diff --git a/changelogs/fragments/2202-zos_data_set-Support-noscratch-options.yml b/changelogs/fragments/2202-zos_data_set-Support-noscratch-options.yml new file mode 100644 index 000000000..d1bf0fdad --- /dev/null +++ b/changelogs/fragments/2202-zos_data_set-Support-noscratch-options.yml @@ -0,0 +1,10 @@ +minor_changes: + - zos_data_set - Adds `noscratch` option to allow uncataloging + a data set without deleting it from the volume's VTOC. + (https://github.com/ansible-collections/ibm_zos_core/pull/2202) +trivial: + - data_set - Internal updates to support the noscratch option. + https://github.com/ansible-collections/ibm_zos_core/pull/2202) + - test_zos_data_set_func - added test case to verify the `noscratch` option + functionality in zos_data_set module. + (https://github.com/ansible-collections/ibm_zos_core/pull/2202). diff --git a/changelogs/fragments/2207-SYSIN-support-zos_job_output.yml b/changelogs/fragments/2207-SYSIN-support-zos_job_output.yml new file mode 100644 index 000000000..230b112ea --- /dev/null +++ b/changelogs/fragments/2207-SYSIN-support-zos_job_output.yml @@ -0,0 +1,3 @@ +minor_changes: + - zos_job_output - Adds support to query SYSIN DDs from a job with new option input. + (https://github.com/ansible-collections/ibm_zos_core/pull/2207) \ No newline at end of file diff --git a/changelogs/fragments/2210-zos_data_set-Support-noscratch-options.yml b/changelogs/fragments/2210-zos_data_set-Support-noscratch-options.yml new file mode 100644 index 000000000..aa9ff7330 --- /dev/null +++ b/changelogs/fragments/2210-zos_data_set-Support-noscratch-options.yml @@ -0,0 +1,10 @@ +minor_changes: + - zos_data_set - Adds `noscratch` option to allow uncataloging + a data set without deleting it from the volume's VTOC. + (https://github.com/ansible-collections/ibm_zos_core/pull/2210) +trivial: + - data_set - Internal updates to support the noscratch option. + https://github.com/ansible-collections/ibm_zos_core/pull/2210) + - test_zos_data_set_func - added test case to verify the `noscratch` option + functionality in zos_data_set module. + (https://github.com/ansible-collections/ibm_zos_core/pull/2210). diff --git a/changelogs/fragments/2213-test-case-conditional-failure-2-19.yml b/changelogs/fragments/2213-test-case-conditional-failure-2-19.yml new file mode 100644 index 000000000..72bf5c59a --- /dev/null +++ b/changelogs/fragments/2213-test-case-conditional-failure-2-19.yml @@ -0,0 +1,16 @@ +trivial: + - test_zos_copy_func.py - modified test case `test_job_script_async` + to resolve porting issues to ansible 2.19. + (https://github.com/ansible-collections/ibm_zos_core/pull/2213). + + - test_zos_job_submit_func.py - modified test case `test_job_submit_async` + to resolve porting issues to ansible 2.19. + (https://github.com/ansible-collections/ibm_zos_core/pull/2213). + + - test_zos_script_func.py - modified test case `test_job_script_async` + to resolve porting issues to ansible 2.19. + (https://github.com/ansible-collections/ibm_zos_core/pull/2213). + + - test_zos_unarchive_func.py - modified test case `test_zos_unarchive_async` + to resolve porting issues to ansible 2.19. + (https://github.com/ansible-collections/ibm_zos_core/pull/2213). \ No newline at end of file diff --git a/changelogs/fragments/2229-job-typrun-support.yml b/changelogs/fragments/2229-job-typrun-support.yml new file mode 100644 index 000000000..fc0596044 --- /dev/null +++ b/changelogs/fragments/2229-job-typrun-support.yml @@ -0,0 +1,7 @@ +minor_changes: + - zos_job_submit - Adds support for jobs with TYPRUN=JCLHOLD and TYPRUN=HOLD. + (https://github.com/ansible-collections/ibm_zos_core/pull/2229). +trivial: + - zos_job_submit - Fixes a regression on ZOAU v1.3.6.0 where a job submitted + with TYPRUN=COPY would return an error. + (https://github.com/ansible-collections/ibm_zos_core/pull/2229). diff --git a/docs/source/modules/zos_job_submit.rst b/docs/source/modules/zos_job_submit.rst index 845478eff..0d51651f9 100644 --- a/docs/source/modules/zos_job_submit.rst +++ b/docs/source/modules/zos_job_submit.rst @@ -383,6 +383,7 @@ jobs "asid": 0, "class": "K", "content_type": "JOB", + "cpu_time": 1, "creation_date": "2023-05-03", "creation_time": "12:13:00", "ddnames": [ @@ -579,10 +580,12 @@ jobs "stepname": "DLORD6" } ], + "execution_node": "STL1", "execution_time": "00:00:10", "job_class": "K", "job_id": "JOB00361", "job_name": "DBDGEN00", + "origin_node": "STL1", "owner": "OMVSADM", "priority": 1, "program_name": "IEBGENER", @@ -763,7 +766,9 @@ jobs Job status `TYPRUN=SCAN` indicates that the job had the TYPRUN parameter with SCAN option. - Job status `NOEXEC` indicates that the job had the TYPRUN parameter with COPY option. + Job status `TYPRUN=COPY` indicates that the job had the TYPRUN parameter with COPY option. + + Job status `HOLD` indicates that the job had the TYPRUN parameter with either the HOLD or JCLHOLD options. Jobs where status can not be determined will result in None (NULL). @@ -858,4 +863,34 @@ jobs | **type**: str | **sample**: IEBGENER + system + The job entry system that MVS uses to do work. + + | **type**: str + | **sample**: STL1 + + subsystem + The job entry subsystem that MVS uses to do work. + + | **type**: str + | **sample**: STL1 + + cpu_time + Sum of the CPU time used by each job step, in microseconds. + + | **type**: int + | **sample**: 5 + + execution_node + Execution node that picked the job and executed it. + + | **type**: str + | **sample**: STL1 + + origin_node + Origin node that submitted the job. + + | **type**: str + | **sample**: STL1 + diff --git a/plugins/module_utils/data_set.py b/plugins/module_utils/data_set.py index 3c055d0ff..778fa7a29 100644 --- a/plugins/module_utils/data_set.py +++ b/plugins/module_utils/data_set.py @@ -241,7 +241,7 @@ def ensure_present( return True @staticmethod - def ensure_absent(name, volumes=None, tmphlq=None): + def ensure_absent(name, volumes=None, tmphlq=None, noscratch=False): """Deletes provided data set if it exists. Parameters @@ -252,13 +252,15 @@ def ensure_absent(name, volumes=None, tmphlq=None): The volumes the data set may reside on. tmphlq : str High Level Qualifier for temporary datasets. + noscratch : bool + If True, the data set is uncataloged but not physically removed from the volume. Returns ------- bool Indicates if changes were made. """ - changed, present = DataSet.attempt_catalog_if_necessary_and_delete(name, volumes, tmphlq=tmphlq) + changed, present = DataSet.attempt_catalog_if_necessary_and_delete(name, volumes, tmphlq=tmphlq, noscratch=noscratch) return changed # ? should we do additional check to ensure member was actually created? @@ -1003,7 +1005,7 @@ def attempt_catalog_if_necessary(name, volumes, tmphlq=None): return present, changed @staticmethod - def attempt_catalog_if_necessary_and_delete(name, volumes, tmphlq=None): + def attempt_catalog_if_necessary_and_delete(name, volumes, tmphlq=None, noscratch=False): """Attempts to catalog a data set if not already cataloged, then deletes the data set. This is helpful when a data set currently cataloged is not the data @@ -1019,6 +1021,8 @@ def attempt_catalog_if_necessary_and_delete(name, volumes, tmphlq=None): The volumes the data set may reside on. tmphlq : str High Level Qualifier for temporary datasets. + noscratch : bool + If True, the data set is uncataloged but not physically removed from the volume. Returns ------- @@ -1039,7 +1043,7 @@ def attempt_catalog_if_necessary_and_delete(name, volumes, tmphlq=None): present = DataSet.data_set_cataloged(name, volumes, tmphlq=tmphlq) if present: - DataSet.delete(name) + DataSet.delete(name, noscratch=noscratch) changed = True present = False else: @@ -1074,7 +1078,7 @@ def attempt_catalog_if_necessary_and_delete(name, volumes, tmphlq=None): if present: try: - DataSet.delete(name) + DataSet.delete(name, noscratch=noscratch) except DatasetDeleteError: try: DataSet.uncatalog(name, tmphlq=tmphlq) @@ -1101,14 +1105,14 @@ def attempt_catalog_if_necessary_and_delete(name, volumes, tmphlq=None): present = DataSet.data_set_cataloged(name, volumes, tmphlq=tmphlq) if present: - DataSet.delete(name) + DataSet.delete(name, noscratch=noscratch) changed = True present = False else: present = DataSet.data_set_cataloged(name, None, tmphlq=tmphlq) if present: try: - DataSet.delete(name) + DataSet.delete(name, noscratch=noscratch) changed = True present = False except DatasetDeleteError: @@ -1414,7 +1418,7 @@ def create( return changed @staticmethod - def delete(name): + def delete(name, noscratch=False): """A wrapper around zoautil_py datasets.delete() to raise exceptions on failure. @@ -1428,7 +1432,7 @@ def delete(name): DatasetDeleteError When data set deletion fails. """ - rc = datasets.delete(name) + rc = datasets.delete(name, noscratch=noscratch) if rc > 0: raise DatasetDeleteError(name, rc) @@ -2721,7 +2725,7 @@ def ensure_present(self, tmp_hlq=None, replace=False, force=False): self.set_state("present") return rc - def ensure_absent(self, tmp_hlq=None): + def ensure_absent(self, tmp_hlq=None, noscratch=False): """Removes the data set. Parameters @@ -2734,7 +2738,7 @@ def ensure_absent(self, tmp_hlq=None): int Indicates if changes were made. """ - rc = DataSet.ensure_absent(self.name, self.volumes, tmphlq=tmp_hlq) + rc = DataSet.ensure_absent(self.name, self.volumes, tmphlq=tmp_hlq, noscratch=noscratch) if rc == 0: self.set_state("absent") return rc diff --git a/plugins/module_utils/job.py b/plugins/module_utils/job.py index 53b5a370e..464069725 100644 --- a/plugins/module_utils/job.py +++ b/plugins/module_utils/job.py @@ -58,7 +58,7 @@ ]) -def job_output(job_id=None, owner=None, job_name=None, dd_name=None, dd_scan=True, duration=0, timeout=0, start_time=timer()): +def job_output(job_id=None, owner=None, job_name=None, dd_name=None, sysin=False, dd_scan=True, duration=0, timeout=0, start_time=timer()): """Get the output from a z/OS job based on various search criteria. Keyword Parameters @@ -71,6 +71,8 @@ def job_output(job_id=None, owner=None, job_name=None, dd_name=None, dd_scan=Tru The job name search for (default: {None}). dd_name : str The data definition to retrieve (default: {None}). + sysin : bool + The input DD to retrieve SYSIN value (default: {False}). dd_scan : bool Whether or not to pull information from the dd's for this job {default: {True}}. duration : int @@ -112,6 +114,7 @@ def job_output(job_id=None, owner=None, job_name=None, dd_name=None, dd_scan=Tru job_name=job_name, dd_name=dd_name, duration=duration, + sysin=sysin, dd_scan=dd_scan, timeout=timeout, start_time=start_time @@ -128,6 +131,7 @@ def job_output(job_id=None, owner=None, job_name=None, dd_name=None, dd_scan=Tru owner=owner, job_name=job_name, dd_name=dd_name, + sysin=sysin, dd_scan=dd_scan, duration=duration, timeout=timeout, @@ -287,7 +291,7 @@ def _parse_steps(job_str): return stp -def _get_job_status(job_id="*", owner="*", job_name="*", dd_name=None, dd_scan=True, duration=0, timeout=0, start_time=timer()): +def _get_job_status(job_id="*", owner="*", job_name="*", dd_name=None, sysin=False, dd_scan=True, duration=0, timeout=0, start_time=timer()): """Get job status. Parameters @@ -300,6 +304,8 @@ def _get_job_status(job_id="*", owner="*", job_name="*", dd_name=None, dd_scan=T The job name search for (default: {None}). dd_name : str The data definition to retrieve (default: {None}). + sysin : bool + The input DD SYSIN (default: {False}). dd_scan : bool Whether or not to pull information from the dd's for this job {default: {True}}. duration : int @@ -405,7 +411,7 @@ def _get_job_status(job_id="*", owner="*", job_name="*", dd_name=None, dd_scan=T list_of_dds = [] try: - list_of_dds = jobs.list_dds(entry.job_id) + list_of_dds = jobs.list_dds(entry.job_id, sysin=sysin) except exceptions.DDQueryException: is_dd_query_exception = True @@ -424,7 +430,7 @@ def _get_job_status(job_id="*", owner="*", job_name="*", dd_name=None, dd_scan=T try: # Note, in the event of an exception, eg job has TYPRUN=HOLD # list_of_dds will still be populated with valuable content - list_of_dds = jobs.list_dds(entry.job_id) + list_of_dds = jobs.list_dds(entry.job_id, sysin=sysin) is_jesjcl = True if search_dictionaries("dd_name", "JESJCL", list_of_dds) else False is_job_error_status = True if entry.status in JOB_ERROR_STATUSES else False except exceptions.DDQueryException: diff --git a/plugins/modules/zos_data_set.py b/plugins/modules/zos_data_set.py index d03bbb126..25d2ef073 100644 --- a/plugins/modules/zos_data_set.py +++ b/plugins/modules/zos_data_set.py @@ -298,6 +298,15 @@ type: bool required: false default: false + noscratch: + description: + - "When C(state=absent), specifies whether to keep the data set's entry in the VTOC." + - If C(noscratch=True), the data set is uncataloged but not physically removed from the volume. + The Data Set Control Block is not removed from the VTOC. + - This is the equivalent of using C(NOSCRATCH) in an C(IDCAMS DELETE) command. + type: bool + required: false + default: false volumes: description: - > @@ -575,6 +584,15 @@ type: bool required: false default: false + noscratch: + description: + - "When C(state=absent), specifies whether to keep the data set's entry in the VTOC." + - If C(noscratch=True), the data set is uncataloged but not physically removed from the volume. + The Data Set Control Block is not removed from the VTOC. + - This is the equivalent of using C(NOSCRATCH) in an C(IDCAMS DELETE) command. + type: bool + required: false + default: false extended: description: - Sets the I(extended) attribute for Generation Data Groups. @@ -734,6 +752,12 @@ name: someds.name.here state: absent +- name: Uncatalog a data set but do not remove it from the volume. + zos_data_set: + name: someds.name.here + state: absent + noscratch: true + - name: Delete a data set if it exists. If data set not cataloged, check on volume 222222 for the data set, and then catalog and delete if found. zos_data_set: name: someds.name.here @@ -1404,7 +1428,7 @@ def get_data_set_handler(**params): ) -def perform_data_set_operations(data_set, state, replace, tmp_hlq, force): +def perform_data_set_operations(data_set, state, replace, tmp_hlq, force, noscratch): """Calls functions to perform desired operations on one or more data sets. Returns boolean indicating if changes were made. @@ -1439,7 +1463,7 @@ def perform_data_set_operations(data_set, state, replace, tmp_hlq, force): elif state == "absent" and data_set.data_set_type == "gdg": changed = data_set.ensure_absent(force=force) elif state == "absent": - changed = data_set.ensure_absent(tmp_hlq=tmp_hlq) + changed = data_set.ensure_absent(tmp_hlq=tmp_hlq, noscratch=noscratch) elif state == "cataloged": changed = data_set.ensure_cataloged(tmp_hlq=tmp_hlq) elif state == "uncataloged": @@ -1586,6 +1610,11 @@ def parse_and_validate_args(params): required=False, default=False, ), + noscratch=dict( + type="bool", + required=False, + default=False, + ), ), ), # For individual data set args @@ -1676,6 +1705,11 @@ def parse_and_validate_args(params): required=False, default=False, ), + noscratch=dict( + type="bool", + required=False, + default=False, + ), mutually_exclusive=[ ["batch", "name"], # ["batch", "state"], @@ -1788,6 +1822,11 @@ def run_module(): required=False, default=False, ), + noscratch=dict( + type="bool", + required=False, + default=False, + ), ), ), # For individual data set args @@ -1868,6 +1907,11 @@ def run_module(): required=False, default=False ), + noscratch=dict( + type="bool", + required=False, + default=False + ), ) result = dict(changed=False, message="", names=[]) @@ -1895,6 +1939,8 @@ def run_module(): module.params["replace"] = None if module.params.get("record_format") is not None: module.params["record_format"] = None + if module.params.get("noscratch") is not None: + module.params["noscratch"] = None elif module.params.get("type") is not None: if module.params.get("type") in DATA_SET_TYPES_VSAM: # For VSAM types set the value to nothing and let the code manage it @@ -1921,6 +1967,7 @@ def run_module(): replace=data_set_params.get("replace"), tmp_hlq=data_set_params.get("tmp_hlq"), force=data_set_params.get("force"), + noscratch=data_set_params.get("noscratch"), ) result["changed"] = result["changed"] or current_changed except Exception as e: diff --git a/plugins/modules/zos_job_output.py b/plugins/modules/zos_job_output.py index 19be24856..00fef50ba 100644 --- a/plugins/modules/zos_job_output.py +++ b/plugins/modules/zos_job_output.py @@ -33,6 +33,7 @@ like "*". - If there is no ddname, or if ddname="?", output of all the ddnames under the given job will be displayed. + - If SYSIN DDs are needed, C(input) should be set to C(true). version_added: "1.0.0" author: - "Jack Ho (@jacklotusho)" @@ -61,6 +62,12 @@ (e.g "JESJCL", "?") type: str required: false + input: + description: + - Whether to include SYSIN DDs as part of the output. + type: bool + default: false + required: false attributes: action: @@ -90,6 +97,11 @@ job_name: "*" owner: "IBMUSER" ddname: "?" + +- name: Query a job's output including SYSIN DDs + zos_job_output: + job_id: "JOB00548" + input: true """ RETURN = r""" @@ -496,6 +508,7 @@ def run_module(): job_name=dict(type="str", required=False), owner=dict(type="str", required=False), ddname=dict(type="str", required=False), + input=dict(type="bool", required=False, default=False), ) module = AnsibleModule(argument_spec=module_args, supports_check_mode=True) @@ -505,6 +518,7 @@ def run_module(): job_name=dict(type="job_identifier", required=False), owner=dict(type="str", required=False), ddname=dict(type="str", required=False), + input=dict(type="bool", required=False, default=False), ) try: @@ -521,13 +535,14 @@ def run_module(): job_name = module.params.get("job_name") owner = module.params.get("owner") ddname = module.params.get("ddname") + sysin = module.params.get("input") if not job_id and not job_name and not owner: module.fail_json(msg="Please provide a job_id or job_name or owner") try: results = {} - results["jobs"] = job_output(job_id=job_id, owner=owner, job_name=job_name, dd_name=ddname) + results["jobs"] = job_output(job_id=job_id, owner=owner, job_name=job_name, dd_name=ddname, sysin=sysin) results["changed"] = False except zoau_exceptions.JobFetchException as fetch_exception: module.fail_json( diff --git a/plugins/modules/zos_job_submit.py b/plugins/modules/zos_job_submit.py index 1852327ec..0917fc007 100644 --- a/plugins/modules/zos_job_submit.py +++ b/plugins/modules/zos_job_submit.py @@ -272,7 +272,8 @@ - Job status `SYS` indicates a system failure. - Job status `?` indicates status can not be determined. - Job status `TYPRUN=SCAN` indicates that the job had the TYPRUN parameter with SCAN option. - - Job status `NOEXEC` indicates that the job had the TYPRUN parameter with COPY option. + - Job status `TYPRUN=COPY` indicates that the job had the TYPRUN parameter with COPY option. + - Job status `HOLD` indicates that the job had the TYPRUN parameter with either the HOLD or JCLHOLD options. - Jobs where status can not be determined will result in None (NULL). type: str sample: AC @@ -1116,8 +1117,9 @@ def run_module(): result["execution_time"] = job_output_txt[0].get("execution_time") result["duration"] = duration + job_msg = job_output_txt[0].get("ret_code", {}).get("msg") - if duration >= wait_time_s: + if duration >= wait_time_s and job_msg != "HOLD": result["failed"] = True result["changed"] = False _msg = ("The JCL submitted with job id {0} but appears to be a long " @@ -1161,19 +1163,13 @@ def run_module(): job_ret_code.update({"msg_txt": _msg}) raise Exception(_msg) - if job_ret_code_code is not None and job_ret_code_msg == 'NOEXEC': - job_dd_names = job_output_txt[0].get("ddnames") - jes_jcl_dd = search_dictionaries("ddname", "JESJCL", job_dd_names) - # These are the conditions for a job run with TYPRUN=COPY. - if not jes_jcl_dd: - job_ret_code.update({"msg": "TYPRUN=COPY"}) - _msg = ("The job was run with TYPRUN=COPY. " - "This way, the steps are not executed, but the JCL is validated and stored " - "in the JES spool. " - "Please review the job log for further details.") - job_ret_code.update({"msg_txt": _msg}) - - if job_ret_code_code is None or job_ret_code.get("msg") == 'NOEXEC': + if job_ret_code_msg == 'HOLD': + _msg = ("The job was run with TYPRUN=HOLD or TYPRUN=JCLHOLD " + "to request special job processing. This will result in no completion, " + "no return code, no job steps and changed will be set to false.") + job_ret_code.update({"msg_txt": _msg}) + is_changed = False + elif job_ret_code_code is None and job_ret_code.get("msg") == 'NOEXEC': # If there is no job_ret_code_code (Job return code) it may NOT be an error, # some jobs will never return have an RC, eg Started tasks(which are not supported), # so further analyze the @@ -1183,21 +1179,21 @@ def run_module(): jes_jcl_dd = search_dictionaries("ddname", "JESJCL", job_dd_names) # Its possible jobs don't have a JESJCL which are active and this would - # cause an index out of range error. + # mean the job had TYPRUN=COPY. if not jes_jcl_dd: - _msg_detail = " for status {0}.".format(job_ret_code_msg) if job_ret_code_msg else "." - _msg = ("The job return code was not available in the job log, " - "please review the job log{0}".format(_msg_detail)) + job_ret_code.update({"msg": "TYPRUN=COPY"}) + _msg = ("The job was run with TYPRUN=COPY. " + "This way, the steps are not executed, but the JCL is validated and stored " + "in the JES spool. " + "Please review the job log for further details.") job_ret_code.update({"msg_txt": _msg}) - raise Exception(_msg) - - jes_jcl_dd_content = jes_jcl_dd[0].get("content") - jes_jcl_dd_content_str = " ".join(jes_jcl_dd_content) - # The regex can be r"({0})\s*=\s*(COPY|HOLD|JCLHOLD|SCAN)" once zoau support is in. - special_processing_keyword = re.search(r"({0})\s*=\s*(SCAN)" - .format("|".join(JOB_SPECIAL_PROCESSING)), jes_jcl_dd_content_str) + else: + jes_jcl_dd_content = jes_jcl_dd[0].get("content") + jes_jcl_dd_content_str = " ".join(jes_jcl_dd_content) + # The regex can be r"({0})\s*=\s*(COPY|HOLD|JCLHOLD|SCAN)" once zoau support is in. + special_processing_keyword = re.search(r"({0})\s*=\s*(SCAN)" + .format("|".join(JOB_SPECIAL_PROCESSING)), jes_jcl_dd_content_str) - if job_ret_code_msg == 'NOEXEC': job_ret_code.update({"msg": special_processing_keyword[0]}) job_ret_code.update({"code": None}) job_ret_code.update({"msg_code": None}) @@ -1206,19 +1202,17 @@ def run_module(): "return code or job steps and changed will be false." .format(job_submitted_id, special_processing_keyword[0])}) is_changed = False - else: - # The job_ret_code_code is None at this point, but the job_ret_code_msg_code could be populated - # so check both and provide a proper response. + elif job_ret_code_code is None: + # The job_ret_code_code is None at this point, but the job_ret_code_msg_code could be populated + # so check both and provide a proper response. - if job_ret_code_msg_code is None: - _msg_detail = " for status {0}.".format(job_ret_code_msg) if job_ret_code_msg else "." - _msg = ("The job return code was not available in the job log, " - "please review the job log{0}".format(_msg_detail)) - job_ret_code.update({"msg_txt": _msg}) - raise Exception(_msg) + if job_ret_code_msg_code is None: + _msg_detail = " for status {0}.".format(job_ret_code_msg) if job_ret_code_msg else "." + _msg = ("The job return code was not available in the job log, " + "please review the job log{0}".format(_msg_detail)) + job_ret_code.update({"msg_txt": _msg}) + raise Exception(_msg) - # raise Exception("The job return code was not available in the job log, " - # "please review the job log and error {0}.".format(job_ret_code_msg)) elif job_ret_code_code != 0 and max_rc is None: _msg = ("The job return code {0} was non-zero in the " "job output, this job has failed.".format(str(job_ret_code_code))) diff --git a/tests/functional/modules/test_zos_copy_func.py b/tests/functional/modules/test_zos_copy_func.py index a660d7576..6d13aa104 100644 --- a/tests/functional/modules/test_zos_copy_func.py +++ b/tests/functional/modules/test_zos_copy_func.py @@ -257,7 +257,7 @@ async_status: jid: "{{{{ copy_output.ansible_job_id }}}}" register: job_result - until: job_result.finished + until: job_result.finished | bool retries: 10 delay: 30 diff --git a/tests/functional/modules/test_zos_data_set_func.py b/tests/functional/modules/test_zos_data_set_func.py index ec188868d..7346797c5 100644 --- a/tests/functional/modules/test_zos_data_set_func.py +++ b/tests/functional/modules/test_zos_data_set_func.py @@ -1161,3 +1161,151 @@ def test_gdg_deletion_when_absent(ansible_zos_module): assert result.get("changed") is False assert result.get("module_stderr") is None assert result.get("failed") is None + +def test_data_set_delete_with_noscratch(ansible_zos_module, volumes_on_systems): + """ + Tests that 'state: absent' with 'noscratch: true' correctly uncatalogs + a data set but leaves its physical entry in the VTOC. + """ + volumes = Volume_Handler(volumes_on_systems) + volume = volumes.get_available_vol() + hosts = ansible_zos_module + dataset = get_tmp_ds_name(2, 2) + + try: + # Arrange: Create the test data set on the specific volume + hosts.all.zos_data_set( + name=dataset, + type='seq', + state='present', + volumes=[volume], + space_primary=1, + space_type='m' + ) + + # Act: Delete the dataset using the noscratch option + results = hosts.all.zos_data_set( + name=dataset, + state='absent', + noscratch=True + ) + for result in results.contacted.values(): + assert result.get("changed") is True + assert result.get("module_stderr") is None + # Assert 1: Verify the data set is GONE from the catalog. + # This is the first part of the test, where we check that the data set + results = hosts.all.zos_data_set( + name=dataset, + state='absent', + ) + for result in results.contacted.values(): + assert result.get("changed") is False + # catalog_check = hosts.all.command(f"dls '{dataset}'", failed_when=False) + # for result in catalog_check.contacted.values(): + # # Assert that the command failed (non-zero return code) + # assert result.get("rc") != 0 + # Assert 2: Verify the data set is STILL on the volume's VTOC. + # This is the crucial second half of the test. + # We can do this by trying to delete it again, but specifying the volume. + # If this delete reports "changed: true", it's proof that it found and + # deleted the uncataloged data set from the VTOC. + vtoc_check_and_delete = hosts.all.zos_data_set( + name=dataset, + state='absent', + volumes=volume + ) + for result in vtoc_check_and_delete.contacted.values(): + # This assertion proves the data set existed on the volume's VTOC. + assert result.get("changed") is True + finally: + # Cleanup: Perform a final, full delete from the volume since it's still there. + # We provide the volume to ensure it can be found and deleted. + hosts.all.zos_data_set( + name=dataset, + state='absent', + volumes=[volume] + ) + +def test_batch_uncatalog_with_noscratch_suboption(ansible_zos_module, volumes_on_systems): + """ + Tests that the 'noscratch: true' sub-option works correctly when used inside a + batch list to uncatalog multiple data sets. + """ + hosts = ansible_zos_module + volume = Volume_Handler(volumes_on_systems).get_available_vol() + + # Define two separate data sets for the batch operation + dataset_1 = get_tmp_ds_name() + dataset_2 = get_tmp_ds_name() + + try: + # --- Arrange --- + # Create both data sets in a preliminary batch operation so they exist + setup_results = hosts.all.zos_data_set( + batch=[ + {'name': dataset_1, 'type': 'seq', 'state': 'present', 'volumes': volume}, + {'name': dataset_2, 'type': 'seq', 'state': 'present', 'volumes': volume} + ] + ) + for result in setup_results.contacted.values(): + assert result.get("changed") is True + + # --- Act --- + # Run the main test: a batch uncatalog where both items use noscratch + act_results = hosts.all.zos_data_set( + batch=[ + {'name': dataset_1, 'state': 'absent', 'noscratch': True}, + {'name': dataset_2, 'state': 'absent', 'noscratch': True} + ] + ) + # # Assert on the main action results + for result in act_results.contacted.values(): + assert result.get("changed") is True + assert result.get("module_stderr") is None + results = hosts.all.zos_data_set( + name=dataset_1, + state='absent', + ) + for result in results.contacted.values(): + assert result.get("changed") is False + results = hosts.all.zos_data_set( + name=dataset_2, + state='absent', + ) + for result in results.contacted.values(): + assert result.get("changed") is False + + # # --- Verification Assertions --- + # Assert 2: Verify the data set is STILL on the volume's VTOC. + # This is the crucial second half of the test. + # We can do this by trying to delete it again, but specifying the volume. + # If this delete reports "changed: true", it's proof that it found and + # deleted the uncataloged data set from the VTOC. + + vtoc_check_and_delete = hosts.all.zos_data_set( + name=dataset_1, + state='absent', + volumes=volume + ) + for result in vtoc_check_and_delete.contacted.values(): + # This assertion proves the data set existed on the volume's VTOC + assert result.get("changed") is True + + vtoc_check_and_delete = hosts.all.zos_data_set( + name=dataset_2, + state='absent', + volumes=volume + ) + for result in vtoc_check_and_delete.contacted.values(): + # This assertion proves the data set existed on the volume's VTOC + assert result.get("changed") is True + finally: + # --- Cleanup --- + # Ensure both data sets are fully deleted from the volume's VTOC. + # This is critical because the test's main action leaves them on disk. + hosts.all.zos_data_set( + batch=[ + {'name': dataset_1, 'state': 'absent', 'volumes': [volume]}, + {'name': dataset_2, 'state': 'absent', 'volumes': [volume]} + ] + ) diff --git a/tests/functional/modules/test_zos_job_output_func.py b/tests/functional/modules/test_zos_job_output_func.py index e1db120d2..a634507ba 100644 --- a/tests/functional/modules/test_zos_job_output_func.py +++ b/tests/functional/modules/test_zos_job_output_func.py @@ -30,6 +30,19 @@ // """ +JCL_FILE_CONTENTS_SYSIN = """//SYSINS JOB (T043JM,JM00,1,0,0,0),'SYSINS - JRM',CLASS=R, +// MSGCLASS=X,MSGLEVEL=1,NOTIFY=OMVSADM +//STEP1 EXEC PGM=BPXBATCH,PARM='SH sleep 1' +//STDOUT DD SYSOUT=* +//STDERR DD SYSOUT=* +//LISTCAT EXEC PGM=IDCAMS,REGION=4M +//SYSPRINT DD SYSOUT=* +//SYSIN DD * + LISTCAT ENTRIES('TEST.DATASET.JCL') ALL +/* +// +""" + TEMP_PATH = "/tmp/jcl" def test_zos_job_output_no_job_id(ansible_zos_module): @@ -149,6 +162,40 @@ def test_zos_job_output_job_exists_with_filtered_ddname(ansible_zos_module): hosts.all.file(path=TEMP_PATH, state="absent") +def test_zos_job_output_job_exists_with_sysin(ansible_zos_module): + try: + hosts = ansible_zos_module + hosts.all.file(path=TEMP_PATH, state="directory") + hosts.all.zos_data_set( + name="TEST.DATASET.JCL", + type="PS", + state="present" + ) + hosts.all.shell( + cmd=f"echo {quote(JCL_FILE_CONTENTS_SYSIN)} > {TEMP_PATH}/SYSIN" + ) + result = hosts.all.zos_job_submit( + src=f"{TEMP_PATH}/SYSIN", location="uss", volume=None + ) + hosts.all.file(path=TEMP_PATH, state="absent") + sysin = "True" + results = hosts.all.zos_job_output(job_name="SYSINS", input=sysin) + for result in results.contacted.values(): + print(result) + assert result.get("changed") is False + for job in result.get("jobs"): + assert len(job.get("ddnames")) >= 1 + sysin_found = False + for ddname_entry in job.get("ddnames"): + if ddname_entry.get("ddname") == "SYSIN": + sysin_found = True + break + assert sysin_found + finally: + hosts.all.zos_data_set(name="TEST.DATASET.JCL", state="absent") + hosts.all.file(path=TEMP_PATH, state="absent") + + def test_zos_job_submit_job_id_and_owner_included(ansible_zos_module): hosts = ansible_zos_module results = hosts.all.zos_job_output(job_id="STC00*", owner="MASTER") diff --git a/tests/functional/modules/test_zos_job_submit_func.py b/tests/functional/modules/test_zos_job_submit_func.py index fca001a3e..e5f8fc7d5 100644 --- a/tests/functional/modules/test_zos_job_submit_func.py +++ b/tests/functional/modules/test_zos_job_submit_func.py @@ -425,7 +425,7 @@ async_status: jid: "{{{{ job_task.ansible_job_id }}}}" register: job_result - until: job_result.finished + until: job_result.finished | bool retries: 20 delay: 5 """ @@ -1014,8 +1014,8 @@ def test_job_submit_local_jcl_typrun_copy(ansible_zos_module): "to": "IBM-1047" },) for result in results.contacted.values(): - # With ZOAU 1.3.3 changes now code and return msg_code are 0 and 0000 respectively. - # assert result.get("changed") is False + # With ZOAU 1.3.6 changes now code and return msg_code are both None, now + # being consistent with the rest of the possible TYPRUN cases. # When running a job with TYPRUN=COPY, a copy of the JCL will be kept in the JES spool, so # effectively, the system is changed even though the job didn't run. assert result.get("changed") is True @@ -1024,12 +1024,9 @@ def test_job_submit_local_jcl_typrun_copy(ansible_zos_module): r'The job was run with TYPRUN=COPY.', repr(result.get("jobs")[0].get("ret_code").get("msg_txt")) ) - assert result.get("jobs")[0].get("ret_code").get("code") == 0 + assert result.get("jobs")[0].get("ret_code").get("code") is None assert result.get("jobs")[0].get("ret_code").get("msg") == 'TYPRUN=COPY' - assert result.get("jobs")[0].get("ret_code").get("msg_code") == '0000' - # assert result.get("jobs")[0].get("ret_code").get("code") is None - # assert result.get("jobs")[0].get("ret_code").get("msg") is None - # assert result.get("jobs")[0].get("ret_code").get("msg_code") is None + assert result.get("jobs")[0].get("ret_code").get("msg_code") is None def test_job_submit_local_jcl_typrun_hold(ansible_zos_module): @@ -1048,11 +1045,11 @@ def test_job_submit_local_jcl_typrun_hold(ansible_zos_module): assert result.get("changed") is False assert result.get("jobs")[0].get("job_id") is not None assert re.search( - r'long running job', + r'The job was run with TYPRUN=HOLD or TYPRUN=JCLHOLD', repr(result.get("jobs")[0].get("ret_code").get("msg_txt")) ) assert result.get("jobs")[0].get("ret_code").get("code") is None - assert result.get("jobs")[0].get("ret_code").get("msg") == "AC" + assert result.get("jobs")[0].get("ret_code").get("msg") == "HOLD" assert result.get("jobs")[0].get("ret_code").get("msg_code") is None @@ -1072,11 +1069,11 @@ def test_job_submit_local_jcl_typrun_jclhold(ansible_zos_module): assert result.get("changed") is False assert result.get("jobs")[0].get("job_id") is not None assert re.search( - r'long running job', + r'The job was run with TYPRUN=HOLD or TYPRUN=JCLHOLD', repr(result.get("jobs")[0].get("ret_code").get("msg_txt")) ) assert result.get("jobs")[0].get("ret_code").get("code") is None - assert result.get("jobs")[0].get("ret_code").get("msg") == "AC" + assert result.get("jobs")[0].get("ret_code").get("msg") == "HOLD" assert result.get("jobs")[0].get("ret_code").get("msg_code") is None diff --git a/tests/functional/modules/test_zos_script_func.py b/tests/functional/modules/test_zos_script_func.py index 2afa75913..3a7359020 100644 --- a/tests/functional/modules/test_zos_script_func.py +++ b/tests/functional/modules/test_zos_script_func.py @@ -87,7 +87,7 @@ async_status: jid: "{{{{ job_task.ansible_job_id }}}}" register: job_result - until: job_result.finished + until: job_result.finished | bool retries: 20 delay: 5 """ diff --git a/tests/functional/modules/test_zos_unarchive_func.py b/tests/functional/modules/test_zos_unarchive_func.py index 22238326a..5b4aff3df 100644 --- a/tests/functional/modules/test_zos_unarchive_func.py +++ b/tests/functional/modules/test_zos_unarchive_func.py @@ -72,7 +72,7 @@ async_status: jid: "{{{{ job_task.ansible_job_id }}}}" register: job_result - until: job_result.finished + until: job_result.finished | bool retries: 20 delay: 5 """