diff --git a/plugins/module_utils/backup.py b/plugins/module_utils/backup.py index eb2a22004..1a21def67 100644 --- a/plugins/module_utils/backup.py +++ b/plugins/module_utils/backup.py @@ -34,7 +34,7 @@ is_member, extract_dsname, temp_member_name, - DataSet, + DataSetUtils, ) from ansible_collections.ibm.ibm_zos_core.plugins.module_utils.mvs_cmd import iebcopy @@ -96,7 +96,7 @@ def mvs_file_backup(dsn, bk_dsn=None, tmphlq=None): # added the check for a sub-mmember, just in this case if not bk_dsn or "(" not in bk_dsn: bk_dsn = extract_dsname(dsn) + "({0})".format(temp_member_name()) - elif DataSet.is_gds_positive_relative_name(bk_dsn): + elif DataSetUtils.is_gds_positive_relative_name(bk_dsn): bk_dsn = datasets.create(bk_dsn) bk_dsn = _validate_data_set_name(bk_dsn).upper() @@ -127,7 +127,7 @@ def mvs_file_backup(dsn, bk_dsn=None, tmphlq=None): except exceptions.ZOAUException as copy_exception: cp_rc = copy_exception.response.rc else: - if DataSet.is_gds_positive_relative_name(bk_dsn): + if DataSetUtils.is_gds_positive_relative_name(bk_dsn): cp_rc = datasets.copy(dsn, bk_dsn) else: cp_rc = _copy_ds(dsn, bk_dsn, tmphlq=tmphlq) @@ -268,7 +268,7 @@ def _copy_ds(ds, bk_ds, tmphlq=None): ds, out, err ) ) - if rc != 0 and DataSet.is_empty(ds, tmphlq=tmphlq): + if rc != 0 and DataSetUtils.is_empty(ds, tmphlq=tmphlq): rc = 0 return rc diff --git a/plugins/module_utils/data_set.py b/plugins/module_utils/data_set.py index 3c055d0ff..17102548d 100644 --- a/plugins/module_utils/data_set.py +++ b/plugins/module_utils/data_set.py @@ -44,751 +44,758 @@ ztypes = ZOAUImportError(traceback.format_exc()) -class DataSet(object): - """Perform various data set operations such as creation, deletion and cataloging.""" - - # Module args mapped to equivalent ZOAU data set create args - _ZOAU_DS_CREATE_ARGS = { - "name": "name", - "type": "type", - "space_primary": "primary_space", - "space_secondary": "secondary_space", - "record_format": "record_format", - "sms_storage_class": "storage_class_name", - "sms_data_class": "data_class_name", - "sms_management_class": "management_class_name", - "record_length": "record_length", - "key_offset": "key_offset", - "key_length": "key_length", - "block_size": "block_size", - "directory_blocks": "directory_blocks", - "volumes": "volumes", - } - - _VSAM_CATALOG_COMMAND_NOT_INDEXED = """ DEFINE CLUSTER - - (NAME('{0}') - - VOLUMES({1}) - - RECATALOG {2}) - - DATA(NAME('{0}.DATA')) +class DataSet(): """ + This class represents a z/OS data set that can be yet to be created or + already created in the system. It encapsulates the data set attributes + to easy access and provides operations to perform in the same data set. - _VSAM_CATALOG_COMMAND_INDEXED = """ DEFINE CLUSTER - - (NAME('{0}') - - VOLUMES({1}) - - RECATALOG {2}) - - DATA(NAME('{0}.DATA')) - - INDEX(NAME('{0}.INDEX')) """ - - _NON_VSAM_UNCATALOG_COMMAND = " UNCATLG DSNAME={0}" - - _VSAM_UNCATALOG_COMMAND = " DELETE '{0}' NOSCRATCH" - - MVS_PARTITIONED = frozenset({"PE", "PO", "PDSE", "PDS"}) - MVS_SEQ = frozenset({"PS", "SEQ", "BASIC"}) - MVS_VSAM = frozenset({"KSDS", "ESDS", "RRDS", "LDS", "VSAM"}) - - @staticmethod - def ensure_present( + def __init__( + self, name, - replace, - type, - raw_name=None, + escape_name=False, + data_set_type=None, + state=None, + organization=None, + record_format=None, + volumes=None, + block_size=None, + record_length=None, space_primary=None, space_secondary=None, space_type=None, - record_format=None, - record_length=None, - block_size=None, directory_blocks=None, key_length=None, key_offset=None, sms_storage_class=None, sms_data_class=None, sms_management_class=None, - volumes=None, - tmp_hlq=None, - force=None, + total_space=None, + used_space=None, + last_referenced=None, + is_cataloged=None, ): - """Creates data set if it does not already exist. + # Different class variables + self.data_set_possible_states = {"unknown", "present", "absent"} + self.name = name + self.organization = organization + self.record_format = record_format + self.volumes = volumes + self.block_size = block_size + self.record_length = record_length + self.total_space = total_space + self.used_space = used_space + self.last_referenced = last_referenced + self.raw_name = name + self.data_set_type = data_set_type + self.state = state + self.space_primary = space_primary + self.space_secondary = space_secondary + self.space_type = space_type + self.directory_blocks = directory_blocks + self.key_length = key_length + self.key_offset = key_offset + self.sms_storage_class = sms_storage_class + self.sms_data_class = sms_data_class + self.sms_management_class = sms_management_class + self.volumes = volumes + self.is_gds_active = False + self.is_cataloged = False + + # If name has escaped chars or is GDS relative name we clean it. + if escape_name: + self.name = DataSet.escape_data_set_name(self.name) + if DataSet.is_gds_relative_name(self.name): + try: + self.name = DataSet.resolve_gds_absolute_name(self.name) + self.is_gds_active = True + except Exception: + # This means the generation is a positive version so is only used for creation. + self.is_gds_active = False + if self.data_set_type and (self.data_set_type.upper() in DataSet.MVS_VSAM or self.data_set_type == "zfs"): + # When trying to create a new VSAM with a specified record format will fail + # with ZOAU + self.record_format = None + + def init_from_zoau_data_set_class(self, zoau_data_set): + print(f"zoau data_set_class {zoau_data_set}") + data_set = DataSet() + + def create(self, tmp_hlq=None, replace=True, force=False): + """Creates the data set in question. + + Returns + ------- + int + Indicates if changes were made. + """ + arguments = { + "name": self.name, + "raw_name": self.raw_name, + "type": self.data_set_type, + "space_primary": self.space_primary, + "space_secondary": self.space_secondary, + "space_type": self.space_type, + "record_format": self.record_format, + "record_length": self.record_length, + "block_size": self.block_size, + "directory_blocks": self.directory_blocks, + "key_length": self.key_length, + "key_offset": self.key_offset, + "sms_storage_class": self.sms_storage_class, + "sms_data_class": self.sms_data_class, + "sms_management_class": self.sms_management_class, + "volumes": self.volumes, + "tmp_hlq": tmp_hlq, + "force": force, + } + formatted_args = DataSet._build_zoau_args(**arguments) + changed = False + if DataSet.data_set_exists(self.name, tmphlq=tmp_hlq): + DataSet.delete(self.name) + changed = True + zoau_data_set = datasets.create(**formatted_args) + if zoau_data_set is not None: + self.set_state("present") + self.name = zoau_data_set.name + return True + return changed + + def ensure_present(self, tmp_hlq=None, replace=False, force=False): + """ Make sure that the data set is created or fail creating it. Parameters ---------- - name : str - The name of the dataset. - raw_name : str - Original name without escaping or gds name resolve operations performed. + tmp_hlq : str + High level qualifier for temporary datasets. replace : bool Used to determine behavior when data set already exists. - type : str, optional - The type of dataset. - Valid options are: SEQ, BASIC, LARGE, PDS, PDSE, LIBRARY, LDS, RRDS, ESDS, KSDS. - Defaults to None. - space_primary : int, optional - The amount of primary space to allocate for the dataset. - Defaults to None. - space_secondary : int, optional - The amount of secondary space to allocate for the dataset. - Defaults to None. - space_type : str, optional - The unit of measurement to use when defining primary and secondary space. - Defaults to None. - record_format : str, optional - The record format to use for the dataset. - Valid options are: F, FB, VB, FBA, VBA, U. - Defaults to None. - record_length : int, optional - The length, in bytes, of each record in the data set. - Defaults to None. - block_size : int, optional - The block size to use for the data set. - Defaults to None. - directory_blocks : int, optional - The number of directory blocks to allocate to the data set. - Defaults to None. - key_length : int, optional - The key length of a record. - Required for Key Sequenced Datasets (KSDS). - Defaults to None. - key_offset : int, optional - The key offset is the position of the first byte of the key - in each logical record of a the specified VSAM data set. - If the key is at the beginning of the logical record, the offset is zero. - Required for Key Sequenced Datasets (KSDS). - Defaults to None. - sms_storage_class : str, optional - The storage class for an SMS-managed dataset. - Required for SMS-managed datasets that do not match an SMS-rule. - Not valid for datasets that are not SMS-managed. - Note that all non-linear VSAM datasets are SMS-managed. - Defaults to None. - sms_data_class : str, optional - The data class for an SMS-managed dataset. - Optional for SMS-managed datasets that do not match an SMS-rule. - Not valid for datasets that are not SMS-managed. - Note that all non-linear VSAM datasets are SMS-managed. - Defaults to None. - sms_management_class : str, optional - The management class for an SMS-managed dataset. - Optional for SMS-managed datasets that do not match an SMS-rule. - Not valid for datasets that are not SMS-managed. - Note that all non-linear VSAM datasets are SMS-managed. - Defaults to None. - volumes : Union[str, list[str]], optional - A list of volume serials. - When providing multiple volumes, processing will begin with - the first volume in the provided list. Offline volumes are not considered. - Volumes can always be provided when not using SMS. - When using SMS, volumes can be provided when the storage class being used - has GUARANTEED_SPACE=YES specified. Otherwise, the allocation will fail. - Defaults to None. - tmp_hlq : str, optional - High level qualifier for temporary datasets. - force : bool, optional - Used to determine behavior when performing member operations on a PDSE. - Defaults to None. + force : bool + Used to determine behavior when performing member operations on a pdse. Returns ------- - bool + int Indicates if changes were made. - - Raises - ------ - DatasetCreateError - When data set creation fails. """ - arguments = locals() - arguments.pop("replace", None) - present = False - changed = False - if DataSet.data_set_cataloged(name, tmphlq=tmp_hlq): - present = True - # Validate volume conflicts when: - # 1. Dataset exists in catalog (present=True). - # 2. User hasn't requested replacement (replace=False). - # 3. Specific volumes were requested (volumes parameter provided). - if present and not replace and volumes: - cataloged_volumes = DataSet.data_set_cataloged_volume_list(name, tmphlq=tmp_hlq) - requested_volumes = [vol.upper() for vol in volumes] - if not any(vol.upper() in requested_volumes for vol in cataloged_volumes): - raise DatasetCatalogedOnDifferentVolumeError( - name=name, - existing_volumes=cataloged_volumes, - requested_volumes=volumes - ) - - if not present: - try: - DataSet.create(**arguments) - except DatasetCreateError as e: - raise_error = True - # data set exists on volume - if "DatasetVerificationError" in e.msg or "Error Code: 0x4704" in e.msg: - present, changed = DataSet.attempt_catalog_if_necessary( - name, volumes, tmphlq=tmp_hlq - ) - if present and changed: - raise_error = False - if raise_error: - raise - if present: - if not replace: - return changed - DataSet.replace(**arguments) - if type.upper() == "ZFS": - DataSet.format_zfs(name) - return True + arguments = { + "name": self.name, + "raw_name": self.raw_name, + "type": self.data_set_type, + "space_primary": self.space_primary, + "space_secondary": self.space_secondary, + "space_type": self.space_type, + "record_format": self.record_format, + "record_length": self.record_length, + "block_size": self.block_size, + "directory_blocks": self.directory_blocks, + "key_length": self.key_length, + "key_offset": self.key_offset, + "sms_storage_class": self.sms_storage_class, + "sms_data_class": self.sms_data_class, + "sms_management_class": self.sms_management_class, + "volumes": self.volumes, + "replace": replace, + "tmp_hlq": tmp_hlq, + "force": force, + } + rc = DataSetUtils.ensure_present(**arguments) + self.set_state("present") + return rc - @staticmethod - def ensure_absent(name, volumes=None, tmphlq=None): - """Deletes provided data set if it exists. + def ensure_absent(self, tmp_hlq=None): + """Removes the data set. Parameters ---------- - name : str - The name of the data set to ensure is absent. - volumes : list[str] - The volumes the data set may reside on. - tmphlq : str - High Level Qualifier for temporary datasets. + tmp_hlq : str + High level qualifier for temporary datasets. Returns ------- - bool + int Indicates if changes were made. """ - changed, present = DataSet.attempt_catalog_if_necessary_and_delete(name, volumes, tmphlq=tmphlq) - return changed + rc = DataSetUtils.ensure_absent(self.name, self.volumes, tmphlq=tmp_hlq) + if rc == 0: + self.set_state("absent") + return rc - # ? should we do additional check to ensure member was actually created? - @staticmethod - def ensure_member_present(name, replace=False, tmphlq=None): - """Creates data set member if it does not already exist. + def delete(self): + """Deletes the data set in question. + + Returns + ------- + int + Indicates if changes were made. + """ + DataSetUtils.ensure_absent(self.name, self.volumes) + self.set_state("absent") + + def ensure_cataloged(self, tmp_hlq=None): + """ + Ensures the data set is cataloged, if not catalogs it. Parameters ---------- - name : str - The name of the data set to ensure is present. - replace : bool - Used to determine behavior when data set already - exists. - tmphlq : str - High Level Qualifier for temporary datasets. + tmp_hlq : str + High level qualifier for temporary datasets. Returns ------- - bool + int Indicates if changes were made. """ - if DataSet.data_set_member_exists(name): - if not replace: - return False - DataSet.delete_member(name) - DataSet.create_member(name, tmphlq=tmphlq) - return True + rc = DataSetUtils.ensure_cataloged(name=self.name, volumes=self.volumes, tmphlq=tmp_hlq) + self.is_cataloged = True + return rc - @staticmethod - def ensure_member_absent(name, force=False): - """Deletes provided data set member if it exists. - Returns a boolean indicating if changes were made. + def catalog(self, tmp_hlq=None): + """Catalog the data set in question. Parameters ---------- - force : bool - Mode to execute. + tmp_hlq : str + High level qualifier for temporary datasets. Returns ------- - bool - True if the data set member exists. + int + Indicates if changes were made. """ - if DataSet.data_set_member_exists(name): - DataSet.delete_member(name, force) - return True - return False + rc = DataSetUtils.catalog(self.name, self.volumes, tmphlq=tmp_hlq) + self.is_cataloged = True + return rc - @staticmethod - def ensure_cataloged(name, volumes, tmphlq=None): - """Ensure a data set is cataloged. Data set can initially - be in cataloged or uncataloged state when this function is called. + def ensure_uncataloged(self, tmp_hlq=None): + """ + Ensures the data set is uncataloged, if not catalogs it. Parameters ---------- - name : str - The data set name to ensure is cataloged. - volume : str - The volume on which the data set should exist. - tmphlq : str - High Level Qualifier for temporary datasets. + tmp_hlq : str + High level qualifier for temporary datasets. Returns ------- - bool - If changes were made. + int + Indicates if changes were made. """ - if DataSet.data_set_cataloged(name, None, tmphlq=tmphlq): - return False - try: - DataSet.catalog(name, volumes, tmphlq=tmphlq) - except DatasetCatalogError: - raise DatasetCatalogError( - name, volumes, "-1", "Data set was not found. Unable to catalog." - ) - return True + rc = DataSetUtils.ensure_uncataloged(self.name, tmphlq=tmp_hlq) + self.is_cataloged = False + return rc - @staticmethod - def ensure_uncataloged(name, tmphlq=None): - """Ensure a data set is uncataloged. Data set can initially - be in cataloged or uncataloged state when this function is called. + def uncatalog(self, tmp_hlq=None): + """Uncatalog the data set in question. Parameters ---------- - name : str - The data set name to ensure is uncataloged. - tmphlq : str - High Level Qualifier for temporary datasets. + tmp_hlq : str + High level qualifier for temporary datasets. Returns ------- - bool - If changes were made. + int + Indicates if changes were made. """ - if DataSet.data_set_cataloged(name, tmphlq=tmphlq): - DataSet.uncatalog(name, tmphlq=tmphlq) - return True - return False + rc = DataSetUtils.uncatalog(self.name, tmphlq=tmp_hlq) + self.is_cataloged = False + return rc - @staticmethod - def allocate_model_data_set(ds_name, model, executable=False, asa_text=False, vol=None, tmphlq=None): - """Allocates a data set based on the attributes of a 'model' data set. - Useful when a data set needs to be created identical to another. Supported - model(s) are Physical Sequential (PS), Partitioned Data Sets (PDS/PDSE), - and VSAM data sets. If `ds_name` has a member (i.e., "DATASET(member)"), - it will be shortened to just the partitioned data set name. + def set_state(self, new_state): + """Used to set the data set state. Parameters ---------- - ds_name : str - The name of the data set to allocate. If the ds_name - is a partitioned member e.g. hlq.llq.ds(mem), only the data set name - must be used. See extract_dsname(ds_name) in data_set.py. - model : str - The name of the data set whose allocation parameters - should be used to allocate the new data set 'ds_name'. - executable : bool - Whether the new data set should support executables. - asa_text : bool - Whether the new data set should support ASA control - characters (have record format FBA). - vol : str - The volume where data set should be allocated. - tmphlq : str - High Level Qualifier for temporary datasets. - - Raise - ----- - NonExistentSourceError - When the model data set does not exist. - MVSCmdExecError - When the call to IKJEFT01 to allocate the - data set fails. + new_state : str {unknown, present, absent} + New state of the data set. + Returns + ------- + bool + If state was set properly. """ - if not DataSet.data_set_exists(model, tmphlq=tmphlq): - raise DatasetNotFoundError(model) + if new_state not in self.data_set_possible_states: + raise ValueError(f"State {self.state} not supported for Dataset class.") + return True - ds_name = extract_dsname(ds_name) - model_type = DataSet.data_set_type(model, tmphlq=tmphlq) - # The break lines are absolutely necessary, a JCL code line can't - # be longer than 72 characters. The following JCL is compatible with - # all data set types. - alloc_cmd = """ ALLOC DS('{0}') - - LIKE ('{1}')""".format(ds_name, model) - # Now adding special parameters for sequential and partitioned - # data sets. - if model_type not in DataSet.MVS_VSAM: - try: - data_set = datasets.list_datasets(model)[0] - except IndexError: - raise AttributeError("Could not retrieve model data set block size.") - block_size = data_set.block_size - alloc_cmd = """{0} - - BLKSIZE({1})""".format(alloc_cmd, block_size) - if vol: - alloc_cmd = """{0} - - VOLUME({1})""".format(alloc_cmd, vol.upper()) +class DataSetUtils(): + # Module args mapped to equivalent ZOAU data set create args + _ZOAU_DS_CREATE_ARGS = { + "name": "name", + "type": "type", + "space_primary": "primary_space", + "space_secondary": "secondary_space", + "record_format": "record_format", + "sms_storage_class": "storage_class_name", + "sms_data_class": "data_class_name", + "sms_management_class": "management_class_name", + "record_length": "record_length", + "key_offset": "key_offset", + "key_length": "key_length", + "block_size": "block_size", + "directory_blocks": "directory_blocks", + "volumes": "volumes", + } - if asa_text: - alloc_cmd = """{0} - - RECFM(F,B,A)""".format(alloc_cmd) + _VSAM_CATALOG_COMMAND_NOT_INDEXED = """ DEFINE CLUSTER - + (NAME('{0}') - + VOLUMES({1}) - + RECATALOG {2}) - + DATA(NAME('{0}.DATA')) + """ - if executable: - alloc_cmd = """{0} - - RECFM(U) - - DSNTYPE(LIBRARY)""".format(alloc_cmd) + _VSAM_CATALOG_COMMAND_INDEXED = """ DEFINE CLUSTER - + (NAME('{0}') - + VOLUMES({1}) - + RECATALOG {2}) - + DATA(NAME('{0}.DATA')) - + INDEX(NAME('{0}.INDEX')) + """ - rc, out, err = mvs_cmd.ikjeft01(alloc_cmd, authorized=True, tmphlq=tmphlq) - if rc != 0: - raise MVSCmdExecError(rc, out, err) + _NON_VSAM_UNCATALOG_COMMAND = " UNCATLG DSNAME={0}" - @staticmethod - def allocate_gds_model_data_set(ds_name, model, executable=False, asa_text=False, vol=None, tmphlq=None): + _VSAM_UNCATALOG_COMMAND = " DELETE '{0}' NOSCRATCH" + + MVS_PARTITIONED = frozenset({"PE", "PO", "PDSE", "PDS"}) + MVS_SEQ = frozenset({"PS", "SEQ", "BASIC"}) + MVS_VSAM = frozenset({"KSDS", "ESDS", "RRDS", "LDS", "VSAM"}) + + def init(self): """ - Allocates a new current generation of a generation data group using a model - data set to set its attributes. + Standard utility that performs multiple data set related operations without the need of instanciating a + Data Set object. + """ + self.util = True + + @staticmethod + def ensure_present( + name, + replace, + type, + raw_name=None, + space_primary=None, + space_secondary=None, + space_type=None, + record_format=None, + record_length=None, + block_size=None, + directory_blocks=None, + key_length=None, + key_offset=None, + sms_storage_class=None, + sms_data_class=None, + sms_management_class=None, + volumes=None, + tmp_hlq=None, + force=None, + ): + """Creates data set if it does not already exist. Parameters ---------- - ds_name : str - Name of the data set that will be allocated. It must be a GDS - relative name. - model : str - The name of the data set whose allocation parameters - should be used to allocate the new data set. - executable : bool, optional - Whether the new data set should support executables. - asa_text : bool, optional - Whether the new data set should support ASA control - characters (have record format FBA). - vol : str, optional - The volume where the new data set should be allocated. - tmphlq : str - High Level Qualifier for temporary datasets. + name : str + The name of the dataset. + raw_name : str + Original name without escaping or gds name resolve operations performed. + replace : bool + Used to determine behavior when data set already exists. + type : str, optional + The type of dataset. + Valid options are: SEQ, BASIC, LARGE, PDS, PDSE, LIBRARY, LDS, RRDS, ESDS, KSDS. + Defaults to None. + space_primary : int, optional + The amount of primary space to allocate for the dataset. + Defaults to None. + space_secondary : int, optional + The amount of secondary space to allocate for the dataset. + Defaults to None. + space_type : str, optional + The unit of measurement to use when defining primary and secondary space. + Defaults to None. + record_format : str, optional + The record format to use for the dataset. + Valid options are: F, FB, VB, FBA, VBA, U. + Defaults to None. + record_length : int, optional + The length, in bytes, of each record in the data set. + Defaults to None. + block_size : int, optional + The block size to use for the data set. + Defaults to None. + directory_blocks : int, optional + The number of directory blocks to allocate to the data set. + Defaults to None. + key_length : int, optional + The key length of a record. + Required for Key Sequenced Datasets (KSDS). + Defaults to None. + key_offset : int, optional + The key offset is the position of the first byte of the key + in each logical record of a the specified VSAM data set. + If the key is at the beginning of the logical record, the offset is zero. + Required for Key Sequenced Datasets (KSDS). + Defaults to None. + sms_storage_class : str, optional + The storage class for an SMS-managed dataset. + Required for SMS-managed datasets that do not match an SMS-rule. + Not valid for datasets that are not SMS-managed. + Note that all non-linear VSAM datasets are SMS-managed. + Defaults to None. + sms_data_class : str, optional + The data class for an SMS-managed dataset. + Optional for SMS-managed datasets that do not match an SMS-rule. + Not valid for datasets that are not SMS-managed. + Note that all non-linear VSAM datasets are SMS-managed. + Defaults to None. + sms_management_class : str, optional + The management class for an SMS-managed dataset. + Optional for SMS-managed datasets that do not match an SMS-rule. + Not valid for datasets that are not SMS-managed. + Note that all non-linear VSAM datasets are SMS-managed. + Defaults to None. + volumes : Union[str, list[str]], optional + A list of volume serials. + When providing multiple volumes, processing will begin with + the first volume in the provided list. Offline volumes are not considered. + Volumes can always be provided when not using SMS. + When using SMS, volumes can be provided when the storage class being used + has GUARANTEED_SPACE=YES specified. Otherwise, the allocation will fail. + Defaults to None. + tmp_hlq : str, optional + High level qualifier for temporary datasets. + force : bool, optional + Used to determine behavior when performing member operations on a PDSE. + Defaults to None. Returns ------- - str - Absolute name of the newly allocated generation data set. + bool + Indicates if changes were made. Raises ------ DatasetCreateError - When the allocation fails. + When data set creation fails. """ - model_attributes = datasets.list_datasets(model)[0] - dataset_type = model_attributes.organization - record_format = model_attributes.record_format - - if executable: - dataset_type = "library" - elif dataset_type in DataSet.MVS_SEQ: - dataset_type = "seq" - elif dataset_type in DataSet.MVS_PARTITIONED: - dataset_type = "pdse" - - if asa_text: - record_format = "fba" - elif executable: - record_format = "u" - - data_set_object = MVSDataSet( - name=ds_name, - data_set_type=dataset_type, - state="absent", - record_format=record_format, - volumes=vol, - block_size=model_attributes.block_size, - record_length=model_attributes.record_length, - space_primary=model_attributes.total_space, - space_type='' - ) + arguments = locals() + arguments.pop("replace", None) + present = False + changed = False + if DataSetUtils.data_set_cataloged(name, tmphlq=tmp_hlq): + present = True + # Validate volume conflicts when: + # 1. Dataset exists in catalog (present=True). + # 2. User hasn't requested replacement (replace=False). + # 3. Specific volumes were requested (volumes parameter provided). + if present and not replace and volumes: + cataloged_volumes = DataSetUtils.data_set_cataloged_volume_list(name, tmphlq=tmp_hlq) + requested_volumes = [vol.upper() for vol in volumes] + if not any(vol.upper() in requested_volumes for vol in cataloged_volumes): + raise DatasetCatalogedOnDifferentVolumeError( + name=name, + existing_volumes=cataloged_volumes, + requested_volumes=volumes + ) - success = data_set_object.ensure_present(tmp_hlq=tmphlq) - if not success: - raise DatasetCreateError( - data_set=ds_name, - msg=f"Error while trying to allocate {ds_name}." - ) + if not present: + try: + changed, data_set = DataSetUtils.create(**arguments) + except DatasetCreateError as e: + raise_error = True + # data set exists on volume + if "DatasetVerificationError" in e.msg or "Error Code: 0x4704" in e.msg: + present, changed = DataSetUtils.attempt_catalog_if_necessary( + name, volumes, tmphlq=tmp_hlq + ) + if present and changed: + raise_error = False + if raise_error: + raise + if present: + if not replace: + return changed + DataSetUtils.replace(**arguments) + if type.upper() == "ZFS": + DataSetUtils.format_zfs(name) + return changed @staticmethod - def data_set_cataloged(name, volumes=None, tmphlq=None): - """Determine if a data set is in catalog. + def ensure_absent(name, volumes=None, tmphlq=None): + """Deletes provided data set if it exists. Parameters ---------- name : str - The data set name to check if cataloged. - volume : str - The volume the data set may reside on. + The name of the data set to ensure is absent. + volumes : list[str] + The volumes the data set may reside on. tmphlq : str High Level Qualifier for temporary datasets. Returns ------- bool - If data is is cataloged. - + Indicates if changes were made. """ + changed, present = DataSetUtils.attempt_catalog_if_necessary_and_delete(name, volumes, tmphlq=tmphlq) + return changed - # Resolve GDS names before passing it into listcat - if DataSet.is_gds_relative_name(name): - try: - name = DataSet.resolve_gds_absolute_name(name) - except GDSNameResolveError: - # if GDS name cannot be resolved, it's not in the catalog. - return False - - # We need to unescape because this calls to system can handle - # special characters just fine. - name = name.upper().replace("\\", '') - - module = AnsibleModuleHelper(argument_spec={}) - stdin = " LISTCAT ENTRIES('{0}')".format(name) - - cmd = "mvscmdauth --pgm=idcams --sysprint=* --sysin=stdin" - if tmphlq: - cmd = "{0} -Q={1}".format(cmd, tmphlq) - - rc, stdout, stderr = module.run_command( - cmd, - data=stdin, - errors='replace' - ) - - # The above 'listcat entries' command to idcams returns: - # rc=0 if data set found in catalog - # rc=4 if data set NOT found in catalog - # rc>4 for other errors - if rc > 4: - raise MVSCmdExecError(rc, stdout, stderr) - - if volumes: - cataloged_volume_list = DataSet.data_set_cataloged_volume_list(name, tmphlq=tmphlq) or [] - if bool(set(volumes) & set(cataloged_volume_list)): - return True - else: - if re.search(r"-\s" + re.escape(name) + r"\s*\n\s+IN-CAT", stdout): - return True - - return False - + # ? should we do additional check to ensure member was actually created? @staticmethod - def data_set_cataloged_volume_list(name, tmphlq=None): - """Get the volume list for a cataloged dataset name. + def ensure_member_present(name, replace=False, tmphlq=None): + """Creates data set member if it does not already exist. Parameters ---------- name : str - The data set name to check if cataloged. + The name of the data set to ensure is present. + replace : bool + Used to determine behavior when data set already + exists. tmphlq : str High Level Qualifier for temporary datasets. Returns ------- - Union[str] - A list of volumes where the dataset is cataloged. - + bool + Indicates if changes were made. """ - name = name.upper() - module = AnsibleModuleHelper(argument_spec={}) - stdin = " LISTCAT ENTRIES('{0}') ALL".format(name) - - cmd = "mvscmdauth --pgm=idcams --sysprint=* --sysin=stdin" - if tmphlq: - cmd = "{0} -Q={1}".format(cmd, tmphlq) - - rc, stdout, stderr = module.run_command( - cmd, - data=stdin, - errors='replace' - ) - # The above 'listcat entries all' command to idcams returns: - # rc=0 if data set found in catalog - # rc=4 if data set NOT found in catalog - # rc>4 for other errors - if rc > 4: - raise MVSCmdExecError(rc, stdout, stderr) - - delimiter = 'VOLSER------------' - arr = stdout.split(delimiter)[1:] # throw away header - - # Volume serials (VOLSER) under 6 chars will have one or more leading '-'s due to the chosen delimiter. - # The volser is in between the beginning of each str and the first space. - # Strip away any leading '-'s, then split on the next whitespace and throw away the remaining in each str. - volume_list = [x.strip('-').split()[0] for x in arr] - - volume_list = list(set(volume_list)) # remove duplicates, order doesn't matter - return volume_list + if DataSetUtils.data_set_member_exists(name): + if not replace: + return False + DataSetUtils.delete_member(name) + DataSetUtils.create_member(name, tmphlq=tmphlq) + return True @staticmethod - def data_set_exists(name, volume=None, tmphlq=None): - """Determine if a data set exists. - This will check the catalog in addition to - the volume table of contents. + def ensure_member_absent(name, force=False): + """Deletes provided data set member if it exists. + Returns a boolean indicating if changes were made. Parameters ---------- - name : str - The data set name to check if exists. - volume : str - The volume the data set may reside on. - tmphlq : str - High Level Qualifier for temporary datasets. + force : bool + Mode to execute. Returns ------- bool - If data is found. + True if the data set member exists. """ - if DataSet.data_set_cataloged(name, tmphlq=tmphlq): + if DataSetUtils.data_set_member_exists(name): + DataSetUtils.delete_member(name, force) return True - elif volume is not None: - return DataSet._is_in_vtoc(name, volume, tmphlq=tmphlq) return False @staticmethod - def data_set_member_exists(name): - """Checks for existence of data set member. + def ensure_cataloged(name, volumes, tmphlq=None): + """Ensure a data set is cataloged. Data set can initially + be in cataloged or uncataloged state when this function is called. Parameters ---------- name : str - The data set name including member. + The data set name to ensure is cataloged. + volume : str + The volume on which the data set should exist. + tmphlq : str + High Level Qualifier for temporary datasets. Returns ------- bool - If data set member exists. + If changes were made. """ - module = AnsibleModuleHelper(argument_spec={}) - rc, stdout, stderr = module.run_command( - "head \"//'{0}'\"".format(name), errors='replace') - if rc != 0 or (stderr and "EDC5067I" in stderr): + if DataSetUtils.data_set_cataloged(name, None, tmphlq=tmphlq): return False + try: + DataSetUtils.catalog(name, volumes, tmphlq=tmphlq) + except DatasetCatalogError: + raise DatasetCatalogError( + name, volumes, "-1", "Data set was not found. Unable to catalog." + ) return True @staticmethod - def data_set_shared_members(src, dest): - """Checks for the existence of members from a source data set in - a destination data set. + def ensure_uncataloged(name, tmphlq=None): + """Ensure a data set is uncataloged. Data set can initially + be in cataloged or uncataloged state when this function is called. Parameters ---------- - src : str - The source data set name. The name can contain a wildcard pattern. - dest : str - The destination data set name. + name : str + The data set name to ensure is uncataloged. + tmphlq : str + High Level Qualifier for temporary datasets. Returns ------- bool - If at least one of the members in src exists in dest. + If changes were made. """ - src_members = datasets.list_members(src) - - for member in src_members: - if DataSet.data_set_member_exists("{0}({1})".format(dest, member)): - return True - + if DataSetUtils.data_set_cataloged(name, tmphlq=tmphlq): + DataSetUtils.uncatalog(name, tmphlq=tmphlq) + return True return False @staticmethod - def get_member_name_from_file(file_name): - """Creates a member name for a partitioned data set by taking up to the - first 8 characters from a filename without its file extension + def allocate_model_data_set(ds_name, model, executable=False, asa_text=False, vol=None, tmphlq=None): + """Allocates a data set based on the attributes of a 'model' data set. + Useful when a data set needs to be created identical to another. Supported + model(s) are Physical Sequential (PS), Partitioned Data Sets (PDS/PDSE), + and VSAM data sets. If `ds_name` has a member (i.e., "DATASET(member)"), + it will be shortened to just the partitioned data set name. Parameters ---------- - file_name : str - A file name that can include a file extension. + ds_name : str + The name of the data set to allocate. If the ds_name + is a partitioned member e.g. hlq.llq.ds(mem), only the data set name + must be used. See extract_dsname(ds_name) in data_set.py. + model : str + The name of the data set whose allocation parameters + should be used to allocate the new data set 'ds_name'. + executable : bool + Whether the new data set should support executables. + asa_text : bool + Whether the new data set should support ASA control + characters (have record format FBA). + vol : str + The volume where data set should be allocated. + tmphlq : str + High Level Qualifier for temporary datasets. + + Raise + ----- + NonExistentSourceError + When the model data set does not exist. + MVSCmdExecError + When the call to IKJEFT01 to allocate the + data set fails. - Returns - ------- - str - Member name constructed from the file name. """ - # Removing the file extension. - member_name = path.splitext(file_name)[0] - # Taking the first 8 characters from the file name. - member_name = member_name.replace(".", "")[0:8] + if not DataSetUtils.data_set_exists(model, tmphlq=tmphlq): + raise DatasetNotFoundError(model) - return member_name + ds_name = extract_dsname(ds_name) + model_type = DataSetUtils.data_set_type(model, tmphlq=tmphlq) - @staticmethod - def files_in_data_set_members(src, dest): - """Checks for the existence of members corresponding to USS files in a - destination data set. The file names get converted to the form they - would take when copied into a partitioned data set. + # The break lines are absolutely necessary, a JCL code line can't + # be longer than 72 characters. The following JCL is compatible with + # all data set types. + alloc_cmd = """ ALLOC DS('{0}') - + LIKE ('{1}')""".format(ds_name, model) - Parameters - ---------- - src : str - USS path to a file or a directory. - dest : str - Name of the destination data set. + # Now adding special parameters for sequential and partitioned + # data sets. + if model_type not in DataSetUtils.MVS_VSAM: + try: + data_set = datasets.list_datasets(model)[0] + except IndexError: + raise AttributeError("Could not retrieve model data set block size.") + block_size = data_set.block_size + alloc_cmd = """{0} - + BLKSIZE({1})""".format(alloc_cmd, block_size) - Returns - ------- - bool - If at least one of the members in src exists in dest. - """ - if path.isfile(src): - files = [path.basename(src)] - else: - dummy_path, dummy_dirs, files = next(walk(src)) + if vol: + alloc_cmd = """{0} - + VOLUME({1})""".format(alloc_cmd, vol.upper()) - files = [DataSet.get_member_name_from_file(file) for file in files] + if asa_text: + alloc_cmd = """{0} - + RECFM(F,B,A)""".format(alloc_cmd) - for file in files: - if DataSet.data_set_member_exists("{0}({1})".format(dest, file)): - return True + if executable: + alloc_cmd = """{0} - + RECFM(U) - + DSNTYPE(LIBRARY)""".format(alloc_cmd) - return False + rc, out, err = mvs_cmd.ikjeft01(alloc_cmd, authorized=True, tmphlq=tmphlq) + if rc != 0: + raise MVSCmdExecError(rc, out, err) @staticmethod - def data_set_volume(name, tmphlq=None): - """Checks the volume where a data set is located. + def allocate_gds_model_data_set(ds_name, model, executable=False, asa_text=False, vol=None, tmphlq=None): + """ + Allocates a new current generation of a generation data group using a model + data set to set its attributes. Parameters ---------- - name : str - The name of the data set. + ds_name : str + Name of the data set that will be allocated. It must be a GDS + relative name. + model : str + The name of the data set whose allocation parameters + should be used to allocate the new data set. + executable : bool, optional + Whether the new data set should support executables. + asa_text : bool, optional + Whether the new data set should support ASA control + characters (have record format FBA). + vol : str, optional + The volume where the new data set should be allocated. tmphlq : str High Level Qualifier for temporary datasets. Returns ------- str - Name of the volume where the data set is. + Absolute name of the newly allocated generation data set. Raises ------ - DatasetNotFoundError - When data set cannot be found on the system. - DatasetVolumeError - When the function is unable to parse the value - of VOLSER. + DatasetCreateError + When the allocation fails. """ - data_set_information = datasets.list_datasets(name) - - if len(data_set_information) > 0: - return data_set_information[0].volume + model_attributes = datasets.list_datasets(model)[0] + dataset_type = model_attributes.organization + record_format = model_attributes.record_format - # If listing failed to return a data set, then it's probably a VSAM. - output = DataSet._get_listcat_data(name, tmphlq=tmphlq) + if executable: + dataset_type = "library" + elif dataset_type in DataSetUtils.MVS_SEQ: + dataset_type = "seq" + elif dataset_type in DataSetUtils.MVS_PARTITIONED: + dataset_type = "pdse" - if re.findall(r"NOT FOUND|NOT LISTED", output): - raise DatasetNotFoundError(name) + if asa_text: + record_format = "fba" + elif executable: + record_format = "u" - volser_output = re.findall(r"VOLSER-*[A-Z|0-9]+", output) + data_set_object = DataSet( + name=ds_name, + data_set_type=dataset_type, + state="absent", + record_format=record_format, + volumes=vol, + block_size=model_attributes.block_size, + record_length=model_attributes.record_length, + space_primary=model_attributes.total_space, + space_type='' + ) - if volser_output: - return volser_output[0].replace("VOLSER", "").replace("-", "") - else: - raise DatasetVolumeError(name) + success = data_set_object.ensure_present(tmp_hlq=tmphlq) + if not success: + raise DatasetCreateError( + data_set=ds_name, + msg=f"Error while trying to allocate {ds_name}." + ) @staticmethod - def data_set_type(name, volume=None, tmphlq=None): - """Checks the type of a data set, data sets must be cataloged. + def data_set_cataloged(name, volumes=None, tmphlq=None): + """Determine if a data set is in catalog. Parameters ---------- name : str - The name of the data set. + The data set name to check if cataloged. volume : str The volume the data set may reside on. tmphlq : str @@ -796,393 +803,670 @@ def data_set_type(name, volume=None, tmphlq=None): Returns ------- - str - The type of the data set (one of "PS", "PO", "DA", "KSDS", - "ESDS", "LDS" or "RRDS"). - None - If the data set does not exist or ZOAU is not able to determine - the type. + bool + If data is is cataloged. """ - if not DataSet.data_set_exists(name, volume, tmphlq=tmphlq): - return None - data_sets_found = datasets.list_datasets(name) + # Resolve GDS names before passing it into listcat + if DataSetUtils.is_gds_relative_name(name): + try: + name = DataSetUtils.resolve_gds_absolute_name(name) + except GDSNameResolveError: + # if GDS name cannot be resolved, it's not in the catalog. + return False - # Using the organization property when it's a sequential or partitioned - # dataset. VSAMs and GDGs are not found by datasets.list_datasets. - if len(data_sets_found) > 0: - return data_sets_found[0].organization + # We need to unescape because this calls to system can handle + # special characters just fine. + name = name.upper().replace("\\", '') - # Now trying to list GDGs through gdgs. - data_sets_found = gdgs.list_gdg_names(name) - if len(data_sets_found) > 0: - return "GDG" + module = AnsibleModuleHelper(argument_spec={}) + stdin = " LISTCAT ENTRIES('{0}')".format(name) - # Next, trying to get the DATA information of a VSAM through - # LISTCAT. - output = DataSet._get_listcat_data(name, tmphlq=tmphlq) + cmd = "mvscmdauth --pgm=idcams --sysprint=* --sysin=stdin" + if tmphlq: + cmd = "{0} -Q={1}".format(cmd, tmphlq) - # Filtering all the DATA information to only get the ATTRIBUTES block. - data_set_attributes = re.findall( - r"ATTRIBUTES.*STATISTICS", output, re.DOTALL) - if len(data_set_attributes) == 0: - return None + rc, stdout, stderr = module.run_command( + cmd, + data=stdin, + errors='replace' + ) - if re.search(r"\bINDEXED\b", data_set_attributes[0]): - return "KSDS" - elif re.search(r"\bNONINDEXED\b", data_set_attributes[0]): - return "ESDS" - elif re.search(r"\bLINEAR\b", data_set_attributes[0]): - return "LDS" - elif re.search(r"\bNUMBERED\b", data_set_attributes[0]): - return "RRDS" + # The above 'listcat entries' command to idcams returns: + # rc=0 if data set found in catalog + # rc=4 if data set NOT found in catalog + # rc>4 for other errors + if rc > 4: + raise MVSCmdExecError(rc, stdout, stderr) + + if volumes: + cataloged_volume_list = DataSetUtils.data_set_cataloged_volume_list(name, tmphlq=tmphlq) or [] + if bool(set(volumes) & set(cataloged_volume_list)): + return True else: - return None + if re.search(r"-\s" + re.escape(name) + r"\s*\n\s+IN-CAT", stdout): + return True + + return False @staticmethod - def _get_listcat_data(name, tmphlq=None): - """Runs IDCAMS to get the DATA information associated with a data set. + def data_set_cataloged_volume_list(name, tmphlq=None): + """Get the volume list for a cataloged dataset name. Parameters ---------- name : str - Name of the data set. + The data set name to check if cataloged. tmphlq : str High Level Qualifier for temporary datasets. Returns ------- - str - Standard output from IDCAMS. + Union[str] + A list of volumes where the dataset is cataloged. - Raises - ------ - MVSCmdExecError - When IDCAMS fails to get the data. """ name = name.upper() module = AnsibleModuleHelper(argument_spec={}) - stdin = " LISTCAT ENT('{0}') DATA ALL".format(name) + stdin = " LISTCAT ENTRIES('{0}') ALL".format(name) cmd = "mvscmdauth --pgm=idcams --sysprint=* --sysin=stdin" if tmphlq: cmd = "{0} -Q={1}".format(cmd, tmphlq) rc, stdout, stderr = module.run_command( - cmd, data=stdin, errors='replace' + cmd, + data=stdin, + errors='replace' ) - - if rc != 0: + # The above 'listcat entries all' command to idcams returns: + # rc=0 if data set found in catalog + # rc=4 if data set NOT found in catalog + # rc>4 for other errors + if rc > 4: raise MVSCmdExecError(rc, stdout, stderr) - return stdout + delimiter = 'VOLSER------------' + arr = stdout.split(delimiter)[1:] # throw away header + + # Volume serials (VOLSER) under 6 chars will have one or more leading '-'s due to the chosen delimiter. + # The volser is in between the beginning of each str and the first space. + # Strip away any leading '-'s, then split on the next whitespace and throw away the remaining in each str. + volume_list = [x.strip('-').split()[0] for x in arr] + + volume_list = list(set(volume_list)) # remove duplicates, order doesn't matter + return volume_list @staticmethod - def is_empty(name, volume=None, tmphlq=None): - """Determines whether a data set is empty. + def data_set_exists(name, volume=None, tmphlq=None): + """Determine if a data set exists. + This will check the catalog in addition to + the volume table of contents. Parameters ---------- name : str - The name of the data set. + The data set name to check if exists. volume : str - The volume where the data set resides. + The volume the data set may reside on. tmphlq : str High Level Qualifier for temporary datasets. Returns ------- bool - Whether the data set is empty or not. + If data is found. """ - if not DataSet.data_set_exists(name, volume, tmphlq=tmphlq): - raise DatasetNotFoundError(name) - - ds_type = DataSet.data_set_type(name, volume, tmphlq=tmphlq) - - if ds_type in DataSet.MVS_PARTITIONED: - return DataSet._pds_empty(name) - elif ds_type in DataSet.MVS_SEQ: - module = AnsibleModuleHelper(argument_spec={}) - rc, stdout, stderr = module.run_command("head \"//'{0}'\"".format(name), errors='replace') - return rc == 0 and len(stdout.strip()) == 0 - elif ds_type in DataSet.MVS_VSAM: - return DataSet._vsam_empty(name, tmphlq=tmphlq) + if DataSetUtils.data_set_cataloged(name, tmphlq=tmphlq): + return True + elif volume is not None: + return DataSetUtils._is_in_vtoc(name, volume, tmphlq=tmphlq) + return False @staticmethod - def _pds_empty(name): - """Determines if a partitioned data set is empty. + def data_set_member_exists(name): + """Checks for existence of data set member. Parameters ---------- name : str - The name of the PDS/PDSE. + The data set name including member. Returns ------- bool - If PDS/PDSE is empty. - Returns True if it is empty. False otherwise. + If data set member exists. """ module = AnsibleModuleHelper(argument_spec={}) - ls_cmd = "mls {0}".format(name) - rc, out, err = module.run_command(ls_cmd, errors='replace') - # RC 2 for mls means that there aren't any members. - return rc == 2 + rc, stdout, stderr = module.run_command( + "head \"//'{0}'\"".format(name), errors='replace') + if rc != 0 or (stderr and "EDC5067I" in stderr): + return False + return True @staticmethod - def _vsam_empty(name, tmphlq=None): - """Determines if a VSAM data set is empty. + def data_set_shared_members(src, dest): + """Checks for the existence of members from a source data set in + a destination data set. Parameters ---------- - name : str - The name of the VSAM data set. - tmphlq : str - High Level Qualifier for temporary datasets. + src : str + The source data set name. The name can contain a wildcard pattern. + dest : str + The destination data set name. Returns ------- bool - If VSAM data set is empty. - Returns True if VSAM data set exists and is empty. - False otherwise. + If at least one of the members in src exists in dest. """ - module = AnsibleModuleHelper(argument_spec={}) - empty_cmd = """ PRINT - - INFILE(MYDSET) - - COUNT(1)""" - - cmd = "mvscmdauth --pgm=idcams --sysprint=* --sysin=stdin --mydset={0}".format( - name - ) - if tmphlq: - cmd = "{0} -Q={1}".format(cmd, tmphlq) + src_members = datasets.list_members(src) - rc, out, err = module.run_command( - cmd, data=empty_cmd, errors='replace' - ) + for member in src_members: + if DataSetUtils.data_set_member_exists("{0}({1})".format(dest, member)): + return True - if rc == 4 or "VSAM OPEN RETURN CODE IS 160" in out: - return True - elif rc != 0: - return False + return False @staticmethod - def attempt_catalog_if_necessary(name, volumes, tmphlq=None): - """Attempts to catalog a data set if not already cataloged. + def get_member_name_from_file(file_name): + """Creates a member name for a partitioned data set by taking up to the + first 8 characters from a filename without its file extension Parameters ---------- - name : str - The name of the data set. - volumes : list[str] - The volumes the data set may reside on. - tmphlq : str - High Level Qualifier for temporary datasets. + file_name : str + A file name that can include a file extension. Returns ------- - tuple(bool,bool) - Whether the data set is now present. Whether changes were made. + str + Member name constructed from the file name. """ - changed = False - present = False - if DataSet.data_set_cataloged(name, tmphlq=tmphlq): - present = True - elif volumes is not None: - errors = False - try: - DataSet.catalog(name, volumes, tmphlq=tmphlq) - except DatasetCatalogError: - errors = True - if not errors: - changed = True - present = True - return present, changed + # Removing the file extension. + member_name = path.splitext(file_name)[0] + # Taking the first 8 characters from the file name. + member_name = member_name.replace(".", "")[0:8] + + return member_name @staticmethod - def attempt_catalog_if_necessary_and_delete(name, volumes, tmphlq=None): - """Attempts to catalog a data set if not already cataloged, then deletes - the data set. - This is helpful when a data set currently cataloged is not the data - set needing to be deleted, meaning the one in the provided volumes - is needing to be deleted.. Recall, you can have a data set in - two different volumes, and only one cataloged. + def files_in_data_set_members(src, dest): + """Checks for the existence of members corresponding to USS files in a + destination data set. The file names get converted to the form they + would take when copied into a partitioned data set. Parameters ---------- - name : str - The name of the data set. - volumes : list[str] - The volumes the data set may reside on. - tmphlq : str - High Level Qualifier for temporary datasets. + src : str + USS path to a file or a directory. + dest : str + Name of the destination data set. Returns ------- - tuple(bool,bool) - Whether changes were made. Whether the data set is now present. + bool + If at least one of the members in src exists in dest. """ + if path.isfile(src): + files = [path.basename(src)] + else: + dummy_path, dummy_dirs, files = next(walk(src)) - changed = False - present = True + files = [DataSetUtils.get_member_name_from_file(file) for file in files] - if volumes: - # Check if the data set is cataloged - present = DataSet.data_set_cataloged(name, tmphlq=tmphlq) + for file in files: + if DataSetUtils.data_set_member_exists("{0}({1})".format(dest, file)): + return True - if present: - # Data set is cataloged, now check it its cataloged on the provided volumes - # If it is, we just delete because the DS is the right one wanting deletion. - present = DataSet.data_set_cataloged(name, volumes, tmphlq=tmphlq) + return False - if present: - DataSet.delete(name) - changed = True - present = False - else: - # It appears that what is in catalog does not match the provided - # volumes, therefore the user wishes we delete a data set on a - # particular volue, NOT what is in catalog. - # for the provided volumes + @staticmethod + def data_set_volume(name, tmphlq=None): + """Checks the volume where a data set is located. - # We need to identify the volumes where the current cataloged data set - # is located for use later when we recatalog. Code is strategically - # placed before the uncatalog. - cataloged_volume_list_original = DataSet.data_set_cataloged_volume_list(name, tmphlq=tmphlq) + Parameters + ---------- + name : str + The name of the data set. + tmphlq : str + High Level Qualifier for temporary datasets. - try: - DataSet.uncatalog(name, tmphlq=tmphlq) - except DatasetUncatalogError: - return changed, present + Returns + ------- + str + Name of the volume where the data set is. - # Catalog the data set for the provided volumes - try: - DataSet.catalog(name, volumes, tmphlq=tmphlq) - except DatasetCatalogError: - try: - # A failure, so recatalog the original data set on the original volumes - DataSet.catalog(name, cataloged_volume_list_original, tmphlq=tmphlq) - except DatasetCatalogError: - pass - return changed, present + Raises + ------ + DatasetNotFoundError + When data set cannot be found on the system. + DatasetVolumeError + When the function is unable to parse the value + of VOLSER. + """ + data_set_information = datasets.list_datasets(name) - # Check the recatalog, ensure it cataloged before we try to remove - present = DataSet.data_set_cataloged(name, volumes, tmphlq=tmphlq) + if len(data_set_information) > 0: + return data_set_information[0].volume - if present: - try: - DataSet.delete(name) - except DatasetDeleteError: - try: - DataSet.uncatalog(name, tmphlq=tmphlq) - except DatasetUncatalogError: - try: - DataSet.catalog(name, cataloged_volume_list_original, tmphlq=tmphlq) - except DatasetCatalogError: - pass - return changed, present - try: - DataSet.catalog(name, cataloged_volume_list_original, tmphlq=tmphlq) - changed = True - present = False - except DatasetCatalogError: - changed = True - present = False - return changed, present - else: - try: - DataSet.catalog(name, volumes, tmphlq=tmphlq) - except DatasetCatalogError: - return changed, present + # If listing failed to return a data set, then it's probably a VSAM. + output = DataSetUtils._get_listcat_data(name, tmphlq=tmphlq) - present = DataSet.data_set_cataloged(name, volumes, tmphlq=tmphlq) + if re.findall(r"NOT FOUND|NOT LISTED", output): + raise DatasetNotFoundError(name) - if present: - DataSet.delete(name) - changed = True - present = False + volser_output = re.findall(r"VOLSER-*[A-Z|0-9]+", output) + + if volser_output: + return volser_output[0].replace("VOLSER", "").replace("-", "") else: - present = DataSet.data_set_cataloged(name, None, tmphlq=tmphlq) - if present: - try: - DataSet.delete(name) - changed = True - present = False - except DatasetDeleteError: - return changed, present + raise DatasetVolumeError(name) - return changed, present + @staticmethod + def data_set_type(name, volume=None, tmphlq=None): + """Checks the type of a data set, data sets must be cataloged. + + Parameters + ---------- + name : str + The name of the data set. + volume : str + The volume the data set may reside on. + tmphlq : str + High Level Qualifier for temporary datasets. + + Returns + ------- + str + The type of the data set (one of "PS", "PO", "DA", "KSDS", + "ESDS", "LDS" or "RRDS"). + None + If the data set does not exist or ZOAU is not able to determine + the type. + + """ + if not DataSetUtils.data_set_exists(name, volume, tmphlq=tmphlq): + return None + + data_sets_found = datasets.list_datasets(name) + + # Using the organization property when it's a sequential or partitioned + # dataset. VSAMs and GDGs are not found by datasets.list_datasets. + if len(data_sets_found) > 0: + return data_sets_found[0].organization + + # Now trying to list GDGs through gdgs. + data_sets_found = gdgs.list_gdg_names(name) + if len(data_sets_found) > 0: + return "GDG" + + # Next, trying to get the DATA information of a VSAM through + # LISTCAT. + output = DataSetUtils._get_listcat_data(name, tmphlq=tmphlq) + + # Filtering all the DATA information to only get the ATTRIBUTES block. + data_set_attributes = re.findall( + r"ATTRIBUTES.*STATISTICS", output, re.DOTALL) + if len(data_set_attributes) == 0: + return None + + if re.search(r"\bINDEXED\b", data_set_attributes[0]): + return "KSDS" + elif re.search(r"\bNONINDEXED\b", data_set_attributes[0]): + return "ESDS" + elif re.search(r"\bLINEAR\b", data_set_attributes[0]): + return "LDS" + elif re.search(r"\bNUMBERED\b", data_set_attributes[0]): + return "RRDS" + else: + return None @staticmethod - def _is_in_vtoc(name, volume, tmphlq=None): - """Determines if data set is in a volume's table of contents. + def _get_listcat_data(name, tmphlq=None): + """Runs IDCAMS to get the DATA information associated with a data set. Parameters ---------- name : str - The name of the data set to search for. + Name of the data set. + tmphlq : str + High Level Qualifier for temporary datasets. + + Returns + ------- + str + Standard output from IDCAMS. + + Raises + ------ + MVSCmdExecError + When IDCAMS fails to get the data. + """ + name = name.upper() + module = AnsibleModuleHelper(argument_spec={}) + stdin = " LISTCAT ENT('{0}') DATA ALL".format(name) + + cmd = "mvscmdauth --pgm=idcams --sysprint=* --sysin=stdin" + if tmphlq: + cmd = "{0} -Q={1}".format(cmd, tmphlq) + + rc, stdout, stderr = module.run_command( + cmd, data=stdin, errors='replace' + ) + + if rc != 0: + raise MVSCmdExecError(rc, stdout, stderr) + + return stdout + + @staticmethod + def is_empty(name, volume=None, tmphlq=None): + """Determines whether a data set is empty. + + Parameters + ---------- + name : str + The name of the data set. volume : str - The volume to search the table of contents of. + The volume where the data set resides. tmphlq : str High Level Qualifier for temporary datasets. Returns ------- bool - If data set was found in table of contents for volume. + Whether the data set is empty or not. """ - data_sets = vtoc.get_volume_entry(volume, tmphlq=tmphlq) - data_set = vtoc.find_data_set_in_volume_output(name, data_sets) - if data_set is not None: - return True - vsam_name = name + ".data" - vsam_data_set = vtoc.find_data_set_in_volume_output( - vsam_name, data_sets) - if vsam_data_set is not None: - return True - return False + if not DataSetUtils.data_set_exists(name, volume, tmphlq=tmphlq): + raise DatasetNotFoundError(name) + + ds_type = DataSetUtils.data_set_type(name, volume, tmphlq=tmphlq) + + if ds_type in DataSetUtils.MVS_PARTITIONED: + return DataSetUtils._pds_empty(name) + elif ds_type in DataSetUtils.MVS_SEQ: + module = AnsibleModuleHelper(argument_spec={}) + rc, stdout, stderr = module.run_command("head \"//'{0}'\"".format(name), errors='replace') + return rc == 0 and len(stdout.strip()) == 0 + elif ds_type in DataSetUtils.MVS_VSAM: + return DataSetUtils._vsam_empty(name, tmphlq=tmphlq) @staticmethod - def replace( - name, - type, - raw_name=None, - space_primary=None, - space_secondary=None, - space_type=None, - record_format=None, - record_length=None, - block_size=None, - directory_blocks=None, - key_length=None, - key_offset=None, - sms_storage_class=None, - sms_data_class=None, - sms_management_class=None, - volumes=None, - tmp_hlq=None, - force=None, - ): - """Attempts to replace an existing data set. + def _pds_empty(name): + """Determines if a partitioned data set is empty. + Parameters ---------- name : str - The name of the dataset - raw_name : str - Original name without escaping or gds name resolve operations performed. - type : str, optional - The type of dataset. - Valid options are: SEQ, BASIC, LARGE, PDS, PDSE, LIBRARY, LDS, RRDS, ESDS, KSDS. - Defaults to None. - space_primary : int, optional - The amount of primary space to allocate for the dataset. - Defaults to None. - space_secondary : int, optional - The amount of secondary space to allocate for the dataset. - Defaults to None. + The name of the PDS/PDSE. + + Returns + ------- + bool + If PDS/PDSE is empty. + Returns True if it is empty. False otherwise. + """ + module = AnsibleModuleHelper(argument_spec={}) + ls_cmd = "mls {0}".format(name) + rc, out, err = module.run_command(ls_cmd, errors='replace') + # RC 2 for mls means that there aren't any members. + return rc == 2 + + @staticmethod + def _vsam_empty(name, tmphlq=None): + """Determines if a VSAM data set is empty. + + Parameters + ---------- + name : str + The name of the VSAM data set. + tmphlq : str + High Level Qualifier for temporary datasets. + + Returns + ------- + bool + If VSAM data set is empty. + Returns True if VSAM data set exists and is empty. + False otherwise. + """ + module = AnsibleModuleHelper(argument_spec={}) + empty_cmd = """ PRINT - + INFILE(MYDSET) - + COUNT(1)""" + + cmd = "mvscmdauth --pgm=idcams --sysprint=* --sysin=stdin --mydset={0}".format( + name + ) + if tmphlq: + cmd = "{0} -Q={1}".format(cmd, tmphlq) + + rc, out, err = module.run_command( + cmd, data=empty_cmd, errors='replace' + ) + + if rc == 4 or "VSAM OPEN RETURN CODE IS 160" in out: + return True + elif rc != 0: + return False + + @staticmethod + def attempt_catalog_if_necessary(name, volumes, tmphlq=None): + """Attempts to catalog a data set if not already cataloged. + + Parameters + ---------- + name : str + The name of the data set. + volumes : list[str] + The volumes the data set may reside on. + tmphlq : str + High Level Qualifier for temporary datasets. + + Returns + ------- + tuple(bool,bool) + Whether the data set is now present. Whether changes were made. + """ + changed = False + present = False + if DataSetUtils.data_set_cataloged(name, tmphlq=tmphlq): + present = True + elif volumes is not None: + errors = False + try: + DataSetUtils.catalog(name, volumes, tmphlq=tmphlq) + except DatasetCatalogError: + errors = True + if not errors: + changed = True + present = True + return present, changed + + @staticmethod + def attempt_catalog_if_necessary_and_delete(name, volumes, tmphlq=None): + """Attempts to catalog a data set if not already cataloged, then deletes + the data set. + This is helpful when a data set currently cataloged is not the data + set needing to be deleted, meaning the one in the provided volumes + is needing to be deleted.. Recall, you can have a data set in + two different volumes, and only one cataloged. + + Parameters + ---------- + name : str + The name of the data set. + volumes : list[str] + The volumes the data set may reside on. + tmphlq : str + High Level Qualifier for temporary datasets. + + Returns + ------- + tuple(bool,bool) + Whether changes were made. Whether the data set is now present. + """ + + changed = False + present = True + + if volumes: + # Check if the data set is cataloged + present = DataSetUtils.data_set_cataloged(name, tmphlq=tmphlq) + + if present: + # Data set is cataloged, now check it its cataloged on the provided volumes + # If it is, we just delete because the DS is the right one wanting deletion. + present = DataSetUtils.data_set_cataloged(name, volumes, tmphlq=tmphlq) + + if present: + DataSetUtils.delete(name) + changed = True + present = False + else: + # It appears that what is in catalog does not match the provided + # volumes, therefore the user wishes we delete a data set on a + # particular volue, NOT what is in catalog. + # for the provided volumes + + # We need to identify the volumes where the current cataloged data set + # is located for use later when we recatalog. Code is strategically + # placed before the uncatalog. + cataloged_volume_list_original = DataSetUtils.data_set_cataloged_volume_list(name, tmphlq=tmphlq) + + try: + DataSetUtils.uncatalog(name, tmphlq=tmphlq) + except DatasetUncatalogError: + return changed, present + + # Catalog the data set for the provided volumes + try: + DataSetUtils.catalog(name, volumes, tmphlq=tmphlq) + except DatasetCatalogError: + try: + # A failure, so recatalog the original data set on the original volumes + DataSetUtils.catalog(name, cataloged_volume_list_original, tmphlq=tmphlq) + except DatasetCatalogError: + pass + return changed, present + + # Check the recatalog, ensure it cataloged before we try to remove + present = DataSetUtils.data_set_cataloged(name, volumes, tmphlq=tmphlq) + + if present: + try: + DataSetUtils.delete(name) + except DatasetDeleteError: + try: + DataSetUtils.uncatalog(name, tmphlq=tmphlq) + except DatasetUncatalogError: + try: + DataSetUtils.catalog(name, cataloged_volume_list_original, tmphlq=tmphlq) + except DatasetCatalogError: + pass + return changed, present + try: + DataSetUtils.catalog(name, cataloged_volume_list_original, tmphlq=tmphlq) + changed = True + present = False + except DatasetCatalogError: + changed = True + present = False + return changed, present + else: + try: + DataSetUtils.catalog(name, volumes, tmphlq=tmphlq) + except DatasetCatalogError: + return changed, present + + present = DataSetUtils.data_set_cataloged(name, volumes, tmphlq=tmphlq) + + if present: + DataSetUtils.delete(name) + changed = True + present = False + else: + present = DataSetUtils.data_set_cataloged(name, None, tmphlq=tmphlq) + if present: + try: + DataSetUtils.delete(name) + changed = True + present = False + except DatasetDeleteError: + return changed, present + + return changed, present + + @staticmethod + def _is_in_vtoc(name, volume, tmphlq=None): + """Determines if data set is in a volume's table of contents. + + Parameters + ---------- + name : str + The name of the data set to search for. + volume : str + The volume to search the table of contents of. + tmphlq : str + High Level Qualifier for temporary datasets. + + Returns + ------- + bool + If data set was found in table of contents for volume. + """ + data_sets = vtoc.get_volume_entry(volume, tmphlq=tmphlq) + data_set = vtoc.find_data_set_in_volume_output(name, data_sets) + if data_set is not None: + return True + vsam_name = name + ".data" + vsam_data_set = vtoc.find_data_set_in_volume_output( + vsam_name, data_sets) + if vsam_data_set is not None: + return True + return False + + @staticmethod + def replace( + name, + type, + raw_name=None, + space_primary=None, + space_secondary=None, + space_type=None, + record_format=None, + record_length=None, + block_size=None, + directory_blocks=None, + key_length=None, + key_offset=None, + sms_storage_class=None, + sms_data_class=None, + sms_management_class=None, + volumes=None, + tmp_hlq=None, + force=None, + ): + """Attempts to replace an existing data set. + Parameters + ---------- + name : str + The name of the dataset + raw_name : str + Original name without escaping or gds name resolve operations performed. + type : str, optional + The type of dataset. + Valid options are: SEQ, BASIC, LARGE, PDS, PDSE, LIBRARY, LDS, RRDS, ESDS, KSDS. + Defaults to None. + space_primary : int, optional + The amount of primary space to allocate for the dataset. + Defaults to None. + space_secondary : int, optional + The amount of secondary space to allocate for the dataset. + Defaults to None. space_type : str, optional The unit of measurement to use when defining primary and secondary space. Defaults to None. @@ -1242,8 +1526,8 @@ def replace( Defaults to None. """ arguments = locals() - DataSet.delete(name) - DataSet.create(**arguments) + DataSetUtils.delete(name) + DataSetUtils.create(**arguments) @staticmethod def _build_zoau_args(**kwargs): @@ -1286,8 +1570,8 @@ def _build_zoau_args(**kwargs): for arg, val in kwargs.items(): if val is None: continue - if DataSet._ZOAU_DS_CREATE_ARGS.get(arg): - renamed_args[DataSet._ZOAU_DS_CREATE_ARGS.get(arg)] = val + if DataSetUtils._ZOAU_DS_CREATE_ARGS.get(arg): + renamed_args[DataSetUtils._ZOAU_DS_CREATE_ARGS.get(arg)] = val else: renamed_args[arg] = val return renamed_args @@ -1390,13 +1674,19 @@ def create( force : bool, optional Used to determine behavior when performing member operations on a pdse. Defaults to None. + Returns + ------- + changed : bool + Wether a new data set was created or not. + data_set : zoautil_py.datasets.Dataset object + Wether a new data set was created or not. Raises ------ DatasetCreateError When data set creation fails. """ original_args = locals() - formatted_args = DataSet._build_zoau_args(**original_args) + formatted_args = DataSetUtils._build_zoau_args(**original_args) try: data_set = datasets.create(**formatted_args) except exceptions._ZOAUExtendableException as create_exception: @@ -1411,7 +1701,7 @@ def create( msg="Unable to verify the data set was created. Received DatasetVerificationError from ZOAU.", ) changed = data_set is not None - return changed + return changed, data_set @staticmethod def delete(name): @@ -1454,7 +1744,7 @@ def create_member(name, tmphlq=None): """ module = AnsibleModuleHelper(argument_spec={}) base_dsname = name.split("(")[0] - if not base_dsname or not DataSet.data_set_cataloged(base_dsname, tmphlq=tmphlq): + if not base_dsname or not DataSetUtils.data_set_cataloged(base_dsname, tmphlq=tmphlq): raise DatasetNotFoundError(name) tmp_file = tempfile.NamedTemporaryFile(delete=True) rc, stdout, stderr = module.run_command( @@ -1495,10 +1785,10 @@ def catalog(name, volumes, tmphlq=None): tmphlq : str High Level Qualifier for temporary datasets. """ - if DataSet.is_vsam(name, volumes, tmphlq=tmphlq): - DataSet._catalog_vsam(name, volumes, tmphlq=tmphlq) + if DataSetUtils.is_vsam(name, volumes, tmphlq=tmphlq): + DataSetUtils._catalog_vsam(name, volumes, tmphlq=tmphlq) else: - DataSet._catalog_non_vsam(name, volumes, tmphlq=tmphlq) + DataSetUtils._catalog_non_vsam(name, volumes, tmphlq=tmphlq) @staticmethod # TODO: extend for multi volume data sets @@ -1520,7 +1810,7 @@ def _catalog_non_vsam(name, volumes, tmphlq=None): When attempt at catalog fails. """ module = AnsibleModuleHelper(argument_spec={}) - iehprogm_input = DataSet._build_non_vsam_catalog_command( + iehprogm_input = DataSetUtils._build_non_vsam_catalog_command( name.upper(), volumes) cmd = "mvscmdauth --pgm=iehprogm --sysprint=* --sysin=stdin" @@ -1572,15 +1862,15 @@ def _catalog_vsam(name, volumes, tmphlq=None): data_set_type_vsam = "NONINDEXED" if data_set_type_vsam != "INDEXED": - command = DataSet._VSAM_CATALOG_COMMAND_NOT_INDEXED.format( + command = DataSetUtils._VSAM_CATALOG_COMMAND_NOT_INDEXED.format( data_set_name, - DataSet._build_volume_string_idcams(volumes), + DataSetUtils._build_volume_string_idcams(volumes), data_set_type_vsam, ) else: - command = DataSet._VSAM_CATALOG_COMMAND_INDEXED.format( + command = DataSetUtils._VSAM_CATALOG_COMMAND_INDEXED.format( data_set_name, - DataSet._build_volume_string_idcams(volumes), + DataSetUtils._build_volume_string_idcams(volumes), data_set_type_vsam, ) @@ -1595,9 +1885,9 @@ def _catalog_vsam(name, volumes, tmphlq=None): if not success: # Liberty taken such that here we can assume its a LINEAR VSAM - command = DataSet._VSAM_CATALOG_COMMAND_NOT_INDEXED.format( + command = DataSetUtils._VSAM_CATALOG_COMMAND_NOT_INDEXED.format( data_set_name, - DataSet._build_volume_string_idcams(volumes), + DataSetUtils._build_volume_string_idcams(volumes), "LINEAR", ) @@ -1631,10 +1921,10 @@ def uncatalog(name, tmphlq=None): High Level Qualifier for temporary datasets. """ - if DataSet.is_vsam(name, tmphlq=tmphlq): - DataSet._uncatalog_vsam(name, tmphlq=tmphlq) + if DataSetUtils.is_vsam(name, tmphlq=tmphlq): + DataSetUtils._uncatalog_vsam(name, tmphlq=tmphlq) else: - DataSet._uncatalog_non_vsam(name, tmphlq=tmphlq) + DataSetUtils._uncatalog_non_vsam(name, tmphlq=tmphlq) @staticmethod def _uncatalog_non_vsam(name, tmphlq=None): @@ -1653,11 +1943,11 @@ def _uncatalog_non_vsam(name, tmphlq=None): When uncataloging fails. """ module = AnsibleModuleHelper(argument_spec={}) - iehprogm_input = DataSet._NON_VSAM_UNCATALOG_COMMAND.format(name) + iehprogm_input = DataSetUtils._NON_VSAM_UNCATALOG_COMMAND.format(name) temp_name = None try: - temp_name = DataSet.create_temp(name.split(".")[0]) - DataSet.write(temp_name, iehprogm_input) + temp_name = DataSetUtils.create_temp(name.split(".")[0]) + DataSetUtils.write(temp_name, iehprogm_input) cmd = "mvscmdauth --pgm=iehprogm --sysprint=* --sysin={0}".format(temp_name) if tmphlq: @@ -1690,7 +1980,7 @@ def _uncatalog_vsam(name, tmphlq=None): When uncatalog fails. """ module = AnsibleModuleHelper(argument_spec={}) - idcams_input = DataSet._VSAM_UNCATALOG_COMMAND.format(name) + idcams_input = DataSetUtils._VSAM_UNCATALOG_COMMAND.format(name) cmd = "mvscmdauth --pgm=idcams --sysprint=* --sysin=stdin" if tmphlq: @@ -1726,9 +2016,9 @@ def is_vsam(name, volumes=None, tmphlq=None): If the data set is VSAM. """ if not volumes: - return DataSet._is_vsam_from_listcat(name, tmphlq=tmphlq) + return DataSetUtils._is_vsam_from_listcat(name, tmphlq=tmphlq) # ? will multivolume data set have vtoc info for each volume? - return DataSet._is_vsam_from_vtoc(name, volumes[0], tmphlq=tmphlq) + return DataSetUtils._is_vsam_from_vtoc(name, volumes[0], tmphlq=tmphlq) @staticmethod def _is_vsam_from_vtoc(name, volume, tmphlq=None): @@ -1942,8 +2232,8 @@ def create_temp( str The name of the temporary data set. """ - temp_name = DataSet.temp_name(hlq) - DataSet.create( + temp_name = DataSetUtils.temp_name(hlq) + DataSetUtils.create( temp_name, type=type, space_primary=space_primary, @@ -2022,9 +2312,9 @@ def _build_non_vsam_catalog_command(name, volumes): str The command string formatted for use with IEHPROGM. """ - command_part_1 = DataSet._format_jcl_line( + command_part_1 = DataSetUtils._format_jcl_line( " CATLG DSNAME={0},".format(name)) - command_part_2 = DataSet._build_volume_string_iehprogm(volumes) + command_part_2 = DataSetUtils._build_volume_string_iehprogm(volumes) return command_part_1 + command_part_2 @staticmethod @@ -2097,7 +2387,7 @@ def _build_volume_string_iehprogm(volumes): volume.upper()) if index + 1 != len(volumes): single_volume_string += "," - volume_string += DataSet._format_jcl_line(single_volume_string) + volume_string += DataSetUtils._format_jcl_line(single_volume_string) else: volume_string += single_volume_string + ")\n" return volume_string @@ -2182,7 +2472,7 @@ def get_name_if_data_set_is_alias(name, tmp_hlq=None): # special characters just fine. name = name.upper().replace("\\", '') idcams_cmd = f" LISTCAT ALIAS ENTRIES('{name}')ALL" - response = DataSet._execute_idcams_cmd(idcams_cmd, tmp_hlq=tmp_hlq) + response = DataSetUtils._execute_idcams_cmd(idcams_cmd, tmp_hlq=tmp_hlq) if response.rc == 0: base_name = re.search( @@ -2236,7 +2526,7 @@ def _execute_idcams_cmd( temp_dd_location = None try: - temp_dd_location = DataSet.create_temp( + temp_dd_location = DataSetUtils.create_temp( hlq=tmp_hlq, type='SEQ', record_format=record_format, @@ -2268,7 +2558,7 @@ def _execute_idcams_cmd( datasets.delete(temp_dd_location) -class DataSetUtils(object): +class DataSetView(object): def __init__(self, data_set, tmphlq=None): """A standard utility to gather information about a particular data set. Note that the input data set is assumed @@ -2352,490 +2642,218 @@ def ds_type(self): return self.ds_info.get("dsorg") def volume(self): - """Retrieves the volume name where the input data set is stored. - - Returns - ------- - str - Volume where the data set is stored. - None - If the data set does not exist. - - Raises - ------ - AttributeError - When input data set is a USS file or directory. - """ - if self.is_uss_path: - raise AttributeError( - "USS file or directory has no attribute 'Volume'") - return self.ds_info.get("volser") - - def lrecl(self): - """Retrieves the record length of the input data set. Record length - specifies the length, in bytes, of each record in the data set. - - Returns - ------- - int - The record length, in bytes, of each record. - None - If the data set does not exist or the data set is VSAM. - - Raises - ------ - AttributeError - When input data set is a USS file or directory. - """ - if self.is_uss_path: - raise AttributeError( - "USS file or directory has no attribute 'lrecl'") - return self.ds_info.get("lrecl") - - def blksize(self): - """Retrieves the BLKSIZE of the input data set. - - Returns - ------- - int - The blksize of the input data set. - None - If the data set does not exist or the data set is VSAM. - - Raises - ------ - AttributeError - When input data set is a USS file or directory. - """ - if self.is_uss_path: - raise AttributeError( - "USS file or directory has no attribute 'blksize'") - return self.ds_info.get("blksize") - - def recfm(self): - """Retrieves the record format of the input data set. - - Returns - ------- - str - Record format. - 'F' - Fixed. - 'FB' - Fixed Blocked. - 'V' - Variable. - 'VB' - Variable Blocked. - 'U' - Undefined. - 'VBS' - Variable Blocked Spanned. - 'VS' - Variable Spanned. - None - If the data set does not exist or the data set is VSAM. - - Raises - ------ - AttributeError - When input data set is a USS file or directory. - """ - if self.is_uss_path: - raise AttributeError( - "USS file or directory has no attribute 'recfm'") - return self.ds_info.get("recfm") - - def _gather_data_set_info(self): - """Retrieves information about the input data set using LISTDS and - LISTCAT commands. - - Returns - ------- - dict - Dictionary containing data set attributes. - - Raises - ------ - DatasetBusyError - The dataset may be open by another user. - MVSCmdExecError - Another error while executing the command. - """ - result = dict() - self.data_set = self.data_set.upper().replace("\\", '') - listds_rc, listds_out, listds_err = mvs_cmd.ikjeft01( - " LISTDS '{0}'".format(self.data_set), - authorized=True, - tmphlq=self.tmphlq - ) - - if listds_rc == 0: - result.update(self._process_listds_output(listds_out)) - else: - if re.findall(r"ALREADY IN USE", listds_out): - raise DatasetBusyError(self.data_set) - if re.findall(r"NOT IN CATALOG", listds_out): - self.ds_info["exists"] = False - else: - raise MVSCmdExecError(listds_rc, listds_out, listds_err) - - listcat_rc, listcat_out, listcat_err = mvs_cmd.idcams( - " LISTCAT ENT({0}) ALL".format(self.data_set), authorized=True - ) - if listcat_rc == 0: - result.update(self._process_listcat_output(listcat_out)) - else: - if re.findall(r"NOT FOUND|NOT LISTED", listcat_out): - self.ds_info["exists"] = False - else: - raise MVSCmdExecError(listcat_rc, listcat_out, listcat_err) - return result - - def _process_listds_output(self, output): - """Parses the output generated by LISTDS command. - - Parameters - ---------- - output : str - The output of LISTDS command. - - Returns - ------- - dict - Dictionary containing the output parameters of LISTDS. - """ - result = dict() - if "NOT IN CATALOG" in output: - result["exists"] = False - else: - result["exists"] = True - ds_search = re.search( - r"(-|--)DSORG(-\s*|\s*)\n(.*)", output, re.MULTILINE) - if ds_search: - ds_params = ds_search.group(3).split() - result["dsorg"] = ds_params[-1] - if result.get("dsorg") != "VSAM": - result["recfm"] = ds_params[0] - if ds_params[1].isdigit(): - result["lrecl"] = int(ds_params[1]) - if len(ds_params) > 2 and ds_params[2].isdigit(): - result["blksize"] = int(ds_params[2]) - return result - - def _process_listcat_output(self, output): - """Parses the output generated by LISTCAT command. - - Parameters - ---------- - output : str - The output of LISTCAT command. - - Returns - ------- - dict - Dictionary containing the output parameters of LISTCAT. - """ - result = dict() - if "NOT FOUND" not in output: - volser_output = re.findall(r"VOLSER-*[A-Z|0-9]*", output) - if volser_output: - result["volser"] = "".join( - re.findall(r"-[A-Z|0-9]*", volser_output[0]) - ).replace("-", "") - return result - - @staticmethod - def verify_dataset_disposition(data_set, disposition): - """Function to call iefbr14 to verify the dsp of data_set - - Args: - data_set {str}: Name of dataset to verify the dsp=shr - - Returns: - bool: If the data_set is in dsp=shr - """ - data_set_disp = f"{data_set},{disposition}" - dd = {"dd" : data_set_disp} - rc, stdput, stderr = mvs_cmd.iefbr14(dds=dd) - - if rc != 0: - return True - else: - return False - - -class MVSDataSet(): - """ - This class represents a z/OS data set that can be yet to be created or - already created in the system. It encapsulates the data set attributes - to easy access and provides operations to perform in the same data set. - - """ - def __init__( - self, - name, - escape_name=False, - data_set_type=None, - state=None, - organization=None, - record_format=None, - volumes=None, - block_size=None, - record_length=None, - space_primary=None, - space_secondary=None, - space_type=None, - directory_blocks=None, - key_length=None, - key_offset=None, - sms_storage_class=None, - sms_data_class=None, - sms_management_class=None, - total_space=None, - used_space=None, - last_referenced=None, - is_cataloged=None, - ): - # Different class variables - self.data_set_possible_states = {"unknown", "present", "absent"} - self.name = name - self.organization = organization - self.record_format = record_format - self.volumes = volumes - self.block_size = block_size - self.record_length = record_length - self.total_space = total_space - self.used_space = used_space - self.last_referenced = last_referenced - self.raw_name = name - self.data_set_type = data_set_type - self.state = state - self.space_primary = space_primary - self.space_secondary = space_secondary - self.space_type = space_type - self.directory_blocks = directory_blocks - self.key_length = key_length - self.key_offset = key_offset - self.sms_storage_class = sms_storage_class - self.sms_data_class = sms_data_class - self.sms_management_class = sms_management_class - self.volumes = volumes - self.is_gds_active = False - self.is_cataloged = False - - # If name has escaped chars or is GDS relative name we clean it. - if escape_name: - self.name = DataSet.escape_data_set_name(self.name) - if DataSet.is_gds_relative_name(self.name): - try: - self.name = DataSet.resolve_gds_absolute_name(self.name) - self.is_gds_active = True - except Exception: - # This means the generation is a positive version so is only used for creation. - self.is_gds_active = False - if self.data_set_type and (self.data_set_type.upper() in DataSet.MVS_VSAM or self.data_set_type == "zfs"): - # When trying to create a new VSAM with a specified record format will fail - # with ZOAU - self.record_format = None - - def create(self, tmp_hlq=None, replace=True, force=False): - """Creates the data set in question. - - Returns - ------- - int - Indicates if changes were made. - """ - arguments = { - "name": self.name, - "raw_name": self.raw_name, - "type": self.data_set_type, - "space_primary": self.space_primary, - "space_secondary": self.space_secondary, - "space_type": self.space_type, - "record_format": self.record_format, - "record_length": self.record_length, - "block_size": self.block_size, - "directory_blocks": self.directory_blocks, - "key_length": self.key_length, - "key_offset": self.key_offset, - "sms_storage_class": self.sms_storage_class, - "sms_data_class": self.sms_data_class, - "sms_management_class": self.sms_management_class, - "volumes": self.volumes, - "tmp_hlq": tmp_hlq, - "force": force, - } - formatted_args = DataSet._build_zoau_args(**arguments) - changed = False - if DataSet.data_set_exists(self.name, tmphlq=tmp_hlq): - DataSet.delete(self.name) - changed = True - zoau_data_set = datasets.create(**formatted_args) - if zoau_data_set is not None: - self.set_state("present") - self.name = zoau_data_set.name - return True - return changed - - def ensure_present(self, tmp_hlq=None, replace=False, force=False): - """ Make sure that the data set is created or fail creating it. - - Parameters - ---------- - tmp_hlq : str - High level qualifier for temporary datasets. - replace : bool - Used to determine behavior when data set already exists. - force : bool - Used to determine behavior when performing member operations on a pdse. + """Retrieves the volume name where the input data set is stored. Returns ------- - int - Indicates if changes were made. - """ - arguments = { - "name": self.name, - "raw_name": self.raw_name, - "type": self.data_set_type, - "space_primary": self.space_primary, - "space_secondary": self.space_secondary, - "space_type": self.space_type, - "record_format": self.record_format, - "record_length": self.record_length, - "block_size": self.block_size, - "directory_blocks": self.directory_blocks, - "key_length": self.key_length, - "key_offset": self.key_offset, - "sms_storage_class": self.sms_storage_class, - "sms_data_class": self.sms_data_class, - "sms_management_class": self.sms_management_class, - "volumes": self.volumes, - "replace": replace, - "tmp_hlq": tmp_hlq, - "force": force, - } - rc = DataSet.ensure_present(**arguments) - self.set_state("present") - return rc + str + Volume where the data set is stored. + None + If the data set does not exist. - def ensure_absent(self, tmp_hlq=None): - """Removes the data set. + Raises + ------ + AttributeError + When input data set is a USS file or directory. + """ + if self.is_uss_path: + raise AttributeError( + "USS file or directory has no attribute 'Volume'") + return self.ds_info.get("volser") - Parameters - ---------- - tmp_hlq : str - High level qualifier for temporary datasets. + def lrecl(self): + """Retrieves the record length of the input data set. Record length + specifies the length, in bytes, of each record in the data set. Returns ------- int - Indicates if changes were made. + The record length, in bytes, of each record. + None + If the data set does not exist or the data set is VSAM. + + Raises + ------ + AttributeError + When input data set is a USS file or directory. """ - rc = DataSet.ensure_absent(self.name, self.volumes, tmphlq=tmp_hlq) - if rc == 0: - self.set_state("absent") - return rc + if self.is_uss_path: + raise AttributeError( + "USS file or directory has no attribute 'lrecl'") + return self.ds_info.get("lrecl") - def delete(self): - """Deletes the data set in question. + def blksize(self): + """Retrieves the BLKSIZE of the input data set. Returns ------- int - Indicates if changes were made. - """ - DataSet.ensure_absent(self.name, self.volumes) - self.set_state("absent") + The blksize of the input data set. + None + If the data set does not exist or the data set is VSAM. - def ensure_cataloged(self, tmp_hlq=None): + Raises + ------ + AttributeError + When input data set is a USS file or directory. """ - Ensures the data set is cataloged, if not catalogs it. + if self.is_uss_path: + raise AttributeError( + "USS file or directory has no attribute 'blksize'") + return self.ds_info.get("blksize") - Parameters - ---------- - tmp_hlq : str - High level qualifier for temporary datasets. + def recfm(self): + """Retrieves the record format of the input data set. Returns ------- - int - Indicates if changes were made. - """ - rc = DataSet.ensure_cataloged(name=self.name, volumes=self.volumes, tmphlq=tmp_hlq) - self.is_cataloged = True - return rc + str + Record format. + 'F' + Fixed. + 'FB' + Fixed Blocked. + 'V' + Variable. + 'VB' + Variable Blocked. + 'U' + Undefined. + 'VBS' + Variable Blocked Spanned. + 'VS' + Variable Spanned. + None + If the data set does not exist or the data set is VSAM. - def catalog(self, tmp_hlq=None): - """Catalog the data set in question. + Raises + ------ + AttributeError + When input data set is a USS file or directory. + """ + if self.is_uss_path: + raise AttributeError( + "USS file or directory has no attribute 'recfm'") + return self.ds_info.get("recfm") - Parameters - ---------- - tmp_hlq : str - High level qualifier for temporary datasets. + def _gather_data_set_info(self): + """Retrieves information about the input data set using LISTDS and + LISTCAT commands. Returns ------- - int - Indicates if changes were made. - """ - rc = DataSet.catalog(self.name, self.volumes, tmphlq=tmp_hlq) - self.is_cataloged = True - return rc + dict + Dictionary containing data set attributes. - def ensure_uncataloged(self, tmp_hlq=None): + Raises + ------ + DatasetBusyError + The dataset may be open by another user. + MVSCmdExecError + Another error while executing the command. """ - Ensures the data set is uncataloged, if not catalogs it. + result = dict() + self.data_set = self.data_set.upper().replace("\\", '') + listds_rc, listds_out, listds_err = mvs_cmd.ikjeft01( + " LISTDS '{0}'".format(self.data_set), + authorized=True, + tmphlq=self.tmphlq + ) + + if listds_rc == 0: + result.update(self._process_listds_output(listds_out)) + else: + if re.findall(r"ALREADY IN USE", listds_out): + raise DatasetBusyError(self.data_set) + if re.findall(r"NOT IN CATALOG", listds_out): + self.ds_info["exists"] = False + else: + raise MVSCmdExecError(listds_rc, listds_out, listds_err) + + listcat_rc, listcat_out, listcat_err = mvs_cmd.idcams( + " LISTCAT ENT({0}) ALL".format(self.data_set), authorized=True + ) + if listcat_rc == 0: + result.update(self._process_listcat_output(listcat_out)) + else: + if re.findall(r"NOT FOUND|NOT LISTED", listcat_out): + self.ds_info["exists"] = False + else: + raise MVSCmdExecError(listcat_rc, listcat_out, listcat_err) + return result + + def _process_listds_output(self, output): + """Parses the output generated by LISTDS command. Parameters ---------- - tmp_hlq : str - High level qualifier for temporary datasets. + output : str + The output of LISTDS command. Returns ------- - int - Indicates if changes were made. + dict + Dictionary containing the output parameters of LISTDS. """ - rc = DataSet.ensure_uncataloged(self.name, tmphlq=tmp_hlq) - self.is_cataloged = False - return rc + result = dict() + if "NOT IN CATALOG" in output: + result["exists"] = False + else: + result["exists"] = True + ds_search = re.search( + r"(-|--)DSORG(-\s*|\s*)\n(.*)", output, re.MULTILINE) + if ds_search: + ds_params = ds_search.group(3).split() + result["dsorg"] = ds_params[-1] + if result.get("dsorg") != "VSAM": + result["recfm"] = ds_params[0] + if ds_params[1].isdigit(): + result["lrecl"] = int(ds_params[1]) + if len(ds_params) > 2 and ds_params[2].isdigit(): + result["blksize"] = int(ds_params[2]) + return result - def uncatalog(self, tmp_hlq=None): - """Uncatalog the data set in question. + def _process_listcat_output(self, output): + """Parses the output generated by LISTCAT command. Parameters ---------- - tmp_hlq : str - High level qualifier for temporary datasets. + output : str + The output of LISTCAT command. Returns ------- - int - Indicates if changes were made. + dict + Dictionary containing the output parameters of LISTCAT. """ - rc = DataSet.uncatalog(self.name, tmphlq=tmp_hlq) - self.is_cataloged = False - return rc + result = dict() + if "NOT FOUND" not in output: + volser_output = re.findall(r"VOLSER-*[A-Z|0-9]*", output) + if volser_output: + result["volser"] = "".join( + re.findall(r"-[A-Z|0-9]*", volser_output[0]) + ).replace("-", "") + return result - def set_state(self, new_state): - """Used to set the data set state. + @staticmethod + def verify_dataset_disposition(data_set, disposition): + """Function to call iefbr14 to verify the dsp of data_set - Parameters - ---------- - new_state : str {unknown, present, absent} - New state of the data set. + Args: + data_set {str}: Name of dataset to verify the dsp=shr - Returns - ------- - bool - If state was set properly. + Returns: + bool: If the data_set is in dsp=shr """ - if new_state not in self.data_set_possible_states: - raise ValueError(f"State {self.state} not supported for MVSDataset class.") - return True + data_set_disp = f"{data_set},{disposition}" + dd = {"dd" : data_set_disp} + rc, stdput, stderr = mvs_cmd.iefbr14(dds=dd) + + if rc != 0: + return True + else: + return False + class Member(): diff --git a/plugins/module_utils/dd_statement.py b/plugins/module_utils/dd_statement.py index 33c9d49cc..11e3954f9 100644 --- a/plugins/module_utils/dd_statement.py +++ b/plugins/module_utils/dd_statement.py @@ -17,7 +17,7 @@ ZOAUImportError, ) -from ansible_collections.ibm.ibm_zos_core.plugins.module_utils.data_set import DataSet +from ansible_collections.ibm.ibm_zos_core.plugins.module_utils.data_set import DataSetUtils try: from zoautil_py import datasets @@ -762,7 +762,7 @@ def __init__( Defaults to 80. """ self.name = None - name = DataSet.create_temp( + name = DataSetUtils.create_temp( hlq=tmphlq, record_format=record_format, space_primary=space_primary, @@ -773,13 +773,13 @@ def __init__( super().__init__(name) if isinstance(content, list): content = "\n".join(content) - DataSet.write(name, content) + DataSetUtils.write(name, content) def __del__(self): """Delete dataset with the name of this object """ if self.name: - DataSet.delete(self.name) + DataSetUtils.delete(self.name) def _build_arg_string(self): """Build a string representing the arguments of this particular data type @@ -860,7 +860,7 @@ def __init__( Defaults to 80. """ self.name = None - name = DataSet.create_temp( + name = DataSetUtils.create_temp( hlq=tmphlq, record_format=record_format, space_primary=space_primary, @@ -874,7 +874,7 @@ def __del__(self): """Delete dataset with the name of this object """ if self.name: - DataSet.delete(self.name) + DataSetUtils.delete(self.name) def _build_arg_string(self): """Build a string representing the arguments of this particular data type @@ -912,8 +912,8 @@ def __del__(self): if VIO wrote to disk during execution. """ try: - DataSet.delete(self.name) - except DataSet.DatasetDeleteError: + DataSetUtils.delete(self.name) + except DataSetUtils.DatasetDeleteError: pass def _build_arg_string(self): diff --git a/plugins/module_utils/zos_mvs_raw.py b/plugins/module_utils/zos_mvs_raw.py index 9e898b58a..9086e9bff 100644 --- a/plugins/module_utils/zos_mvs_raw.py +++ b/plugins/module_utils/zos_mvs_raw.py @@ -24,7 +24,7 @@ InputDefinition, OutputDefinition, ) -from ansible_collections.ibm.ibm_zos_core.plugins.module_utils.data_set import DataSet +from ansible_collections.ibm.ibm_zos_core.plugins.module_utils.data_set import DataSetUtils from ansible_collections.ibm.ibm_zos_core.plugins.module_utils import ( backup as zos_backup, ) @@ -250,13 +250,13 @@ def __init__( key_encoding2 = encryption_key_2.get("encoding") should_reuse = False - if (reuse or replace) and DataSet.data_set_exists(data_set_name, volumes): + if (reuse or replace) and DataSetUtils.data_set_exists(data_set_name, volumes): if reuse: should_reuse = True elif replace: if backup: self.backup = zos_backup.mvs_file_backup(data_set_name, None, tmphlq) - DataSet.delete(data_set_name) + DataSetUtils.delete(data_set_name) if not should_reuse: super().__init__( diff --git a/plugins/modules/zos_apf.py b/plugins/modules/zos_apf.py index 211a1f478..224a59688 100644 --- a/plugins/modules/zos_apf.py +++ b/plugins/modules/zos_apf.py @@ -324,7 +324,7 @@ # supported data set types -DS_TYPE = data_set.DataSet.MVS_SEQ.union(data_set.DataSet.MVS_PARTITIONED) +DS_TYPE = data_set.DataSetUtils.MVS_SEQ.union(data_set.DataSetUtils.MVS_PARTITIONED) def backupOper(module, src, backup, tmphlq=None): @@ -354,7 +354,7 @@ def backupOper(module, src, backup, tmphlq=None): """ file_type = None if data_set.is_data_set(src): - file_type = data_set.DataSet.data_set_type(src, tmphlq=tmphlq) + file_type = data_set.DataSetUtils.data_set_type(src, tmphlq=tmphlq) else: if os.path.exists(src): file_type = 'USS' diff --git a/plugins/modules/zos_archive.py b/plugins/modules/zos_archive.py index a02ec00a5..0d977e7e6 100644 --- a/plugins/modules/zos_archive.py +++ b/plugins/modules/zos_archive.py @@ -1217,7 +1217,7 @@ def find_targets(self): """Finds target datasets in host. """ for path in self.sources: - if data_set.DataSet.data_set_exists(path, tmphlq=self.tmphlq): + if data_set.DataSetUtils.data_set_exists(path, tmphlq=self.tmphlq): self.targets.append(path) else: self.not_found.append(path) @@ -1321,7 +1321,7 @@ def _create_dest_data_set( if space_type is None: arguments.update(space_type="m") arguments.pop("self") - changed = data_set.DataSet.ensure_present(**arguments) + changed = data_set.DataSetUtils.ensure_present(**arguments) return arguments["name"], changed def create_dest_ds(self, name): @@ -1338,8 +1338,8 @@ def create_dest_ds(self, name): Name of the newly created data set. """ record_length = XMIT_RECORD_LENGTH if self.format == "xmit" else AMATERSE_RECORD_LENGTH - data_set.DataSet.ensure_present(name=name, replace=True, type='seq', record_format='fb', record_length=record_length, tmphlq=self.tmphlq) - # changed = data_set.DataSet.ensure_present(name=name, replace=True, type='seq', record_format='fb', record_length=record_length) + data_set.DataSetUtils.ensure_present(name=name, replace=True, type='seq', record_format='fb', record_length=record_length, tmphlq=self.tmphlq) + # changed = data_set.DataSetUtils.ensure_present(name=name, replace=True, type='seq', record_format='fb', record_length=record_length) # cmd = "dtouch -rfb -tseq -l{0} {1}".format(record_length, name) # rc, out, err = self.module.run_command(cmd) @@ -1456,7 +1456,7 @@ def dest_exists(self): bool If destination path exists. """ - return data_set.DataSet.data_set_exists(self.dest, tmphlq=self.tmphlq) + return data_set.DataSetUtils.data_set_exists(self.dest, tmphlq=self.tmphlq) def remove_targets(self): """Removes the archived targets and changes the state accordingly. @@ -1464,7 +1464,7 @@ def remove_targets(self): self.state = STATE_ABSENT for target in self.archived: try: - changed = data_set.DataSet.ensure_absent(target) + changed = data_set.DataSetUtils.ensure_absent(target) except Exception: self.state = STATE_INCOMPLETE if not changed: @@ -1495,8 +1495,8 @@ def expand_mvs_paths(self, paths): # resolve GDS relative names for index, e_path in enumerate(e_paths): - if data_set.DataSet.is_gds_relative_name(e_path): - e_paths[index] = data_set.DataSet.resolve_gds_absolute_name(e_path) + if data_set.DataSetUtils.is_gds_relative_name(e_path): + e_paths[index] = data_set.DataSetUtils.resolve_gds_absolute_name(e_path) expanded_path.extend(e_paths) return expanded_path @@ -1525,7 +1525,7 @@ def clean_environment(self, data_sets=None, uss_files=None, remove_targets=False """ if data_set is not None: for ds in data_sets: - data_set.DataSet.ensure_absent(ds) + data_set.DataSetUtils.ensure_absent(ds) if uss_files is not None: for file in uss_files: try: @@ -1560,7 +1560,7 @@ def encode_source(self): self.failed_on_encoding = [] for target in self.encode_targets: try: - ds_type = data_set.DataSetUtils(target, tmphlq=self.tmphlq).ds_type() + ds_type = data_set.DataSetView(target, tmphlq=self.tmphlq).ds_type() if not ds_type: ds_type = "PS" self.ds_types[target] = ds_type @@ -1684,7 +1684,7 @@ def archive_targets(self): self.module.fail_json( msg="To archive multiple source data sets, you must use option 'use_adrdssu=True'.") source = self.targets[0] - dataset = data_set.MVSDataSet( + dataset = data_set.DataSet( name=self.dest, data_set_type='seq', record_format='fb', @@ -1779,7 +1779,7 @@ def archive_targets(self): msg="To archive multiple source data sets, you must use option 'use_adrdssu=True'.") source = self.sources[0] # dest = self.create_dest_ds(self.dest) - dataset = data_set.MVSDataSet( + dataset = data_set.DataSet( name=self.dest, data_set_type='seq', record_format='fb', diff --git a/plugins/modules/zos_backup_restore.py b/plugins/modules/zos_backup_restore.py index 731762689..b7351d7c8 100644 --- a/plugins/modules/zos_backup_restore.py +++ b/plugins/modules/zos_backup_restore.py @@ -380,7 +380,7 @@ from ansible_collections.ibm.ibm_zos_core.plugins.module_utils.better_arg_parser import \ BetterArgParser from ansible_collections.ibm.ibm_zos_core.plugins.module_utils.data_set import \ - DataSet + DataSetUtils from ansible_collections.ibm.ibm_zos_core.plugins.module_utils.import_handler import \ ZOAUImportError @@ -499,8 +499,8 @@ def resolve_gds_name_if_any(data_set_list): """ if isinstance(data_set_list, list): for index, name in enumerate(data_set_list): - if DataSet.is_gds_relative_name(name): - data_set_list[index] = DataSet.resolve_gds_absolute_name(name) + if DataSetUtils.is_gds_relative_name(name): + data_set_list[index] = DataSetUtils.resolve_gds_absolute_name(name) return data_set_list diff --git a/plugins/modules/zos_blockinfile.py b/plugins/modules/zos_blockinfile.py index 86e65958a..2e73a3831 100644 --- a/plugins/modules/zos_blockinfile.py +++ b/plugins/modules/zos_blockinfile.py @@ -616,15 +616,15 @@ def main(): block = transformBlock(block, ' ', indentation) # analysis the file type if "/" not in src: - dataset = data_set.MVSDataSet( + dataset = data_set.DataSet( name=src ) src = dataset.name - if data_set.DataSet.is_gds_relative_name(src): + if data_set.DataSetUtils.is_gds_relative_name(src): module.fail_json(msg="{0} does not exist".format(src)) - ds_utils = data_set.DataSetUtils(src, tmphlq=tmphlq) + ds_utils = data_set.DataSetView(src, tmphlq=tmphlq) if not ds_utils.exists(): message = "{0} does NOT exist".format(str(src)) module.fail_json(msg=message) diff --git a/plugins/modules/zos_copy.py b/plugins/modules/zos_copy.py index 0a9c0d090..c4001e40d 100644 --- a/plugins/modules/zos_copy.py +++ b/plugins/modules/zos_copy.py @@ -1650,7 +1650,7 @@ def copy_to_uss( """ changed_files = None - if src_ds_type in data_set.DataSet.MVS_SEQ.union(data_set.DataSet.MVS_PARTITIONED) or src_ds_type == "GDG": + if src_ds_type in data_set.DataSetUtils.MVS_SEQ.union(data_set.DataSetUtils.MVS_PARTITIONED) or src_ds_type == "GDG": self._mvs_copy_to_uss( src, dest, src_ds_type, src_member, member_name=member_name ) @@ -1942,14 +1942,14 @@ def _mvs_copy_to_uss( # the same name as the member. dest = "{0}/{1}".format(dest, member_name or src) - if (src_ds_type in data_set.DataSet.MVS_PARTITIONED and not src_member) or src_ds_type == "GDG": + if (src_ds_type in data_set.DataSetUtils.MVS_PARTITIONED and not src_member) or src_ds_type == "GDG": try: os.mkdir(dest) except FileExistsError: pass try: - if src_member or src_ds_type in data_set.DataSet.MVS_SEQ: + if src_member or src_ds_type in data_set.DataSetUtils.MVS_SEQ: if self.asa_text: response = copy.copy_asa_mvs2uss(src, dest, tmphlq=self.tmphlq) rc = response.rc @@ -2113,11 +2113,11 @@ def copy_to_pdse( ] dest_members = [ dest_member if dest_member - else data_set.DataSet.get_member_name_from_file(file) + else data_set.DataSetUtils.get_member_name_from_file(file) for file in files ] - elif src_ds_type in data_set.DataSet.MVS_SEQ: + elif src_ds_type in data_set.DataSetUtils.MVS_SEQ: src_members = [new_src] dest_members = [dest_member] @@ -2365,7 +2365,7 @@ def get_data_set_attributes( Returns ------- dict - Parameters that can be passed into data_set.DataSet.ensure_present. + Parameters that can be passed into data_set.DataSetUtils.ensure_present. """ # Calculating the size needed to allocate. space_primary = int(math.ceil((size / 1024))) @@ -2479,7 +2479,7 @@ def create_seq_dataset_from_file( volume=volume ) - data_set.DataSet.ensure_present(replace=force, tmp_hlq=tmphlq, **dest_params) + data_set.DataSetUtils.ensure_present(replace=force, tmp_hlq=tmphlq, **dest_params) def backup_data(ds_name, ds_type, backup_name, tmphlq=None): @@ -2578,7 +2578,7 @@ def is_compatible( # is incompatible to execute the copy. # ******************************************************************** if executable: - if src_type in data_set.DataSet.MVS_SEQ or dest_type in data_set.DataSet.MVS_SEQ: + if src_type in data_set.DataSetUtils.MVS_SEQ or dest_type in data_set.DataSetUtils.MVS_SEQ: return False # ******************************************************************** @@ -2592,9 +2592,9 @@ def is_compatible( # When either the src or dest are GDSs, the other cannot be a VSAM # data set, since GDGs don't support VSAMs. # ******************************************************************** - if is_src_gds and dest_type in data_set.DataSet.MVS_VSAM: + if is_src_gds and dest_type in data_set.DataSetUtils.MVS_VSAM: return False - if is_dest_gds and src_type in data_set.DataSet.MVS_VSAM: + if is_dest_gds and src_type in data_set.DataSetUtils.MVS_VSAM: return False # ******************************************************************** @@ -2625,9 +2625,9 @@ def is_compatible( # partitioned data set member, other sequential data sets or USS files. # Anything else is incompatible. # ******************************************************************** - if src_type in data_set.DataSet.MVS_SEQ: + if src_type in data_set.DataSetUtils.MVS_SEQ: return not ( - (dest_type in data_set.DataSet.MVS_PARTITIONED and not copy_member) or dest_type == "VSAM" + (dest_type in data_set.DataSetUtils.MVS_PARTITIONED and not copy_member) or dest_type == "VSAM" ) # ******************************************************************** @@ -2642,11 +2642,11 @@ def is_compatible( # In the second case, the possible targets are USS directories and # other PDS/PDSE. Anything else is incompatible. # ******************************************************************** - elif src_type in data_set.DataSet.MVS_PARTITIONED: + elif src_type in data_set.DataSetUtils.MVS_PARTITIONED: if dest_type == "VSAM": return False if not src_member: - return not (copy_member or dest_type in data_set.DataSet.MVS_SEQ) + return not (copy_member or dest_type in data_set.DataSetUtils.MVS_SEQ) return True # ******************************************************************** @@ -2659,11 +2659,11 @@ def is_compatible( # directory or a partitioned data set. # ******************************************************************** elif src_type == "USS": - if dest_type in data_set.DataSet.MVS_SEQ or copy_member: + if dest_type in data_set.DataSetUtils.MVS_SEQ or copy_member: return not is_src_dir - elif dest_type in data_set.DataSet.MVS_PARTITIONED and not copy_member and is_src_inline: + elif dest_type in data_set.DataSetUtils.MVS_PARTITIONED and not copy_member and is_src_inline: return False - elif dest_type in data_set.DataSet.MVS_VSAM: + elif dest_type in data_set.DataSetUtils.MVS_VSAM: return False else: return True @@ -2738,14 +2738,14 @@ def does_destination_allow_copy( # If the destination is a sequential or VSAM data set and is empty, the module will try to use it, # otherwise, force needs to be True to continue and replace it. - if (dest_type in data_set.DataSet.MVS_SEQ or dest_type in data_set.DataSet.MVS_VSAM) and dest_exists: - is_dest_empty = data_set.DataSet.is_empty(dest, volume, tmphlq=tmphlq) + if (dest_type in data_set.DataSetUtils.MVS_SEQ or dest_type in data_set.DataSetUtils.MVS_VSAM) and dest_exists: + is_dest_empty = data_set.DataSetUtils.is_empty(dest, volume, tmphlq=tmphlq) if not (is_dest_empty or force): return False # When the destination is a partitioned data set, the module will have to be able to replace # existing members inside of it, if needed. - if dest_type in data_set.DataSet.MVS_PARTITIONED and dest_exists and member_exists and not force: + if dest_type in data_set.DataSetUtils.MVS_PARTITIONED and dest_exists and member_exists and not force: return False # When the destination is an existing GDG, we'll check that we have enough free generations @@ -2975,7 +2975,7 @@ def allocate_destination_data_set( space_type, type. """ src_name = data_set.extract_dsname(src) - is_dest_empty = data_set.DataSet.is_empty(dest) if dest_exists else True + is_dest_empty = data_set.DataSetUtils.is_empty(dest) if dest_exists else True # Replacing an existing dataset only when it's not empty. We don't know whether that # empty dataset was created for the user by an admin/operator, and they don't have permissions @@ -3018,20 +3018,20 @@ def allocate_destination_data_set( del dest_params["purge"] del dest_params["extended"] del dest_params["fifo"] - data_set.DataSet.ensure_present(replace=force, tmp_hlq=tmphlq, **dest_params) - elif dest_ds_type in data_set.DataSet.MVS_SEQ: + data_set.DataSetUtils.ensure_present(replace=force, tmp_hlq=tmphlq, **dest_params) + elif dest_ds_type in data_set.DataSetUtils.MVS_SEQ: volumes = [volume] if volume else None - data_set.DataSet.ensure_absent(dest, volumes=volumes) + data_set.DataSetUtils.ensure_absent(dest, volumes=volumes) if src_ds_type == "USS": # Taking the temp file when a local file was copied with sftp. create_seq_dataset_from_file(src, dest, force, is_binary, asa_text, volume=volume, tmphlq=tmphlq) - elif src_ds_type in data_set.DataSet.MVS_SEQ: + elif src_ds_type in data_set.DataSetUtils.MVS_SEQ: # Only applying the GDS special case when we don't have an absolute name. if is_gds and not is_active_gds: - data_set.DataSet.allocate_gds_model_data_set(ds_name=dest, model=src_name, asa_text=asa_text, vol=volume) + data_set.DataSetUtils.allocate_gds_model_data_set(ds_name=dest, model=src_name, asa_text=asa_text, vol=volume) else: - data_set.DataSet.allocate_model_data_set(ds_name=dest, model=src_name, asa_text=asa_text, vol=volume, tmphlq=tmphlq) + data_set.DataSetUtils.allocate_model_data_set(ds_name=dest, model=src_name, asa_text=asa_text, vol=volume, tmphlq=tmphlq) else: temp_dump = None try: @@ -3053,15 +3053,15 @@ def allocate_destination_data_set( finally: if temp_dump: os.remove(temp_dump) - elif dest_ds_type in data_set.DataSet.MVS_PARTITIONED and not dest_exists: + elif dest_ds_type in data_set.DataSetUtils.MVS_PARTITIONED and not dest_exists: # Taking the src as model if it's also a PDSE. - if src_ds_type in data_set.DataSet.MVS_PARTITIONED: + if src_ds_type in data_set.DataSetUtils.MVS_PARTITIONED: # Only applying the GDS special case when we don't have an absolute name. if is_gds and not is_active_gds: - data_set.DataSet.allocate_gds_model_data_set(ds_name=dest, model=src_name, asa_text=asa_text, vol=volume) + data_set.DataSetUtils.allocate_gds_model_data_set(ds_name=dest, model=src_name, asa_text=asa_text, vol=volume) else: - data_set.DataSet.allocate_model_data_set(ds_name=dest, model=src_name, executable=executable, asa_text=asa_text, vol=volume, tmphlq=tmphlq) - elif src_ds_type in data_set.DataSet.MVS_SEQ: + data_set.DataSetUtils.allocate_model_data_set(ds_name=dest, model=src_name, executable=executable, asa_text=asa_text, vol=volume, tmphlq=tmphlq) + elif src_ds_type in data_set.DataSetUtils.MVS_SEQ: src_attributes = datasets.list_datasets(src_name)[0] # The size returned by listing is in bytes. size = int(src_attributes.total_space) @@ -3077,7 +3077,7 @@ def allocate_destination_data_set( type="PDSE", volume=volume ) - data_set.DataSet.ensure_present(replace=force, tmp_hlq=tmphlq, **dest_params) + data_set.DataSetUtils.ensure_present(replace=force, tmp_hlq=tmphlq, **dest_params) elif src_ds_type == "USS": if os.path.isfile(src): # This is almost the same as allocating a sequential dataset. @@ -3135,13 +3135,13 @@ def allocate_destination_data_set( volume=volume ) - data_set.DataSet.ensure_present(replace=force, tmp_hlq=tmphlq, **dest_params) - elif dest_ds_type in data_set.DataSet.MVS_VSAM: + data_set.DataSetUtils.ensure_present(replace=force, tmp_hlq=tmphlq, **dest_params) + elif dest_ds_type in data_set.DataSetUtils.MVS_VSAM: # If dest_data_set is not available, always create the destination using the src VSAM # as a model. volumes = [volume] if volume else None - data_set.DataSet.ensure_absent(dest, volumes=volumes) - data_set.DataSet.allocate_model_data_set(ds_name=dest, model=src_name, vol=volume, tmphlq=tmphlq) + data_set.DataSetUtils.ensure_absent(dest, volumes=volumes) + data_set.DataSetUtils.allocate_model_data_set(ds_name=dest, model=src_name, vol=volume, tmphlq=tmphlq) elif dest_ds_type == "GDG": src_view = gdgs.GenerationDataGroupView(src) @@ -3162,9 +3162,9 @@ def allocate_destination_data_set( if is_gds and not is_active_gds: gdg_name = data_set.extract_dsname(dest) - dest = data_set.DataSet.resolve_gds_absolute_name(f"{gdg_name}(0)") + dest = data_set.DataSetUtils.resolve_gds_absolute_name(f"{gdg_name}(0)") - if dest_ds_type not in data_set.DataSet.MVS_VSAM and dest_ds_type != "GDG": + if dest_ds_type not in data_set.DataSetUtils.MVS_VSAM and dest_ds_type != "GDG": dest_params = get_attributes_of_any_dataset_created( dest, src_ds_type, @@ -3263,7 +3263,7 @@ def remote_cleanup(module): shutil.rmtree(dest) else: dest = data_set.extract_dsname(dest) - data_set.DataSet.ensure_absent(name=dest) + data_set.DataSetUtils.ensure_absent(name=dest) def update_result(res_args, original_args): @@ -3418,9 +3418,9 @@ def run_module(module, arg_def): is_src_dir = os.path.isdir(src) is_uss = "/" in dest is_mvs_src = is_data_set(data_set.extract_dsname(src)) - is_src_gds = data_set.DataSet.is_gds_relative_name(src) + is_src_gds = data_set.DataSetUtils.is_gds_relative_name(src) is_mvs_dest = is_data_set(data_set.extract_dsname(dest)) - is_dest_gds = data_set.DataSet.is_gds_relative_name(dest) + is_dest_gds = data_set.DataSetUtils.is_gds_relative_name(dest) is_dest_gds_active = False is_pds = is_src_dir and is_mvs_dest src_member = is_member(src) @@ -3430,30 +3430,30 @@ def run_module(module, arg_def): is_dest_alias = False if is_mvs_src and not src_member and not is_src_gds: - is_src_alias, src_base_name = data_set.DataSet.get_name_if_data_set_is_alias(src, tmphlq) + is_src_alias, src_base_name = data_set.DataSetUtils.get_name_if_data_set_is_alias(src, tmphlq) if is_src_alias: src = src_base_name if is_mvs_dest and not copy_member and not is_dest_gds: - is_dest_alias, dest_base_name = data_set.DataSet.get_name_if_data_set_is_alias(dest, tmphlq) + is_dest_alias, dest_base_name = data_set.DataSetUtils.get_name_if_data_set_is_alias(dest, tmphlq) if is_dest_alias: dest = dest_base_name # Validation for copy from a member if src_member: - if not (data_set.DataSet.data_set_member_exists(src)): + if not (data_set.DataSetUtils.data_set_member_exists(src)): module.fail_json(msg="Unable to copy. Source member {0} does not exist or is not cataloged.".format( data_set.extract_member_name(src) )) - # Implementing the new MVSDataSet class by masking the values of + # Implementing the new DataSet class by masking the values of # src/raw_src and dest/raw_dest. if is_mvs_src: - src_data_set_object = data_set.MVSDataSet(src) + src_data_set_object = data_set.DataSet(src) src = src_data_set_object.name raw_src = src_data_set_object.raw_name if is_mvs_dest: - dest_data_set_object = data_set.MVSDataSet(dest) + dest_data_set_object = data_set.DataSet(dest) dest = dest_data_set_object.name raw_dest = dest_data_set_object.raw_name is_dest_gds_active = dest_data_set_object.is_gds_active @@ -3541,13 +3541,13 @@ def run_module(module, arg_def): copy_handler = CopyHandler(module, is_binary=is_binary) copy_handler._tag_file_encoding(converted_src, "UTF-8") else: - if (is_src_gds and data_set.DataSet.data_set_exists(src, tmphlq=tmphlq)) or ( - not is_src_gds and data_set.DataSet.data_set_exists(src_name, tmphlq=tmphlq)): - if src_member and not data_set.DataSet.data_set_member_exists(src): + if (is_src_gds and data_set.DataSetUtils.data_set_exists(src, tmphlq=tmphlq)) or ( + not is_src_gds and data_set.DataSetUtils.data_set_exists(src_name, tmphlq=tmphlq)): + if src_member and not data_set.DataSetUtils.data_set_member_exists(src): raise NonExistentSourceError(src) - src_ds_type = data_set.DataSet.data_set_type(src_name, tmphlq=tmphlq) + src_ds_type = data_set.DataSetUtils.data_set_type(src_name, tmphlq=tmphlq) - if src_ds_type not in data_set.DataSet.MVS_VSAM and src_ds_type != "GDG": + if src_ds_type not in data_set.DataSetUtils.MVS_VSAM and src_ds_type != "GDG": src_attributes = datasets.list_datasets(src_name)[0] if src_attributes.record_format == 'FBA' or src_attributes.record_format == 'VBA': src_has_asa_chars = True @@ -3556,7 +3556,7 @@ def run_module(module, arg_def): # An empty VSAM will throw an error when IDCAMS tries to open it to copy # the contents. - if src_ds_type in data_set.DataSet.MVS_VSAM and data_set.DataSet.is_empty(src_name): + if src_ds_type in data_set.DataSetUtils.MVS_VSAM and data_set.DataSetUtils.is_empty(src_name): module.exit_json( note="The source VSAM {0} is likely empty. No data was copied.".format(src_name), changed=False, @@ -3584,8 +3584,8 @@ def run_module(module, arg_def): if dest_exists and not os.access(dest, os.W_OK): module.fail_json(msg="Destination {0} is not writable".format(raw_dest)) else: - dest_exists = data_set.DataSet.data_set_exists(dest_name, volume, tmphlq=tmphlq) - dest_ds_type = data_set.DataSet.data_set_type(dest_name, volume, tmphlq=tmphlq) + dest_exists = data_set.DataSetUtils.data_set_exists(dest_name, volume, tmphlq=tmphlq) + dest_ds_type = data_set.DataSetUtils.data_set_type(dest_name, volume, tmphlq=tmphlq) # When dealing with a new generation, we'll override its type to None # so it will be the same type as the source (or whatever dest_data_set has) @@ -3600,7 +3600,7 @@ def run_module(module, arg_def): elif executable: # When executable is selected and dest_exists is false means an executable PDSE was copied to remote, # so we need to provide the correct dest_ds_type that will later be transformed into LIBRARY. - # Not using LIBRARY at this step since there are many checks with dest_ds_type in data_set.DataSet.MVS_PARTITIONED + # Not using LIBRARY at this step since there are many checks with dest_ds_type in data_set.DataSetUtils.MVS_PARTITIONED # and LIBRARY is not in MVS_PARTITIONED frozen set. dest_ds_type = "PDSE" @@ -3608,28 +3608,28 @@ def run_module(module, arg_def): dest_has_asa_chars = True elif not dest_exists and asa_text: dest_has_asa_chars = True - elif dest_exists and dest_ds_type not in data_set.DataSet.MVS_VSAM and dest_ds_type != "GDG": + elif dest_exists and dest_ds_type not in data_set.DataSetUtils.MVS_VSAM and dest_ds_type != "GDG": dest_attributes = datasets.list_datasets(dest_name)[0] if dest_attributes.record_format == 'FBA' or dest_attributes.record_format == 'VBA': dest_has_asa_chars = True - if dest_ds_type in data_set.DataSet.MVS_PARTITIONED: + if dest_ds_type in data_set.DataSetUtils.MVS_PARTITIONED: # Checking if we need to copy a member when the user requests it implicitly. # src is a file and dest was just the PDS/E dataset name. if not copy_member and src_ds_type == "USS" and os.path.isfile(src): copy_member = True - dest_member = data_set.DataSet.get_member_name_from_file(os.path.basename(src)) + dest_member = data_set.DataSetUtils.get_member_name_from_file(os.path.basename(src)) dest = f"{dest_name}({dest_member})" # Checking if the members that would be created from the directory files # are already present on the system. if copy_member: - dest_member_exists = dest_exists and data_set.DataSet.data_set_member_exists(dest) + dest_member_exists = dest_exists and data_set.DataSetUtils.data_set_member_exists(dest) elif src_ds_type == "USS": root_dir = src - dest_member_exists = dest_exists and data_set.DataSet.files_in_data_set_members(root_dir, dest) - elif src_ds_type in data_set.DataSet.MVS_PARTITIONED: - dest_member_exists = dest_exists and data_set.DataSet.data_set_shared_members(src, dest) + dest_member_exists = dest_exists and data_set.DataSetUtils.files_in_data_set_members(root_dir, dest) + elif src_ds_type in data_set.DataSetUtils.MVS_PARTITIONED: + dest_member_exists = dest_exists and data_set.DataSetUtils.data_set_shared_members(src, dest) except Exception as err: module.fail_json(msg=str(err)) identical_gdg_copy = module.params.get('identical_gdg_copy', False) @@ -3689,7 +3689,7 @@ def run_module(module, arg_def): # ******************************************************************** if dest_exists and dest_ds_type != "USS": if not force_lock: - is_dest_lock = data_set.DataSetUtils.verify_dataset_disposition(data_set=data_set.extract_dsname(dest_name), disposition="old") + is_dest_lock = data_set.DataSetView.verify_dataset_disposition(data_set=data_set.extract_dsname(dest_name), disposition="old") if is_dest_lock: module.fail_json( msg="Unable to write to dest '{0}' because a task is accessing the data set.".format( @@ -3712,7 +3712,7 @@ def run_module(module, arg_def): # Attempt to write PDS (not member) to USS file (i.e. a non-directory) # ******************************************************************** if ( - src_ds_type in data_set.DataSet.MVS_PARTITIONED and not src_member + src_ds_type in data_set.DataSetUtils.MVS_PARTITIONED and not src_member and dest_ds_type == 'USS' and not os.path.isdir(dest) ): module.fail_json( @@ -3725,12 +3725,12 @@ def run_module(module, arg_def): # ******************************************************************** if dest_exists: if backup or backup_name: - if dest_ds_type in data_set.DataSet.MVS_PARTITIONED and data_set.DataSet.is_empty(dest_name): + if dest_ds_type in data_set.DataSetUtils.MVS_PARTITIONED and data_set.DataSetUtils.is_empty(dest_name): # The partitioned data set is empty res_args["note"] = "Destination is empty, backup request ignored" else: if backup_name: - backup_data_set = data_set.MVSDataSet(backup_name) + backup_data_set = data_set.DataSet(backup_name) if backup_data_set.is_gds_active: module.fail_json( msg=( @@ -3762,11 +3762,11 @@ def run_module(module, arg_def): if ( is_pds or copy_member - or (src_ds_type in data_set.DataSet.MVS_PARTITIONED and (not src_member) and is_mvs_dest) + or (src_ds_type in data_set.DataSetUtils.MVS_PARTITIONED and (not src_member) and is_mvs_dest) or (src and os.path.isdir(src) and is_mvs_dest) ): dest_ds_type = "PDSE" - elif src_ds_type in data_set.DataSet.MVS_VSAM or src_ds_type == "GDG": + elif src_ds_type in data_set.DataSetUtils.MVS_VSAM or src_ds_type == "GDG": dest_ds_type = src_ds_type elif not is_uss: dest_ds_type = "SEQ" @@ -3916,7 +3916,7 @@ def run_module(module, arg_def): # ------------------------------- o ----------------------------------- # Copy to sequential data set (PS / SEQ) # --------------------------------------------------------------------- - elif dest_ds_type in data_set.DataSet.MVS_SEQ: + elif dest_ds_type in data_set.DataSetUtils.MVS_SEQ: # TODO: check how ASA behaves with this if src_ds_type == "USS" and not is_binary: new_src = conv_path or src @@ -3934,7 +3934,7 @@ def run_module(module, arg_def): # --------------------------------------------------------------------- # Copy to PDS/PDSE # --------------------------------------------------------------------- - elif dest_ds_type in data_set.DataSet.MVS_PARTITIONED or dest_ds_type == "LIBRARY": + elif dest_ds_type in data_set.DataSetUtils.MVS_PARTITIONED or dest_ds_type == "LIBRARY": pdse_copy_handler = PDSECopyHandler( module, diff --git a/plugins/modules/zos_data_set.py b/plugins/modules/zos_data_set.py index d03bbb126..4914ad46d 100644 --- a/plugins/modules/zos_data_set.py +++ b/plugins/modules/zos_data_set.py @@ -801,18 +801,135 @@ - "222222" """ RETURN = r""" -names: +data_sets: description: The data set names, including temporary generated data set names, in the order provided to the module. returned: always type: list elements: str + contains: + name: + description: The data set name. + type: str + returned: always + state: + description: The final state desired for specified data set. + type: str + returned: always + type: + description: The data set type. + type: str + returned: always + space_primary: + description: The amount of primary space allocated for the dataset. + type: int + returned: always + space_secondary: + description: The amount of secondary space allocated for the dataset. + type: int + returned: always + space_type: + description: The unit of measurement used when defining primary and secondary space. + type: str + returned: always + record_format: + description: The format of the data set. + type: str + sample: fb + returned: always + sms_storage_class: + description: + - The storage class for the SMS-managed dataset. + - Returned empty if the data set was not specified as SMS-managed dataset. + type: str + returned: always + sms_data_class: + description: + - The data class for an SMS-managed dataset. + - Returned empty if the data set was not specified as SMS-managed dataset. + type: str + returned: always + sms_management_class: + description: + - The management class for an SMS-managed dataset. + - Returned empty if the data set was not specified as SMS-managed dataset. + type: str + returned: always + record_length: + description: The length, in bytes, of each record in the data set. + type: int + returned: always + block_size: + description: The block size used for the data set. + type: int + returned: always + directory_blocks: + description: + - The number of directory blocks to allocate to the data set. + type: int + required: false + returned: always + key_offset: + description: The key offset used when creating a KSDS data set. + type: int + returned: always + key_length: + description: The key length used when creating a KSDS data set. + type: int + returned: always + empty: + description: + - I(empty) attribute for Generation Data Groups. + - Returned empty if the data set provided was not defined as a GDG. + type: bool + returned: always + extended: + description: + - I(extended) attribute for Generation Data Groups. + - Returned empty if the data set provided was not defined as a GDG. + type: bool + returned: always + fifo: + description: + - I(fifo) attribute for Generation Data Groups. + - Returned empty if the data set provided was not defined as a GDG. + type: bool + returned: always + limit: + description: + - I(limit) attribute for Generation Data Groups. + - Returned empty if the data set provided was not defined as a GDG. + type: int + returned: always + purge: + description: + - I(purge) attribute for Generation Data Groups. + - Returned empty if the data set provided was not defined as a GDG. + type: bool + returned: always + scratch: + description: + - I(scratch) attribute for Generation Data Groups. + - Returned empty if the data set provided was not defined as a GDG. + type: bool + returned: always + volumes: + description: + - Specifies the name of the volume(s) where the data set is located. + - Returned empty if volume was not provided. + type: list + returned: always +msg: + description: A string with a generic message relayed to the user. + returned: always + type: str + sample: Error while gathering data set information """ from ansible_collections.ibm.ibm_zos_core.plugins.module_utils.better_arg_parser import ( BetterArgParser, ) from ansible_collections.ibm.ibm_zos_core.plugins.module_utils.data_set import ( - DataSet, GenerationDataGroup, MVSDataSet, Member + DataSetUtils, GenerationDataGroup, DataSet, Member ) from ansible.module_utils.basic import AnsibleModule @@ -942,7 +1059,7 @@ def data_set_name(contents, dependencies): tmphlq = dependencies.get("tmp_hlq") if tmphlq is None: tmphlq = "" - contents = DataSet.temp_name(tmphlq) + contents = DataSetUtils.temp_name(tmphlq) else: raise ValueError( 'Data set and member name must be provided when "type=member"' @@ -1368,7 +1485,7 @@ def get_data_set_handler(**params): Returns ------- - MVSDataSet or GenerationDataGroup or Member object. + DataSet or GenerationDataGroup or Member object. """ if params.get("type") == "gdg": return GenerationDataGroup( @@ -1385,7 +1502,7 @@ def get_data_set_handler(**params): name=params.get("name") ) else: - return MVSDataSet( + return DataSet( name=params.get("name"), record_format=params.get("record_format", None), volumes=params.get("volumes", None), @@ -1410,7 +1527,7 @@ def perform_data_set_operations(data_set, state, replace, tmp_hlq, force): Parameters ---------- - data_set : {object | MVSDataSet | Member | GenerationDataGroup } + data_set : {object | DataSet | Member | GenerationDataGroup } Data set object to perform operations on. state : str State of the data sets. @@ -1702,6 +1819,53 @@ def parse_and_validate_args(params): } return parsed_args +def build_return_schema( data_set_params): + """ Builds return values schema with empty values. + + Parameters + ---------- + data_set_params : dict + Dictionary containing all params used in data set creation. + + Returns + ------- + dict + Dictionary used to return values at execution finalization. + """ + data_set_schema = { + "name": "", + "state": "", + "type": "", + "space_primary": "", + "space_secondary": "", + "space_type": "", + "record_format": "", + "sms_storage_class": "", + "sms_data_class": "", + "sms_management_class": "", + "record_length": "", + "block_size": "", + "directory_blocks": "", + "key_offset": "", + "key_length": "", + "empty": "", + "extended": "", + "fifo": "", + "limit": "", + "purge": "", + "scratch": "", + "volumes": [], + } + + + data_sets = [ data_set_schema.copy() | data_set for data_set in data_set_params ] + result = { + "data_sets": data_sets, + "changed": False, + "msg": "", + "failed": False + } + return result def run_module(): """Runs the module. @@ -1910,10 +2074,11 @@ def run_module(): module_args['state']['dependencies'] = ['batch'] params = parse_and_validate_args(module.params) data_set_param_list = get_individual_data_set_parameters(params) - result["names"] = [d.get("name", "") for d in data_set_param_list] + # Build return schema from the data set param list + result["names"] = build_return_schema(data_set_param_list) for data_set_params in data_set_param_list: - # this returns MVSDataSet, Member or GenerationDataGroup + # this returns DataSet, Member or GenerationDataGroup data_set = get_data_set_handler(**data_set_params) current_changed = perform_data_set_operations( data_set=data_set, @@ -1925,8 +2090,6 @@ def run_module(): result["changed"] = result["changed"] or current_changed except Exception as e: module.fail_json(msg=repr(e), **result) - if module.params.get("replace"): - result["changed"] = True module.exit_json(**result) diff --git a/plugins/modules/zos_encode.py b/plugins/modules/zos_encode.py index ffd0f7ab6..f4a472445 100644 --- a/plugins/modules/zos_encode.py +++ b/plugins/modules/zos_encode.py @@ -379,14 +379,14 @@ def check_mvs_dataset(ds, tmphlq=None): """ check_rc = False ds_type = None - if not data_set.DataSet.data_set_exists(ds, tmphlq=tmphlq): + if not data_set.DataSetUtils.data_set_exists(ds, tmphlq=tmphlq): raise EncodeError( "Data set {0} is not cataloged, please check data set provided in" "the src option.".format(ds) ) else: check_rc = True - ds_type = data_set.DataSetUtils(ds, tmphlq=tmphlq).ds_type() + ds_type = data_set.DataSetView(ds, tmphlq=tmphlq).ds_type() if not ds_type: raise EncodeError("Unable to determine data set type of {0}".format(ds)) return check_rc, ds_type @@ -551,14 +551,14 @@ def run_module(): verify_uss_path_exists(src) # This can raise an exception. else: is_mvs_src = True - src_data_set = data_set.MVSDataSet(src) + src_data_set = data_set.DataSet(src) is_name_member = data_set.is_member(src_data_set.name) dest_exists = False if not is_name_member: - dest_exists = data_set.DataSet.data_set_exists(src_data_set.name, tmphlq=tmphlq) + dest_exists = data_set.DataSetUtils.data_set_exists(src_data_set.name, tmphlq=tmphlq) else: - dest_exists = data_set.DataSet.data_set_exists( + dest_exists = data_set.DataSetUtils.data_set_exists( data_set.extract_dsname(src_data_set.name), tmphlq=tmphlq ) @@ -570,14 +570,14 @@ def run_module(): ) if is_name_member: - if not data_set.DataSet.data_set_member_exists(src_data_set.name): + if not data_set.DataSetUtils.data_set_member_exists(src_data_set.name): raise EncodeError("Cannot find member {0} in {1}".format( data_set.extract_member(src_data_set.raw_name), data_set.extract_dsname(src_data_set.raw_name) )) ds_type_src = "PS" else: - ds_type_src = data_set.DataSet.data_set_type(src_data_set.name, tmphlq=tmphlq) + ds_type_src = data_set.DataSetUtils.data_set_type(src_data_set.name, tmphlq=tmphlq) if not ds_type_src: raise EncodeError("Unable to determine data set type of {0}".format(src_data_set.raw_name)) @@ -600,13 +600,13 @@ def run_module(): is_uss_dest = True else: is_mvs_dest = True - dest_data_set = data_set.MVSDataSet(dest) + dest_data_set = data_set.DataSet(dest) is_name_member = data_set.is_member(dest_data_set.name) if not is_name_member: - dest_exists = data_set.DataSet.data_set_exists(dest_data_set.name, tmphlq=tmphlq) + dest_exists = data_set.DataSetUtils.data_set_exists(dest_data_set.name, tmphlq=tmphlq) else: - dest_exists = data_set.DataSet.data_set_exists( + dest_exists = data_set.DataSetUtils.data_set_exists( data_set.extract_dsname(dest_data_set.name), tmphlq=tmphlq ) @@ -620,7 +620,7 @@ def run_module(): if is_name_member: ds_type_dest = "PS" else: - ds_type_dest = data_set.DataSet.data_set_type(dest_data_set.name, tmphlq=tmphlq) + ds_type_dest = data_set.DataSetUtils.data_set_type(dest_data_set.name, tmphlq=tmphlq) if (not is_uss_dest) and (path.sep in dest): try: @@ -646,7 +646,7 @@ def run_module(): # Check if the dest is required to be backup before conversion if backup: if backup_name: - backup_data_set = data_set.MVSDataSet(backup_name) + backup_data_set = data_set.DataSet(backup_name) if backup_data_set.is_gds_active: raise EncodeError( f"The generation data set {backup_name} cannot be used as backup. " diff --git a/plugins/modules/zos_fetch.py b/plugins/modules/zos_fetch.py index 62004698a..5a25b5b50 100644 --- a/plugins/modules/zos_fetch.py +++ b/plugins/modules/zos_fetch.py @@ -459,9 +459,9 @@ def _copy_vsam_to_temp_data_set(self, ds_name): if tmphlq is None: tmphlq = "MVSTMP" try: - sysin = data_set.DataSet.create_temp(tmphlq) - sysprint = data_set.DataSet.create_temp(tmphlq) - out_ds_name = data_set.DataSet.create_temp( + sysin = data_set.DataSetUtils.create_temp(tmphlq) + sysprint = data_set.DataSetUtils.create_temp(tmphlq) + out_ds_name = data_set.DataSetUtils.create_temp( tmphlq, space_primary=vsam_size, space_type="K", record_format="VB", record_length=max_recl ) repro_sysin = " REPRO INFILE(INPUT) OUTFILE(OUTPUT) " @@ -723,7 +723,7 @@ def _fetch_gdg(self, src, is_binary, encoding=None): data_group = gdgs.GenerationDataGroupView(src) for current_gds in data_group.generations(): - if current_gds.organization in data_set.DataSet.MVS_SEQ: + if current_gds.organization in data_set.DataSetUtils.MVS_SEQ: self._fetch_mvs_data( current_gds.name, is_binary, @@ -731,7 +731,7 @@ def _fetch_gdg(self, src, is_binary, encoding=None): file_override=current_gds.name, encoding=encoding ) - elif current_gds.organization in data_set.DataSet.MVS_PARTITIONED: + elif current_gds.organization in data_set.DataSetUtils.MVS_PARTITIONED: self._fetch_pdse( current_gds.name, is_binary, @@ -931,13 +931,13 @@ def run_module(): if "/" in src: # USS src_exists = os.path.exists(b_src) else: # MVS - src_data_set = data_set.MVSDataSet(src) + src_data_set = data_set.DataSet(src) is_member = data_set.is_member(src_data_set.name) if is_member: - src_exists = data_set.DataSet.data_set_member_exists(src_data_set.name) + src_exists = data_set.DataSetUtils.data_set_member_exists(src_data_set.name) else: - src_exists = data_set.DataSet.data_set_exists( + src_exists = data_set.DataSetUtils.data_set_exists( src_data_set.name, tmphlq=tmphlq ) @@ -969,7 +969,7 @@ def run_module(): if "/" in src: ds_type = "USS" else: - ds_type = data_set.DataSet.data_set_type( + ds_type = data_set.DataSetUtils.data_set_type( data_set.extract_dsname(src_data_set.name), tmphlq=tmphlq ) @@ -986,7 +986,7 @@ def run_module(): # Fetch a sequential data set # # ********************************************************** # - if ds_type in data_set.DataSet.MVS_SEQ: + if ds_type in data_set.DataSetUtils.MVS_SEQ: file_path = fetch_handler._fetch_mvs_data( src_data_set.name, is_binary, @@ -998,7 +998,7 @@ def run_module(): # Fetch a partitioned data set or one of its members # # ********************************************************** # - elif ds_type in data_set.DataSet.MVS_PARTITIONED: + elif ds_type in data_set.DataSetUtils.MVS_PARTITIONED: if is_member: file_path = fetch_handler._fetch_mvs_data( src_data_set.name, @@ -1033,7 +1033,7 @@ def run_module(): # Fetch a VSAM data set # # ********************************************************** # - elif ds_type in data_set.DataSet.MVS_VSAM: + elif ds_type in data_set.DataSetUtils.MVS_VSAM: file_path = fetch_handler._fetch_vsam( src_data_set.name, is_binary, diff --git a/plugins/modules/zos_job_submit.py b/plugins/modules/zos_job_submit.py index 1852327ec..00dff6dd0 100644 --- a/plugins/modules/zos_job_submit.py +++ b/plugins/modules/zos_job_submit.py @@ -696,7 +696,7 @@ data_set, ) from ansible_collections.ibm.ibm_zos_core.plugins.module_utils.data_set import ( - DataSet, + DataSetUtils, ) from ansible.module_utils.basic import AnsibleModule from ansible.module_utils._text import to_text @@ -1067,14 +1067,14 @@ def run_module(): if location == "data_set": # Resolving a relative GDS name and escaping special symbols if needed. - src_data = data_set.MVSDataSet(src) + src_data = data_set.DataSet(src) # Checking that the source is actually present on the system. if volume is not None: volumes = [volume] # Get the data set name to catalog it. src_ds_name = data_set.extract_dsname(src_data.name) - present, changed = DataSet.attempt_catalog_if_necessary(src_ds_name, volumes) + present, changed = DataSetUtils.attempt_catalog_if_necessary(src_ds_name, volumes) if not present: module.fail_json( @@ -1082,10 +1082,10 @@ def run_module(): f"not be cataloged on the volume {volume}.") ) elif data_set.is_member(src_data.name): - if not DataSet.data_set_member_exists(src_data.name): + if not DataSetUtils.data_set_member_exists(src_data.name): module.fail_json(msg=f"Cannot submit job, the data set member {src_data.raw_name} was not found.") else: - if not DataSet.data_set_exists(src_data.name): + if not DataSetUtils.data_set_exists(src_data.name): module.fail_json(msg=f"Cannot submit job, the data set {src_data.raw_name} was not found.") job_submitted_id, duration = submit_src_jcl( diff --git a/plugins/modules/zos_lineinfile.py b/plugins/modules/zos_lineinfile.py index f5565ce9e..4f4e0c601 100644 --- a/plugins/modules/zos_lineinfile.py +++ b/plugins/modules/zos_lineinfile.py @@ -677,16 +677,16 @@ def main(): # analysis the file type if "/" not in src: - dataset = data_set.MVSDataSet( + dataset = data_set.DataSet( name=src ) src = dataset.name is_gds = dataset.is_gds_active - if data_set.DataSet.is_gds_relative_name(src) and is_gds is False: + if data_set.DataSetUtils.is_gds_relative_name(src) and is_gds is False: module.fail_json(msg="{0} does not exist".format(src)) - ds_utils = data_set.DataSetUtils(src, tmphlq=tmphlq) + ds_utils = data_set.DataSetView(src, tmphlq=tmphlq) # Check if dest/src exists if not ds_utils.exists(): diff --git a/plugins/modules/zos_mount.py b/plugins/modules/zos_mount.py index a35dda095..0d45144db 100644 --- a/plugins/modules/zos_mount.py +++ b/plugins/modules/zos_mount.py @@ -605,7 +605,7 @@ def mt_backupOper(module, src, backup, tmphlq=None): Data set type is NOT supported. """ # analysis the file type - ds_utils = data_set.DataSetUtils(src, tmphlq=tmphlq) + ds_utils = data_set.DataSetView(src, tmphlq=tmphlq) file_type = ds_utils.ds_type() if file_type != "USS" and file_type not in mt_DS_TYPE: message = "{0} data set type is NOT supported".format(str(file_type)) @@ -813,7 +813,7 @@ def run_module(module, arg_def): ) # data set to be mounted/unmounted must exist - fs_du = data_set.DataSetUtils(src, tmphlq=tmphlq) + fs_du = data_set.DataSetView(src, tmphlq=tmphlq) fs_exists = fs_du.exists() if fs_exists is False: module.fail_json( @@ -1033,7 +1033,7 @@ def run_module(module, arg_def): stderr = "Mount called on data set that is already mounted.\n" if write_persistent and module.check_mode is False: - fst_du = data_set.DataSetUtils(data_store, tmphlq=tmphlq) + fst_du = data_set.DataSetView(data_store, tmphlq=tmphlq) fst_exists = fst_du.exists() if fst_exists is False: module.fail_json( diff --git a/plugins/modules/zos_mvs_raw.py b/plugins/modules/zos_mvs_raw.py index 9e87a0ecc..0708a665f 100644 --- a/plugins/modules/zos_mvs_raw.py +++ b/plugins/modules/zos_mvs_raw.py @@ -2816,8 +2816,8 @@ def resolve_data_set_names(dataset, disposition, type): else: disp = "shr" - if data_set.DataSet.is_gds_relative_name(dataset): - if data_set.DataSet.is_gds_positive_relative_name(dataset): + if data_set.DataSetUtils.is_gds_relative_name(dataset): + if data_set.DataSetUtils.is_gds_positive_relative_name(dataset): if disp == "new": if type: return str(datasets.create(dataset, type).name), "shr" @@ -2826,7 +2826,7 @@ def resolve_data_set_names(dataset, disposition, type): else: raise ("To generate a new GDS as {0} disposition 'new' is required.".format(dataset)) else: - data = data_set.MVSDataSet( + data = data_set.DataSet( name=dataset ) src = data.name diff --git a/plugins/modules/zos_replace.py b/plugins/modules/zos_replace.py index 81934c1ae..0fd4c339d 100644 --- a/plugins/modules/zos_replace.py +++ b/plugins/modules/zos_replace.py @@ -270,20 +270,20 @@ def resolve_src_name(module, name, result, tmp_hlq): module.fail_json(rc=257, msg=f"USS path {name} does not exist.", **result) else: try: - data_set_obj = data_set.MVSDataSet(name=name) + data_set_obj = data_set.DataSet(name=name) name = data_set_obj.name if not data_set_obj.is_gds_active: - is_an_alias, base_name = data_set.DataSet.get_name_if_data_set_is_alias(name=name, tmp_hlq=tmp_hlq) + is_an_alias, base_name = data_set.DataSetUtils.get_name_if_data_set_is_alias(name=name, tmp_hlq=tmp_hlq) if is_an_alias: name = base_name except Exception: message_dict = dict(msg=f"Unable to resolve name of data set {name}.") module.fail_json(**message_dict, **result) - if data_set.DataSet.is_gds_relative_name(name): + if data_set.DataSetUtils.is_gds_relative_name(name): module.fail_json(msg="{0} does not exist".format(name), **result) - ds_utils = data_set.DataSetUtils(name) + ds_utils = data_set.DataSetView(name) if not ds_utils.exists(): module.fail_json(msg=f"{name} does NOT exist.", **result) @@ -627,8 +627,8 @@ def run_module(): else: backup_ds = Backup.mvs_file_backup(dsn=src, bk_dsn=backup_name, tmphlq=tmp_hlq) result['backup_name'] = backup_ds - if data_set.DataSet.is_gds_relative_name(backup_ds): - bk_up_obj = data_set.MVSDataSet(name=backup_ds) + if data_set.DataSetUtils.is_gds_relative_name(backup_ds): + bk_up_obj = data_set.DataSet(name=backup_ds) result['backup_name'] = bk_up_obj.name except Exception as err: module.fail_json(msg=f"Unable to allocate backup {backup} destination: {str(err)}.", **result) diff --git a/plugins/modules/zos_stat.py b/plugins/modules/zos_stat.py index 1f29c52ea..c4f2d0b14 100644 --- a/plugins/modules/zos_stat.py +++ b/plugins/modules/zos_stat.py @@ -1013,7 +1013,7 @@ ZOAUImportError ) from ansible_collections.ibm.ibm_zos_core.plugins.module_utils.data_set import ( - DataSet, + DataSetUtils, DatasetCreateError, GDSNameResolveError ) @@ -1820,7 +1820,7 @@ def query(self): try: # First creating a temp data set to hold the LISTDSI script. # All options are meant to allocate just enough space for it. - temp_script_location = DataSet.create_temp( + temp_script_location = DataSetUtils.create_temp( hlq=self.tmp_hlq, type='SEQ', record_format='FB', @@ -1884,7 +1884,7 @@ def _run_listdsi_command(self, temp_script_location): if self.module.check_mode: self.extra_data = f'{self.extra_data}Skipping PDS/E directory attributes and SMS information while running in check mode.\n' - if not self.module.check_mode and self.data_set_type in DataSet.MVS_PARTITIONED: + if not self.module.check_mode and self.data_set_type in DataSetUtils.MVS_PARTITIONED: extra_args = 'DIRECTORY' if not self.module.check_mode and self.sms_managed: extra_args = f'{extra_args} SMSINFO' @@ -2344,7 +2344,7 @@ def fill_return_json(attrs): dsorg = attrs['attributes']['dsorg'] if handler == 'vsam' and dsorg == 'vsam': continue - elif handler == 'nonvsam' and dsorg in DataSet.MVS_SEQ.union(DataSet.MVS_PARTITIONED): + elif handler == 'nonvsam' and dsorg in DataSetUtils.MVS_SEQ.union(DataSetUtils.MVS_PARTITIONED): continue attrs['attributes'] = fill_missing_attrs( @@ -2380,20 +2380,20 @@ def get_data_set_handler( DataSetHandler -- Handler for data sets. """ try: - if DataSet.is_gds_relative_name(name): + if DataSetUtils.is_gds_relative_name(name): # Replacing the relative name because data_set_type, # data_set_cataloged_volume_list and LISTDSI need the # absolute name to locate the data set. - name = DataSet.resolve_gds_absolute_name(name) + name = DataSetUtils.resolve_gds_absolute_name(name) except (GDSNameResolveError, Exception): return DataSetHandler(name, exists=False) alias_name = None - has_been_migrated = DataSet.check_if_data_set_migrated(name) + has_been_migrated = DataSetUtils.check_if_data_set_migrated(name) if has_been_migrated: if recall and not module.check_mode: - rc, stdout, stderr = DataSet.recall_migrated_data_set( + rc, stdout, stderr = DataSetUtils.recall_migrated_data_set( name, module, tmp_hlq=tmp_hlq @@ -2412,7 +2412,7 @@ def get_data_set_handler( return NonVSAMDataSetHandler(name, 'MIGRAT', module, sms_managed, None, migrated=True) try: - is_an_alias, base_name = DataSet.get_name_if_data_set_is_alias( + is_an_alias, base_name = DataSetUtils.get_name_if_data_set_is_alias( name, tmp_hlq=tmp_hlq ) @@ -2425,17 +2425,17 @@ def get_data_set_handler( # If the data set doesn't exist, the return value will be None. # We search in all volumes first in case we're dealing with a VSAM. - ds_type = DataSet.data_set_type(name, tmphlq=tmp_hlq) + ds_type = DataSetUtils.data_set_type(name, tmphlq=tmp_hlq) # If we got a hit for a GDG, we'll stop right now. The user should set # type='GDG' in their task. if not ds_type or ds_type == 'GDG': return DataSetHandler(name, exists=False) - elif ds_type in DataSet.MVS_VSAM: + elif ds_type in DataSetUtils.MVS_VSAM: return VSAMDataSetHandler(name, module, ds_type, tmp_hlq, alias=alias_name) # Finding all the volumes where the data set is allocated. - cataloged_list = DataSet.data_set_cataloged_volume_list(name, tmphlq=tmp_hlq) + cataloged_list = DataSetUtils.data_set_cataloged_volume_list(name, tmphlq=tmp_hlq) if volumes and len(volumes) > 0: found_volumes = [vol for vol in volumes if vol in cataloged_list] missing_volumes = [vol.lower() for vol in volumes if vol not in found_volumes] @@ -2446,12 +2446,12 @@ def get_data_set_handler( # We continue when we find the data set on at least 1 volume. # Overwriting the first ds_type just in case. if len(found_volumes) >= 1: - ds_type = DataSet.data_set_type(name, volume=found_volumes[0], tmphlq=tmp_hlq) + ds_type = DataSetUtils.data_set_type(name, volume=found_volumes[0], tmphlq=tmp_hlq) else: return DataSetHandler(name, exists=False) # Now instantiating a concrete handler based on the data set's type. - if ds_type in DataSet.MVS_SEQ or ds_type in DataSet.MVS_PARTITIONED: + if ds_type in DataSetUtils.MVS_SEQ or ds_type in DataSetUtils.MVS_PARTITIONED: handler = NonVSAMDataSetHandler( name, found_volumes, diff --git a/plugins/modules/zos_tso_command.py b/plugins/modules/zos_tso_command.py index 9a422a2f0..76ecb0fa4 100644 --- a/plugins/modules/zos_tso_command.py +++ b/plugins/modules/zos_tso_command.py @@ -290,8 +290,8 @@ def preprocess_data_set_names(command): pattern = r"(?:(?:[A-Z$#@]{1}[A-Z0-9$#@-]{0,7})(?:[.]{1})){1,21}[A-Z$#@]{1}[A-Z0-9$#@-]{0,7}(?:\([A-Z$#@]{1}[A-Z0-9$#@]{0,7}\)|\((?:[-+]?[0-9]+)\)){0,1}" data_set_list = re.findall(pattern, command) for name in data_set_list: - if data_set.DataSet.is_gds_relative_name(name): - dataset_name = data_set.DataSet.resolve_gds_absolute_name(name) + if data_set.DataSetUtils.is_gds_relative_name(name): + dataset_name = data_set.DataSetUtils.resolve_gds_absolute_name(name) command = command.replace(name, dataset_name) return command diff --git a/plugins/modules/zos_unarchive.py b/plugins/modules/zos_unarchive.py index 970d789a6..d84d15f02 100644 --- a/plugins/modules/zos_unarchive.py +++ b/plugins/modules/zos_unarchive.py @@ -902,8 +902,8 @@ def __init__(self, module): self.dest_data_set = module.params.get("dest_data_set") self.dest_data_set = dict() if self.dest_data_set is None else self.dest_data_set self.source_size = 0 - if data_set.DataSet.is_gds_relative_name(self.src): - self.src = data_set.DataSet.resolve_gds_absolute_name(self.src) + if data_set.DataSetUtils.is_gds_relative_name(self.src): + self.src = data_set.DataSetUtils.resolve_gds_absolute_name(self.src) def dest_type(self): """Returns the destination type. @@ -1033,7 +1033,7 @@ def _create_dest_data_set( if space_primary is None: arguments.update(space_primary=self._compute_dest_data_set_size()) arguments.pop("self") - changed = data_set.DataSet.ensure_present(**arguments) + changed = data_set.DataSetUtils.ensure_present(**arguments) return arguments["name"], changed def _get_include_data_sets_cmd(self): @@ -1129,7 +1129,7 @@ def src_exists(self): bool If the source exists. """ - return data_set.DataSet.data_set_exists(self.src, tmphlq=self.tmphlq) + return data_set.DataSetUtils.data_set_exists(self.src, tmphlq=self.tmphlq) def _get_restored_datasets(self, output): """Gets the datasets that were successfully restored. @@ -1231,13 +1231,13 @@ def clean_environment(self, data_sets=None, uss_files=None, remove_targets=False """ if data_set is not None: for ds in data_sets: - data_set.DataSet.ensure_absent(ds) + data_set.DataSetUtils.ensure_absent(ds) if uss_files is not None: for file in uss_files: os.remove(file) if remove_targets: for target in self.targets: - data_set.DataSet.ensure_absent(target) + data_set.DataSetUtils.ensure_absent(target) def encoding_targets(self): """Finds encoding target datasets in host. @@ -1263,7 +1263,7 @@ def encode_destination(self): for target in self.encode_targets: try: - ds_utils = data_set.DataSetUtils(target, tmphlq=self.tmphlq) + ds_utils = data_set.DataSetView(target, tmphlq=self.tmphlq) ds_type = ds_utils.ds_type() if not ds_type: ds_type = "PS" diff --git a/plugins/modules/zos_zfs_resize.py b/plugins/modules/zos_zfs_resize.py index d08d4ed63..4e27ec1c5 100644 --- a/plugins/modules/zos_zfs_resize.py +++ b/plugins/modules/zos_zfs_resize.py @@ -421,13 +421,25 @@ def create_trace_dataset(name, member=False): rc : bool Indicates if datasets were made. """ + data_set_name = data_set.extract_dsname(name) if member else name + + # Adding new DataSet class in this call. + # dataset = data_set.DataSet( + # name=dataset_name, + # data_set_type="PDS", + # record_length=200, + # record_format="VB", + # space_type="K", + # space_primary="42000", + # space_secondary="25000" + # ) if member: dataset_name = data_set.extract_dsname(name) - data_set.DataSet.ensure_present(name=dataset_name, replace=False, type="PDSE", record_length=200, record_format="VB", + data_set.DataSetUtils.ensure_present(name=dataset_name, replace=False, type="PDSE", record_length=200, record_format="VB", space_type="K", space_primary="42000", space_secondary="25000") - rc = data_set.DataSet.ensure_member_present(name) + rc = data_set.DataSetUtils.ensure_member_present(name) else: - rc = data_set.DataSet.ensure_present(name=name, replace=False, type="PDS", record_length=200, record_format="VB", + rc = data_set.DataSetUtils.ensure_present(name=name, replace=False, type="PDS", record_length=200, record_format="VB", space_type="K", space_primary="42000", space_secondary="25000") return rc @@ -448,7 +460,7 @@ def validate_dataset_info(dataset): """ dataset = data_set.extract_dsname(dataset) - trace_ds = data_set.DataSetUtils(data_set=dataset) + trace_ds = data_set.DataSetView(data_set=dataset) trace_information = trace_ds._gather_data_set_info() if trace_information["dsorg"] != "PO": @@ -541,7 +553,7 @@ def run_module(): module.exit_json(**result) # Validate if the target zFS exist - if not (data_set.DataSet.data_set_exists(target)): + if not (data_set.DataSetUtils.data_set_exists(target)): module.fail_json(msg=f"zFS Target {target} does not exist", **result) # Validation to found target on the system and also get the mount_point @@ -622,12 +634,12 @@ def run_module(): if trace_destination is not None: if data_set.is_data_set(data_set.extract_dsname(trace_destination)): if data_set.is_member(trace_destination): - if not data_set.DataSet.data_set_exists(data_set.extract_dsname(trace_destination)): + if not data_set.DataSetUtils.data_set_exists(data_set.extract_dsname(trace_destination)): trace_destination_created = create_trace_dataset(name=trace_destination, member=True) else: is_valid_trace_destination, msg_trace = validate_dataset_info(dataset=trace_destination) else: - if not (data_set.DataSet.data_set_exists(trace_destination)): + if not (data_set.DataSetUtils.data_set_exists(trace_destination)): trace_destination_created = create_trace_dataset(name=trace_destination, member=False) else: is_valid_trace_destination, msg_trace = validate_dataset_info(dataset=trace_destination)