Skip to content

Commit b74eb3d

Browse files
[Enabler][zos_archive][zos_unarchive] Add support for GDGs and validate symbols support (#1511)
* Added test symbols * Added tests for GDG and sepcial chars * Added changelog * Adding support for gds in zos_unarchive * Added GDS resolve function into archive * Added * Added test, example and modified docs * Added docs * Added support for archive into a GDS * Removed comments * Fixed test * Updated changelog * Updated changelog
1 parent d910975 commit b74eb3d

File tree

6 files changed

+285
-58
lines changed

6 files changed

+285
-58
lines changed
Lines changed: 7 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,7 @@
1+
minor_changes:
2+
- zos_archive - Added support for GDG and GDS relative name notation to archive data sets.
3+
Added support for data set names with special characters like $, /#, /- and @.
4+
(https://github.com/ansible-collections/ibm_zos_core/pull/1511).
5+
- zos_unarchive - Added support for data set names with special characters
6+
like $, /#, /- and @.
7+
(https://github.com/ansible-collections/ibm_zos_core/pull/1511).

plugins/module_utils/data_set.py

Lines changed: 17 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -1030,7 +1030,7 @@ def create(
10301030
original_args = locals()
10311031
formatted_args = DataSet._build_zoau_args(**original_args)
10321032
try:
1033-
datasets.create(**formatted_args)
1033+
data_set = datasets.create(**formatted_args)
10341034
except exceptions._ZOAUExtendableException as create_exception:
10351035
raise DatasetCreateError(
10361036
raw_name if raw_name else name,
@@ -1046,9 +1046,8 @@ def create(
10461046
raw_name if raw_name else name,
10471047
msg="Unable to verify the data set was created. Received DatasetVerificationError from ZOAU.",
10481048
)
1049-
# With ZOAU 1.3 we switched from getting a ZOAUResponse obj to a Dataset obj, previously we returned
1050-
# response.rc now we just return 0 if nothing failed
1051-
return 0
1049+
changed = data_set is not None
1050+
return changed
10521051

10531052
@staticmethod
10541053
def delete(name):
@@ -1896,7 +1895,7 @@ def __init__(
18961895
# with ZOAU
18971896
self.record_format = None
18981897

1899-
def create(self):
1898+
def create(self, tmp_hlq=None, replace=True, force=False):
19001899
"""Creates the data set in question.
19011900
19021901
Returns
@@ -1907,7 +1906,6 @@ def create(self):
19071906
arguments = {
19081907
"name" : self.name,
19091908
"raw_name" : self.raw_name,
1910-
"replace" : self.replace,
19111909
"type" : self.data_set_type,
19121910
"space_primary" : self.space_primary,
19131911
"space_secondary" : self.space_secondary,
@@ -1922,11 +1920,20 @@ def create(self):
19221920
"sms_data_class" : self.sms_data_class,
19231921
"sms_management_class" : self.sms_management_class,
19241922
"volumes" : self.volumes,
1925-
"tmp_hlq" : self.tmp_hlq,
1926-
"force" : self.force,
1923+
"tmp_hlq" : tmp_hlq,
1924+
"force" : force,
19271925
}
1928-
DataSet.create(**arguments)
1929-
self.set_state("present")
1926+
formatted_args = DataSet._build_zoau_args(**arguments)
1927+
changed = False
1928+
if DataSet.data_set_exists(self.name):
1929+
DataSet.delete(self.name)
1930+
changed = True
1931+
zoau_data_set = datasets.create(**formatted_args)
1932+
if zoau_data_set is not None:
1933+
self.set_state("present")
1934+
self.name = zoau_data_set.name
1935+
return True
1936+
return changed
19301937

19311938
def ensure_present(self, tmp_hlq=None, replace=False, force=False):
19321939
""" Make sure that the data set is created or fail creating it.

plugins/modules/zos_archive.py

Lines changed: 62 additions & 34 deletions
Original file line numberDiff line numberDiff line change
@@ -12,7 +12,8 @@
1212
# See the License for the specific language governing permissions and
1313
# limitations under the License.
1414

15-
from __future__ import (absolute_import, division, print_function)
15+
from __future__ import absolute_import, division, print_function
16+
1617
__metaclass__ = type
1718

1819
DOCUMENTATION = r'''
@@ -36,6 +37,7 @@
3637
- List of names or globs of UNIX System Services (USS) files,
3738
PS (sequential data sets), PDS, PDSE to compress or archive.
3839
- USS file paths should be absolute paths.
40+
- GDS relative notation is supported.
3941
- "MVS data sets supported types are: C(SEQ), C(PDS), C(PDSE)."
4042
- VSAMs are not supported.
4143
type: list
@@ -123,9 +125,9 @@
123125
required: true
124126
exclude:
125127
description:
126-
- Remote absolute path, glob, or list of paths, globs or data set name
127-
patterns for the file, files or data sets to exclude from src list
128-
and glob expansion.
128+
- Remote absolute path, glob, or list of paths, globs, data set name
129+
patterns or generation data sets (GDSs) in relative notation for the file,
130+
files or data sets to exclude from src list and glob expansion.
129131
- "Patterns (wildcards) can contain one of the following, `?`, `*`."
130132
- "* matches everything."
131133
- "? matches any single character."
@@ -331,7 +333,7 @@
331333
name: tar
332334
333335
# Archive multiple files
334-
- name: Compress list of files into a zip
336+
- name: Archive list of files into a zip
335337
zos_archive:
336338
src:
337339
- /tmp/archive/foo.txt
@@ -341,15 +343,15 @@
341343
name: zip
342344
343345
# Archive one data set into terse
344-
- name: Compress data set into a terse
346+
- name: Archive data set into a terse
345347
zos_archive:
346348
src: "USER.ARCHIVE.TEST"
347349
dest: "USER.ARCHIVE.RESULT.TRS"
348350
format:
349351
name: terse
350352
351353
# Use terse with different options
352-
- name: Compress data set into a terse, specify pack algorithm and use adrdssu
354+
- name: Archive data set into a terse, specify pack algorithm and use adrdssu
353355
zos_archive:
354356
src: "USER.ARCHIVE.TEST"
355357
dest: "USER.ARCHIVE.RESULT.TRS"
@@ -360,13 +362,34 @@
360362
use_adrdssu: true
361363
362364
# Use a pattern to store
363-
- name: Compress data set pattern using xmit
365+
- name: Archive data set pattern using xmit
364366
zos_archive:
365367
src: "USER.ARCHIVE.*"
366368
exclude_sources: "USER.ARCHIVE.EXCLUDE.*"
367369
dest: "USER.ARCHIVE.RESULT.XMIT"
368370
format:
369371
name: xmit
372+
373+
- name: Archive multiple GDSs into a terse
374+
zos_archive:
375+
src:
376+
- "USER.GDG(0)"
377+
- "USER.GDG(-1)"
378+
- "USER.GDG(-2)"
379+
dest: "USER.ARCHIVE.RESULT.TRS"
380+
format:
381+
name: terse
382+
format_options:
383+
use_adrdssu: True
384+
385+
- name: Archive multiple data sets into a new GDS
386+
zos_archive:
387+
src: "USER.ARCHIVE.*"
388+
dest: "USER.GDG(+1)"
389+
format:
390+
name: terse
391+
format_options:
392+
use_adrdssu: True
370393
'''
371394

372395
RETURN = r'''
@@ -415,27 +438,22 @@
415438
returned: always
416439
'''
417440

418-
from ansible.module_utils.basic import AnsibleModule
419-
from ansible.module_utils._text import to_bytes
420-
from ansible_collections.ibm.ibm_zos_core.plugins.module_utils import (
421-
better_arg_parser,
422-
data_set,
423-
validation,
424-
mvs_cmd,
425-
)
426-
from ansible_collections.ibm.ibm_zos_core.plugins.module_utils.import_handler import (
427-
ZOAUImportError,
428-
)
429-
import os
430-
import tarfile
431-
import zipfile
432441
import abc
433442
import glob
434-
import re
435443
import math
444+
import os
445+
import re
446+
import tarfile
436447
import traceback
448+
import zipfile
437449
from hashlib import sha256
438450

451+
from ansible.module_utils._text import to_bytes
452+
from ansible.module_utils.basic import AnsibleModule
453+
from ansible_collections.ibm.ibm_zos_core.plugins.module_utils import (
454+
better_arg_parser, data_set, mvs_cmd, validation)
455+
from ansible_collections.ibm.ibm_zos_core.plugins.module_utils.import_handler import \
456+
ZOAUImportError
439457

440458
try:
441459
from zoautil_py import datasets
@@ -1278,11 +1296,17 @@ def expand_mvs_paths(self, paths):
12781296
"""
12791297
expanded_path = []
12801298
for path in paths:
1299+
e_path = []
12811300
if '*' in path:
12821301
# list_dataset_names returns a list of data set names or empty.
12831302
e_paths = datasets.list_dataset_names(path)
12841303
else:
12851304
e_paths = [path]
1305+
1306+
# resolve GDS relative names
1307+
for index, e_path in enumerate(e_paths):
1308+
if data_set.DataSet.is_gds_relative_name(e_path):
1309+
e_paths[index] = data_set.DataSet.resolve_gds_absolute_name(e_path)
12861310
expanded_path.extend(e_paths)
12871311
return expanded_path
12881312

@@ -1415,17 +1439,18 @@ def archive_targets(self):
14151439
self.module.fail_json(
14161440
msg="To archive multiple source data sets, you must use option 'use_adrdssu=True'.")
14171441
source = self.targets[0]
1418-
# dest = self.create_dest_ds(self.dest)
1419-
dest, changed = self._create_dest_data_set(
1442+
dataset = data_set.MVSDataSet(
14201443
name=self.dest,
1421-
replace=True,
1422-
type='seq',
1444+
data_set_type='seq',
14231445
record_format='fb',
14241446
record_length=AMATERSE_RECORD_LENGTH,
14251447
space_primary=self.dest_data_set.get("space_primary"),
1426-
space_type=self.dest_data_set.get("space_type"))
1448+
space_type=self.dest_data_set.get("space_type")
1449+
)
1450+
changed = dataset.create(replace=True)
14271451
self.changed = self.changed or changed
1428-
self.add(source, dest)
1452+
self.dest = dataset.name
1453+
self.add(source, self.dest)
14291454
self.clean_environment(data_sets=self.tmp_data_sets)
14301455

14311456

@@ -1509,16 +1534,19 @@ def archive_targets(self):
15091534
msg="To archive multiple source data sets, you must use option 'use_adrdssu=True'.")
15101535
source = self.sources[0]
15111536
# dest = self.create_dest_ds(self.dest)
1512-
dest, changed = self._create_dest_data_set(
1537+
dataset = data_set.MVSDataSet(
15131538
name=self.dest,
1514-
replace=True,
1515-
type='seq',
1539+
data_set_type='seq',
15161540
record_format='fb',
15171541
record_length=XMIT_RECORD_LENGTH,
15181542
space_primary=self.dest_data_set.get("space_primary"),
1519-
space_type=self.dest_data_set.get("space_type"))
1543+
space_type=self.dest_data_set.get("space_type")
1544+
)
1545+
changed = dataset.create(replace=True)
1546+
self.changed = self.changed or changed
15201547
self.changed = self.changed or changed
1521-
self.add(source, dest)
1548+
self.dest = dataset.name
1549+
self.add(source, self.dest)
15221550
self.clean_environment(data_sets=self.tmp_data_sets)
15231551

15241552
def get_error_hint(self, output):

plugins/modules/zos_unarchive.py

Lines changed: 17 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -35,7 +35,7 @@
3535
- The remote absolute path or data set of the archive to be uncompressed.
3636
- I(src) can be a USS file or MVS data set name.
3737
- USS file paths should be absolute paths.
38-
- MVS data sets supported types are C(SEQ), C(PDS), C(PDSE).
38+
- GDS relative names are supported C(e.g. USER.GDG(-1)).
3939
type: str
4040
required: true
4141
format:
@@ -145,6 +145,7 @@
145145
description:
146146
- A list of directories, files or data set names to extract from the
147147
archive.
148+
- GDS relative names are supported C(e.g. USER.GDG(-1)).
148149
- When C(include) is set, only those files will we be extracted leaving
149150
the remaining files in the archive.
150151
- Mutually exclusive with exclude.
@@ -155,6 +156,7 @@
155156
description:
156157
- List the directory and file or data set names that you would like to
157158
exclude from the unarchive action.
159+
- GDS relative names are supported C(e.g. USER.GDG(-1)).
158160
- Mutually exclusive with include.
159161
type: list
160162
elements: str
@@ -349,6 +351,13 @@
349351
- USER.ARCHIVE.TEST1
350352
- USER.ARCHIVE.TEST2
351353
354+
# Unarchive a GDS
355+
- name: Unarchive a terse data set and excluding data sets from unpacking.
356+
zos_unarchive:
357+
src: "USER.ARCHIVE(0)"
358+
format:
359+
name: terse
360+
352361
# List option
353362
- name: List content from XMIT
354363
zos_unarchive:
@@ -623,6 +632,8 @@ def __init__(self, module):
623632
self.dest_data_set = module.params.get("dest_data_set")
624633
self.dest_data_set = dict() if self.dest_data_set is None else self.dest_data_set
625634
self.source_size = 0
635+
if data_set.DataSet.is_gds_relative_name(self.src):
636+
self.src = data_set.DataSet.resolve_gds_absolute_name(self.src)
626637

627638
def dest_type(self):
628639
return "MVS"
@@ -709,14 +720,14 @@ def _create_dest_data_set(
709720
def _get_include_data_sets_cmd(self):
710721
include_cmd = "INCL( "
711722
for include_ds in self.include:
712-
include_cmd += " '{0}', - \n".format(include_ds)
723+
include_cmd += " '{0}', - \n".format(include_ds.upper())
713724
include_cmd += " ) - \n"
714725
return include_cmd
715726

716727
def _get_exclude_data_sets_cmd(self):
717728
exclude_cmd = "EXCL( - \n"
718729
for exclude_ds in self.exclude:
719-
exclude_cmd += " '{0}', - \n".format(exclude_ds)
730+
exclude_cmd += " '{0}', - \n".format(exclude_ds.upper())
720731
exclude_cmd += " ) - \n"
721732
return exclude_cmd
722733

@@ -1143,13 +1154,13 @@ def run_module():
11431154
module.fail_json(msg="Parameter verification failed", stderr=str(err))
11441155
unarchive = get_unarchive_handler(module)
11451156

1157+
if not unarchive.src_exists():
1158+
module.fail_json(msg="{0} does not exists, please provide a valid src.".format(module.params.get("src")))
1159+
11461160
if unarchive.list:
11471161
unarchive.list_archive_content()
11481162
module.exit_json(**unarchive.result)
11491163

1150-
if not unarchive.src_exists():
1151-
module.fail_json(msg="{0} does not exists, please provide a valid src.".format(module.params.get("src")))
1152-
11531164
unarchive.extract_src()
11541165

11551166
if unarchive.dest_unarchived() and unarchive.dest_type() == "USS":

0 commit comments

Comments
 (0)