Skip to content

Commit 0380cad

Browse files
authored
[zos_archive] Revert src encoding post successful archive (#2192)
* code changes to revert encoding * revert encoding test case * changelog addition * resolving pipeline conflicts * review comments incorporation * adding link for zos_encode
1 parent 075688b commit 0380cad

File tree

3 files changed

+180
-2
lines changed

3 files changed

+180
-2
lines changed
Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,4 @@
1+
minor_changes:
2+
- zos_archive - Adds support for reverting the encoding of a source's
3+
files after archiving them.
4+
(https://github.com/ansible-collections/ibm_zos_core/pull/2192)

plugins/modules/zos_archive.py

Lines changed: 55 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -318,6 +318,10 @@
318318
be restored to their original encoding.
319319
- If encoding fails for any file in a set of multiple files, an
320320
exception will be raised and archiving will be skipped.
321+
- The original files in C(src) will be converted. The module will
322+
revert the encoding conversion after a successful archive, but
323+
no backup will be created. If you need to encode using a backup
324+
and then archive take a look at L(zos_encode,./zos_encode.html) module.
321325
type: dict
322326
required: false
323327
suboptions:
@@ -780,6 +784,10 @@ def compute_dest_size(self):
780784
def encode_source(self):
781785
pass
782786

787+
@abc.abstractmethod
788+
def revert_encoding(self):
789+
pass
790+
783791
@property
784792
def result(self):
785793
"""Returns a dict with the results.
@@ -1045,6 +1053,23 @@ def encode_source(self):
10451053
"skipped_encoding_targets": self.skipped_encoding_targets
10461054
}
10471055

1056+
def revert_encoding(self):
1057+
"""Revert src encoding to original
1058+
"""
1059+
enc_utils = encode.EncodeUtils()
1060+
1061+
for target in self.encoded:
1062+
try:
1063+
convert_rc = enc_utils.uss_convert_encoding_prev(
1064+
target, target, self.to_encoding, self.from_encoding
1065+
)
1066+
if convert_rc:
1067+
enc_utils.uss_tag_encoding(target, self.from_encoding)
1068+
1069+
except Exception as e:
1070+
warning_message = f"Failed to revert source file {os.path.abspath(target)} to its original encoding."
1071+
raise EncodeError(warning_message) from e
1072+
10481073

10491074
class TarArchive(USSArchive):
10501075
def __init__(self, module):
@@ -1180,6 +1205,7 @@ def __init__(self, module):
11801205
self.tmp_data_sets = list()
11811206
self.dest_data_set = module.params.get("dest_data_set")
11821207
self.dest_data_set = dict() if self.dest_data_set is None else self.dest_data_set
1208+
self.ds_types = {}
11831209

11841210
def open(self):
11851211
pass
@@ -1536,7 +1562,8 @@ def encode_source(self):
15361562
try:
15371563
ds_type = data_set.DataSetUtils(target, tmphlq=self.tmphlq).ds_type()
15381564
if not ds_type:
1539-
raise EncodeError("Unable to determine data set type of {0}".format(target))
1565+
ds_type = "PS"
1566+
self.ds_types[target] = ds_type
15401567
enc_utils.mvs_convert_encoding(
15411568
target,
15421569
target,
@@ -1546,7 +1573,7 @@ def encode_source(self):
15461573
dest_type=ds_type,
15471574
tmphlq=self.tmphlq
15481575
)
1549-
self.encoded.append(os.path.abspath(target))
1576+
self.encoded.append(target)
15501577
except Exception:
15511578
self.failed_on_encoding.append(os.path.abspath(target))
15521579
return {
@@ -1555,6 +1582,27 @@ def encode_source(self):
15551582
"skipped_encoding_targets": self.skipped_encoding_targets
15561583
}
15571584

1585+
def revert_encoding(self):
1586+
"""Revert src encoding to original
1587+
"""
1588+
enc_utils = encode.EncodeUtils()
1589+
1590+
for target in self.encoded:
1591+
try:
1592+
ds_type = self.ds_types.get(target, "PS")
1593+
enc_utils.mvs_convert_encoding(
1594+
target,
1595+
target,
1596+
self.to_encoding,
1597+
self.from_encoding,
1598+
src_type=ds_type,
1599+
dest_type=ds_type,
1600+
tmphlq=self.tmphlq
1601+
)
1602+
except Exception as e:
1603+
warning_message = f"Failed to revert source file {os.path.abspath(target)} to its original encoding."
1604+
raise EncodeError(warning_message) from e
1605+
15581606

15591607
class AMATerseArchive(MVSArchive):
15601608
def __init__(self, module):
@@ -2013,8 +2061,10 @@ def run_module():
20132061
if archive.dest_exists() and not archive.force:
20142062
module.fail_json(msg="%s file exists. Use force flag to replace dest" % archive.dest)
20152063

2064+
encoding_result = None
20162065
archive.find_targets()
20172066
if archive.targets_exist():
2067+
# encoding the source if encoding is provided.
20182068
if encoding:
20192069
archive.encoding_targets()
20202070
encoding_result = archive.encode_source()
@@ -2031,6 +2081,9 @@ def run_module():
20312081
if archive.dest_type() == "USS":
20322082
archive.update_permissions()
20332083
archive.changed = archive.is_different_from_original()
2084+
# after successful archive revert the source encoding for all the files encoded.
2085+
if encoding_result:
2086+
archive.revert_encoding()
20342087
archive.get_state()
20352088

20362089
module.exit_json(**archive.result)

tests/functional/modules/test_zos_archive_func.py

Lines changed: 121 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -442,6 +442,7 @@ def test_uss_archive_encode_skip_encoding(ansible_zos_module, ds_format):
442442
# - test_mvs_archive_single_dataset_encoding
443443
# - test_mvs_archive_multiple_dataset_pattern_encoding
444444
# - test_mvs_archive_multiple_dataset_pattern_encoding_skip_encoding
445+
# - test_mvs_archive_multiple_dataset_pattern_encoding_revert_src_encoding
445446

446447

447448
@pytest.mark.ds
@@ -1464,6 +1465,126 @@ def test_mvs_archive_multiple_dataset_pattern_encoding_skip_encoding(ansible_zos
14641465

14651466
archived_datasets.append(archive_data_set)
14661467

1468+
finally:
1469+
for ds_name in matched_datasets:
1470+
hosts.all.zos_data_set(name=ds_name, state="absent")
1471+
for archive_ds in archived_datasets:
1472+
hosts.all.zos_data_set(name=archive_ds, state="absent")
1473+
1474+
@pytest.mark.ds
1475+
@pytest.mark.parametrize(
1476+
"ds_format", [
1477+
"terse"
1478+
])
1479+
@pytest.mark.parametrize(
1480+
"data_set", [
1481+
{
1482+
"dstype": "seq",
1483+
"members": [""]
1484+
}
1485+
])
1486+
@pytest.mark.parametrize(
1487+
"encoding", [
1488+
{"from": "IBM-1047", "to": "ISO8859-1"},
1489+
])
1490+
def test_mvs_archive_multiple_dataset_pattern_encoding_revert_src_encoding(ansible_zos_module, ds_format, data_set, encoding):
1491+
try:
1492+
hosts = ansible_zos_module
1493+
hlq_prefix = "OMVSADM.ABC"
1494+
matched_datasets = [f"{hlq_prefix}.A", f"{hlq_prefix}.B"]
1495+
archived_datasets = []
1496+
copy_src_datasets = [f"{hlq_prefix}.C", f"{hlq_prefix}.D"]
1497+
all_datasets_to_process = matched_datasets + copy_src_datasets
1498+
1499+
for ds_name in all_datasets_to_process:
1500+
hosts.all.zos_data_set(name=ds_name, state="absent")
1501+
hosts.all.zos_data_set(
1502+
name=ds_name,
1503+
type=data_set.get("dstype"),
1504+
state="present",
1505+
replace=True,
1506+
)
1507+
if data_set.get("dstype") in ["pds", "pdse"]:
1508+
for member in data_set.get("members"):
1509+
hosts.all.zos_data_set(
1510+
name=f"{ds_name}({member})",
1511+
type="member",
1512+
state="present"
1513+
)
1514+
1515+
test_line = "pattern match"
1516+
for ds_name in all_datasets_to_process:
1517+
for member in data_set.get("members"):
1518+
ds_target = f"{ds_name}({member})" if member else ds_name
1519+
hosts.all.shell(cmd=f"decho '{test_line}' \"{ds_target}\"")
1520+
1521+
format_dict = {"name": ds_format}
1522+
if ds_format == "terse":
1523+
format_dict["format_options"] = {"terse_pack": "spack"}
1524+
for ds_name in matched_datasets:
1525+
1526+
original_hex_result = hosts.all.shell(cmd=f"dcat '{ds_name}' | od -x")
1527+
host_original_result = None
1528+
if original_hex_result.contacted:
1529+
host_original_result = next(iter(original_hex_result.contacted.values()))
1530+
1531+
original_hex_output_lines = [line.strip() for line in host_original_result.get("stdout", "").splitlines() if line.strip()]
1532+
archive_data_set = get_tmp_ds_name()
1533+
archive_result = hosts.all.zos_archive(
1534+
src=ds_name,
1535+
dest=archive_data_set,
1536+
format=format_dict,
1537+
encoding=encoding,
1538+
)
1539+
reverted_hex_result = hosts.all.shell(cmd=f"dcat '{ds_name}' | od -x")
1540+
host_reverted_result = None
1541+
if reverted_hex_result.contacted:
1542+
host_reverted_result = next(iter(reverted_hex_result.contacted.values()))
1543+
reverted_hex_output_lines = [line.strip() for line in host_reverted_result.get("stdout", "").splitlines() if line.strip()]
1544+
1545+
original_hex = []
1546+
for line in original_hex_output_lines:
1547+
if line == '*':
1548+
original_hex.append('*')
1549+
else:
1550+
parts = line.split()
1551+
if len(parts) > 1:
1552+
original_hex.extend(parts[1:])
1553+
1554+
reverted_hex = []
1555+
for line in reverted_hex_output_lines:
1556+
if line == '*':
1557+
reverted_hex.append('*')
1558+
else:
1559+
parts = line.split()
1560+
if len(parts) > 1:
1561+
reverted_hex.extend(parts[1:])
1562+
1563+
for result in archive_result.contacted.values():
1564+
try:
1565+
original_first_star_idx = original_hex.index('*')
1566+
except ValueError:
1567+
original_first_star_idx = len(original_hex)
1568+
1569+
try:
1570+
reverted_first_star_idx = reverted_hex.index('*')
1571+
except ValueError:
1572+
reverted_first_star_idx = len(reverted_hex)
1573+
1574+
original_hex_to_compare = original_hex[:original_first_star_idx]
1575+
reverted_hex_to_compare = reverted_hex[:reverted_first_star_idx]
1576+
1577+
is_identical = (original_hex_to_compare == reverted_hex_to_compare)
1578+
assert is_identical is True
1579+
assert result.get("changed") is True
1580+
assert result.get("dest") == archive_data_set
1581+
assert ds_name in result.get("archived")
1582+
cmd_result = hosts.all.shell(cmd=f"dls {archive_data_set}")
1583+
for c_result in cmd_result.contacted.values():
1584+
assert archive_data_set in c_result.get("stdout")
1585+
1586+
archived_datasets.append(archive_data_set)
1587+
14671588
finally:
14681589
for ds_name in matched_datasets:
14691590
hosts.all.zos_data_set(name=ds_name, state="absent")

0 commit comments

Comments
 (0)