Skip to content

Commit 8666829

Browse files
[Enhancement][zos_fetch] Update module interface return values (#2231)
* Updated zos_fetch with new interface * Updated to fix vsam errors * Updated failing scenario * Updated changelogs * Added new changes * Updated zos_fetch * Fixed sanity issues
1 parent 01bf407 commit 8666829

File tree

4 files changed

+135
-51
lines changed

4 files changed

+135
-51
lines changed
Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,3 @@
1+
breaking_changes:
2+
- zos_fetch - Return value ``file`` is replaced by ``src``. Return value ``note`` is deprecated, the messages thrown in ``note`` are now returned in ``msg``.
3+
(https://github.com/ansible-collections/ibm_zos_core/pull/2231).

plugins/action/zos_fetch.py

Lines changed: 44 additions & 21 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
# Copyright (c) IBM Corporation 2019, 2024
1+
# Copyright (c) IBM Corporation 2019, 2025
22
# Licensed under the Apache License, Version 2.0 (the "License");
33
# you may not use this file except in compliance with the License.
44
# You may obtain a copy of the License at
@@ -57,7 +57,7 @@ def _update_result(result, src, dest, ds_type="USS", is_binary=False):
5757
updated_result = dict((k, v) for k, v in result.items())
5858
updated_result.update(
5959
{
60-
"file": src,
60+
"src": src,
6161
"dest": dest,
6262
"data_set_type": data_set_types[ds_type],
6363
"is_binary": is_binary,
@@ -121,6 +121,7 @@ def run(self, tmp=None, task_vars=None):
121121
dest = self._task.args.get('dest')
122122
encoding = self._task.args.get('encoding', None)
123123
flat = _process_boolean(self._task.args.get('flat'), default=False)
124+
fail_on_missing = _process_boolean(self._task.args.get('fail_on_missing'), default=True)
124125
is_binary = _process_boolean(self._task.args.get('is_binary'))
125126
ignore_sftp_stderr = _process_boolean(
126127
self._task.args.get("ignore_sftp_stderr"), default=True
@@ -186,29 +187,55 @@ def run(self, tmp=None, task_vars=None):
186187
task_vars=task_vars
187188
)
188189
ds_type = fetch_res.get("ds_type")
189-
src = fetch_res.get("file")
190+
src = fetch_res.get("src")
190191
remote_path = fetch_res.get("remote_path")
191-
192-
if fetch_res.get("msg"):
193-
result["msg"] = fetch_res.get("msg")
192+
# Create a dictionary that is a schema for the return values
193+
result = dict(
194+
src="",
195+
dest="",
196+
is_binary=False,
197+
checksum="",
198+
changed=False,
199+
data_set_type="",
200+
msg="",
201+
stdout="",
202+
stderr="",
203+
stdout_lines=[],
204+
stderr_lines=[],
205+
rc=0,
206+
encoding=new_module_args.get("encoding"),
207+
)
208+
# Populate it with the modules response
209+
result["src"] = fetch_res.get("src")
210+
result["dest"] = fetch_res.get("dest")
211+
result["is_binary"] = fetch_res.get("is_binary", False)
212+
result["checksum"] = fetch_res.get("checksum")
213+
result["changed"] = fetch_res.get("changed", False)
214+
result["data_set_type"] = fetch_res.get("data_set_type")
215+
result["msg"] = fetch_res.get("msg")
216+
result["stdout"] = fetch_res.get("stdout")
217+
result["stderr"] = fetch_res.get("stderr")
218+
result["stdout_lines"] = fetch_res.get("stdout_lines")
219+
result["stderr_lines"] = fetch_res.get("stderr_lines")
220+
result["rc"] = fetch_res.get("rc", 0)
221+
result["encoding"] = fetch_res.get("encoding")
222+
223+
if fetch_res.get("failed", False):
194224
result["stdout"] = fetch_res.get("stdout") or fetch_res.get(
195225
"module_stdout"
196226
)
197227
result["stderr"] = fetch_res.get("stderr") or fetch_res.get(
198228
"module_stderr"
199229
)
200-
result["stdout_lines"] = fetch_res.get("stdout_lines")
201-
result["stderr_lines"] = fetch_res.get("stderr_lines")
202-
result["rc"] = fetch_res.get("rc")
203230
result["failed"] = True
204231
return result
205-
206-
elif fetch_res.get("note"):
207-
result["note"] = fetch_res.get("note")
232+
if "No data was fetched." in result["msg"]:
233+
if fail_on_missing:
234+
result["failed"] = True
208235
return result
209236

210237
except Exception as err:
211-
result["msg"] = "Failure during module execution"
238+
result["msg"] = f"Failure during module execution {msg}"
212239
result["stderr"] = str(err)
213240
result["stderr_lines"] = str(err).splitlines()
214241
result["failed"] = True
@@ -229,7 +256,6 @@ def run(self, tmp=None, task_vars=None):
229256
# For instance: If src is: USER.TEST.PROCLIB(DATA) #
230257
# and dest is: /tmp/, then updated dest would be /tmp/DATA #
231258
# ********************************************************** #
232-
233259
if os.path.sep not in self._connection._shell.join_path("a", ""):
234260
src = self._connection._shell._unquote(src)
235261
source_local = src.replace("\\", "/")
@@ -290,15 +316,11 @@ def run(self, tmp=None, task_vars=None):
290316
try:
291317
if ds_type in SUPPORTED_DS_TYPES:
292318
if ds_type == "PO" and os.path.isfile(dest) and not fetch_member:
293-
result[
294-
"msg"
295-
] = "Destination must be a directory to fetch a partitioned data set"
319+
result["msg"] = "Destination must be a directory to fetch a partitioned data set"
296320
result["failed"] = True
297321
return result
298322
if ds_type == "GDG" and os.path.isfile(dest):
299-
result[
300-
"msg"
301-
] = "Destination must be a directory to fetch a generation data group"
323+
result["msg"] = "Destination must be a directory to fetch a generation data group"
302324
result["failed"] = True
303325
return result
304326

@@ -309,7 +331,8 @@ def run(self, tmp=None, task_vars=None):
309331
ignore_stderr=ignore_sftp_stderr,
310332
)
311333
if fetch_content.get("msg"):
312-
return fetch_content
334+
result.update(fetch_content)
335+
return result
313336

314337
if validate_checksum and ds_type != "GDG" and ds_type != "PO" and not is_binary:
315338
new_checksum = _get_file_checksum(dest)

plugins/modules/zos_fetch.py

Lines changed: 36 additions & 24 deletions
Original file line numberDiff line numberDiff line change
@@ -240,8 +240,10 @@
240240
"""
241241

242242
RETURN = r"""
243-
file:
244-
description: The source file path or data set on the remote machine.
243+
src:
244+
description:
245+
- The source file path or data set on the remote machine.
246+
- If the source is not found, then src will be empty.
245247
returned: success
246248
type: str
247249
sample: SOME.DATA.SET
@@ -266,14 +268,9 @@
266268
returned: success
267269
type: str
268270
sample: PDSE
269-
note:
270-
description: Notice of module failure when C(fail_on_missing) is false.
271-
returned: failure and fail_on_missing=false
272-
type: str
273-
sample: The data set USER.PROCLIB does not exist. No data was fetched.
274271
msg:
275-
description: Message returned on failure.
276-
returned: failure
272+
description: Any important messages from the module.
273+
returned: always
277274
type: str
278275
sample: The source 'TEST.DATA.SET' does not exist or is uncataloged.
279276
stdout:
@@ -921,8 +918,23 @@ def run_module():
921918
# ********************************************************** #
922919
# Check for data set existence and determine its type #
923920
# ********************************************************** #
924-
925-
res_args = dict()
921+
encoding_dict = {"from": encoding.get("from"), "to": encoding.get("to")}
922+
result = dict(
923+
src=src,
924+
dest="",
925+
is_binary=is_binary,
926+
checksum="",
927+
changed=False,
928+
data_set_type="",
929+
remote_path="",
930+
msg="",
931+
stdout="",
932+
stderr="",
933+
stdout_lines=[],
934+
stderr_lines=[],
935+
rc=0,
936+
encoding=encoding_dict,
937+
)
926938
src_data_set = None
927939
ds_type = None
928940

@@ -963,7 +975,7 @@ def run_module():
963975
)
964976
else:
965977
module.exit_json(
966-
note=("Source '{0}' was not found. No data was fetched.".format(src))
978+
msg=("Source '{0}' was not found. No data was fetched.".format(src))
967979
)
968980

969981
if "/" in src:
@@ -992,7 +1004,7 @@ def run_module():
9921004
is_binary,
9931005
encoding=encoding
9941006
)
995-
res_args["remote_path"] = file_path
1007+
result["remote_path"] = file_path
9961008

9971009
# ********************************************************** #
9981010
# Fetch a partitioned data set or one of its members #
@@ -1005,9 +1017,9 @@ def run_module():
10051017
is_binary,
10061018
encoding=encoding
10071019
)
1008-
res_args["remote_path"] = file_path
1020+
result["remote_path"] = file_path
10091021
else:
1010-
res_args["remote_path"] = fetch_handler._fetch_pdse(
1022+
result["remote_path"] = fetch_handler._fetch_pdse(
10111023
src_data_set.name,
10121024
is_binary,
10131025
encoding=encoding
@@ -1027,7 +1039,7 @@ def run_module():
10271039
is_binary,
10281040
encoding=encoding
10291041
)
1030-
res_args["remote_path"] = file_path
1042+
result["remote_path"] = file_path
10311043

10321044
# ********************************************************** #
10331045
# Fetch a VSAM data set #
@@ -1039,32 +1051,32 @@ def run_module():
10391051
is_binary,
10401052
encoding=encoding
10411053
)
1042-
res_args["remote_path"] = file_path
1054+
result["remote_path"] = file_path
10431055

10441056
# ********************************************************** #
10451057
# Fetch a GDG #
10461058
# ********************************************************** #
10471059

10481060
elif ds_type == "GDG":
1049-
res_args["remote_path"] = fetch_handler._fetch_gdg(
1061+
result["remote_path"] = fetch_handler._fetch_gdg(
10501062
src_data_set.name,
10511063
is_binary,
10521064
encoding=encoding
10531065
)
10541066

10551067
if ds_type == "USS":
1056-
res_args["file"] = src
1068+
result["src"] = src
10571069
else:
1058-
res_args["file"] = src_data_set.name
1070+
result["src"] = src_data_set.name
10591071

10601072
# Removing the HLQ since the user is probably not expecting it. The module
10611073
# hasn't returned it ever since it was originally written. Changes made to
10621074
# add GDG/GDS support started leaving the HLQ behind in the file name.
10631075
if hlq:
1064-
res_args["file"] = res_args["file"].replace(f"{hlq}.", "")
1076+
result["src"] = result["src"].replace(f"{hlq}.", "")
10651077

1066-
res_args["ds_type"] = ds_type
1067-
module.exit_json(**res_args)
1078+
result["ds_type"] = ds_type
1079+
module.exit_json(**result)
10681080

10691081

10701082
class ZOSFetchError(Exception):
@@ -1094,7 +1106,7 @@ def __init__(self, msg, rc="", stdout="", stderr="", stdout_lines="", stderr_lin
10941106
stdout_lines=stdout_lines,
10951107
stderr_lines=stderr_lines,
10961108
)
1097-
super().__init__(self.msg)
1109+
super().__init__(msg)
10981110

10991111

11001112
def main():

0 commit comments

Comments
 (0)