Skip to content

[Enhancement][zos_fetch] Update module interface return values #2231

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Open
wants to merge 8 commits into
base: dev
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 3 additions & 0 deletions changelogs/fragments/2231-zos_fetch-interface-update.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
breaking_changes:
- zos_fetch - Return value ``file`` is replaced by ``src``. Return value ``note`` is deprecated, the messages thrown in ``note`` are now returned in ``msg``.
(https://github.com/ansible-collections/ibm_zos_core/pull/2231).
65 changes: 44 additions & 21 deletions plugins/action/zos_fetch.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
# Copyright (c) IBM Corporation 2019, 2024
# Copyright (c) IBM Corporation 2019, 2025
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
Expand Down Expand Up @@ -57,7 +57,7 @@ def _update_result(result, src, dest, ds_type="USS", is_binary=False):
updated_result = dict((k, v) for k, v in result.items())
updated_result.update(
{
"file": src,
"src": src,
"dest": dest,
"data_set_type": data_set_types[ds_type],
"is_binary": is_binary,
Expand Down Expand Up @@ -121,6 +121,7 @@ def run(self, tmp=None, task_vars=None):
dest = self._task.args.get('dest')
encoding = self._task.args.get('encoding', None)
flat = _process_boolean(self._task.args.get('flat'), default=False)
fail_on_missing = _process_boolean(self._task.args.get('fail_on_missing'), default=True)
is_binary = _process_boolean(self._task.args.get('is_binary'))
ignore_sftp_stderr = _process_boolean(
self._task.args.get("ignore_sftp_stderr"), default=True
Expand Down Expand Up @@ -186,29 +187,55 @@ def run(self, tmp=None, task_vars=None):
task_vars=task_vars
)
ds_type = fetch_res.get("ds_type")
src = fetch_res.get("file")
src = fetch_res.get("src")
remote_path = fetch_res.get("remote_path")

if fetch_res.get("msg"):
result["msg"] = fetch_res.get("msg")
# Create a dictionary that is a schema for the return values
result = dict(
src="",
dest="",
is_binary=False,
checksum="",
changed=False,
data_set_type="",
msg="",
stdout="",
stderr="",
stdout_lines=[],
stderr_lines=[],
rc=0,
encoding=new_module_args.get("encoding"),
)
# Populate it with the modules response
result["src"] = fetch_res.get("src")
result["dest"] = fetch_res.get("dest")
result["is_binary"] = fetch_res.get("is_binary", False)
result["checksum"] = fetch_res.get("checksum")
result["changed"] = fetch_res.get("changed", False)
result["data_set_type"] = fetch_res.get("data_set_type")
result["msg"] = fetch_res.get("msg")
result["stdout"] = fetch_res.get("stdout")
result["stderr"] = fetch_res.get("stderr")
result["stdout_lines"] = fetch_res.get("stdout_lines")
result["stderr_lines"] = fetch_res.get("stderr_lines")
result["rc"] = fetch_res.get("rc", 0)
result["encoding"] = fetch_res.get("encoding")

if fetch_res.get("failed", False):
result["stdout"] = fetch_res.get("stdout") or fetch_res.get(
"module_stdout"
)
result["stderr"] = fetch_res.get("stderr") or fetch_res.get(
"module_stderr"
)
result["stdout_lines"] = fetch_res.get("stdout_lines")
result["stderr_lines"] = fetch_res.get("stderr_lines")
result["rc"] = fetch_res.get("rc")
result["failed"] = True
return result

elif fetch_res.get("note"):
result["note"] = fetch_res.get("note")
if "No data was fetched." in result["msg"]:
if fail_on_missing:
result["failed"] = True
return result

except Exception as err:
result["msg"] = "Failure during module execution"
result["msg"] = f"Failure during module execution {msg}"
result["stderr"] = str(err)
result["stderr_lines"] = str(err).splitlines()
result["failed"] = True
Expand All @@ -229,7 +256,6 @@ def run(self, tmp=None, task_vars=None):
# For instance: If src is: USER.TEST.PROCLIB(DATA) #
# and dest is: /tmp/, then updated dest would be /tmp/DATA #
# ********************************************************** #

if os.path.sep not in self._connection._shell.join_path("a", ""):
src = self._connection._shell._unquote(src)
source_local = src.replace("\\", "/")
Expand Down Expand Up @@ -290,15 +316,11 @@ def run(self, tmp=None, task_vars=None):
try:
if ds_type in SUPPORTED_DS_TYPES:
if ds_type == "PO" and os.path.isfile(dest) and not fetch_member:
result[
"msg"
] = "Destination must be a directory to fetch a partitioned data set"
result["msg"] = "Destination must be a directory to fetch a partitioned data set"
result["failed"] = True
return result
if ds_type == "GDG" and os.path.isfile(dest):
result[
"msg"
] = "Destination must be a directory to fetch a generation data group"
result["msg"] = "Destination must be a directory to fetch a generation data group"
result["failed"] = True
return result

Expand All @@ -309,7 +331,8 @@ def run(self, tmp=None, task_vars=None):
ignore_stderr=ignore_sftp_stderr,
)
if fetch_content.get("msg"):
return fetch_content
result.update(fetch_content)
return result

if validate_checksum and ds_type != "GDG" and ds_type != "PO" and not is_binary:
new_checksum = _get_file_checksum(dest)
Expand Down
60 changes: 36 additions & 24 deletions plugins/modules/zos_fetch.py
Original file line number Diff line number Diff line change
Expand Up @@ -240,8 +240,10 @@
"""

RETURN = r"""
file:
description: The source file path or data set on the remote machine.
src:
description:
- The source file path or data set on the remote machine.
- If the source is not found, then src will be empty.
returned: success
type: str
sample: SOME.DATA.SET
Expand All @@ -266,14 +268,9 @@
returned: success
type: str
sample: PDSE
note:
description: Notice of module failure when C(fail_on_missing) is false.
returned: failure and fail_on_missing=false
type: str
sample: The data set USER.PROCLIB does not exist. No data was fetched.
msg:
description: Message returned on failure.
returned: failure
description: Any important messages from the module.
returned: always
type: str
sample: The source 'TEST.DATA.SET' does not exist or is uncataloged.
stdout:
Expand Down Expand Up @@ -921,8 +918,23 @@ def run_module():
# ********************************************************** #
# Check for data set existence and determine its type #
# ********************************************************** #

res_args = dict()
encoding_dict = {"from": encoding.get("from"), "to": encoding.get("to")}
result = dict(
src=src,
dest="",
is_binary=is_binary,
checksum="",
changed=False,
data_set_type="",
remote_path="",
msg="",
stdout="",
stderr="",
stdout_lines=[],
stderr_lines=[],
rc=0,
encoding=encoding_dict,
)
src_data_set = None
ds_type = None

Expand Down Expand Up @@ -963,7 +975,7 @@ def run_module():
)
else:
module.exit_json(
note=("Source '{0}' was not found. No data was fetched.".format(src))
msg=("Source '{0}' was not found. No data was fetched.".format(src))
)

if "/" in src:
Expand Down Expand Up @@ -992,7 +1004,7 @@ def run_module():
is_binary,
encoding=encoding
)
res_args["remote_path"] = file_path
result["remote_path"] = file_path

# ********************************************************** #
# Fetch a partitioned data set or one of its members #
Expand All @@ -1005,9 +1017,9 @@ def run_module():
is_binary,
encoding=encoding
)
res_args["remote_path"] = file_path
result["remote_path"] = file_path
else:
res_args["remote_path"] = fetch_handler._fetch_pdse(
result["remote_path"] = fetch_handler._fetch_pdse(
src_data_set.name,
is_binary,
encoding=encoding
Expand All @@ -1027,7 +1039,7 @@ def run_module():
is_binary,
encoding=encoding
)
res_args["remote_path"] = file_path
result["remote_path"] = file_path

# ********************************************************** #
# Fetch a VSAM data set #
Expand All @@ -1039,32 +1051,32 @@ def run_module():
is_binary,
encoding=encoding
)
res_args["remote_path"] = file_path
result["remote_path"] = file_path

# ********************************************************** #
# Fetch a GDG #
# ********************************************************** #

elif ds_type == "GDG":
res_args["remote_path"] = fetch_handler._fetch_gdg(
result["remote_path"] = fetch_handler._fetch_gdg(
src_data_set.name,
is_binary,
encoding=encoding
)

if ds_type == "USS":
res_args["file"] = src
result["src"] = src
else:
res_args["file"] = src_data_set.name
result["src"] = src_data_set.name

# Removing the HLQ since the user is probably not expecting it. The module
# hasn't returned it ever since it was originally written. Changes made to
# add GDG/GDS support started leaving the HLQ behind in the file name.
if hlq:
res_args["file"] = res_args["file"].replace(f"{hlq}.", "")
result["src"] = result["src"].replace(f"{hlq}.", "")

res_args["ds_type"] = ds_type
module.exit_json(**res_args)
result["ds_type"] = ds_type
module.exit_json(**result)


class ZOSFetchError(Exception):
Expand Down Expand Up @@ -1094,7 +1106,7 @@ def __init__(self, msg, rc="", stdout="", stderr="", stdout_lines="", stderr_lin
stdout_lines=stdout_lines,
stderr_lines=stderr_lines,
)
super().__init__(self.msg)
super().__init__(msg)


def main():
Expand Down
Loading