1
- # Copyright (c) IBM Corporation 2019, 2024
1
+ # Copyright (c) IBM Corporation 2019, 2025
2
2
# Licensed under the Apache License, Version 2.0 (the "License");
3
3
# you may not use this file except in compliance with the License.
4
4
# You may obtain a copy of the License at
@@ -57,7 +57,7 @@ def _update_result(result, src, dest, ds_type="USS", is_binary=False):
57
57
updated_result = dict ((k , v ) for k , v in result .items ())
58
58
updated_result .update (
59
59
{
60
- "file " : src ,
60
+ "src " : src ,
61
61
"dest" : dest ,
62
62
"data_set_type" : data_set_types [ds_type ],
63
63
"is_binary" : is_binary ,
@@ -121,6 +121,7 @@ def run(self, tmp=None, task_vars=None):
121
121
dest = self ._task .args .get ('dest' )
122
122
encoding = self ._task .args .get ('encoding' , None )
123
123
flat = _process_boolean (self ._task .args .get ('flat' ), default = False )
124
+ fail_on_missing = _process_boolean (self ._task .args .get ('fail_on_missing' ), default = True )
124
125
is_binary = _process_boolean (self ._task .args .get ('is_binary' ))
125
126
ignore_sftp_stderr = _process_boolean (
126
127
self ._task .args .get ("ignore_sftp_stderr" ), default = True
@@ -186,29 +187,55 @@ def run(self, tmp=None, task_vars=None):
186
187
task_vars = task_vars
187
188
)
188
189
ds_type = fetch_res .get ("ds_type" )
189
- src = fetch_res .get ("file " )
190
+ src = fetch_res .get ("src " )
190
191
remote_path = fetch_res .get ("remote_path" )
191
-
192
- if fetch_res .get ("msg" ):
193
- result ["msg" ] = fetch_res .get ("msg" )
192
+ # Create a dictionary that is a schema for the return values
193
+ result = dict (
194
+ src = "" ,
195
+ dest = "" ,
196
+ is_binary = False ,
197
+ checksum = "" ,
198
+ changed = False ,
199
+ data_set_type = "" ,
200
+ msg = "" ,
201
+ stdout = "" ,
202
+ stderr = "" ,
203
+ stdout_lines = [],
204
+ stderr_lines = [],
205
+ rc = 0 ,
206
+ encoding = new_module_args .get ("encoding" ),
207
+ )
208
+ # Populate it with the modules response
209
+ result ["src" ] = fetch_res .get ("src" )
210
+ result ["dest" ] = fetch_res .get ("dest" )
211
+ result ["is_binary" ] = fetch_res .get ("is_binary" , False )
212
+ result ["checksum" ] = fetch_res .get ("checksum" )
213
+ result ["changed" ] = fetch_res .get ("changed" , False )
214
+ result ["data_set_type" ] = fetch_res .get ("data_set_type" )
215
+ result ["msg" ] = fetch_res .get ("msg" )
216
+ result ["stdout" ] = fetch_res .get ("stdout" )
217
+ result ["stderr" ] = fetch_res .get ("stderr" )
218
+ result ["stdout_lines" ] = fetch_res .get ("stdout_lines" )
219
+ result ["stderr_lines" ] = fetch_res .get ("stderr_lines" )
220
+ result ["rc" ] = fetch_res .get ("rc" , 0 )
221
+ result ["encoding" ] = fetch_res .get ("encoding" )
222
+
223
+ if fetch_res .get ("failed" , False ):
194
224
result ["stdout" ] = fetch_res .get ("stdout" ) or fetch_res .get (
195
225
"module_stdout"
196
226
)
197
227
result ["stderr" ] = fetch_res .get ("stderr" ) or fetch_res .get (
198
228
"module_stderr"
199
229
)
200
- result ["stdout_lines" ] = fetch_res .get ("stdout_lines" )
201
- result ["stderr_lines" ] = fetch_res .get ("stderr_lines" )
202
- result ["rc" ] = fetch_res .get ("rc" )
203
230
result ["failed" ] = True
204
231
return result
205
-
206
- elif fetch_res . get ( "note" ) :
207
- result ["note " ] = fetch_res . get ( "note" )
232
+ if "No data was fetched." in result [ "msg" ]:
233
+ if fail_on_missing :
234
+ result ["failed " ] = True
208
235
return result
209
236
210
237
except Exception as err :
211
- result ["msg" ] = "Failure during module execution"
238
+ result ["msg" ] = f "Failure during module execution { msg } "
212
239
result ["stderr" ] = str (err )
213
240
result ["stderr_lines" ] = str (err ).splitlines ()
214
241
result ["failed" ] = True
@@ -229,7 +256,6 @@ def run(self, tmp=None, task_vars=None):
229
256
# For instance: If src is: USER.TEST.PROCLIB(DATA) #
230
257
# and dest is: /tmp/, then updated dest would be /tmp/DATA #
231
258
# ********************************************************** #
232
-
233
259
if os .path .sep not in self ._connection ._shell .join_path ("a" , "" ):
234
260
src = self ._connection ._shell ._unquote (src )
235
261
source_local = src .replace ("\\ " , "/" )
@@ -290,15 +316,11 @@ def run(self, tmp=None, task_vars=None):
290
316
try :
291
317
if ds_type in SUPPORTED_DS_TYPES :
292
318
if ds_type == "PO" and os .path .isfile (dest ) and not fetch_member :
293
- result [
294
- "msg"
295
- ] = "Destination must be a directory to fetch a partitioned data set"
319
+ result ["msg" ] = "Destination must be a directory to fetch a partitioned data set"
296
320
result ["failed" ] = True
297
321
return result
298
322
if ds_type == "GDG" and os .path .isfile (dest ):
299
- result [
300
- "msg"
301
- ] = "Destination must be a directory to fetch a generation data group"
323
+ result ["msg" ] = "Destination must be a directory to fetch a generation data group"
302
324
result ["failed" ] = True
303
325
return result
304
326
@@ -309,7 +331,8 @@ def run(self, tmp=None, task_vars=None):
309
331
ignore_stderr = ignore_sftp_stderr ,
310
332
)
311
333
if fetch_content .get ("msg" ):
312
- return fetch_content
334
+ result .update (fetch_content )
335
+ return result
313
336
314
337
if validate_checksum and ds_type != "GDG" and ds_type != "PO" and not is_binary :
315
338
new_checksum = _get_file_checksum (dest )
0 commit comments