File tree Expand file tree Collapse file tree 1 file changed +10
-5
lines changed
Expand file tree Collapse file tree 1 file changed +10
-5
lines changed Original file line number Diff line number Diff line change 33fail () {
44 # Restore stdout by pointing it to fd 3 and send any errors to it
55 exec >&3
6- cat " $tmp "
7- rm " $tmp "
6+ [[ -e $tmp ]] && cat " $tmp "
87
98 exit 1
109}
1110
1211cleanup () {
13- rm " $tmp "
12+ for f in " ${temp_files[@]} " ; do
13+ [[ -e $f ]] && rm -- " $f "
14+ done
1415}
1516
17+ temp_files=()
18+
1619# Clone, i.e. preserve, original stdout using fd 3.
1720exec 3>&1
1821# Send stderr and stdout to a temp file
1922tmp=" $( mktemp) "
23+ temp_files+=(" $tmp " )
2024exec & > " $tmp "
2125
2226# Run the fail() method on error
@@ -61,11 +65,12 @@ find "$metrics_directory" -type f -ctime +"$retention_days" -delete
6165# Compress the remaining files in a Puppet service metrics directory.
6266# Store the list of files in a temp file so that `tar` and `rm` will operate on the same files
6367metrics_tmp=" $( mktemp) "
68+ temp_files+=(" $metrics_tmp " )
6469find " $metrics_directory " -type f -name " *json" > " $metrics_tmp "
6570tar --create --gzip --file " ${metrics_directory} /${metrics_type} -$( date +%Y.%m.%d.%H.%M.%S) .tar.gz" \
6671 --files-from " $metrics_tmp "
6772
6873# Cleanup the backed up json files so that we do not duplicate files in the tarballs.
6974# We can assume that the json files have no spaces as they are created by our scripts
70- xargs -a " $metrics_tmp " rm
71- rm " $metrics_tmp "
75+ # Only run xargs if the file is >0 bytes
76+ [[ -s $metrics_tmp ]] && xargs -a " $metrics_tmp " rm
You can’t perform that action at this time.
0 commit comments