|
| 1 | +#!/usr/bin/perl |
| 2 | +#packit.perl - created Tue May 27 17:14:22 BST 2014 |
| 3 | + |
| 4 | +use strict; |
| 5 | +use warnings; |
| 6 | + |
| 7 | +# Takes a shell (or Perl, or whatever) script and packs it to make a shar-type |
| 8 | +# archive that can be run with "sh foo.sh" or indeed "cat foo.sh | sh". |
| 9 | + |
| 10 | +# Note that for URLs the recipient must have either wget or curl |
| 11 | +# In all cases the recipient needs Perl or uudecode, and gunzip, and md5sum |
| 12 | +# The unpack script is designed to work with dash, so should work anywhere. |
| 13 | + |
| 14 | +# You must have shar and uuencode, and wget+md5sum for the AUTO md5sum feature. |
| 15 | + |
| 16 | +# Also note I make no promise that filenames are handled robustly, so |
| 17 | +# avoid spaces and other gubbins, especially in the main script name. |
| 18 | + |
| 19 | +# First, what to pack? |
| 20 | + |
| 21 | +my $mainscript = $ARGV[0] || die "What to pack??\n"; |
| 22 | +my $msbase = $mainscript; |
| 23 | + |
| 24 | +if( $mainscript =~ /\// ) |
| 25 | +{ |
| 26 | + print STDERR "Warning - dependencies in script will be resolved relative to CWD.\n"; |
| 27 | + $msbase = s/.*\///; |
| 28 | +} |
| 29 | + |
| 30 | +my %topack; |
| 31 | +my %urls; |
| 32 | +my $dir = ''; #Specific directory to unpack to if not mktemp -d |
| 33 | +my $id = ''; |
| 34 | +my $md5 = ''; |
| 35 | +my @md5sums; |
| 36 | +$topack{$_}++ for @ARGV; #Pack all args including main script. |
| 37 | + |
| 38 | +# Add things from the main script. |
| 39 | +open( my $ms, "<$mainscript" ) or die $!; |
| 40 | + |
| 41 | +for(<$ms>) |
| 42 | +{ |
| 43 | + chomp; |
| 44 | + /^#==([FUDIM]) (.*)/ or next; |
| 45 | + |
| 46 | + #An ugly if/elsif chain but I don't care |
| 47 | + if( $1 eq 'F' ) |
| 48 | + { |
| 49 | + $topack{$2}++; |
| 50 | + } |
| 51 | + elsif( $1 eq 'U' ) |
| 52 | + { |
| 53 | + $urls{$2}++; |
| 54 | + } |
| 55 | + elsif( $1 eq 'D' ) |
| 56 | + { |
| 57 | + $dir = $2; |
| 58 | + } |
| 59 | + elsif( $1 eq 'I' ) |
| 60 | + { |
| 61 | + $id = $2; |
| 62 | + $id = 'root' if $id eq '0'; |
| 63 | + } |
| 64 | + elsif( $1 eq 'M' ) |
| 65 | + { |
| 66 | + #Warning - specifying a binary file as your md5sum file will blow |
| 67 | + #the system up. Ye be warned. |
| 68 | + $md5 = $2; |
| 69 | + if($md5 =~ /^AUTO[-_]?U/) { $md5 = "AUTO-URL" } |
| 70 | + elsif($md5 =~ /^AUTO/){ $md5 = "AUTO-ALL" } |
| 71 | + } |
| 72 | + |
| 73 | +} |
| 74 | + |
| 75 | +sub echo { print "$_\n" for @_; } |
| 76 | + |
| 77 | +# Generate the md5sums if asked. |
| 78 | +if($md5 =~ /^AUTO-/ && %urls) |
| 79 | +{ |
| 80 | + print STDERR "Calculating md5 sums for " . keys(%urls) . " urls; this may take a while.\n"; |
| 81 | + for(keys %urls) |
| 82 | + { |
| 83 | + #Translate the URL to a filename. Not robust. If you want it robust, make the |
| 84 | + #md5sum file yourself and specify it with #==M mymd5sums.txt |
| 85 | + (my $file4url = $_ ) =~ s/.*\///; |
| 86 | + $file4url =~ s/(%20|\+)/ /g; |
| 87 | + |
| 88 | + unshift @md5sums, `wget -qO- "$_" | md5sum -`; |
| 89 | + chomp $md5sums[0]; |
| 90 | + $md5sums[0] =~ /^([a-f0-9]{32}\s+)-$/ or die "$_ produced bad md5sum line $md5sums[0] - aborting\n"; |
| 91 | + $md5sums[0] = "$1$file4url"; |
| 92 | + } |
| 93 | +} |
| 94 | +if($md5 =~ /^AUTO-ALL/) # This is silly as shar already checksums for you. |
| 95 | +{ |
| 96 | + for(keys %topack) |
| 97 | + { |
| 98 | + unshift @md5sums, `md5sum "$_"`; |
| 99 | + chomp $md5sums[0]; |
| 100 | + $md5sums[0] =~ /^[a-f0-9]{32}\s+/ or die "$_ produced bad md5sum line $md5sums[0] - aborting\n"; |
| 101 | + } |
| 102 | +} |
| 103 | + |
| 104 | + |
| 105 | +# Print a preamble that checks the user and changes to the right directory. |
| 106 | +$| = 1; |
| 107 | +echo '#!/bin/sh', ("#Script generated by packit.perl on " . localtime . "."), ''; |
| 108 | + |
| 109 | +#For root users, check the ID and fix $HOME, as the user may not have run "sudo -H" |
| 110 | +#and this can screw up some things like R module installation. |
| 111 | +#For other users, just check ID, unless we are just unpacking. Anyone can unpack. |
| 112 | +if($id eq 'root') |
| 113 | +{ |
| 114 | + echo 'if [ -z "$UNPACK_ONLY" -a "`id -u`" != 0 ] ; then', |
| 115 | + ' echo "This script needs to be run with root privileges. Please try:"', |
| 116 | + ' echo " sudo -H sh $0"', |
| 117 | + ' exit 1', |
| 118 | + 'fi', |
| 119 | + 'export HOME="`getent passwd 0 | cut -d: -f6`"'; |
| 120 | +} |
| 121 | +elsif($id) |
| 122 | +{ |
| 123 | + echo 'if [ -z "$UNPACK_ONLY" -a "`id -un`" != ' . "'$id'" . ']] ; then', |
| 124 | + ' echo "This script needs to be run as user ' . $id . '. Please try:"', |
| 125 | + ' echo " sudo -Hu ' . $id . ' sh $0"', |
| 126 | + ' exit 1', |
| 127 | + 'fi'; |
| 128 | +} |
| 129 | + |
| 130 | +# Note that whatever happens, the working dir will not be cleaned up. I could add |
| 131 | +# this but it seems risky. Temp dirs are cheap and the cron removes them for you. |
| 132 | + |
| 133 | +if($dir) |
| 134 | +{ |
| 135 | + echo "mkdir -p $dir", "cd $dir", |
| 136 | + 'if [ "$?" != 0 ] ; then', |
| 137 | + ' echo "Unable to change to directory" ' . $dir . ' "; cannot continue."', |
| 138 | + ' exit 2', |
| 139 | + 'fi'; |
| 140 | +} |
| 141 | +else |
| 142 | +{ |
| 143 | + echo 'cd "`mktemp -d || echo /dev/null`"', |
| 144 | + 'if [ "$?" != 0 ] ; then', |
| 145 | + ' echo "Unable to change to temporary directory; cannot continue."', |
| 146 | + ' exit 2', |
| 147 | + 'fi'; |
| 148 | +} |
| 149 | +echo 'echo "Working in `pwd`..."'; |
| 150 | + |
| 151 | +# Now grab all the URLs. Fairly easy. I could add a checksum feature, maybe. |
| 152 | +if(%urls) |
| 153 | +{ |
| 154 | + echo '', |
| 155 | + 'if which wget >/dev/null 2>&1 ; then', |
| 156 | + ' alias WGET="wget -c"', |
| 157 | + 'elif which curl >/dev/null 2>&1 ; then', |
| 158 | + ' alias WGET="curl -o"', |
| 159 | + 'else', |
| 160 | + ' echo "Cannot retrieve URLs as you have neither wget or curl commands available."', |
| 161 | + ' exit 3', |
| 162 | + 'fi', |
| 163 | + '', |
| 164 | + ( map {"if ! WGET '$_' ; then exit 3 ; fi"} keys %urls ), |
| 165 | + ''; |
| 166 | +} |
| 167 | + |
| 168 | +# OK, now run shar. No need to start a subshell as far as I can see, but I do need |
| 169 | +# to disable exit 0 from happening, and provide a uudecode alternative, and hook my md5sum |
| 170 | +# check. |
| 171 | +echo '#Add hook to ensure the payload runs after successful unpacking', |
| 172 | + 'nexit() {', |
| 173 | + 'if [ "$1" = "0" ] ; then', |
| 174 | + ' do_md5_check || { echo "Checksum fail." ; exit 1 ; }', |
| 175 | + ' if [ -n "$UNPACK_ONLY" ] ; then', |
| 176 | + " echo \"Unpack complete in `pwd`. Exiting.\"", |
| 177 | + ' else', |
| 178 | + " chmod +x $msbase", |
| 179 | + " echo 'Unpack phase complete. Running $msbase' ; echo '====>>>'", |
| 180 | + " exec ./$msbase <&1", |
| 181 | + ' fi', |
| 182 | + 'else \exit "$@"', |
| 183 | + 'fi }', |
| 184 | + 'alias exit=nexit'; |
| 185 | + |
| 186 | +echo '#Always prefer Perl for uudecoding', |
| 187 | + 'uudecode() {', |
| 188 | + ' if which perl >/dev/null ; then', |
| 189 | + ' read l0 l1 l2', |
| 190 | + " perl -ne 'print unpack(\"u*\",\$_)' >\$l2 &&", |
| 191 | + ' chmod $l1 $l2', |
| 192 | + ' else', |
| 193 | + ' uudecode "$@"', |
| 194 | + ' fi', |
| 195 | + '}'; |
| 196 | + |
| 197 | +if(!$md5) |
| 198 | +{ |
| 199 | + echo '#No extra md5sum checking', |
| 200 | + 'do_md5_check() {', |
| 201 | + ' true', |
| 202 | + '}'; |
| 203 | +} |
| 204 | +elsif($md5 =~ /^AUTO-/) |
| 205 | +{ |
| 206 | + #Yes, Dash lets you embed heredocs in a function body. How does that work?? |
| 207 | + echo '#md5sum check.', |
| 208 | + "do_md5_check() { md5sum -c <<'.'", |
| 209 | + @md5sums, |
| 210 | + '.', |
| 211 | + '}'; |
| 212 | +} |
| 213 | +else |
| 214 | +{ |
| 215 | + echo 'do_md5_check() {', |
| 216 | + " md5sum -c '$md5'", |
| 217 | + '}'; |
| 218 | +} |
| 219 | + |
| 220 | +open(my $sharout, "|-", "shar -mfxzS") or die $!; |
| 221 | +print $sharout "$_\n" for keys %topack; |
| 222 | +close $sharout; |
0 commit comments