|
| 1 | +## |
| 2 | +# This module requires Metasploit: http://metasploit.com/download |
| 3 | +# Current source: https://github.com/rapid7/metasploit-framework |
| 4 | +## |
| 5 | + |
| 6 | +require 'pdf-reader' |
| 7 | + |
| 8 | +class MetasploitModule < Msf::Auxiliary |
| 9 | + |
| 10 | + def initialize(info = {}) |
| 11 | + super(update_info(info, |
| 12 | + 'Name' => 'Gather PDF Authors', |
| 13 | + 'Description' => %q{ |
| 14 | + This module downloads PDF files and extracts the author's |
| 15 | + name from the document metadata. |
| 16 | + }, |
| 17 | + 'License' => MSF_LICENSE, |
| 18 | + 'Author' => 'Brendan Coles <bcoles[at]gmail.com>')) |
| 19 | + register_options( |
| 20 | + [ |
| 21 | + OptString.new('URL', [ false, 'The URL of a PDF to analyse', '' ]), |
| 22 | + OptString.new('URL_LIST', [ false, 'File containing a list of PDF URLs to analyze', '' ]), |
| 23 | + OptString.new('OUTFILE', [ false, 'File to store output', '' ]) |
| 24 | + ]) |
| 25 | + register_advanced_options( |
| 26 | + [ |
| 27 | + OptString.new('SSL_VERIFY', [ true, 'Verify SSL certificate', true ]), |
| 28 | + OptString.new('PROXY', [ false, 'Proxy server to route connection. <host>:<port>', nil ]), |
| 29 | + OptString.new('PROXY_USER', [ false, 'Proxy Server User', nil ]), |
| 30 | + OptString.new('PROXY_PASS', [ false, 'Proxy Server Password', nil ]) |
| 31 | + ]) |
| 32 | + end |
| 33 | + |
| 34 | + def progress(current, total) |
| 35 | + done = (current.to_f / total.to_f) * 100 |
| 36 | + percent = "%3.2f%%" % done.to_f |
| 37 | + print_status "%7s done (%d/%d files)" % [percent, current, total] |
| 38 | + end |
| 39 | + |
| 40 | + def load_urls |
| 41 | + return [ datastore['URL'] ] unless datastore['URL'].to_s.eql? '' |
| 42 | + |
| 43 | + if datastore['URL_LIST'].to_s.eql? '' |
| 44 | + fail_with Failure::BadConfig, 'No URL(s) specified' |
| 45 | + end |
| 46 | + |
| 47 | + unless File.file? datastore['URL_LIST'].to_s |
| 48 | + fail_with Failure::BadConfig, "File '#{datastore['URL_LIST']}' does not exit" |
| 49 | + end |
| 50 | + |
| 51 | + File.open(datastore['URL_LIST'], 'rb') {|f| f.read}.split(/\r?\n/) |
| 52 | + end |
| 53 | + |
| 54 | + def read(data) |
| 55 | + begin |
| 56 | + reader = PDF::Reader.new data |
| 57 | + return parse reader |
| 58 | + rescue PDF::Reader::MalformedPDFError |
| 59 | + print_error "Could not parse PDF: PDF is malformed" |
| 60 | + return |
| 61 | + rescue PDF::Reader::UnsupportedFeatureError |
| 62 | + print_error "Could not parse PDF: PDF::Reader::UnsupportedFeatureError" |
| 63 | + return |
| 64 | + rescue => e |
| 65 | + print_error "Could not parse PDF: Unhandled exception: #{e}" |
| 66 | + return |
| 67 | + end |
| 68 | + end |
| 69 | + |
| 70 | + def parse(reader) |
| 71 | + # PDF |
| 72 | + #print_status "PDF Version: #{reader.pdf_version}" |
| 73 | + #print_status "PDF Title: #{reader.info['title']}" |
| 74 | + #print_status "PDF Info: #{reader.info}" |
| 75 | + #print_status "PDF Metadata: #{reader.metadata}" |
| 76 | + #print_status "PDF Pages: #{reader.page_count}" |
| 77 | + |
| 78 | + # Software |
| 79 | + #print_status "PDF Creator: #{reader.info[:Creator]}" |
| 80 | + #print_status "PDF Producer: #{reader.info[:Producer]}" |
| 81 | + |
| 82 | + # Author |
| 83 | + reader.info[:Author].class == String ? reader.info[:Author].split(/\r?\n/).first : '' |
| 84 | + end |
| 85 | + |
| 86 | + def download(url) |
| 87 | + print_status "Downloading '#{url}'" |
| 88 | + |
| 89 | + begin |
| 90 | + target = URI.parse url |
| 91 | + raise 'Invalid URL' unless target.scheme =~ %r{https?} |
| 92 | + raise 'Invalid URL' if target.host.to_s.eql? '' |
| 93 | + rescue => e |
| 94 | + print_error "Could not parse URL: #{e}" |
| 95 | + return |
| 96 | + end |
| 97 | + |
| 98 | + clnt = Net::HTTP::Proxy(@proxysrv, @proxyport, @proxyuser, @proxypass).new(target.host, target.port) |
| 99 | + |
| 100 | + if target.scheme.eql? 'https' |
| 101 | + clnt.use_ssl = true |
| 102 | + clnt.verify_mode = datastore['SSL_VERIFY'] ? OpenSSL::SSL::VERIFY_PEER : OpenSSL::SSL::VERIFY_NONE |
| 103 | + end |
| 104 | + |
| 105 | + headers = { |
| 106 | + 'User-Agent' => 'Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US) AppleWebKit/525.13 (KHTML, like Gecko) Chrome/4.0.221.6 Safari/525.13' |
| 107 | + } |
| 108 | + |
| 109 | + begin |
| 110 | + res = clnt.get2 target.request_uri, headers |
| 111 | + rescue => e |
| 112 | + print_error "Connection failed: #{e}" |
| 113 | + return |
| 114 | + end |
| 115 | + |
| 116 | + unless res |
| 117 | + print_error 'Connection failed' |
| 118 | + return |
| 119 | + end |
| 120 | + |
| 121 | + print_status "HTTP #{res.code} -- Downloaded PDF (#{res.body.length} bytes)" |
| 122 | + |
| 123 | + contents = StringIO.new |
| 124 | + contents.puts res.body |
| 125 | + contents |
| 126 | + end |
| 127 | + |
| 128 | + def write_output(data) |
| 129 | + return if datastore['OUTFILE'].to_s.eql? '' |
| 130 | + |
| 131 | + print_status "Writing data to #{datastore['OUTFILE']}..." |
| 132 | + file_name = datastore['OUTFILE'] |
| 133 | + |
| 134 | + if FileTest::exist?(file_name) |
| 135 | + print_status 'OUTFILE already exists, appending..' |
| 136 | + end |
| 137 | + |
| 138 | + File.open(file_name, 'ab') do |fd| |
| 139 | + fd.write(data) |
| 140 | + end |
| 141 | + end |
| 142 | + |
| 143 | + def run |
| 144 | + if datastore['PROXY'] |
| 145 | + @proxysrv, @proxyport = datastore['PROXY'].split(':') |
| 146 | + @proxyuser = datastore['PROXY_USER'] |
| 147 | + @proxypass = datastore['PROXY_PASS'] |
| 148 | + else |
| 149 | + @proxysrv, @proxyport = nil, nil |
| 150 | + end |
| 151 | + |
| 152 | + urls = load_urls |
| 153 | + print_status "Processing #{urls.size} URLs..." |
| 154 | + authors = [] |
| 155 | + max_len = 256 |
| 156 | + urls.each_with_index do |url, index| |
| 157 | + next if url.blank? |
| 158 | + contents = download url |
| 159 | + next if contents.blank? |
| 160 | + author = read contents |
| 161 | + unless author.blank? |
| 162 | + print_good "PDF Author: #{author}" |
| 163 | + if author.length > max_len |
| 164 | + print_warning "Warning: Truncated author's name at #{max_len} characters" |
| 165 | + authors << author[0...max_len] |
| 166 | + else |
| 167 | + authors << author |
| 168 | + end |
| 169 | + end |
| 170 | + progress(index + 1, urls.size) |
| 171 | + end |
| 172 | + |
| 173 | + print_line |
| 174 | + |
| 175 | + if authors.empty? |
| 176 | + print_status 'Found no authors' |
| 177 | + return |
| 178 | + end |
| 179 | + |
| 180 | + print_good "Found #{authors.size} authors: #{authors.join ', '}" |
| 181 | + write_output authors.join "\n" |
| 182 | + end |
| 183 | +end |
0 commit comments