Skip to content

Commit 2fa16f4

Browse files
committed
Rewrite relative script URLs to be absolute.
* Adds rescue clauses around URI parsing/pulling * Actually use the URI_PATH datastore option.
1 parent 993356c commit 2fa16f4

File tree

1 file changed

+44
-23
lines changed

1 file changed

+44
-23
lines changed

modules/auxiliary/gather/apple_safari_webarchive_uxss.rb

Lines changed: 44 additions & 23 deletions
Original file line numberDiff line numberDiff line change
@@ -76,7 +76,7 @@ def cleanup
7676
super
7777
# clear my resource, deregister ref, stop/close the HTTP socket
7878
begin
79-
@http_service.remove_resource("/grab")
79+
@http_service.remove_resource(collect_data_uri)
8080
@http_service.deref
8181
@http_service.stop
8282
@http_service.close
@@ -140,7 +140,7 @@ def start_http(opts={})
140140
'Proc' => Proc.new { |cli, req|
141141
on_request_uri(cli, req)
142142
},
143-
'Path' => "/grab"
143+
'Path' => collect_data_uri
144144
}.update(opts['Uri'] || {})
145145

146146
proto = (datastore["SSL"] ? "https" : "http")
@@ -715,24 +715,43 @@ def all_script_urls(pages)
715715

716716
# @return [Array<Array<String>>] list of URLs for remote javascripts that are cacheable
717717
def find_cached_scripts
718-
cached_scripts = all_script_urls(urls).map do |urls_for_site|
718+
cached_scripts = all_script_urls(urls).each_with_index.map do |urls_for_site, i|
719+
begin
720+
page_uri = URI.parse(urls[i])
721+
rescue URI::InvalidURIError => e
722+
next
723+
end
724+
719725
results = urls_for_site.uniq.map do |url|
720-
print_status "URL: #{url}"
721-
io = open url
722-
# parse some HTTP headers and do type coercions
723-
last_modified = io.last_modified
724-
expires = Time.parse(io.meta['expires']) rescue nil
725-
cache_control = io.meta['cache-control'] || ''
726-
charset = io.charset
727-
etag = io.meta['etag']
728-
# lets see if we are able to "poison" the cache for this asset...
729-
if (!expires.nil? && Time.now < expires) or
730-
(cache_control.length > 0) or # if asset is cacheable
731-
(last_modified.length > 0)
732-
print_status("Found cacheable #{url}")
733-
io.meta.merge(:body => io.read, :url => url)
734-
else
735-
nil
726+
begin
727+
print_status "URL: #{url}"
728+
begin
729+
script_uri = URI.parse(url)
730+
if script_uri.relative?
731+
url = page_uri + url
732+
end
733+
io = open(url)
734+
rescue URI::InvalidURIError => e
735+
next
736+
end
737+
738+
# parse some HTTP headers and do type coercions
739+
last_modified = io.last_modified
740+
expires = Time.parse(io.meta['expires']) rescue nil
741+
cache_control = io.meta['cache-control'] || ''
742+
charset = io.charset
743+
etag = io.meta['etag']
744+
# lets see if we are able to "poison" the cache for this asset...
745+
if (!expires.nil? && Time.now < expires) or
746+
(cache_control.length > 0) or # if asset is cacheable
747+
(not last_modified.nil? and last_modified.to_s.length > 0)
748+
print_status("Found cacheable #{url}")
749+
io.meta.merge(:body => io.read, :url => url)
750+
else
751+
nil
752+
end
753+
rescue Errno::ENOENT => e # lots of things can go wrong here.
754+
next
736755
end
737756
end
738757
results.compact # remove nils
@@ -745,7 +764,9 @@ def find_cached_scripts
745764

746765
# @return [String] the path to send data back to
747766
def collect_data_uri
748-
"/grab"
767+
path = datastore["URI_PATH"]
768+
path = if not path or path.empty? then '/grab' end
769+
if path.starts_with '/' then path else "/#{path}" end
749770
end
750771

751772
# @return [String] formatted http/https URL of the listener
@@ -780,9 +801,9 @@ def urls
780801
# @param [String] input the unencoded string
781802
# @return [String] input with dangerous chars replaced with xml entities
782803
def escape_xml(input)
783-
input.gsub("&", "&amp;").gsub("<", "&lt;")
784-
.gsub(">", "&gt;").gsub("'", "&apos;")
785-
.gsub("\"", "&quot;")
804+
input.to_s.gsub("&", "&amp;").gsub("<", "&lt;")
805+
.gsub(">", "&gt;").gsub("'", "&apos;")
806+
.gsub("\"", "&quot;")
786807
end
787808

788809
def should_steal_cookies?

0 commit comments

Comments
 (0)