From 310e974e1bea4d3d71267aff3c0cd4854d3923da Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?M=C3=BCmin=20K=C3=B6yk=C4=B1ran?= Date: Fri, 30 Dec 2022 13:38:56 +0300 Subject: [PATCH 1/6] Added ssl-verify parameter * Added new parameter to disable SSL Certificate Verification check. --- pwnxss.py | 15 +++++++++++++-- 1 file changed, 13 insertions(+), 2 deletions(-) diff --git a/pwnxss.py b/pwnxss.py index 8c39bff..33d1d2d 100644 --- a/pwnxss.py +++ b/pwnxss.py @@ -1,5 +1,5 @@ ''' -PwnXSS - 2019/2020 +PwnXSS - 2019/2022 This project was created by Andripwn with Pwn0sec team. Copyright under the MIT license ''' @@ -13,6 +13,16 @@ Github: https://www.github.com/pwn0sec/PwnXSS Version: 0.5 Final """ +def str2bool(v): + if isinstance(v, bool): + return v + if v.lower() in ('yes', 'true', 't', 'y', '1'): + return True + elif v.lower() in ('no', 'false', 'f', 'n', '0'): + return False + else: + raise argparse.ArgumentTypeError('Boolean value expected.') + def check(getopt): payload=int(getopt.payload_level) if payload > 6 and getopt.payload is None: @@ -44,6 +54,7 @@ def start(): pos_opt.add_argument("--proxy",default=None,metavar="",help="Set proxy (e.g. {'https':'https://10.10.1.10:1080'})") pos_opt.add_argument("--about",action="store_true",help="Print information about PwnXSS tool") pos_opt.add_argument("--cookie",help="Set cookie (e.g {'ID':'1094200543'})",default='''{"ID":"1094200543"}''',metavar="") + pos_opt.add_argument("--ssl-verify", type=str2bool, nargs='?',const=True, default=True,help="SSL Certificate Verification. Default: True") getopt=parse.parse_args() print(logo) @@ -62,7 +73,7 @@ def start(): Project: PwnXSS License: MIT Author: Security Executions Code -Last updates: 2019 may 26 +Last updates: 2022 Dec 30 Note: Take your own RISK **************** """+epilog) From 4289b6a203af617f202ece134fb452107439d928 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?M=C3=BCmin=20K=C3=B6yk=C4=B1ran?= Date: Fri, 30 Dec 2022 13:42:48 +0300 Subject: [PATCH 2/6] * Added ssl_verify parameter for crawler. * Added ssl_verify parameter for crawler. --- lib/crawler/crawler.py | 12 +++++++----- 1 file changed, 7 insertions(+), 5 deletions(-) diff --git a/lib/crawler/crawler.py b/lib/crawler/crawler.py index ccb5339..13645a3 100644 --- a/lib/crawler/crawler.py +++ b/lib/crawler/crawler.py @@ -11,11 +11,13 @@ class crawler: visited=[] @classmethod - def getLinks(self,base,proxy,headers,cookie): + def getLinks(self,base,proxy,headers,cookie,ssl_verify): lst=[] conn=session(proxy,headers,cookie) + if ssl_verify == False: + conn.verify = False text=conn.get(base).text isi=BeautifulSoup(text,"html.parser") @@ -37,17 +39,17 @@ def getLinks(self,base,proxy,headers,cookie): return lst @classmethod - def crawl(self,base,depth,proxy,headers,level,method,cookie): + def crawl(self,base,depth,proxy,headers,level,method,cookie,ssl_verify): - urls=self.getLinks(base,proxy,headers,cookie) + urls=self.getLinks(base,proxy,headers,cookie,ssl_verify) for url in urls: if url.startswith("https://") or url.startswith("http://"): - p=Process(target=core.main, args=(url,proxy,headers,level,cookie,method)) + p=Process(target=core.main, args=(url,proxy,headers,level,cookie,method,ssl_verify)) p.start() p.join() if depth != 0: - self.crawl(url,depth-1,base,proxy,level,method,cookie) + self.crawl(url,depth-1,base,proxy,level,method,cookie,ssl_verify) else: break From 9401f13ea99a3523cb401d09f7a981412e18b7c6 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?M=C3=BCmin=20K=C3=B6yk=C4=B1ran?= Date: Fri, 30 Dec 2022 14:22:17 +0300 Subject: [PATCH 3/6] Added ssl_verify parameter for core. * Added ssl_verify parameter for core. --- lib/core.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/lib/core.py b/lib/core.py index c2d1169..eec6c39 100644 --- a/lib/core.py +++ b/lib/core.py @@ -154,13 +154,15 @@ def get_method(self): Log.info("URL is not an HTTP url, ignoring") @classmethod - def main(self,url,proxy,headers,payload,cookie,method=2): + def main(self,url,proxy,headers,payload,cookie,method=2,ssl_verify=True): print(W+"*"*15) self.payload=payload self.url=url self.session=session(proxy,headers,cookie) + if ssl_verify == False: + self.session.verify = False Log.info("Checking connection to: "+Y+url) try: ctr=self.session.get(url) From 19fbcb3c732b437729541188127d53c2d533c443 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?M=C3=BCmin=20K=C3=B6yk=C4=B1ran?= Date: Fri, 30 Dec 2022 14:35:35 +0300 Subject: [PATCH 4/6] Added ssl_verify parameter for crawler. * Added ssl_verify parameter for crawler. --- lib/crawler/crawler.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/lib/crawler/crawler.py b/lib/crawler/crawler.py index 13645a3..cf0ea92 100644 --- a/lib/crawler/crawler.py +++ b/lib/crawler/crawler.py @@ -15,9 +15,7 @@ def getLinks(self,base,proxy,headers,cookie,ssl_verify): lst=[] - conn=session(proxy,headers,cookie) - if ssl_verify == False: - conn.verify = False + conn=session(proxy,headers,cookie,ssl_verify) text=conn.get(base).text isi=BeautifulSoup(text,"html.parser") From 44bc653d0b938009679762640a5e18dd87056885 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?M=C3=BCmin=20K=C3=B6yk=C4=B1ran?= Date: Fri, 30 Dec 2022 14:36:40 +0300 Subject: [PATCH 5/6] Added ssl_verify parameter for core. * Added ssl_verify parameter for core. --- lib/core.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/lib/core.py b/lib/core.py index eec6c39..d19c19c 100644 --- a/lib/core.py +++ b/lib/core.py @@ -160,9 +160,7 @@ def main(self,url,proxy,headers,payload,cookie,method=2,ssl_verify=True): self.payload=payload self.url=url - self.session=session(proxy,headers,cookie) - if ssl_verify == False: - self.session.verify = False + self.session=session(proxy,headers,cookie,ssl_verify) Log.info("Checking connection to: "+Y+url) try: ctr=self.session.get(url) From d01b3aae1949b6e745c397eb34a0c8667ee1573a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?M=C3=BCmin=20K=C3=B6yk=C4=B1ran?= Date: Fri, 30 Dec 2022 14:42:42 +0300 Subject: [PATCH 6/6] Added ssl-verify parameter * Added ssl-verify parameter --- pwnxss.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/pwnxss.py b/pwnxss.py index 33d1d2d..326e607 100644 --- a/pwnxss.py +++ b/pwnxss.py @@ -60,12 +60,12 @@ def start(): print(logo) Log.info("Starting PwnXSS...") if getopt.u: - core.main(getopt.u,getopt.proxy,getopt.user_agent,check(getopt),getopt.cookie,getopt.method) + core.main(getopt.u,getopt.proxy,getopt.user_agent,check(getopt),getopt.cookie,getopt.method,getopt.ssl_verify) - crawler.crawl(getopt.u,int(getopt.depth),getopt.proxy,getopt.user_agent,check(getopt),getopt.method,getopt.cookie) + crawler.crawl(getopt.u,int(getopt.depth),getopt.proxy,getopt.user_agent,check(getopt),getopt.method,getopt.cookie,getopt.ssl_verify) elif getopt.single: - core.main(getopt.single,getopt.proxy,getopt.user_agent,check(getopt),getopt.cookie,getopt.method) + core.main(getopt.single,getopt.proxy,getopt.user_agent,check(getopt),getopt.cookie,getopt.method,getopt.ssl_verify) elif getopt.about: print("""