1+ package main
2+
3+ // imports
4+ import (
5+ "bufio"
6+ "crypto/tls"
7+ "errors"
8+ "fmt"
9+ "io/ioutil"
10+ "math/rand"
11+ "net"
12+ "net/http"
13+ "net/url"
14+ "os"
15+ "path"
16+ "strings"
17+ "sync"
18+ "time"
19+
20+ flag "github.com/spf13/pflag"
21+ )
22+
23+ const (
24+ qsize = 20
25+ )
26+
27+ type ln struct {
28+ filename string
29+ url string
30+ host string
31+ }
32+
33+ var (
34+ workersArg int
35+ timeOutArg int
36+ headerArg []string
37+ urlArg string
38+ statusListArg string
39+ proxyArg string
40+ fingerPrintArg string
41+ outputFileArg string
42+ queryArg string
43+ verboseArg bool
44+ followRedirectArg bool
45+ useRandomAgentArg bool
46+ testHTTPArg bool
47+ allInArg bool
48+ useUniqueName bool
49+ )
50+
51+ func newClient () * http.Client {
52+ tr := & http.Transport {
53+ MaxIdleConns : 30 ,
54+ IdleConnTimeout : time .Second ,
55+ TLSClientConfig : & tls.Config {InsecureSkipVerify : true },
56+ DialContext : (& net.Dialer {
57+ Timeout : time .Second * time .Duration (timeOutArg ),
58+ }).DialContext ,
59+ }
60+
61+ if proxyArg != "" {
62+ if p , err := url .Parse (proxyArg ); err == nil {
63+ tr .Proxy = http .ProxyURL (p )
64+ }
65+ }
66+
67+ client := & http.Client {
68+ Transport : tr ,
69+ Timeout : time .Second * time .Duration (timeOutArg ),
70+ }
71+
72+ if ! followRedirectArg {
73+ client .CheckRedirect = func (req * http.Request , via []* http.Request ) error {
74+ return http .ErrUseLastResponse
75+ }
76+ }
77+
78+ return client
79+ }
80+
81+
82+ func main () {
83+
84+ flag .StringArrayVarP (& headerArg , "header" , "H" , nil , "Add custom Headers to the request" )
85+ flag .StringVarP (& urlArg , "url" , "u" , "" , "The url to check" )
86+ flag .StringVarP (& outputFileArg , "output" , "o" , "" , "Save to folder. Default: create results folder which will include a folder for each target" )
87+ flag .IntVarP (& workersArg , "workers" , "w" , 20 , "Number of workers" )
88+ flag .BoolVarP (& verboseArg , "verbose" , "v" , false , "Display extra info about what is going on" )
89+ flag .BoolVarP (& followRedirectArg , "follow-redirect" , "f" , false , "Follow redirects (Default: false)" )
90+ flag .StringVarP (& proxyArg , "proxy" , "p" , "" , "Add a HTTP proxy" )
91+ flag .BoolVarP (& useRandomAgentArg , "random-agent" , "r" , false , "Set a random User Agent" )
92+ flag .BoolVarP (& allInArg , "no-folders" , "" , false , "Don't store results on separate folders" )
93+ flag .BoolVarP (& useUniqueName , "unique" , "" , false , "Use a unique name for each file" )
94+ flag .IntVarP (& timeOutArg , "timeout" , "t" , 20 , "connection timeout" )
95+
96+ flag .Parse ()
97+
98+ if workersArg > 100 {
99+ workersArg = 100
100+ }
101+
102+ client := newClient ()
103+
104+ var links []string
105+
106+ if len (urlArg ) > 0 {
107+ links = append (links , urlArg )
108+ } else {
109+ s := bufio .NewScanner (os .Stdin )
110+ for s .Scan () {
111+ links = append (links , s .Text ())
112+ }
113+ }
114+
115+ var wg sync.WaitGroup
116+
117+ queue := make (chan ln , qsize )
118+
119+ for i := 0 ; i < workersArg ; i ++ {
120+ wg .Add (1 )
121+ go worker (i + 1 , queue , & wg , client )
122+ }
123+
124+ for _ , link := range links {
125+
126+ u , err := url .ParseRequestURI (link )
127+ if err != nil {
128+ if (verboseArg ){
129+ fmt .Printf ("[-] Invalid url: %s\n " , link )
130+ }
131+ }
132+
133+ _ , fileName := path .Split (u .Path )
134+
135+ link0 := ln {filename : fileName , url : link , host : u .Host }
136+
137+ queue <- link0
138+ }
139+
140+ close (queue )
141+ wg .Wait ()
142+ }
143+
144+ func worker (index int , queue <- chan ln , wg * sync.WaitGroup , client * http.Client ) {
145+
146+ defer wg .Done ()
147+ for link := range queue {
148+ if (verboseArg ){
149+ fmt .Printf ("[+] Worker %d, downloading %s\n " , index , link .url )
150+ }
151+ bytes , err := fetch (link .url , client )
152+
153+ if err != nil {
154+ if (verboseArg ){
155+ fmt .Println (err )
156+ }
157+ continue
158+ }
159+
160+ var errWrite error
161+ var fullPath string
162+
163+ if (useUniqueName ){
164+ //use unique name
165+ host , _ := url .Parse (link .url )
166+
167+ cleanhost := strings .Replace (host .Host , "." , "_" , - 1 )
168+
169+ link .filename = cleanhost + "_" + link .filename
170+ }
171+
172+ if (! allInArg ){
173+ if (outputFileArg != "" ){
174+ fullPath = path .Join (outputFileArg , "results" )
175+ }else {
176+ fullPath = "results"
177+ }
178+
179+ //creating results folder
180+ if _ , errResults := os .Stat (fullPath ); os .IsNotExist (errResults ) {
181+ os .MkdirAll (fullPath , 0755 )
182+ }
183+
184+ fullPath = path .Join (fullPath , link .host )
185+
186+ //creating a folder for each domain
187+ if _ , errFolder := os .Stat (fullPath ); os .IsNotExist (errFolder ) {
188+ os .Mkdir (fullPath , 0755 )
189+ }
190+
191+ fullPath = path .Join (fullPath , link .filename )
192+
193+ }else {
194+ if (outputFileArg != "" ){
195+ fullPath = outputFileArg
196+ os .Mkdir (fullPath , 0755 )
197+ }
198+
199+ fullPath = path .Join (fullPath , link .filename )
200+ }
201+
202+ // lets write the file!
203+ errWrite = ioutil .WriteFile (fullPath , bytes , 0644 )
204+
205+ if errWrite != nil {
206+ if (verboseArg ){
207+ fmt .Printf ("[-] Write error: %v\n " , errWrite )
208+ }
209+ } else {
210+ if (verboseArg ){
211+ fmt .Printf ("[+] file OK: %v, size: %v\n " , link .filename , len (bytes ))
212+ }
213+ }
214+ }
215+ }
216+
217+ // Fetch function
218+ func fetch (url string , client * http.Client ) ([]byte , error ) {
219+
220+ //resp, err := http.Get(url)
221+
222+ req , err := http .NewRequest ("GET" , url , nil )
223+
224+ if err != nil {
225+ if verboseArg {
226+ fmt .Printf ("[-] Error: %v\n " , err )
227+ }
228+ return nil , errors .New ("[-] Failed to fetch " + url )
229+ }
230+
231+ if useRandomAgentArg {
232+ req .Header .Set ("User-Agent" , getUserAgent ())
233+ } else {
234+ req .Header .Set ("User-Agent" , "Mozilla/5.0 (compatible; fget/1.0)" )
235+ }
236+
237+ // add headers to the request
238+ for _ , h := range headerArg {
239+ parts := strings .SplitN (h , ":" , 2 )
240+
241+ if len (parts ) != 2 {
242+ continue
243+ }
244+ req .Header .Set (parts [0 ], parts [1 ])
245+ }
246+
247+ // send the request
248+ resp , err := client .Do (req )
249+ if err != nil {
250+ if (verboseArg ) {
251+ fmt .Printf ("[-] Error: %v\n " , err )
252+ }
253+ return nil , errors .New ("[-] Failed to parse " + url )
254+ }
255+ defer resp .Body .Close ()
256+
257+
258+ body , err := ioutil .ReadAll (resp .Body )
259+ if err != nil {
260+ if (verboseArg ){
261+ fmt .Printf ("[-] Error: %v\n " , err )
262+ }
263+ return nil , errors .New ("[-] Failed to parse " + url )
264+ }
265+ return body , nil
266+ }
267+
268+ func getUserAgent () string {
269+ payload := []string {
270+ "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/116.0.0.0 Safari/537.36" ,
271+ "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/116.0.0.0 Safari/537.36" ,
272+ "Mozilla/5.0 (Macintosh; Intel Mac OS X 10.15; rv:109.0) Gecko/20100101 Firefox/117.0" ,
273+ "Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:109.0) Gecko/20100101 Firefox/117.0" ,
274+ "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/605.1.15 (KHTML, like Gecko) Version/17.0 Safari/605.1.15" ,
275+ }
276+
277+ rand .Seed (time .Now ().UnixNano ())
278+ randomIndex := rand .Intn (len (payload ))
279+
280+ pick := payload [randomIndex ]
281+
282+ return pick
283+ }
0 commit comments