@@ -16,79 +16,65 @@ import (
1616var displayProgress = true
1717
1818func main () {
19- var err error
20- var proxy , filepath , bwLimit string
19+ // var err error
20+ var proxy , filePath , bwLimit , resumeTask string
2121
22- conn := flag .Int ("n" , runtime .NumCPU (), "connection" )
23- skiptls := flag .Bool ("skip-tls" , true , "skip verify certificate for https" )
24- flag .StringVar (& proxy , "proxy" , "" , "proxy for downloading, ex \n \t -proxy '127.0.0.1:12345' for socks5 proxy\n \t -proxy 'http://proxy.com:8080' for http proxy" )
25- flag .StringVar (& filepath , "file" , "" , "filepath that contains links in each line" )
26- flag .StringVar (& bwLimit , "rate" , "" , "bandwidth limit to use while downloading, ex\n \t -rate 10kB\n \t -rate 10MiB" )
22+ conn := flag .Int ("n" , runtime .NumCPU (), "number of connections" )
23+ skiptls := flag .Bool ("skip-tls" , true , "skip certificate verification for https" )
24+ flag .StringVar (& proxy , "proxy" , "" , "proxy for downloading, e.g. -proxy '127.0.0.1:12345' for socks5 or -proxy 'http://proxy.com:8080' for http proxy" )
25+ flag .StringVar (& filePath , "file" , "" , "path to a file that contains one URL per line" )
26+ flag .StringVar (& bwLimit , "rate" , "" , "bandwidth limit during download, e.g. -rate 10kB or -rate 10MiB" )
27+ flag .StringVar (& resumeTask , "resume" , "" , "resume download task with given task name (or URL)" )
2728
2829 flag .Parse ()
2930 args := flag .Args ()
31+
32+ // If the resume flag is provided, use that path (ignoring other arguments)
33+ if resumeTask != "" {
34+ state , err := Resume (resumeTask )
35+ FatalCheck (err )
36+ Execute (state .URL , state , * conn , * skiptls , proxy , bwLimit )
37+ return
38+ }
39+
40+ // If no resume flag, then check for positional URL or file input
3041 if len (args ) < 1 {
31- if len (filepath ) < 2 {
32- Errorln ("url is required" )
42+ if len (filePath ) < 1 {
43+ Errorln ("A URL or input file with URLs is required" )
3344 usage ()
3445 os .Exit (1 )
3546 }
36- // Creating a SerialGroup .
47+ // Create a serial group for processing multiple URLs in a file .
3748 g1 := task .NewSerialGroup ()
38- file , err := os .Open (filepath )
49+ file , err := os .Open (filePath )
3950 if err != nil {
4051 FatalCheck (err )
4152 }
42-
4353 defer file .Close ()
4454
4555 reader := bufio .NewReader (file )
46-
4756 for {
4857 line , _ , err := reader .ReadLine ()
49-
5058 if err == io .EOF {
5159 break
5260 }
53-
54- g1 .AddChild (downloadTask (string (line ), nil , * conn , * skiptls , proxy , bwLimit ))
61+ url := string (line )
62+ // Add the download task for each URL
63+ g1 .AddChild (downloadTask (url , nil , * conn , * skiptls , proxy , bwLimit ))
5564 }
5665 g1 .Run (nil )
5766 return
5867 }
5968
60- command := args [0 ]
61- if command == "tasks" {
62- if err = TaskPrint (); err != nil {
63- Errorf ("%v\n " , err )
64- }
65- return
66- } else if command == "resume" {
67- if len (args ) < 2 {
68- Errorln ("downloading task name is required" )
69- usage ()
70- os .Exit (1 )
71- }
72-
73- var task string
74- if IsURL (args [1 ]) {
75- task = TaskFromURL (args [1 ])
76- } else {
77- task = args [1 ]
78- }
79-
80- state , err := Resume (task )
69+ // Otherwise, if a URL is provided as positional argument, treat it as a new download.
70+ downloadURL := args [0 ]
71+ // Check if a folder already exists for the task and remove if necessary.
72+ if ExistDir (FolderOf (downloadURL )) {
73+ Warnf ("Downloading task already exists, remove it first \n " )
74+ err := os .RemoveAll (FolderOf (downloadURL ))
8175 FatalCheck (err )
82- Execute (state .URL , state , * conn , * skiptls , proxy , bwLimit )
83- return
84- } else {
85- if ExistDir (FolderOf (command )) {
86- Warnf ("Downloading task already exist, remove first \n " )
87- err := os .RemoveAll (FolderOf (command ))
88- FatalCheck (err )
89- }
90- Execute (command , nil , * conn , * skiptls , proxy , bwLimit )
9176 }
77+ Execute (downloadURL , nil , * conn , * skiptls , proxy , bwLimit )
9278}
9379
9480func downloadTask (url string , state * State , conn int , skiptls bool , proxy string , bwLimit string ) task.Task {
@@ -98,19 +84,16 @@ func downloadTask(url string, state *State, conn int, skiptls bool, proxy string
9884 return task .NewTaskWithFunc (run )
9985}
10086
101- // Execute configures the HTTPDownloader and uses it to download stuff .
87+ // Execute configures the HTTPDownloader and uses it to download the target .
10288func Execute (url string , state * State , conn int , skiptls bool , proxy string , bwLimit string ) {
103- //otherwise is hget <URL> command
104-
89+ // Capture OS interrupt signals
10590 signalChan := make (chan os.Signal , 1 )
10691 signal .Notify (signalChan ,
10792 syscall .SIGHUP ,
10893 syscall .SIGINT ,
10994 syscall .SIGTERM ,
11095 syscall .SIGQUIT )
11196
112- //set up parallel
113-
11497 var files = make ([]string , 0 )
11598 var parts = make ([]Part , 0 )
11699 var isInterrupted = false
@@ -125,14 +108,20 @@ func Execute(url string, state *State, conn int, skiptls bool, proxy string, bwL
125108 if state == nil {
126109 downloader = NewHTTPDownloader (url , conn , skiptls , proxy , bwLimit )
127110 } else {
128- downloader = & HTTPDownloader {url : state .URL , file : filepath .Base (state .URL ), par : int64 (len (state .Parts )), parts : state .Parts , resumable : true }
111+ downloader = & HTTPDownloader {
112+ url : state .URL ,
113+ file : filepath .Base (state .URL ),
114+ par : int64 (len (state .Parts )),
115+ parts : state .Parts ,
116+ resumable : true ,
117+ }
129118 }
130119 go downloader .Do (doneChan , fileChan , errorChan , interruptChan , stateChan )
131120
132121 for {
133122 select {
134123 case <- signalChan :
135- //send par number of interrupt for each routine
124+ // Signal all active download routines to interrupt.
136125 isInterrupted = true
137126 for i := 0 ; i < conn ; i ++ {
138127 interruptChan <- true
@@ -141,19 +130,19 @@ func Execute(url string, state *State, conn int, skiptls bool, proxy string, bwL
141130 files = append (files , file )
142131 case err := <- errorChan :
143132 Errorf ("%v" , err )
144- panic (err ) //maybe need better style
133+ panic (err )
145134 case part := <- stateChan :
146135 parts = append (parts , part )
147136 case <- doneChan :
148137 if isInterrupted {
149138 if downloader .resumable {
150- Printf ("Interrupted, saving state ... \n " )
139+ Printf ("Interrupted, saving state...\n " )
151140 s := & State {URL : url , Parts : parts }
152141 if err := s .Save (); err != nil {
153142 Errorf ("%v\n " , err )
154143 }
155144 } else {
156- Warnf ("Interrupted, but downloading url is not resumable, silently die " )
145+ Warnf ("Interrupted, but the download is not resumable. Exiting silently. \n " )
157146 }
158147 } else {
159148 err := JoinFile (files , filepath .Base (url ))
@@ -168,8 +157,15 @@ func Execute(url string, state *State, conn int, skiptls bool, proxy string, bwL
168157
169158func usage () {
170159 Printf (`Usage:
171- hget [-n connection] [-skip-tls true] [-proxy proxy_address] [-file filename] URL
172- hget tasks
173- hget resume [TaskName]
160+ hget [options] URL
161+ hget [options] --resume=TaskName
162+
163+ Options:
164+ -n int number of connections (default number of CPUs)
165+ -skip-tls bool skip certificate verification for https (default true)
166+ -proxy string proxy address (e.g., '127.0.0.1:12345' for socks5 or 'http://proxy.com:8080')
167+ -file string file path containing URLs (one per line)
168+ -rate string bandwidth limit during download (e.g., 10kB, 10MiB)
169+ -resume string resume a stopped download by providing its task name or URL
174170` )
175171}
0 commit comments