@@ -6,13 +6,13 @@ use std::io::{Read, Seek, SeekFrom};
66use std:: path:: Path ;
77
88use super :: { Archive , ArchiveError , Origin } ;
9+ use attohttpc:: header:: HeaderMap ;
910use flate2:: read:: GzDecoder ;
1011use fs_utils:: ensure_containing_dir_exists;
1112use hyperx:: header:: {
1213 AcceptRanges , ByteRangeSpec , ContentLength , Header , Range , RangeUnit , TypedHeaders ,
1314} ;
1415use progress_read:: ProgressRead ;
15- use reqwest:: blocking:: Response ;
1616use tee:: TeeReader ;
1717
1818/// A Node installation tarball.
@@ -29,9 +29,8 @@ pub struct Tarball {
2929
3030/// Determines the length of an HTTP response's content in bytes, using
3131/// the HTTP `"Content-Length"` header.
32- fn content_length ( response : & Response ) -> Result < u64 , ArchiveError > {
33- response
34- . headers ( )
32+ fn content_length ( headers : & HeaderMap ) -> Result < u64 , ArchiveError > {
33+ headers
3534 . decode :: < ContentLength > ( )
3635 . ok ( )
3736 . map ( |v| v. 0 )
@@ -55,14 +54,14 @@ impl Tarball {
5554 /// tarball that can be streamed (and that tees its data to a local
5655 /// file as it streams).
5756 pub fn fetch ( url : & str , cache_file : & Path ) -> Result < Box < dyn Archive > , ArchiveError > {
58- let response = reqwest :: blocking :: get ( url) ? ;
57+ let ( status , headers , response) = attohttpc :: get ( url) . send ( ) ? . split ( ) ;
5958
60- if !response . status ( ) . is_success ( ) {
61- return Err ( ArchiveError :: HttpError ( response . status ( ) ) ) ;
59+ if !status. is_success ( ) {
60+ return Err ( ArchiveError :: HttpError ( status) ) ;
6261 }
6362
64- let compressed_size = content_length ( & response ) ?;
65- let uncompressed_size = if accepts_byte_ranges ( & response ) {
63+ let compressed_size = content_length ( & headers ) ?;
64+ let uncompressed_size = if accepts_byte_ranges ( & headers ) {
6665 fetch_uncompressed_size ( url, compressed_size)
6766 } else {
6867 None
@@ -128,18 +127,17 @@ fn unpack_isize(packed: [u8; 4]) -> u64 {
128127/// downloading the entire gzip file. For very small files it's unlikely to be
129128/// more efficient than simply downloading the entire file up front.
130129fn fetch_isize ( url : & str , len : u64 ) -> Result < [ u8 ; 4 ] , ArchiveError > {
131- let client = reqwest:: blocking:: Client :: new ( ) ;
132130 let range_header = Range :: Bytes ( vec ! [ ByteRangeSpec :: FromTo ( len - 4 , len - 1 ) ] ) ;
133- let mut response = client
134- . get ( url)
131+ let ( status, headers, mut response) = attohttpc:: get ( url)
135132 . header ( Range :: header_name ( ) , range_header. to_string ( ) )
136- . send ( ) ?;
133+ . send ( ) ?
134+ . split ( ) ;
137135
138- if !response . status ( ) . is_success ( ) {
139- return Err ( ArchiveError :: HttpError ( response . status ( ) ) ) ;
136+ if !status. is_success ( ) {
137+ return Err ( ArchiveError :: HttpError ( status) ) ;
140138 }
141139
142- let actual_length = content_length ( & response ) ?;
140+ let actual_length = content_length ( & headers ) ?;
143141
144142 if actual_length != 4 {
145143 return Err ( ArchiveError :: UnexpectedContentLengthError ( actual_length) ) ;
@@ -160,9 +158,8 @@ fn load_isize(file: &mut File) -> Result<[u8; 4], ArchiveError> {
160158 Ok ( buf)
161159}
162160
163- fn accepts_byte_ranges ( response : & Response ) -> bool {
164- response
165- . headers ( )
161+ fn accepts_byte_ranges ( headers : & HeaderMap ) -> bool {
162+ headers
166163 . decode :: < AcceptRanges > ( )
167164 . ok ( )
168165 . map ( |v| v. iter ( ) . any ( |unit| * unit == RangeUnit :: Bytes ) )
0 commit comments