Skip to content

Commit 9a4c3b0

Browse files
authored
Enhance download function with Qualcomm HEAD fallback
1 parent 80fbb76 commit 9a4c3b0

File tree

1 file changed

+44
-24
lines changed

1 file changed

+44
-24
lines changed

backends/qualcomm/scripts/download_qnn_sdk.py

Lines changed: 44 additions & 24 deletions
Original file line numberDiff line numberDiff line change
@@ -117,35 +117,58 @@ def _atomic_download(url: str, dest: pathlib.Path):
117117

118118

119119
def _download_archive(url: str, archive_path: pathlib.Path) -> bool:
120-
"""Reliable download with resume + retries."""
120+
"""Reliable download with resume + retries + Qualcomm HEAD fallback."""
121121

122122
logger.info("Downloading with resume support: %s", url)
123123

124124
CHUNK_SIZE = 1024 * 1024 # 1MB
125-
MAX_RETRIES = 5
126-
127-
# Determine existing partial file size
128125
downloaded = archive_path.stat().st_size if archive_path.exists() else 0
129-
130-
# Get total size
131-
head = requests.head(url)
132-
if "content-length" not in head.headers:
133-
logger.error("Server did not return content-length header!")
126+
MAX_RETRIES = 10
127+
128+
# ---- STEP 1: Try HEAD (may fail for Qualcomm) ----
129+
head = requests.head(url, allow_redirects=True)
130+
131+
def head_looks_invalid(h: requests.Response) -> bool:
132+
"""Qualcomm sometimes sends HTML wrappers."""
133+
if "content-length" not in h.headers:
134+
return True
135+
size = int(h.headers["content-length"])
136+
if size < 1024 * 1024: # <1MB means HTML stub, not the zip
137+
return True
138+
ctype = h.headers.get("content-type", "").lower()
139+
if "zip" not in ctype:
140+
return True
134141
return False
135-
total = int(head.headers["content-length"])
136142

137-
if downloaded == total:
143+
# ---- STEP 2: Fallback: Issue a GET request to discover real size ----
144+
if head_looks_invalid(head):
145+
logger.warning("HEAD invalid (size=%s, type=%s). Using GET for size detection.",
146+
head.headers.get("content-length"),
147+
head.headers.get("content-type"))
148+
149+
with requests.get(url, stream=True, allow_redirects=True) as r:
150+
r.raise_for_status()
151+
if "content-length" not in r.headers:
152+
logger.error("Server did not return content-length even on GET!")
153+
return False
154+
total = int(r.headers["content-length"])
155+
else:
156+
total = int(head.headers["content-length"])
157+
158+
logger.info("File size detected: %d bytes", total)
159+
160+
# ---- Already downloaded? ----
161+
if downloaded == total and total > 0:
138162
logger.info("File already fully downloaded.")
139163
return True
140164

141165
logger.info("Resuming from %d / %d bytes", downloaded, total)
142166

143-
# Open file in append mode
167+
# ---- STEP 3: Streaming download with HTTP Range ----
144168
with open(archive_path, "ab") as f:
145-
146169
while downloaded < total:
147-
headers = {"Range": f"bytes={downloaded}-"}
148170

171+
headers = {"Range": f"bytes={downloaded}-"}
149172
try:
150173
with requests.get(url, headers=headers, stream=True) as r:
151174
r.raise_for_status()
@@ -160,20 +183,17 @@ def _download_archive(url: str, archive_path: pathlib.Path) -> bool:
160183
requests.exceptions.ReadTimeout,
161184
urllib3.exceptions.IncompleteRead) as e:
162185

163-
logger.warning(
164-
"Connection drop during download at %d / %d bytes. Retrying... (%s)",
165-
downloaded, total, e,
166-
)
186+
logger.warning("Connection drop at %d bytes, retrying... (%s)",
187+
downloaded, e)
167188
time.sleep(1)
168-
continue # retry with updated downloaded value
189+
continue # resume from updated offset
169190

170-
# End of successful pass
171191
break
172192

173-
# Validate final size
174-
actual = archive_path.stat().st_size
175-
if actual != total:
176-
logger.error("Download incomplete: expected %d, got %d", total, actual)
193+
# ---- Final size check ----
194+
final_size = archive_path.stat().st_size
195+
if final_size != total:
196+
logger.error("Download incomplete: expected %d, got %d", total, final_size)
177197
return False
178198

179199
logger.info("Download completed successfully.")

0 commit comments

Comments
 (0)