Skip to content

Commit adc7a27

Browse files
committed
PR #1155: Refactor and type helpers for download to satisfy complexity and annotation linters
- Move helper functions to module level with proper type annotations - Implement clean retry loop using helpers - Keep external API unchanged
1 parent ae11d69 commit adc7a27

File tree

1 file changed

+95
-110
lines changed

1 file changed

+95
-110
lines changed

ardupilot_methodic_configurator/backend_internet.py

Lines changed: 95 additions & 110 deletions
Original file line numberDiff line numberDiff line change
@@ -30,6 +30,7 @@
3030
from platformdirs import user_config_dir
3131
from requests import HTTPError as requests_HTTPError
3232
from requests import RequestException as requests_RequestException
33+
from requests import Response
3334
from requests import Timeout as requests_Timeout
3435
from requests import get as requests_get
3536
from requests.exceptions import RequestException
@@ -40,6 +41,57 @@
4041
GITHUB_API_URL_RELEASES = "https://api.github.com/repos/ArduPilot/MethodicConfigurator/releases/"
4142

4243

44+
def _build_proxies() -> dict[str, str]:
45+
proxies_dict = {
46+
"http": os.environ.get("HTTP_PROXY") or os.environ.get("http_proxy"),
47+
"https": os.environ.get("HTTPS_PROXY") or os.environ.get("https_proxy"),
48+
"no_proxy": os.environ.get("NO_PROXY") or os.environ.get("no_proxy"),
49+
}
50+
return {k: v for k, v in proxies_dict.items() if v is not None}
51+
52+
53+
def _existing_size_and_headers(path: str, resume: bool) -> tuple[int, dict[str, str]]:
54+
headers: dict[str, str] = {}
55+
existing = 0
56+
if resume and os.path.exists(path):
57+
existing = os.path.getsize(path)
58+
if existing > 0:
59+
headers["Range"] = f"bytes={existing}-"
60+
return existing, headers
61+
62+
63+
def _parse_total_size(response: Response) -> int:
64+
content_range = response.headers.get("Content-Range")
65+
if content_range:
66+
try:
67+
return int(content_range.split("/")[-1])
68+
except Exception:
69+
return 0
70+
return int(response.headers.get("content-length", 0) or 0)
71+
72+
73+
def _write_response(
74+
response: Response,
75+
path: str,
76+
mode: str,
77+
initial_downloaded: int,
78+
total_size: int,
79+
progress_callback: Optional[Callable[[float, str], None]],
80+
) -> int:
81+
downloaded_local = initial_downloaded
82+
block_size_local = 8192
83+
with open(path, mode) as fh:
84+
for chunk in response.iter_content(chunk_size=block_size_local):
85+
if chunk:
86+
fh.write(chunk)
87+
downloaded_local += len(chunk)
88+
if progress_callback and total_size:
89+
progress = (downloaded_local / total_size) * 100
90+
msg_local = _("Downloading ... {:.1f}%")
91+
progress_callback(progress, msg_local.format(progress))
92+
return downloaded_local
93+
94+
4395
def download_file_from_url(
4496
url: str,
4597
local_filename: str,
@@ -52,126 +104,59 @@ def download_file_from_url(
52104
"""
53105
Download a file with optional resume and retry support.
54106
55-
This is a thin wrapper that delegates the heavy lifting to
56-
`_download_file_with_retries` to keep function complexity low for linters.
107+
The implementation uses helper functions to keep complexity low and
108+
provide clear unit-testable pieces.
57109
"""
58110
if not url or not local_filename:
59111
logging_error(_("URL or local filename not provided."))
60112
return False
61113

62-
logging_info(_("Downloading %s from %s"), local_filename, url)
63-
64-
return _download_file_with_retries(url, local_filename, timeout, progress_callback, retries, backoff_factor, allow_resume)
65-
114+
proxies = _build_proxies()
115+
os.makedirs(os.path.dirname(os.path.abspath(local_filename)), exist_ok=True)
66116

67-
def _download_file_with_retries(
68-
url: str,
69-
local_filename: str,
70-
timeout: int,
71-
progress_callback: Optional[Callable[[float, str], None]],
72-
retries: int,
73-
backoff_factor: float,
74-
allow_resume: bool,
75-
) -> bool:
76-
"""Internal: perform download with retries, resume and progress reporting."""
77-
def _build_proxies() -> dict:
78-
proxies_dict = {
79-
"http": os.environ.get("HTTP_PROXY") or os.environ.get("http_proxy"),
80-
"https": os.environ.get("HTTPS_PROXY") or os.environ.get("https_proxy"),
81-
"no_proxy": os.environ.get("NO_PROXY") or os.environ.get("no_proxy"),
82-
}
83-
return {k: v for k, v in proxies_dict.items() if v is not None}
84-
85-
def _existing_size_and_headers(path: str, resume: bool) -> tuple[int, dict]:
86-
headers: dict = {}
87-
existing = 0
88-
if resume and os.path.exists(path):
89-
existing = os.path.getsize(path)
90-
if existing > 0:
91-
headers["Range"] = f"bytes={existing}-"
92-
return existing, headers
93-
94-
def _parse_total_size(response) -> int:
95-
content_range = response.headers.get("Content-Range")
96-
if content_range:
97-
try:
98-
return int(content_range.split("/")[-1])
99-
except Exception:
100-
return 0
101-
return int(response.headers.get("content-length", 0) or 0)
102-
103-
def _write_response(response, path: str, mode: str, initial_downloaded: int, total_size: int) -> int:
104-
downloaded_local = initial_downloaded
105-
block_size_local = 8192
106-
with open(path, mode) as fh:
107-
for chunk in response.iter_content(chunk_size=block_size_local):
108-
if chunk:
109-
fh.write(chunk)
110-
downloaded_local += len(chunk)
111-
if progress_callback and total_size:
112-
progress = (downloaded_local / total_size) * 100
113-
msg_local = _("Downloading ... {:.1f}%")
114-
progress_callback(progress, msg_local.format(progress))
115-
return downloaded_local
117+
attempt = 0
118+
while attempt <= retries:
119+
try:
120+
existing_size, headers = _existing_size_and_headers(local_filename, allow_resume)
121+
122+
response = requests_get(
123+
url,
124+
stream=True,
125+
timeout=timeout,
126+
proxies=proxies,
127+
verify=True,
128+
headers=headers,
129+
)
130+
response.raise_for_status()
116131

117-
try:
118-
proxies = _build_proxies()
119-
os.makedirs(os.path.dirname(os.path.abspath(local_filename)), exist_ok=True)
132+
total_size = _parse_total_size(response)
120133

121-
attempt = 0
122-
while attempt <= retries:
123-
try:
124-
existing_size, headers = _existing_size_and_headers(local_filename, allow_resume)
125-
126-
response = requests_get(
127-
url,
128-
stream=True,
129-
timeout=timeout,
130-
proxies=proxies,
131-
verify=True,
132-
headers=headers,
133-
)
134-
response.raise_for_status()
135-
136-
total_size = _parse_total_size(response)
137-
138-
mode = "ab" if existing_size and response.status_code == 206 else "wb"
139-
if mode == "wb":
140-
existing_size = 0
141-
142-
downloaded = _write_response(response, local_filename, mode, existing_size, total_size)
143-
144-
if progress_callback:
145-
progress_callback(100.0, _("Download complete"))
146-
147-
return bool(downloaded > 0)
148-
149-
except requests_Timeout:
150-
logging_error(_("Download timed out (attempt %d)"), attempt + 1)
151-
except requests_RequestException as e:
152-
logging_error(_("Network error during download (attempt %d): %s"), attempt + 1, e)
153-
except OSError as e:
154-
logging_error(_("File system error during download: %s"), e)
155-
except ValueError as e:
156-
logging_error(_("Invalid data received from %s: %s"), url, e)
157-
158-
attempt += 1
159-
if attempt > retries:
160-
break
161-
sleep_time = backoff_factor * (2 ** (attempt - 1))
162-
sleep_time = sleep_time * (0.8 + 0.4 * (os.urandom(1)[0] / 255.0))
163-
time.sleep(sleep_time)
134+
mode = "ab" if existing_size and response.status_code == 206 else "wb"
135+
if mode == "wb":
136+
existing_size = 0
164137

165-
return False
138+
downloaded = _write_response(response, local_filename, mode, existing_size, total_size, progress_callback)
166139

167-
except requests_Timeout:
168-
logging_error(_("Download timed out"))
169-
except requests_RequestException as e:
170-
logging_error(_("Network error during download: {}").format(e))
171-
except OSError as e:
172-
logging_error(_("File system error: {}").format(e))
173-
except ValueError as e:
174-
logging_error(_("Invalid data received from %s: %s"), url, e)
140+
if progress_callback:
141+
progress_callback(100.0, _("Download complete"))
142+
143+
return bool(downloaded > 0)
144+
145+
except requests_Timeout:
146+
logging_error(_("Download timed out (attempt %d)"), attempt + 1)
147+
except requests_RequestException as e:
148+
logging_error(_("Network error during download (attempt %d): %s"), attempt + 1, e)
149+
except OSError as e:
150+
logging_error(_("File system error during download: %s"), e)
151+
except ValueError as e:
152+
logging_error(_("Invalid data received from %s: %s"), url, e)
153+
154+
attempt += 1
155+
if attempt > retries:
156+
break
157+
sleep_time = backoff_factor * (2 ** (attempt - 1))
158+
sleep_time = sleep_time * (0.8 + 0.4 * (os.urandom(1)[0] / 255.0))
159+
time.sleep(sleep_time)
175160

176161
return False
177162

0 commit comments

Comments
 (0)