Skip to content

Commit 99af786

Browse files
committed
build_parameters.py: use local cache and server cache to get files
1 parent 14c32b0 commit 99af786

1 file changed

Lines changed: 101 additions & 10 deletions

File tree

build_parameters.py

Lines changed: 101 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -44,6 +44,14 @@
4444
parser.add_argument("--ardupilotRepoFolder", dest='gitFolder', default="../ardupilot", help="Ardupilot git folder. ")
4545
parser.add_argument("--destination", dest='destFolder', default="../../../../new_params_mversion", help="Parameters*.rst destination folder.") # noqa: E501
4646
parser.add_argument('--vehicle', dest='single_vehicle', help="If you just want to copy to one vehicle, you can do this. Otherwise it will work for all vehicles (Copter, Plane, Rover, AntennaTracker, Sub, Blimp)") # noqa: E501
47+
48+
DEFAULT_CACHE_TIME = 6 * 3600
49+
50+
# Get the directory where this script is located
51+
script_dir = os.path.dirname(os.path.abspath(__file__))
52+
default_cache_dir = os.path.join(script_dir, '.cache')
53+
54+
parser.add_argument("--cache-dir", dest='cache_dir', default=default_cache_dir, help="Directory to cache HTTP responses")
4755
args = parser.parse_args()
4856

4957
error_count = 0
@@ -83,6 +91,95 @@ def format(self, record):
8391
'Connection': 'keep-alive'
8492
})
8593

94+
95+
def get_cached_url(url, cache_dir=None):
96+
"""Get URL content with caching to avoid repeated downloads"""
97+
if cache_dir is None:
98+
cache_dir = args.cache_dir
99+
100+
os.makedirs(cache_dir, exist_ok=True)
101+
102+
# Create cache filename from URL
103+
cache_filename = re.sub(r'[^\w\-_.]', '_', url) + '.cache'
104+
cache_path = os.path.join(cache_dir, cache_filename)
105+
cache_meta_path = cache_path + '.meta'
106+
107+
def load_cached_content():
108+
with open(cache_path, 'r', encoding='utf-8') as f:
109+
return f.read()
110+
111+
def load_cache_metadata():
112+
if not os.path.exists(cache_meta_path):
113+
return {}
114+
try:
115+
with open(cache_meta_path, 'r', encoding='utf-8') as f:
116+
return json.load(f)
117+
except OSError:
118+
return {}
119+
120+
def save_cache(content, response):
121+
with open(cache_path, 'w', encoding='utf-8') as f:
122+
f.write(content)
123+
124+
metadata = {}
125+
etag = response.headers.get('ETag')
126+
last_modified = response.headers.get('Last-Modified')
127+
if etag:
128+
metadata['etag'] = etag
129+
if last_modified:
130+
metadata['last_modified'] = last_modified
131+
if metadata:
132+
with open(cache_meta_path, 'w', encoding='utf-8') as f:
133+
json.dump(metadata, f)
134+
135+
if os.path.exists(cache_path):
136+
cache_age = time.time() - os.path.getmtime(cache_path)
137+
if cache_age < DEFAULT_CACHE_TIME:
138+
debug(f"Using cached content for {url}")
139+
return load_cached_content()
140+
141+
cache_metadata = {}
142+
headers = {}
143+
if os.path.exists(cache_path):
144+
cache_metadata = load_cache_metadata()
145+
if cache_metadata.get('etag'):
146+
headers['If-None-Match'] = cache_metadata['etag']
147+
if cache_metadata.get('last_modified'):
148+
headers['If-Modified-Since'] = cache_metadata['last_modified']
149+
150+
if headers:
151+
try:
152+
debug(f"HEAD checking server for {url}")
153+
head_response = session.head(url, timeout=30, headers=headers, allow_redirects=True)
154+
if head_response.status_code == 304:
155+
debug(f"Cache still valid for {url}")
156+
return load_cached_content()
157+
158+
head_response.raise_for_status()
159+
if (head_response.headers.get('ETag') == cache_metadata.get('etag') and
160+
head_response.headers.get('Last-Modified') == cache_metadata.get('last_modified')):
161+
debug(f"Server metadata unchanged for {url}, using local cache")
162+
return load_cached_content()
163+
except requests.RequestException as e:
164+
debug(f"HEAD request failed for {url}: {e}")
165+
# Fallback to GET if the HEAD request is unsupported or fails.
166+
167+
try:
168+
debug(f"Fetching full content from {url}")
169+
response = session.get(url, timeout=30, allow_redirects=True)
170+
response.raise_for_status()
171+
content = response.text
172+
except requests.RequestException as e:
173+
error(f"Failed to fetch {url}: {e}")
174+
if os.path.exists(cache_path):
175+
debug(f"Using stale cached content for {url}")
176+
return load_cached_content()
177+
raise
178+
179+
save_cache(content, response)
180+
return content
181+
182+
86183
# Parameters
87184
COMMITFILE = "git-version.txt"
88185
BASEURL = "https://firmware.ardupilot.org/"
@@ -361,9 +458,7 @@ def handle_starttag(self, tag, attrs):
361458
try:
362459
debug(f"Fetching {firmware_url}{vehicle}")
363460

364-
response = session.get(firmware_url + vehicle, timeout=30)
365-
response.raise_for_status()
366-
content = response.text
461+
content = get_cached_url(firmware_url + vehicle)
367462
html_parser.feed(content)
368463
except Exception as e:
369464
error(f"Vehicles folders list download error: {e}")
@@ -415,9 +510,7 @@ def handle_starttag(self, tag, attrs):
415510
try:
416511
debug(f"Fetching {url}")
417512

418-
response = session.get(url, timeout=30)
419-
response.raise_for_status()
420-
content = response.text
513+
content = get_cached_url(url)
421514
html_parser.feed(content)
422515
except Exception as e:
423516
error(f"Board folders list download error: {e}")
@@ -438,11 +531,9 @@ def fetch_commit_hash(version_link, board, file):
438531
progress(f"Processing link...\t{fetch_link}")
439532

440533
try:
441-
response = session.get(fetch_link, timeout=30)
442-
response.raise_for_status()
443-
fecth_response = response.text
534+
fetch_response = get_cached_url(fetch_link)
444535

445-
commit_details = fecth_response.split("\n")
536+
commit_details = fetch_response.split("\n")
446537
commit_hash = commit_details[0][7:]
447538
# version = commit_details[6] the sizes cary
448539
version = commit_details.pop(-2)

0 commit comments

Comments
 (0)