Skip to content

Commit ff10f64

Browse files
committed
flash: additional enhancements from pyocd
1 parent f86e26e commit ff10f64

File tree

1 file changed

+39
-9
lines changed

1 file changed

+39
-9
lines changed

tools/generate_flash_algo.py

Lines changed: 39 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -28,9 +28,18 @@
2828
from datetime import datetime
2929
from pyocd.target.pack.flash_algo import PackFlashAlgo
3030

31-
BLOB_HEADER = '0xe00abe00,'
31+
# This header consists of two instructions:
32+
#
33+
# ```
34+
# bkpt #0
35+
# b .-2 # branch to the bkpt
36+
# ```
37+
#
38+
# Before running a flash algo operation, LR is set to the address of the `bkpt` instruction,
39+
# so when the operation function returns it will halt the CPU.
40+
BLOB_HEADER = '0xe7fdbe00,'
3241
HEADER_SIZE = 4
33-
STACK_SIZE = 0x400
42+
STACK_SIZE = 0x800
3443

3544
DAPLINK_TEMPLATE = \
3645
"""/* Flash OS Routines (Automagically Generated)
@@ -179,7 +188,6 @@ def process_template(self, template_text, data_dict=None):
179188
data_dict = dict(data_dict)
180189
assert "algo" not in data_dict, "algo already set by user data"
181190
data_dict["algo"] = self
182-
data_dict["algo_size"] = len(self.algo_data)
183191

184192
template = jinja2.Template(template_text)
185193
return template.render(data_dict)
@@ -216,6 +224,33 @@ def main():
216224

217225
print(algo.flash_info)
218226

227+
# Allocate stack after algo and its rw/zi data, with bottom rounded to 8 bytes.
228+
stack_base = (args.blob_start + HEADER_SIZE
229+
+ algo.rw_start + algo.rw_size # rw_start incorporates instruction size
230+
+ algo.zi_size)
231+
stack_base = (stack_base + 7) // 8 * 8
232+
# Stack top rounded to at least 256 bytes
233+
sp = stack_base + args.stack_size
234+
if algo.page_size > 256:
235+
sp = (sp + algo.page_size - 1) // algo.page_size * algo.page_size
236+
else:
237+
sp = (sp + 255) // 256 * 256
238+
239+
print(f"load addr: {args.blob_start:#010x}")
240+
print(f"header: {HEADER_SIZE:#x} bytes")
241+
print(f"data: {len(algo.algo_data):#x} bytes")
242+
print(f"ro: {algo.ro_start:#010x} + {algo.ro_size:#x} bytes")
243+
print(f"rw: {algo.rw_start:#010x} + {algo.rw_size:#x} bytes")
244+
print(f"zi: {algo.zi_start:#010x} + {algo.zi_size:#x} bytes")
245+
print(f"stack: {stack_base:#010x} .. {sp:#010x} ({sp - stack_base:#x} bytes)")
246+
print(f"buffer: {sp:#010x} .. {sp + algo.page_size:#010x} ({algo.page_size:#x} bytes)")
247+
248+
print("\nSymbol offsets:")
249+
for n, v in sorted(algo.symbols.items(), key=lambda x: x[1]):
250+
if v >= 0xffffffff:
251+
continue
252+
print(f"{n}:{' ' * (11 - len(n))} {v:#010x}")
253+
219254
if args.info_only:
220255
return
221256

@@ -236,17 +271,12 @@ def main():
236271
hash = hashlib.sha256()
237272
hash.update(flm_content)
238273

239-
# Allocate stack after algo and its rw data, with top and bottom rounded to 256 bytes.
240-
stack_base = args.blob_start + HEADER_SIZE + algo.rw_start + algo.rw_size
241-
stack_base = (stack_base + 7) // 8 * 8
242-
sp = stack_base + args.stack_size
243-
sp = (sp + 255) // 256 * 256
244-
245274
data_dict = {
246275
'filename': os.path.split(args.elf_path)[-1],
247276
'digest': hash.hexdigest(),
248277
'file_size': len(flm_content),
249278
'pack_file': pack_file,
279+
'algo_size': len(algo.algo_data),
250280
'name': os.path.splitext(os.path.split(args.elf_path)[-1])[0],
251281
'prog_header': BLOB_HEADER,
252282
'header_size': HEADER_SIZE,

0 commit comments

Comments
 (0)