|
27 | 27 | from os.path import relpath
|
28 | 28 | from os import linesep, remove, makedirs
|
29 | 29 | from time import time
|
30 |
| -from intelhex import IntelHex |
31 | 30 | from json import load, dump
|
32 | 31 | from jinja2 import FileSystemLoader
|
33 | 32 | from jinja2.environment import Environment
|
34 | 33 |
|
35 | 34 | from .arm_pack_manager import Cache
|
36 | 35 | from .utils import (mkdir, run_cmd, run_cmd_ext, NotSupportedException,
|
37 | 36 | ToolException, InvalidReleaseTargetException,
|
38 |
| - intelhex_offset, integer, generate_update_filename, copy_when_different) |
| 37 | + copy_when_different) |
39 | 38 | from .paths import (MBED_CMSIS_PATH, MBED_TARGETS_PATH, MBED_LIBRARIES,
|
40 | 39 | MBED_HEADER, MBED_DRIVERS, MBED_PLATFORM, MBED_HAL,
|
41 | 40 | MBED_CONFIG_FILE, MBED_LIBRARIES_DRIVERS,
|
@@ -393,124 +392,6 @@ def prepare_toolchain(src_paths, build_dir, target, toolchain_name,
|
393 | 392 |
|
394 | 393 | return toolchain
|
395 | 394 |
|
396 |
| -def _printihex(ihex): |
397 |
| - import pprint |
398 |
| - pprint.PrettyPrinter().pprint(ihex.todict()) |
399 |
| - |
400 |
| -def _real_region_size(region): |
401 |
| - try: |
402 |
| - part = intelhex_offset(region.filename, offset=region.start) |
403 |
| - return (part.maxaddr() - part.minaddr()) + 1 |
404 |
| - except AttributeError: |
405 |
| - return region.size |
406 |
| - |
407 |
| - |
408 |
| -def _fill_header(region_list, current_region): |
409 |
| - """Fill an application header region |
410 |
| -
|
411 |
| - This is done it three steps: |
412 |
| - * Fill the whole region with zeros |
413 |
| - * Fill const, timestamp and size entries with their data |
414 |
| - * Fill the digests using this header as the header region |
415 |
| - """ |
416 |
| - region_dict = {r.name: r for r in region_list} |
417 |
| - header = IntelHex() |
418 |
| - header.puts(current_region.start, b'\x00' * current_region.size) |
419 |
| - start = current_region.start |
420 |
| - for member in current_region.filename: |
421 |
| - _, type, subtype, data = member |
422 |
| - member_size = Config.header_member_size(member) |
423 |
| - if type == "const": |
424 |
| - fmt = { |
425 |
| - "8le": ">B", "16le": "<H", "32le": "<L", "64le": "<Q", |
426 |
| - "8be": "<B", "16be": ">H", "32be": ">L", "64be": ">Q" |
427 |
| - }[subtype] |
428 |
| - header.puts(start, struct.pack(fmt, integer(data, 0))) |
429 |
| - elif type == "timestamp": |
430 |
| - fmt = {"32le": "<L", "64le": "<Q", |
431 |
| - "32be": ">L", "64be": ">Q"}[subtype] |
432 |
| - header.puts(start, struct.pack(fmt, int(time()))) |
433 |
| - elif type == "size": |
434 |
| - fmt = {"32le": "<L", "64le": "<Q", |
435 |
| - "32be": ">L", "64be": ">Q"}[subtype] |
436 |
| - size = sum(_real_region_size(region_dict[r]) for r in data) |
437 |
| - header.puts(start, struct.pack(fmt, size)) |
438 |
| - elif type == "digest": |
439 |
| - if data == "header": |
440 |
| - ih = header[:start] |
441 |
| - else: |
442 |
| - ih = intelhex_offset(region_dict[data].filename, offset=region_dict[data].start) |
443 |
| - if subtype.startswith("CRCITT32"): |
444 |
| - fmt = {"CRCITT32be": ">L", "CRCITT32le": "<L"}[subtype] |
445 |
| - crc_val = zlib.crc32(ih.tobinarray()) & 0xffffffff |
446 |
| - header.puts(start, struct.pack(fmt, crc_val)) |
447 |
| - elif subtype.startswith("SHA"): |
448 |
| - if subtype == "SHA256": |
449 |
| - hash = hashlib.sha256() |
450 |
| - elif subtype == "SHA512": |
451 |
| - hash = hashlib.sha512() |
452 |
| - hash.update(ih.tobinarray()) |
453 |
| - header.puts(start, hash.digest()) |
454 |
| - start += Config.header_member_size(member) |
455 |
| - return header |
456 |
| - |
457 |
| - |
458 |
| -def merge_region_list(region_list, destination, notify, config, padding=b'\xFF'): |
459 |
| - """Merge the region_list into a single image |
460 |
| -
|
461 |
| - Positional Arguments: |
462 |
| - region_list - list of regions, which should contain filenames |
463 |
| - destination - file name to write all regions to |
464 |
| - padding - bytes to fill gaps with |
465 |
| - """ |
466 |
| - merged = IntelHex() |
467 |
| - _, format = splitext(destination) |
468 |
| - notify.info("Merging Regions") |
469 |
| - # Merged file list: Keep track of binary/hex files that we have already |
470 |
| - # merged. e.g In some cases, bootloader may be split into multiple parts, but |
471 |
| - # all internally referring to the same bootloader file. |
472 |
| - merged_list = [] |
473 |
| - |
474 |
| - for region in region_list: |
475 |
| - if region.active and not region.filename: |
476 |
| - raise ToolException("Active region has no contents: No file found.") |
477 |
| - if isinstance(region.filename, list): |
478 |
| - header_basename, _ = splitext(destination) |
479 |
| - header_filename = header_basename + "_header.hex" |
480 |
| - _fill_header(region_list, region).tofile(header_filename, format='hex') |
481 |
| - region = region._replace(filename=header_filename) |
482 |
| - if region.filename and (region.filename not in merged_list): |
483 |
| - notify.info(" Filling region %s with %s" % (region.name, region.filename)) |
484 |
| - part = intelhex_offset(region.filename, offset=region.start) |
485 |
| - part.start_addr = None |
486 |
| - # Normally, we assume that part.maxddr() can be beyond |
487 |
| - # end of rom. However, if the size is restricted with config, do check. |
488 |
| - if config.target.restrict_size is not None: |
489 |
| - part_size = (part.maxaddr() - part.minaddr()) + 1 |
490 |
| - if part_size > region.size: |
491 |
| - raise ToolException("Contents of region %s does not fit" |
492 |
| - % region.name) |
493 |
| - merged_list.append(region.filename) |
494 |
| - merged.merge(part) |
495 |
| - elif region.filename in merged_list: |
496 |
| - notify.info(" Skipping %s as it is merged previously" % (region.name)) |
497 |
| - |
498 |
| - # Hex file can have gaps, so no padding needed. While other formats may |
499 |
| - # need padding. Iterate through segments and pad the gaps. |
500 |
| - if format != ".hex": |
501 |
| - # begin patching from the end of the first segment |
502 |
| - _, begin = merged.segments()[0] |
503 |
| - for start, stop in merged.segments()[1:]: |
504 |
| - pad_size = start - begin |
505 |
| - merged.puts(begin, padding * pad_size) |
506 |
| - begin = stop + 1 |
507 |
| - |
508 |
| - if not exists(dirname(destination)): |
509 |
| - makedirs(dirname(destination)) |
510 |
| - notify.info("Space used after regions merged: 0x%x" % |
511 |
| - (merged.maxaddr() - merged.minaddr() + 1)) |
512 |
| - merged.tofile(destination, format=format.strip(".")) |
513 |
| - |
514 | 395 |
|
515 | 396 | UPDATE_WHITELIST = (
|
516 | 397 | "application",
|
@@ -605,27 +486,7 @@ def build_project(src_paths, build_path, target, toolchain_name,
|
605 | 486 | objects = toolchain.compile_sources(resources, sorted(resources.get_file_paths(FileType.INC_DIR)))
|
606 | 487 | resources.add_files_to_type(FileType.OBJECT, objects)
|
607 | 488 |
|
608 |
| - # Link Program |
609 |
| - if toolchain.config.has_regions: |
610 |
| - binary, _ = toolchain.link_program(resources, build_path, name + "_application") |
611 |
| - region_list = list(toolchain.config.regions) |
612 |
| - region_list = [r._replace(filename=binary) if r.active else r |
613 |
| - for r in region_list] |
614 |
| - res = "%s.%s" % (join(build_path, name), |
615 |
| - getattr(toolchain.target, "OUTPUT_EXT", "bin")) |
616 |
| - merge_region_list(region_list, res, notify, toolchain.config) |
617 |
| - update_regions = [ |
618 |
| - r for r in region_list if r.name in UPDATE_WHITELIST |
619 |
| - ] |
620 |
| - if update_regions: |
621 |
| - update_res = join(build_path, generate_update_filename(name, toolchain.target)) |
622 |
| - merge_region_list(update_regions, update_res, notify, toolchain.config) |
623 |
| - res = (res, update_res) |
624 |
| - else: |
625 |
| - res = (res, None) |
626 |
| - else: |
627 |
| - res, _ = toolchain.link_program(resources, build_path, name) |
628 |
| - res = (res, None) |
| 489 | + res = toolchain.link_program(resources, build_path, name) |
629 | 490 |
|
630 | 491 | into_dir, extra_artifacts = toolchain.config.deliver_into()
|
631 | 492 | if into_dir:
|
|
0 commit comments