1# SPDX-License-Identifier: GPL-2.0+ 2# Copyright (c) 2016 Google, Inc 3# Written by Simon Glass <sjg@chromium.org> 4# 5# Creates binary images from input files controlled by a description 6# 7 8from collections import OrderedDict 9import glob 10try: 11 import importlib.resources as importlib_resources 12except ImportError: # pragma: no cover 13 # for Python 3.6 14 import importlib_resources 15import os 16import re 17 18import sys 19 20from binman import bintool 21from binman import cbfs_util 22from binman import elf 23from binman import entry 24from dtoc import fdt_util 25from u_boot_pylib import command 26from u_boot_pylib import tools 27from u_boot_pylib import tout 28 29# These are imported if needed since they import libfdt 30state = None 31Image = None 32 33# List of images we plan to create 34# Make this global so that it can be referenced from tests 35images = OrderedDict() 36 37# Help text for each type of missing blob, dict: 38# key: Value of the entry's 'missing-msg' or entry name 39# value: Text for the help 40missing_blob_help = {} 41 42def _ReadImageDesc(binman_node, use_expanded): 43 """Read the image descriptions from the /binman node 44 45 This normally produces a single Image object called 'image'. But if 46 multiple images are present, they will all be returned. 47 48 Args: 49 binman_node: Node object of the /binman node 50 use_expanded: True if the FDT will be updated with the entry information 51 Returns: 52 OrderedDict of Image objects, each of which describes an image 53 """ 54 # For Image() 55 # pylint: disable=E1102 56 images = OrderedDict() 57 if 'multiple-images' in binman_node.props: 58 for node in binman_node.subnodes: 59 if not node.name.startswith('template'): 60 images[node.name] = Image(node.name, node, 61 use_expanded=use_expanded) 62 else: 63 images['image'] = Image('image', binman_node, use_expanded=use_expanded) 64 return images 65 66def _FindBinmanNode(dtb): 67 """Find the 'binman' node in the device tree 68 69 Args: 70 dtb: Fdt object to scan 71 Returns: 72 Node object of /binman node, or None if not found 73 """ 74 for node in dtb.GetRoot().subnodes: 75 if node.name == 'binman': 76 return node 77 return None 78 79def _ReadMissingBlobHelp(): 80 """Read the missing-blob-help file 81 82 This file containins help messages explaining what to do when external blobs 83 are missing. 84 85 Returns: 86 Dict: 87 key: Message tag (str) 88 value: Message text (str) 89 """ 90 91 def _FinishTag(tag, msg, result): 92 if tag: 93 result[tag] = msg.rstrip() 94 tag = None 95 msg = '' 96 return tag, msg 97 98 my_data = importlib_resources.files(__package__).joinpath('missing-blob-help').read_bytes() 99 re_tag = re.compile(r"^([-\.a-z0-9]+):$") 100 result = {} 101 tag = None 102 msg = '' 103 for line in my_data.decode('utf-8').splitlines(): 104 if not line.startswith('#'): 105 m_tag = re_tag.match(line) 106 if m_tag: 107 _, msg = _FinishTag(tag, msg, result) 108 tag = m_tag.group(1) 109 elif tag: 110 msg += line + '\n' 111 _FinishTag(tag, msg, result) 112 return result 113 114def _ShowBlobHelp(level, path, text, fname): 115 tout.do_output(level, '%s (%s):' % (path, fname)) 116 for line in text.splitlines(): 117 tout.do_output(level, ' %s' % line) 118 tout.do_output(level, '') 119 120def _ShowHelpForMissingBlobs(level, missing_list): 121 """Show help for each missing blob to help the user take action 122 123 Args: 124 missing_list: List of Entry objects to show help for 125 """ 126 global missing_blob_help 127 128 if not missing_blob_help: 129 missing_blob_help = _ReadMissingBlobHelp() 130 131 for entry in missing_list: 132 tags = entry.GetHelpTags() 133 134 # Show the first match help message 135 shown_help = False 136 for tag in tags: 137 if tag in missing_blob_help: 138 _ShowBlobHelp(level, entry._node.path, missing_blob_help[tag], 139 entry.GetDefaultFilename()) 140 shown_help = True 141 break 142 # Or a generic help message 143 if not shown_help: 144 _ShowBlobHelp(level, entry._node.path, "Missing blob", 145 entry.GetDefaultFilename()) 146 147def GetEntryModules(include_testing=True): 148 """Get a set of entry class implementations 149 150 Returns: 151 Set of paths to entry class filenames 152 """ 153 entries = importlib_resources.files(__package__).joinpath('etype') 154 glob_list = [entry.name for entry in entries.iterdir() 155 if entry.name.endswith('.py') and entry.is_file()] 156 return set([os.path.splitext(os.path.basename(item))[0] 157 for item in glob_list 158 if include_testing or '_testing' not in item]) 159 160def WriteEntryDocs(modules, test_missing=None): 161 """Write out documentation for all entries 162 163 Args: 164 modules: List of Module objects to get docs for 165 test_missing: Used for testing only, to force an entry's documentation 166 to show as missing even if it is present. Should be set to None in 167 normal use. 168 """ 169 from binman.entry import Entry 170 Entry.WriteDocs(modules, test_missing) 171 172 173def write_bintool_docs(modules, test_missing=None): 174 """Write out documentation for all bintools 175 176 Args: 177 modules: List of Module objects to get docs for 178 test_missing: Used for testing only, to force an entry's documentation 179 to show as missing even if it is present. Should be set to None in 180 normal use. 181 """ 182 bintool.Bintool.WriteDocs(modules, test_missing) 183 184 185def ListEntries(image_fname, entry_paths): 186 """List the entries in an image 187 188 This decodes the supplied image and displays a table of entries from that 189 image, preceded by a header. 190 191 Args: 192 image_fname: Image filename to process 193 entry_paths: List of wildcarded paths (e.g. ['*dtb*', 'u-boot*', 194 'section/u-boot']) 195 """ 196 image = Image.FromFile(image_fname) 197 198 entries, lines, widths = image.GetListEntries(entry_paths) 199 200 num_columns = len(widths) 201 for linenum, line in enumerate(lines): 202 if linenum == 1: 203 # Print header line 204 print('-' * (sum(widths) + num_columns * 2)) 205 out = '' 206 for i, item in enumerate(line): 207 width = -widths[i] 208 if item.startswith('>'): 209 width = -width 210 item = item[1:] 211 txt = '%*s ' % (width, item) 212 out += txt 213 print(out.rstrip()) 214 215 216def ReadEntry(image_fname, entry_path, decomp=True): 217 """Extract an entry from an image 218 219 This extracts the data from a particular entry in an image 220 221 Args: 222 image_fname: Image filename to process 223 entry_path: Path to entry to extract 224 decomp: True to return uncompressed data, if the data is compress 225 False to return the raw data 226 227 Returns: 228 data extracted from the entry 229 """ 230 global Image 231 from binman.image import Image 232 233 image = Image.FromFile(image_fname) 234 image.CollectBintools() 235 entry = image.FindEntryPath(entry_path) 236 return entry.ReadData(decomp) 237 238 239def ShowAltFormats(image): 240 """Show alternative formats available for entries in the image 241 242 This shows a list of formats available. 243 244 Args: 245 image (Image): Image to check 246 """ 247 alt_formats = {} 248 image.CheckAltFormats(alt_formats) 249 print('%-10s %-20s %s' % ('Flag (-F)', 'Entry type', 'Description')) 250 for name, val in alt_formats.items(): 251 entry, helptext = val 252 print('%-10s %-20s %s' % (name, entry.etype, helptext)) 253 254 255def ExtractEntries(image_fname, output_fname, outdir, entry_paths, 256 decomp=True, alt_format=None): 257 """Extract the data from one or more entries and write it to files 258 259 Args: 260 image_fname: Image filename to process 261 output_fname: Single output filename to use if extracting one file, None 262 otherwise 263 outdir: Output directory to use (for any number of files), else None 264 entry_paths: List of entry paths to extract 265 decomp: True to decompress the entry data 266 267 Returns: 268 List of EntryInfo records that were written 269 """ 270 image = Image.FromFile(image_fname) 271 image.CollectBintools() 272 273 if alt_format == 'list': 274 ShowAltFormats(image) 275 return 276 277 # Output an entry to a single file, as a special case 278 if output_fname: 279 if not entry_paths: 280 raise ValueError('Must specify an entry path to write with -f') 281 if len(entry_paths) != 1: 282 raise ValueError('Must specify exactly one entry path to write with -f') 283 entry = image.FindEntryPath(entry_paths[0]) 284 data = entry.ReadData(decomp, alt_format) 285 tools.write_file(output_fname, data) 286 tout.notice("Wrote %#x bytes to file '%s'" % (len(data), output_fname)) 287 return 288 289 # Otherwise we will output to a path given by the entry path of each entry. 290 # This means that entries will appear in subdirectories if they are part of 291 # a sub-section. 292 einfos = image.GetListEntries(entry_paths)[0] 293 tout.notice('%d entries match and will be written' % len(einfos)) 294 for einfo in einfos: 295 entry = einfo.entry 296 data = entry.ReadData(decomp, alt_format) 297 path = entry.GetPath()[1:] 298 fname = os.path.join(outdir, path) 299 300 # If this entry has children, create a directory for it and put its 301 # data in a file called 'root' in that directory 302 if entry.GetEntries(): 303 if fname and not os.path.exists(fname): 304 os.makedirs(fname) 305 fname = os.path.join(fname, 'root') 306 tout.notice("Write entry '%s' size %x to '%s'" % 307 (entry.GetPath(), len(data), fname)) 308 tools.write_file(fname, data) 309 return einfos 310 311 312def BeforeReplace(image, allow_resize): 313 """Handle getting an image ready for replacing entries in it 314 315 Args: 316 image: Image to prepare 317 """ 318 state.PrepareFromLoadedData(image) 319 image.CollectBintools() 320 image.LoadData(decomp=False) 321 322 # If repacking, drop the old offset/size values except for the original 323 # ones, so we are only left with the constraints. 324 if image.allow_repack and allow_resize: 325 image.ResetForPack() 326 327 328def ReplaceOneEntry(image, entry, data, do_compress, allow_resize): 329 """Handle replacing a single entry an an image 330 331 Args: 332 image: Image to update 333 entry: Entry to write 334 data: Data to replace with 335 do_compress: True to compress the data if needed, False if data is 336 already compressed so should be used as is 337 allow_resize: True to allow entries to change size (this does a re-pack 338 of the entries), False to raise an exception 339 """ 340 if not entry.WriteData(data, do_compress): 341 if not image.allow_repack: 342 entry.Raise('Entry data size does not match, but allow-repack is not present for this image') 343 if not allow_resize: 344 entry.Raise('Entry data size does not match, but resize is disabled') 345 346 347def AfterReplace(image, allow_resize, write_map): 348 """Handle write out an image after replacing entries in it 349 350 Args: 351 image: Image to write 352 allow_resize: True to allow entries to change size (this does a re-pack 353 of the entries), False to raise an exception 354 write_map: True to write a map file 355 """ 356 tout.info('Processing image') 357 ProcessImage(image, update_fdt=True, write_map=write_map, 358 get_contents=False, allow_resize=allow_resize) 359 360 361def WriteEntryToImage(image, entry, data, do_compress=True, allow_resize=True, 362 write_map=False): 363 BeforeReplace(image, allow_resize) 364 tout.info('Writing data to %s' % entry.GetPath()) 365 ReplaceOneEntry(image, entry, data, do_compress, allow_resize) 366 AfterReplace(image, allow_resize=allow_resize, write_map=write_map) 367 368 369def WriteEntry(image_fname, entry_path, data, do_compress=True, 370 allow_resize=True, write_map=False): 371 """Replace an entry in an image 372 373 This replaces the data in a particular entry in an image. This size of the 374 new data must match the size of the old data unless allow_resize is True. 375 376 Args: 377 image_fname: Image filename to process 378 entry_path: Path to entry to extract 379 data: Data to replace with 380 do_compress: True to compress the data if needed, False if data is 381 already compressed so should be used as is 382 allow_resize: True to allow entries to change size (this does a re-pack 383 of the entries), False to raise an exception 384 write_map: True to write a map file 385 386 Returns: 387 Image object that was updated 388 """ 389 tout.info("Write entry '%s', file '%s'" % (entry_path, image_fname)) 390 image = Image.FromFile(image_fname) 391 image.CollectBintools() 392 entry = image.FindEntryPath(entry_path) 393 WriteEntryToImage(image, entry, data, do_compress=do_compress, 394 allow_resize=allow_resize, write_map=write_map) 395 396 return image 397 398 399def ReplaceEntries(image_fname, input_fname, indir, entry_paths, 400 do_compress=True, allow_resize=True, write_map=False): 401 """Replace the data from one or more entries from input files 402 403 Args: 404 image_fname: Image filename to process 405 input_fname: Single input filename to use if replacing one file, None 406 otherwise 407 indir: Input directory to use (for any number of files), else None 408 entry_paths: List of entry paths to replace 409 do_compress: True if the input data is uncompressed and may need to be 410 compressed if the entry requires it, False if the data is already 411 compressed. 412 write_map: True to write a map file 413 414 Returns: 415 List of EntryInfo records that were written 416 """ 417 image_fname = os.path.abspath(image_fname) 418 image = Image.FromFile(image_fname) 419 420 image.mark_build_done() 421 422 # Replace an entry from a single file, as a special case 423 if input_fname: 424 if not entry_paths: 425 raise ValueError('Must specify an entry path to read with -f') 426 if len(entry_paths) != 1: 427 raise ValueError('Must specify exactly one entry path to write with -f') 428 entry = image.FindEntryPath(entry_paths[0]) 429 data = tools.read_file(input_fname) 430 tout.notice("Read %#x bytes from file '%s'" % (len(data), input_fname)) 431 WriteEntryToImage(image, entry, data, do_compress=do_compress, 432 allow_resize=allow_resize, write_map=write_map) 433 return 434 435 # Otherwise we will input from a path given by the entry path of each entry. 436 # This means that files must appear in subdirectories if they are part of 437 # a sub-section. 438 einfos = image.GetListEntries(entry_paths)[0] 439 tout.notice("Replacing %d matching entries in image '%s'" % 440 (len(einfos), image_fname)) 441 442 BeforeReplace(image, allow_resize) 443 444 for einfo in einfos: 445 entry = einfo.entry 446 if entry.GetEntries(): 447 tout.info("Skipping section entry '%s'" % entry.GetPath()) 448 continue 449 450 path = entry.GetPath()[1:] 451 fname = os.path.join(indir, path) 452 453 if os.path.exists(fname): 454 tout.notice("Write entry '%s' from file '%s'" % 455 (entry.GetPath(), fname)) 456 data = tools.read_file(fname) 457 ReplaceOneEntry(image, entry, data, do_compress, allow_resize) 458 else: 459 tout.warning("Skipping entry '%s' from missing file '%s'" % 460 (entry.GetPath(), fname)) 461 462 AfterReplace(image, allow_resize=allow_resize, write_map=write_map) 463 return image 464 465def SignEntries(image_fname, input_fname, privatekey_fname, algo, entry_paths, 466 write_map=False): 467 """Sign and replace the data from one or more entries from input files 468 469 Args: 470 image_fname: Image filename to process 471 input_fname: Single input filename to use if replacing one file, None 472 otherwise 473 algo: Hashing algorithm 474 entry_paths: List of entry paths to sign 475 privatekey_fname: Private key filename 476 write_map (bool): True to write the map file 477 """ 478 image_fname = os.path.abspath(image_fname) 479 image = Image.FromFile(image_fname) 480 481 image.mark_build_done() 482 483 BeforeReplace(image, allow_resize=True) 484 485 for entry_path in entry_paths: 486 entry = image.FindEntryPath(entry_path) 487 entry.UpdateSignatures(privatekey_fname, algo, input_fname) 488 489 AfterReplace(image, allow_resize=True, write_map=write_map) 490 491def _ProcessTemplates(parent): 492 """Handle any templates in the binman description 493 494 Args: 495 parent: Binman node to process (typically /binman) 496 497 Returns: 498 bool: True if any templates were processed 499 500 Search though each target node looking for those with an 'insert-template' 501 property. Use that as a list of references to template nodes to use to 502 adjust the target node. 503 504 Processing involves copying each subnode of the template node into the 505 target node. 506 507 This is done recursively, so templates can be at any level of the binman 508 image, e.g. inside a section. 509 510 See 'Templates' in the Binman documnentation for details. 511 """ 512 found = False 513 for node in parent.subnodes: 514 tmpl = fdt_util.GetPhandleList(node, 'insert-template') 515 if tmpl: 516 node.copy_subnodes_from_phandles(tmpl) 517 found = True 518 519 found |= _ProcessTemplates(node) 520 return found 521 522def _RemoveTemplates(parent): 523 """Remove any templates in the binman description 524 """ 525 del_nodes = [] 526 for node in parent.subnodes: 527 if node.name.startswith('template'): 528 del_nodes.append(node) 529 530 for node in del_nodes: 531 node.Delete() 532 533def propagate_prop(node, prop): 534 """Propagate the provided property to all the parent nodes up the hierarchy 535 536 Args: 537 node (fdt.Node): Node and all its parent nodes up to the root to 538 propagate the property. 539 prop (str): Boolean property to propagate 540 541 Return: 542 True if any change was made, else False 543 """ 544 changed = False 545 while node: 546 if prop not in node.props: 547 node.AddEmptyProp(prop, 0) 548 changed = True 549 node = node.parent 550 return changed 551 552def scan_and_prop_bootph(node): 553 """Propagate bootph properties from children to parents 554 555 The bootph schema indicates that bootph properties in children should be 556 implied in their parents, all the way up the hierarchy. This is expensive 557 to implement in U-Boot before relocation at runtime, so this function 558 explicitly propagates these bootph properties upwards during build time. 559 560 This is used to set the bootph-all, bootph-some-ram property in the parent 561 node if the respective property is found in any of the parent's subnodes. 562 The other bootph-* properties are associated with the SPL stage and hence 563 handled by fdtgrep.c. 564 565 Args: 566 node (fdt.Node): Node to scan for bootph-all and bootph-some-ram 567 property 568 569 Return: 570 True if any change was made, else False 571 572 """ 573 bootph_prop = {'bootph-all', 'bootph-some-ram'} 574 575 changed = False 576 for prop in bootph_prop: 577 if prop in node.props: 578 changed |= propagate_prop(node.parent, prop) 579 580 for subnode in node.subnodes: 581 changed |= scan_and_prop_bootph(subnode) 582 return changed 583 584def PrepareImagesAndDtbs(dtb_fname, select_images, update_fdt, use_expanded, indir): 585 """Prepare the images to be processed and select the device tree 586 587 This function: 588 - reads in the device tree 589 - finds and scans the binman node to create all entries 590 - selects which images to build 591 - Updates the device tress with placeholder properties for offset, 592 image-pos, etc. 593 594 Args: 595 dtb_fname: Filename of the device tree file to use (.dts or .dtb) 596 selected_images: List of images to output, or None for all 597 update_fdt: True to update the FDT wth entry offsets, etc. 598 use_expanded: True to use expanded versions of entries, if available. 599 So if 'u-boot' is called for, we use 'u-boot-expanded' instead. This 600 is needed if update_fdt is True (although tests may disable it) 601 indir: List of directories where input files can be found 602 603 Returns: 604 OrderedDict of images: 605 key: Image name (str) 606 value: Image object 607 """ 608 # Import these here in case libfdt.py is not available, in which case 609 # the above help option still works. 610 from dtoc import fdt 611 from dtoc import fdt_util 612 global images 613 614 # Get the device tree ready by compiling it and copying the compiled 615 # output into a file in our output directly. Then scan it for use 616 # in binman. 617 if indir is None: 618 indir = [] 619 dtb_fname = fdt_util.EnsureCompiled(dtb_fname, indir=indir) 620 fname = tools.get_output_filename('u-boot.dtb.out') 621 tools.write_file(fname, tools.read_file(dtb_fname)) 622 dtb = fdt.FdtScan(fname) 623 624 node = _FindBinmanNode(dtb) 625 if not node: 626 raise ValueError("Device tree '%s' does not have a 'binman' " 627 "node" % dtb_fname) 628 629 if _ProcessTemplates(node): 630 dtb.Sync(True) 631 fname = tools.get_output_filename('u-boot.dtb.tmpl1') 632 tools.write_file(fname, dtb.GetContents()) 633 634 _RemoveTemplates(node) 635 dtb.Sync(True) 636 637 # Rescan the dtb to pick up the new phandles 638 dtb.Scan() 639 node = _FindBinmanNode(dtb) 640 fname = tools.get_output_filename('u-boot.dtb.tmpl2') 641 tools.write_file(fname, dtb.GetContents()) 642 643 if scan_and_prop_bootph(dtb.GetRoot()): 644 dtb.Sync(True) 645 646 images = _ReadImageDesc(node, use_expanded) 647 648 if select_images: 649 skip = [] 650 new_images = OrderedDict() 651 for name, image in images.items(): 652 if name in select_images: 653 new_images[name] = image 654 else: 655 skip.append(name) 656 images = new_images 657 tout.notice('Skipping images: %s' % ', '.join(skip)) 658 659 state.Prepare(images, dtb) 660 661 # Prepare the device tree by making sure that any missing 662 # properties are added (e.g. 'pos' and 'size'). The values of these 663 # may not be correct yet, but we add placeholders so that the 664 # size of the device tree is correct. Later, in 665 # SetCalculatedProperties() we will insert the correct values 666 # without changing the device-tree size, thus ensuring that our 667 # entry offsets remain the same. 668 for image in images.values(): 669 image.gen_entries() 670 image.CollectBintools() 671 if update_fdt: 672 image.AddMissingProperties(True) 673 image.ProcessFdt(dtb) 674 675 for dtb_item in state.GetAllFdts(): 676 dtb_item.Sync(auto_resize=True) 677 dtb_item.Pack() 678 dtb_item.Flush() 679 return images 680 681def CheckForProblems(image): 682 """Check for problems with image generation 683 684 Shows warning about missing, faked or optional external blobs, as well as 685 missing bintools. 686 687 Args: 688 image (Image): Image to process 689 690 Returns: 691 bool: True if there are any problems which result in a non-functional 692 image 693 """ 694 missing_list = [] 695 image.CheckMissing(missing_list) 696 if missing_list: 697 tout.error("Image '%s' is missing external blobs and is non-functional: %s\n" % 698 (image.name, ' '.join([e.name for e in missing_list]))) 699 _ShowHelpForMissingBlobs(tout.ERROR, missing_list) 700 701 faked_list = [] 702 faked_optional_list = [] 703 faked_required_list = [] 704 image.CheckFakedBlobs(faked_list) 705 for e in faked_list: 706 if e.optional: 707 faked_optional_list.append(e) 708 else: 709 faked_required_list.append(e) 710 if faked_required_list: 711 tout.warning( 712 "Image '%s' has faked external blobs and is non-functional: %s\n" % 713 (image.name, ' '.join([os.path.basename(e.GetDefaultFilename()) 714 for e in faked_required_list]))) 715 716 optional_list = [] 717 # For optional blobs, we should inform the user when the blob is not present. This will come as 718 # a warning since it may not be immediately apparent that something is missing otherwise. 719 # E.g. user thinks they supplied a blob, but there is no info of the contrary if they made an 720 # error. 721 # Faked optional blobs are not relevant for final images (as they are dropped anyway) so we 722 # will omit the message with default verbosity. 723 image.CheckOptional(optional_list) 724 if optional_list: 725 tout.warning( 726 "Image '%s' is missing optional external blobs but is still functional: %s\n" % 727 (image.name, ' '.join([e.name for e in optional_list]))) 728 _ShowHelpForMissingBlobs(tout.WARNING, optional_list) 729 730 if faked_optional_list: 731 tout.info( 732 "Image '%s' has faked optional external blobs but is still functional: %s\n" % 733 (image.name, ' '.join([os.path.basename(e.GetDefaultFilename()) 734 for e in faked_optional_list]))) 735 736 missing_bintool_list = [] 737 image.check_missing_bintools(missing_bintool_list) 738 if missing_bintool_list: 739 tout.warning( 740 "Image '%s' has missing bintools and is non-functional: %s\n" % 741 (image.name, ' '.join([os.path.basename(bintool.name) 742 for bintool in missing_bintool_list]))) 743 return any([missing_list, faked_required_list, missing_bintool_list]) 744 745def ProcessImage(image, update_fdt, write_map, get_contents=True, 746 allow_resize=True, allow_missing=False, 747 allow_fake_blobs=False): 748 """Perform all steps for this image, including checking and # writing it. 749 750 This means that errors found with a later image will be reported after 751 earlier images are already completed and written, but that does not seem 752 important. 753 754 Args: 755 image: Image to process 756 update_fdt: True to update the FDT wth entry offsets, etc. 757 write_map: True to write a map file 758 get_contents: True to get the image contents from files, etc., False if 759 the contents is already present 760 allow_resize: True to allow entries to change size (this does a re-pack 761 of the entries), False to raise an exception 762 allow_missing: Allow blob_ext objects to be missing 763 allow_fake_blobs: Allow blob_ext objects to be faked with dummy files 764 765 Returns: 766 True if one or more external blobs are missing or faked, 767 False if all are present 768 """ 769 if get_contents: 770 image.SetAllowMissing(allow_missing) 771 image.SetAllowFakeBlob(allow_fake_blobs) 772 image.GetEntryContents() 773 image.GetEntryOffsets() 774 775 # We need to pack the entries to figure out where everything 776 # should be placed. This sets the offset/size of each entry. 777 # However, after packing we call ProcessEntryContents() which 778 # may result in an entry changing size. In that case we need to 779 # do another pass. Since the device tree often contains the 780 # final offset/size information we try to make space for this in 781 # AddMissingProperties() above. However, if the device is 782 # compressed we cannot know this compressed size in advance, 783 # since changing an offset from 0x100 to 0x104 (for example) can 784 # alter the compressed size of the device tree. So we need a 785 # third pass for this. 786 passes = 5 787 for pack_pass in range(passes): 788 try: 789 image.PackEntries() 790 except Exception as e: 791 if write_map: 792 fname = image.WriteMap() 793 print("Wrote map file '%s' to show errors" % fname) 794 raise 795 image.SetImagePos() 796 if update_fdt: 797 image.SetCalculatedProperties() 798 for dtb_item in state.GetAllFdts(): 799 dtb_item.Sync() 800 dtb_item.Flush() 801 image.WriteSymbols() 802 sizes_ok = image.ProcessEntryContents() 803 if sizes_ok: 804 break 805 image.ResetForPack() 806 tout.info('Pack completed after %d pass(es)' % (pack_pass + 1)) 807 if not sizes_ok: 808 image.Raise('Entries changed size after packing (tried %s passes)' % 809 passes) 810 811 has_problems = CheckForProblems(image) 812 813 image.BuildImage() 814 if write_map: 815 image.WriteMap() 816 817 image.WriteAlternates() 818 819 return has_problems 820 821def Binman(args): 822 """The main control code for binman 823 824 This assumes that help and test options have already been dealt with. It 825 deals with the core task of building images. 826 827 Args: 828 args: Command line arguments Namespace object 829 """ 830 global Image 831 global state 832 833 if args.full_help: 834 with importlib_resources.path('binman', 'README.rst') as readme: 835 tools.print_full_help(str(readme)) 836 return 0 837 838 # Put these here so that we can import this module without libfdt 839 from binman.image import Image 840 from binman import state 841 842 tool_paths = [] 843 if args.toolpath: 844 tool_paths += args.toolpath 845 if args.tooldir: 846 tool_paths.append(args.tooldir) 847 tools.set_tool_paths(tool_paths or None) 848 bintool.Bintool.set_tool_dir(args.tooldir) 849 850 if args.cmd in ['ls', 'extract', 'replace', 'tool', 'sign']: 851 try: 852 tout.init(args.verbosity + 1) 853 if args.cmd == 'replace': 854 tools.prepare_output_dir(args.outdir, args.preserve) 855 else: 856 tools.prepare_output_dir(None) 857 if args.cmd == 'ls': 858 ListEntries(args.image, args.paths) 859 860 if args.cmd == 'extract': 861 ExtractEntries(args.image, args.filename, args.outdir, args.paths, 862 not args.uncompressed, args.format) 863 864 if args.cmd == 'replace': 865 ReplaceEntries(args.image, args.filename, args.indir, args.paths, 866 do_compress=not args.compressed, 867 allow_resize=not args.fix_size, write_map=args.map) 868 869 if args.cmd == 'sign': 870 SignEntries(args.image, args.file, args.key, args.algo, args.paths) 871 872 if args.cmd == 'tool': 873 if args.list: 874 bintool.Bintool.list_all() 875 elif args.fetch: 876 if not args.bintools: 877 raise ValueError( 878 "Please specify bintools to fetch or 'all' or 'missing'") 879 bintool.Bintool.fetch_tools(bintool.FETCH_ANY, 880 args.bintools) 881 else: 882 raise ValueError("Invalid arguments to 'tool' subcommand") 883 except: 884 raise 885 finally: 886 tools.finalise_output_dir() 887 return 0 888 889 elf_params = None 890 if args.update_fdt_in_elf: 891 elf_params = args.update_fdt_in_elf.split(',') 892 if len(elf_params) != 4: 893 raise ValueError('Invalid args %s to --update-fdt-in-elf: expected infile,outfile,begin_sym,end_sym' % 894 elf_params) 895 896 # Try to figure out which device tree contains our image description 897 if args.dt: 898 dtb_fname = args.dt 899 else: 900 board = args.board 901 if not board: 902 raise ValueError('Must provide a board to process (use -b <board>)') 903 board_pathname = os.path.join(args.build_dir, board) 904 dtb_fname = os.path.join(board_pathname, 'u-boot.dtb') 905 if not args.indir: 906 args.indir = ['.'] 907 args.indir.append(board_pathname) 908 909 try: 910 tout.init(args.verbosity + 1) 911 elf.debug = args.debug 912 cbfs_util.VERBOSE = args.verbosity > tout.NOTICE 913 state.use_fake_dtb = args.fake_dtb 914 915 # Normally we replace the 'u-boot' etype with 'u-boot-expanded', etc. 916 # When running tests this can be disabled using this flag. When not 917 # updating the FDT in image, it is not needed by binman, but we use it 918 # for consistency, so that the images look the same to U-Boot at 919 # runtime. 920 use_expanded = not args.no_expanded 921 try: 922 tools.set_input_dirs(args.indir) 923 tools.prepare_output_dir(args.outdir, args.preserve) 924 state.SetEntryArgs(args.entry_arg) 925 state.SetThreads(args.threads) 926 927 images = PrepareImagesAndDtbs(dtb_fname, args.image, 928 args.update_fdt, use_expanded, args.indir) 929 930 if args.test_section_timeout: 931 # Set the first image to timeout, used in testThreadTimeout() 932 images[list(images.keys())[0]].test_section_timeout = True 933 invalid = False 934 bintool.Bintool.set_missing_list( 935 args.force_missing_bintools.split(',') if 936 args.force_missing_bintools else None) 937 938 # Create the directory here instead of Entry.check_fake_fname() 939 # since that is called from a threaded context so different threads 940 # may race to create the directory 941 if args.fake_ext_blobs: 942 entry.Entry.create_fake_dir() 943 944 for image in images.values(): 945 invalid |= ProcessImage(image, args.update_fdt, args.map, 946 allow_missing=args.allow_missing, 947 allow_fake_blobs=args.fake_ext_blobs) 948 949 # Write the updated FDTs to our output files 950 for dtb_item in state.GetAllFdts(): 951 tools.write_file(dtb_item._fname, dtb_item.GetContents()) 952 953 if elf_params: 954 data = state.GetFdtForEtype('u-boot-dtb').GetContents() 955 elf.UpdateFile(*elf_params, data) 956 957 bintool.Bintool.set_missing_list(None) 958 959 # This can only be True if -M is provided, since otherwise binman 960 # would have raised an error already 961 if invalid: 962 msg = 'Some images are invalid' 963 if args.ignore_missing: 964 tout.warning(msg) 965 else: 966 tout.error(msg) 967 return 103 968 969 # Use this to debug the time take to pack the image 970 #state.TimingShow() 971 finally: 972 tools.finalise_output_dir() 973 finally: 974 tout.uninit() 975 976 return 0 977