1# SPDX-License-Identifier: GPL-2.0+ 2# Copyright (c) 2018 Google, Inc 3# Written by Simon Glass <sjg@chromium.org> 4 5"""Entry-type module for sections (groups of entries) 6 7Sections are entries which can contain other entries. This allows hierarchical 8images to be created. 9""" 10 11from collections import OrderedDict 12import concurrent.futures 13import re 14import sys 15 16from binman.entry import Entry 17from binman import state 18from dtoc import fdt_util 19from u_boot_pylib import tools 20from u_boot_pylib import tout 21from u_boot_pylib.tools import to_hex_size 22 23 24class Entry_section(Entry): 25 """Entry that contains other entries 26 27 A section is an entry which can contain other entries, thus allowing 28 hierarchical images to be created. See 'Sections and hierarchical images' 29 in the binman README for more information. 30 31 The base implementation simply joins the various entries together, using 32 various rules about alignment, etc. 33 34 Subclassing 35 ~~~~~~~~~~~ 36 37 This class can be subclassed to support other file formats which hold 38 multiple entries, such as CBFS. To do this, override the following 39 functions. The documentation here describes what your function should do. 40 For example code, see etypes which subclass `Entry_section`, or `cbfs.py` 41 for a more involved example:: 42 43 $ grep -l \(Entry_section tools/binman/etype/*.py 44 45 ReadNode() 46 Call `super().ReadNode()`, then read any special properties for the 47 section. Then call `self.ReadEntries()` to read the entries. 48 49 Binman calls this at the start when reading the image description. 50 51 ReadEntries() 52 Read in the subnodes of the section. This may involve creating entries 53 of a particular etype automatically, as well as reading any special 54 properties in the entries. For each entry, entry.ReadNode() should be 55 called, to read the basic entry properties. The properties should be 56 added to `self._entries[]`, in the correct order, with a suitable name. 57 58 Binman calls this at the start when reading the image description. 59 60 BuildSectionData(required) 61 Create the custom file format that you want and return it as bytes. 62 This likely sets up a file header, then loops through the entries, 63 adding them to the file. For each entry, call `entry.GetData()` to 64 obtain the data. If that returns None, and `required` is False, then 65 this method must give up and return None. But if `required` is True then 66 it should assume that all data is valid. 67 68 Binman calls this when packing the image, to find out the size of 69 everything. It is called again at the end when building the final image. 70 71 SetImagePos(image_pos): 72 Call `super().SetImagePos(image_pos)`, then set the `image_pos` values 73 for each of the entries. This should use the custom file format to find 74 the `start offset` (and `image_pos`) of each entry. If the file format 75 uses compression in such a way that there is no offset available (other 76 than reading the whole file and decompressing it), then the offsets for 77 affected entries can remain unset (`None`). The size should also be set 78 if possible. 79 80 Binman calls this after the image has been packed, to update the 81 location that all the entries ended up at. 82 83 ReadChildData(child, decomp, alt_format): 84 The default version of this may be good enough, if you are able to 85 implement SetImagePos() correctly. But that is a bit of a bypass, so 86 you can override this method to read from your custom file format. It 87 should read the entire entry containing the custom file using 88 `super().ReadData(True)`, then parse the file to get the data for the 89 given child, then return that data. 90 91 If your file format supports compression, the `decomp` argument tells 92 you whether to return the compressed data (`decomp` is False) or to 93 uncompress it first, then return the uncompressed data (`decomp` is 94 True). This is used by the `binman extract -U` option. 95 96 If your entry supports alternative formats, the alt_format provides the 97 alternative format that the user has selected. Your function should 98 return data in that format. This is used by the 'binman extract -l' 99 option. 100 101 Binman calls this when reading in an image, in order to populate all the 102 entries with the data from that image (`binman ls`). 103 104 WriteChildData(child): 105 Binman calls this after `child.data` is updated, to inform the custom 106 file format about this, in case it needs to do updates. 107 108 The default version of this does nothing and probably needs to be 109 overridden for the 'binman replace' command to work. Your version should 110 use `child.data` to update the data for that child in the custom file 111 format. 112 113 Binman calls this when updating an image that has been read in and in 114 particular to update the data for a particular entry (`binman replace`) 115 116 Properties / Entry arguments 117 ~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 118 119 See :ref:`develop/package/binman:Image description format` for more 120 information. 121 122 align-default 123 Default alignment for this section, if no alignment is given in the 124 entry 125 126 pad-byte 127 Pad byte to use when padding 128 129 sort-by-offset 130 True if entries should be sorted by offset, False if they must be 131 in-order in the device tree description 132 133 end-at-4gb 134 Used to build an x86 ROM which ends at 4GB (2^32) 135 136 name-prefix 137 Adds a prefix to the name of every entry in the section when writing out 138 the map 139 140 skip-at-start 141 Number of bytes before the first entry starts. These effectively adjust 142 the starting offset of entries. For example, if this is 16, then the 143 first entry would start at 16. An entry with offset = 20 would in fact 144 be written at offset 4 in the image file, since the first 16 bytes are 145 skipped when writing. 146 147 filename 148 filename to write the unpadded section contents to within the output 149 directory (None to skip this). 150 151 Since a section is also an entry, it inherits all the properies of entries 152 too. 153 154 Note that the `allow_missing` member controls whether this section permits 155 external blobs to be missing their contents. The option will produce an 156 image but of course it will not work. It is useful to make sure that 157 Continuous Integration systems can build without the binaries being 158 available. This is set by the `SetAllowMissing()` method, if 159 `--allow-missing` is passed to binman. 160 """ 161 def __init__(self, section, etype, node, test=False): 162 if not test: 163 super().__init__(section, etype, node) 164 self._entries = OrderedDict() 165 self._pad_byte = 0 166 self._sort = False 167 self._skip_at_start = None 168 self._end_4gb = False 169 self._ignore_missing = False 170 self._filename = None 171 172 def IsSpecialSubnode(self, node): 173 """Check if a node is a special one used by the section itself 174 175 Some nodes are used for hashing / signatures and do not add entries to 176 the actual section. 177 178 Returns: 179 bool: True if the node is a special one, else False 180 """ 181 return node.name.startswith('hash') or node.name.startswith('signature') 182 183 def ReadNode(self): 184 """Read properties from the section node""" 185 super().ReadNode() 186 self._pad_byte = fdt_util.GetInt(self._node, 'pad-byte', 0) 187 self._sort = fdt_util.GetBool(self._node, 'sort-by-offset') 188 self._end_4gb = fdt_util.GetBool(self._node, 'end-at-4gb') 189 self._skip_at_start = fdt_util.GetInt(self._node, 'skip-at-start') 190 if self._end_4gb: 191 if not self.size: 192 self.Raise("Section size must be provided when using end-at-4gb") 193 if self._skip_at_start is not None: 194 self.Raise("Provide either 'end-at-4gb' or 'skip-at-start'") 195 else: 196 self._skip_at_start = 0x100000000 - self.size 197 else: 198 if self._skip_at_start is None: 199 self._skip_at_start = 0 200 self._name_prefix = fdt_util.GetString(self._node, 'name-prefix') 201 self.align_default = fdt_util.GetInt(self._node, 'align-default', 0) 202 self._filename = fdt_util.GetString(self._node, 'filename', 203 self._filename) 204 205 self.ReadEntries() 206 207 def ReadEntries(self): 208 for node in self._node.subnodes: 209 if self.IsSpecialSubnode(node): 210 continue 211 entry = Entry.Create(self, node, 212 expanded=self.GetImage().use_expanded, 213 missing_etype=self.GetImage().missing_etype) 214 entry.ReadNode() 215 entry.SetPrefix(self._name_prefix) 216 self._entries[node.name] = entry 217 218 def _Raise(self, msg): 219 """Raises an error for this section 220 221 Args: 222 msg (str): Error message to use in the raise string 223 Raises: 224 ValueError: always 225 """ 226 raise ValueError("Section '%s': %s" % (self._node.path, msg)) 227 228 def GetFdts(self): 229 fdts = {} 230 for entry in self._entries.values(): 231 fdts.update(entry.GetFdts()) 232 return fdts 233 234 def ProcessFdt(self, fdt): 235 """Allow entries to adjust the device tree 236 237 Some entries need to adjust the device tree for their purposes. This 238 may involve adding or deleting properties. 239 """ 240 todo = self._entries.values() 241 for passnum in range(3): 242 next_todo = [] 243 for entry in todo: 244 if not entry.ProcessFdt(fdt): 245 next_todo.append(entry) 246 todo = next_todo 247 if not todo: 248 break 249 if todo: 250 self.Raise('Internal error: Could not complete processing of Fdt: remaining %s' % 251 todo) 252 return True 253 254 def gen_entries(self): 255 super().gen_entries() 256 for entry in self._entries.values(): 257 entry.gen_entries() 258 259 def AddMissingProperties(self, have_image_pos): 260 """Add new properties to the device tree as needed for this entry""" 261 super().AddMissingProperties(have_image_pos) 262 if self.compress != 'none': 263 have_image_pos = False 264 for entry in self._entries.values(): 265 entry.AddMissingProperties(have_image_pos) 266 267 def ObtainContents(self, fake_size=0, skip_entry=None): 268 return self.GetEntryContents(skip_entry=skip_entry) 269 270 def GetPaddedDataForEntry(self, entry, entry_data): 271 """Get the data for an entry including any padding 272 273 Gets the entry data and uses the section pad-byte value to add padding 274 before and after as defined by the pad-before and pad-after properties. 275 This does not consider alignment. 276 277 Args: 278 entry: Entry to check 279 entry_data: Data for the entry, False if is null 280 281 Returns: 282 Contents of the entry along with any pad bytes before and 283 after it (bytes) 284 """ 285 pad_byte = (entry._pad_byte if isinstance(entry, Entry_section) 286 else self._pad_byte) 287 288 data = bytearray() 289 # Handle padding before the entry 290 if entry.pad_before: 291 data += tools.get_bytes(self._pad_byte, entry.pad_before) 292 293 # Add in the actual entry data 294 data += entry_data 295 296 # Handle padding after the entry 297 if entry.pad_after: 298 data += tools.get_bytes(self._pad_byte, entry.pad_after) 299 300 if entry.size: 301 data += tools.get_bytes(pad_byte, entry.size - len(data)) 302 303 self.Detail('GetPaddedDataForEntry: size %s' % to_hex_size(self.data)) 304 305 return data 306 307 def BuildSectionData(self, required): 308 """Build the contents of a section 309 310 This places all entries at the right place, dealing with padding before 311 and after entries. It does not do padding for the section itself (the 312 pad-before and pad-after properties in the section items) since that is 313 handled by the parent section. 314 315 This should be overridden by subclasses which want to build their own 316 data structure for the section. 317 318 Args: 319 required: True if the data must be present, False if it is OK to 320 return None 321 322 Returns: 323 Contents of the section (bytes) 324 """ 325 section_data = bytearray() 326 327 for entry in self._entries.values(): 328 entry_data = entry.GetData(required) 329 330 # This can happen when this section is referenced from a collection 331 # earlier in the image description. See testCollectionSection(). 332 if not required and entry_data is None: 333 return None 334 335 entry_data_final = entry_data 336 if entry_data is None: 337 pad_byte = (entry._pad_byte if isinstance(entry, Entry_section) 338 else self._pad_byte) 339 entry_data_final = tools.get_bytes(self._pad_byte, entry.size) 340 341 data = self.GetPaddedDataForEntry(entry, entry_data_final) 342 # Handle empty space before the entry 343 pad = (entry.offset or 0) - self._skip_at_start - len(section_data) 344 if pad > 0: 345 section_data += tools.get_bytes(self._pad_byte, pad) 346 347 # Add in the actual entry data 348 if entry.overlap: 349 end_offset = entry.offset + entry.size 350 if end_offset > len(section_data): 351 entry.Raise("Offset %#x (%d) ending at %#x (%d) must overlap with existing entries" % 352 (entry.offset, entry.offset, end_offset, 353 end_offset)) 354 # Don't write anything for null entries' 355 if entry_data is not None: 356 section_data = (section_data[:entry.offset] + data + 357 section_data[entry.offset + entry.size:]) 358 else: 359 section_data += data 360 361 self.Detail('GetData: %d entries, total size %#x' % 362 (len(self._entries), len(section_data))) 363 return self.CompressData(section_data) 364 365 def GetPaddedData(self, data=None): 366 """Get the data for a section including any padding 367 368 Gets the section data and uses the parent section's pad-byte value to 369 add padding before and after as defined by the pad-before and pad-after 370 properties. If this is a top-level section (i.e. an image), this is the 371 same as GetData(), since padding is not supported. 372 373 This does not consider alignment. 374 375 Returns: 376 Contents of the section along with any pad bytes before and 377 after it (bytes) 378 """ 379 section = self.section or self 380 if data is None: 381 data = self.GetData() 382 return section.GetPaddedDataForEntry(self, data) 383 384 def GetData(self, required=True): 385 """Get the contents of an entry 386 387 This builds the contents of the section, stores this as the contents of 388 the section and returns it. If the section has a filename, the data is 389 written there also. 390 391 Args: 392 required: True if the data must be present, False if it is OK to 393 return None 394 395 Returns: 396 bytes content of the section, made up for all all of its subentries. 397 This excludes any padding. If the section is compressed, the 398 compressed data is returned 399 """ 400 if not self.build_done: 401 data = self.BuildSectionData(required) 402 if data is None: 403 return None 404 self.SetContents(data) 405 else: 406 data = self.data 407 if self._filename: 408 tools.write_file(tools.get_output_filename(self._filename), data) 409 return data 410 411 def GetOffsets(self): 412 """Handle entries that want to set the offset/size of other entries 413 414 This calls each entry's GetOffsets() method. If it returns a list 415 of entries to update, it updates them. 416 """ 417 self.GetEntryOffsets() 418 return {} 419 420 def ResetForPack(self): 421 """Reset offset/size fields so that packing can be done again""" 422 super().ResetForPack() 423 for entry in self._entries.values(): 424 entry.ResetForPack() 425 426 def Pack(self, offset): 427 """Pack all entries into the section""" 428 self._PackEntries() 429 if self._sort: 430 self._SortEntries() 431 self._extend_entries() 432 433 if self.build_done: 434 self.size = None 435 else: 436 data = self.BuildSectionData(True) 437 self.SetContents(data) 438 439 self.CheckSize() 440 441 offset = super().Pack(offset) 442 self.CheckEntries() 443 return offset 444 445 def _PackEntries(self): 446 """Pack all entries into the section""" 447 offset = self._skip_at_start 448 for entry in self._entries.values(): 449 offset = entry.Pack(offset) 450 return offset 451 452 def _extend_entries(self): 453 """Extend any entries that are permitted to""" 454 exp_entry = None 455 for entry in self._entries.values(): 456 if exp_entry: 457 exp_entry.extend_to_limit(entry.offset) 458 exp_entry = None 459 if entry.extend_size: 460 exp_entry = entry 461 if exp_entry: 462 exp_entry.extend_to_limit(self.size) 463 464 def _SortEntries(self): 465 """Sort entries by offset""" 466 entries = sorted(self._entries.values(), key=lambda entry: entry.offset) 467 self._entries.clear() 468 for entry in entries: 469 self._entries[entry._node.name] = entry 470 471 def CheckEntries(self): 472 """Check that entries do not overlap or extend outside the section""" 473 max_size = self.size if self.uncomp_size is None else self.uncomp_size 474 475 offset = 0 476 prev_name = 'None' 477 for entry in self._entries.values(): 478 entry.CheckEntries() 479 if (entry.offset < self._skip_at_start or 480 entry.offset + entry.size > self._skip_at_start + 481 max_size): 482 entry.Raise('Offset %#x (%d) size %#x (%d) is outside the ' 483 "section '%s' starting at %#x (%d) " 484 'of size %#x (%d)' % 485 (entry.offset, entry.offset, entry.size, entry.size, 486 self._node.path, self._skip_at_start, 487 self._skip_at_start, max_size, max_size)) 488 if not entry.overlap: 489 if entry.offset < offset and entry.size: 490 entry.Raise("Offset %#x (%d) overlaps with previous entry '%s' ending at %#x (%d)" % 491 (entry.offset, entry.offset, prev_name, offset, 492 offset)) 493 offset = entry.offset + entry.size 494 prev_name = entry.GetPath() 495 496 def WriteSymbols(self, section): 497 """Write symbol values into binary files for access at run time""" 498 for entry in self._entries.values(): 499 entry.WriteSymbols(self) 500 501 def SetCalculatedProperties(self): 502 super().SetCalculatedProperties() 503 for entry in self._entries.values(): 504 entry.SetCalculatedProperties() 505 506 def SetImagePos(self, image_pos): 507 super().SetImagePos(image_pos) 508 if self.compress == 'none': 509 for entry in self._entries.values(): 510 entry.SetImagePos(image_pos + self.offset) 511 512 def ProcessContents(self): 513 sizes_ok_base = super(Entry_section, self).ProcessContents() 514 sizes_ok = True 515 for entry in self._entries.values(): 516 if not entry.ProcessContents(): 517 sizes_ok = False 518 return sizes_ok and sizes_ok_base 519 520 def WriteMap(self, fd, indent): 521 """Write a map of the section to a .map file 522 523 Args: 524 fd: File to write the map to 525 """ 526 Entry.WriteMapLine(fd, indent, self.name, self.offset or 0, 527 self.size, self.image_pos) 528 for entry in self._entries.values(): 529 entry.WriteMap(fd, indent + 1) 530 531 def GetEntries(self): 532 return self._entries 533 534 def GetContentsByPhandle(self, phandle, source_entry, required): 535 """Get the data contents of an entry specified by a phandle 536 537 This uses a phandle to look up a node and and find the entry 538 associated with it. Then it returns the contents of that entry. 539 540 The node must be a direct subnode of this section. 541 542 Args: 543 phandle: Phandle to look up (integer) 544 source_entry: Entry containing that phandle (used for error 545 reporting) 546 required: True if the data must be present, False if it is OK to 547 return None 548 549 Returns: 550 data from associated entry (as a string), or None if not found 551 """ 552 node = self._node.GetFdt().LookupPhandle(phandle) 553 if not node: 554 source_entry.Raise("Cannot find node for phandle %d" % phandle) 555 entry = self.FindEntryByNode(node) 556 if not entry: 557 source_entry.Raise("Cannot find entry for node '%s'" % node.name) 558 return entry.GetData(required) 559 560 def LookupEntry(self, entries, sym_name, msg): 561 """Look up the entry for an ENF symbol 562 563 Args: 564 entries (dict): entries to search: 565 key: entry name 566 value: Entry object 567 sym_name: Symbol name in the ELF file to look up in the format 568 _binman_<entry>_prop_<property> where <entry> is the name of 569 the entry and <property> is the property to find (e.g. 570 _binman_u_boot_prop_offset). As a special case, you can append 571 _any to <entry> to have it search for any matching entry. E.g. 572 _binman_u_boot_any_prop_offset will match entries called u-boot, 573 u-boot-img and u-boot-nodtb) 574 msg: Message to display if an error occurs 575 576 Returns: 577 tuple: 578 Entry: entry object that was found 579 str: name used to search for entries (uses '-' instead of the 580 '_' used by the symbol name) 581 str: property name the symbol refers to, e.g. 'image_pos' 582 583 Raises: 584 ValueError:the symbol name cannot be decoded, e.g. does not have 585 a '_binman_' prefix 586 """ 587 m = re.match(r'^_binman_(\w+)_prop_(\w+)$', sym_name) 588 if not m: 589 raise ValueError("%s: Symbol '%s' has invalid format" % 590 (msg, sym_name)) 591 entry_name, prop_name = m.groups() 592 entry_name = entry_name.replace('_', '-') 593 entry = entries.get(entry_name) 594 if not entry: 595 if entry_name.endswith('-any'): 596 root = entry_name[:-4] 597 for name in entries: 598 if name.startswith(root): 599 rest = name[len(root):] 600 if rest in ['', '-elf', '-img', '-nodtb']: 601 entry = entries[name] 602 return entry, entry_name, prop_name 603 604 def LookupSymbol(self, sym_name, optional, msg, base_addr, entries=None): 605 """Look up a symbol in an ELF file 606 607 Looks up a symbol in an ELF file. Only entry types which come from an 608 ELF image can be used by this function. 609 610 At present the only entry properties supported are: 611 offset 612 image_pos - 'base_addr' is added if this is not an end-at-4gb image 613 size 614 615 Args: 616 sym_name: Symbol name in the ELF file to look up in the format 617 _binman_<entry>_prop_<property> where <entry> is the name of 618 the entry and <property> is the property to find (e.g. 619 _binman_u_boot_prop_offset). As a special case, you can append 620 _any to <entry> to have it search for any matching entry. E.g. 621 _binman_u_boot_any_prop_offset will match entries called u-boot, 622 u-boot-img and u-boot-nodtb) 623 optional: True if the symbol is optional. If False this function 624 will raise if the symbol is not found 625 msg: Message to display if an error occurs 626 base_addr: Base address of image. This is added to the returned 627 image_pos in most cases so that the returned position indicates 628 where the targetted entry/binary has actually been loaded. But 629 if end-at-4gb is used, this is not done, since the binary is 630 already assumed to be linked to the ROM position and using 631 execute-in-place (XIP). 632 633 Returns: 634 Value that should be assigned to that symbol, or None if it was 635 optional and not found 636 637 Raises: 638 ValueError if the symbol is invalid or not found, or references a 639 property which is not supported 640 """ 641 if not entries: 642 entries = self._entries 643 entry, entry_name, prop_name = self.LookupEntry(entries, sym_name, msg) 644 if not entry: 645 err = ("%s: Entry '%s' not found in list (%s)" % 646 (msg, entry_name, ','.join(entries.keys()))) 647 if optional: 648 print('Warning: %s' % err, file=sys.stderr) 649 return None 650 raise ValueError(err) 651 if prop_name == 'offset': 652 return entry.offset 653 elif prop_name == 'image_pos': 654 value = entry.image_pos 655 if not self.GetImage()._end_4gb: 656 value += base_addr 657 return value 658 if prop_name == 'size': 659 return entry.size 660 else: 661 raise ValueError("%s: No such property '%s'" % (msg, prop_name)) 662 663 def GetRootSkipAtStart(self): 664 """Get the skip-at-start value for the top-level section 665 666 This is used to find out the starting offset for root section that 667 contains this section. If this is a top-level section then it returns 668 the skip-at-start offset for this section. 669 670 This is used to get the absolute position of section within the image. 671 672 Returns: 673 Integer skip-at-start value for the root section containing this 674 section 675 """ 676 if self.section: 677 return self.section.GetRootSkipAtStart() 678 return self._skip_at_start 679 680 def GetStartOffset(self): 681 """Get the start offset for this section 682 683 Returns: 684 The first available offset in this section (typically 0) 685 """ 686 return self._skip_at_start 687 688 def GetImageSize(self): 689 """Get the size of the image containing this section 690 691 Returns: 692 Image size as an integer number of bytes, which may be None if the 693 image size is dynamic and its sections have not yet been packed 694 """ 695 return self.GetImage().size 696 697 def FindEntryType(self, etype): 698 """Find an entry type in the section 699 700 Args: 701 etype: Entry type to find 702 Returns: 703 entry matching that type, or None if not found 704 """ 705 for entry in self._entries.values(): 706 if entry.etype == etype: 707 return entry 708 return None 709 710 def GetEntryContents(self, skip_entry=None): 711 """Call ObtainContents() for each entry in the section 712 713 Note that this may set entry.absent to True if the entry is not 714 actually needed 715 """ 716 def _CheckDone(entry): 717 if entry != skip_entry: 718 if entry.ObtainContents() is False: 719 next_todo.append(entry) 720 return entry 721 722 todo = self._entries.values() 723 for passnum in range(3): 724 threads = state.GetThreads() 725 next_todo = [] 726 727 if threads == 0: 728 for entry in todo: 729 _CheckDone(entry) 730 else: 731 with concurrent.futures.ThreadPoolExecutor( 732 max_workers=threads) as executor: 733 future_to_data = { 734 entry: executor.submit(_CheckDone, entry) 735 for entry in todo} 736 timeout = 60 737 if self.GetImage().test_section_timeout: 738 timeout = 0 739 done, not_done = concurrent.futures.wait( 740 future_to_data.values(), timeout=timeout) 741 # Make sure we check the result, so any exceptions are 742 # generated. Check the results in entry order, since tests 743 # may expect earlier entries to fail first. 744 for entry in todo: 745 job = future_to_data[entry] 746 job.result() 747 if not_done: 748 self.Raise('Timed out obtaining contents') 749 750 todo = next_todo 751 if not todo: 752 break 753 754 if todo: 755 self.Raise('Internal error: Could not complete processing of contents: remaining %s' % 756 todo) 757 return True 758 759 def drop_absent(self): 760 """Drop entries which are absent""" 761 self._entries = {n: e for n, e in self._entries.items() if not e.absent} 762 763 def _SetEntryOffsetSize(self, name, offset, size): 764 """Set the offset and size of an entry 765 766 Args: 767 name: Entry name to update 768 offset: New offset, or None to leave alone 769 size: New size, or None to leave alone 770 """ 771 entry = self._entries.get(name) 772 if not entry: 773 self._Raise("Unable to set offset/size for unknown entry '%s'" % 774 name) 775 entry.SetOffsetSize(self._skip_at_start + offset if offset is not None 776 else None, size) 777 778 def GetEntryOffsets(self): 779 """Handle entries that want to set the offset/size of other entries 780 781 This calls each entry's GetOffsets() method. If it returns a list 782 of entries to update, it updates them. 783 """ 784 for entry in self._entries.values(): 785 offset_dict = entry.GetOffsets() 786 for name, info in offset_dict.items(): 787 self._SetEntryOffsetSize(name, *info) 788 789 def CheckSize(self): 790 contents_size = len(self.data) 791 792 size = self.size 793 if not size: 794 data = self.GetPaddedData(self.data) 795 size = len(data) 796 size = tools.align(size, self.align_size) 797 798 if self.size and contents_size > self.size: 799 self._Raise("contents size %#x (%d) exceeds section size %#x (%d)" % 800 (contents_size, contents_size, self.size, self.size)) 801 if not self.size: 802 self.size = size 803 if self.size != tools.align(self.size, self.align_size): 804 self._Raise("Size %#x (%d) does not match align-size %#x (%d)" % 805 (self.size, self.size, self.align_size, 806 self.align_size)) 807 return size 808 809 def ListEntries(self, entries, indent): 810 """List the files in the section""" 811 Entry.AddEntryInfo(entries, indent, self.name, self.etype, self.size, 812 self.image_pos, None, self.offset, self) 813 for entry in self._entries.values(): 814 entry.ListEntries(entries, indent + 1) 815 816 def LoadData(self, decomp=True): 817 for entry in self._entries.values(): 818 entry.LoadData(decomp) 819 data = self.ReadData(decomp) 820 self.contents_size = len(data) 821 self.ProcessContentsUpdate(data) 822 self.Detail('Loaded data') 823 824 def GetImage(self): 825 """Get the image containing this section 826 827 Note that a top-level section is actually an Image, so this function may 828 return self. 829 830 Returns: 831 Image object containing this section 832 """ 833 if not self.section: 834 return self 835 return self.section.GetImage() 836 837 def GetSort(self): 838 """Check if the entries in this section will be sorted 839 840 Returns: 841 True if to be sorted, False if entries will be left in the order 842 they appear in the device tree 843 """ 844 return self._sort 845 846 def ReadData(self, decomp=True, alt_format=None): 847 tout.info("ReadData path='%s'" % self.GetPath()) 848 parent_data = self.section.ReadData(True, alt_format) 849 offset = self.offset - self.section._skip_at_start 850 data = parent_data[offset:offset + self.size] 851 tout.info( 852 '%s: Reading data from offset %#x-%#x (real %#x), size %#x, got %#x' % 853 (self.GetPath(), self.offset, self.offset + self.size, offset, 854 self.size, len(data))) 855 return data 856 857 def ReadChildData(self, child, decomp=True, alt_format=None): 858 tout.debug(f"ReadChildData for child '{child.GetPath()}'") 859 parent_data = self.ReadData(True, alt_format) 860 offset = child.offset - self._skip_at_start 861 tout.debug("Extract for child '%s': offset %#x, skip_at_start %#x, result %#x" % 862 (child.GetPath(), child.offset, self._skip_at_start, offset)) 863 data = parent_data[offset:offset + child.size] 864 if decomp: 865 indata = data 866 data = child.DecompressData(indata) 867 if child.uncomp_size: 868 tout.info("%s: Decompressing data size %#x with algo '%s' to data size %#x" % 869 (child.GetPath(), len(indata), child.compress, 870 len(data))) 871 if alt_format: 872 new_data = child.GetAltFormat(data, alt_format) 873 if new_data is not None: 874 data = new_data 875 return data 876 877 def WriteData(self, data, decomp=True): 878 ok = super().WriteData(data, decomp) 879 880 # The section contents are now fixed and cannot be rebuilt from the 881 # containing entries. 882 self.mark_build_done() 883 return ok 884 885 def WriteChildData(self, child): 886 return super().WriteChildData(child) 887 888 def SetAllowMissing(self, allow_missing): 889 """Set whether a section allows missing external blobs 890 891 Args: 892 allow_missing: True if allowed, False if not allowed 893 """ 894 self.allow_missing = allow_missing 895 for entry in self._entries.values(): 896 entry.SetAllowMissing(allow_missing) 897 898 def SetAllowFakeBlob(self, allow_fake): 899 """Set whether a section allows to create a fake blob 900 901 Args: 902 allow_fake: True if allowed, False if not allowed 903 """ 904 super().SetAllowFakeBlob(allow_fake) 905 for entry in self._entries.values(): 906 entry.SetAllowFakeBlob(allow_fake) 907 908 def CheckMissing(self, missing_list): 909 """Check if any entries in this section have missing external blobs 910 911 If there are missing (non-optional) blobs, the entries are added to the 912 list 913 914 Args: 915 missing_list: List of Entry objects to be added to 916 """ 917 for entry in self._entries.values(): 918 entry.CheckMissing(missing_list) 919 920 def CheckFakedBlobs(self, faked_blobs_list): 921 """Check if any entries in this section have faked external blobs 922 923 If there are faked blobs, the entries are added to the list 924 925 Args: 926 faked_blobs_list: List of Entry objects to be added to 927 """ 928 for entry in self._entries.values(): 929 entry.CheckFakedBlobs(faked_blobs_list) 930 931 def CheckOptional(self, optional_list): 932 """Check the section for missing but optional external blobs 933 934 If there are missing (optional) blobs, the entries are added to the list 935 936 Args: 937 optional_list (list): List of Entry objects to be added to 938 """ 939 for entry in self._entries.values(): 940 entry.CheckOptional(optional_list) 941 942 def check_missing_bintools(self, missing_list): 943 """Check if any entries in this section have missing bintools 944 945 If there are missing bintools, these are added to the list 946 947 Args: 948 missing_list: List of Bintool objects to be added to 949 """ 950 super().check_missing_bintools(missing_list) 951 for entry in self._entries.values(): 952 entry.check_missing_bintools(missing_list) 953 954 def _CollectEntries(self, entries, entries_by_name, add_entry): 955 """Collect all the entries in an section 956 957 This builds up a dict of entries in this section and all subsections. 958 Entries are indexed by path and by name. 959 960 Since all paths are unique, entries will not have any conflicts. However 961 entries_by_name make have conflicts if two entries have the same name 962 (e.g. with different parent sections). In this case, an entry at a 963 higher level in the hierarchy will win over a lower-level entry. 964 965 Args: 966 entries: dict to put entries: 967 key: entry path 968 value: Entry object 969 entries_by_name: dict to put entries 970 key: entry name 971 value: Entry object 972 add_entry: Entry to add 973 """ 974 entries[add_entry.GetPath()] = add_entry 975 to_add = add_entry.GetEntries() 976 if to_add: 977 for entry in to_add.values(): 978 entries[entry.GetPath()] = entry 979 for entry in to_add.values(): 980 self._CollectEntries(entries, entries_by_name, entry) 981 entries_by_name[add_entry.name] = add_entry 982 983 def MissingArgs(self, entry, missing): 984 """Report a missing argument, if enabled 985 986 For entries which require arguments, this reports an error if some are 987 missing. If missing entries are being ignored (e.g. because we read the 988 entry from an image rather than creating it), this function does 989 nothing. 990 991 Args: 992 entry (Entry): Entry to raise the error on 993 missing (list of str): List of missing properties / entry args, each 994 a string 995 """ 996 if not self._ignore_missing: 997 missing = ', '.join(missing) 998 entry.Raise(f'Missing required properties/entry args: {missing}') 999 1000 def CheckAltFormats(self, alt_formats): 1001 for entry in self._entries.values(): 1002 entry.CheckAltFormats(alt_formats) 1003 1004 def AddBintools(self, btools): 1005 super().AddBintools(btools) 1006 for entry in self._entries.values(): 1007 entry.AddBintools(btools) 1008 1009 def read_elf_segments(self): 1010 entries = self.GetEntries() 1011 1012 # If the section only has one entry, see if it can provide ELF segments 1013 if len(entries) == 1: 1014 for entry in entries.values(): 1015 return entry.read_elf_segments() 1016 return None 1017