1# SPDX-License-Identifier: GPL-2.0+
2# Copyright (c) 2016 Google, Inc
3#
4# Base class for all entries
5#
6
7from collections import namedtuple
8import importlib
9import os
10import pathlib
11import sys
12import time
13
14from binman import bintool
15from binman import elf
16from dtoc import fdt_util
17from u_boot_pylib import tools
18from u_boot_pylib.tools import to_hex, to_hex_size
19from u_boot_pylib import tout
20
21modules = {}
22
23# This is imported if needed
24state = None
25
26# An argument which can be passed to entries on the command line, in lieu of
27# device-tree properties.
28EntryArg = namedtuple('EntryArg', ['name', 'datatype'])
29
30# Information about an entry for use when displaying summaries
31EntryInfo = namedtuple('EntryInfo', ['indent', 'name', 'etype', 'size',
32                                     'image_pos', 'uncomp_size', 'offset',
33                                     'entry'])
34
35class Entry(object):
36    """An Entry in the section
37
38    An entry corresponds to a single node in the device-tree description
39    of the section. Each entry ends up being a part of the final section.
40    Entries can be placed either right next to each other, or with padding
41    between them. The type of the entry determines the data that is in it.
42
43    This class is not used by itself. All entry objects are subclasses of
44    Entry.
45
46    Attributes:
47        section: Section object containing this entry
48        node: The node that created this entry
49        offset: Offset of entry within the section, None if not known yet (in
50            which case it will be calculated by Pack())
51        size: Entry size in bytes, None if not known
52        min_size: Minimum entry size in bytes
53        pre_reset_size: size as it was before ResetForPack(). This allows us to
54            keep track of the size we started with and detect size changes
55        uncomp_size: Size of uncompressed data in bytes, if the entry is
56            compressed, else None
57        contents_size: Size of contents in bytes, 0 by default
58        align: Entry start offset alignment relative to the start of the
59            containing section, or None
60        align_size: Entry size alignment, or None
61        align_end: Entry end offset alignment relative to the start of the
62            containing section, or None
63        pad_before: Number of pad bytes before the contents when it is placed
64            in the containing section, 0 if none. The pad bytes become part of
65            the entry.
66        pad_after: Number of pad bytes after the contents when it is placed in
67            the containing section, 0 if none. The pad bytes become part of
68            the entry.
69        data: Contents of entry (string of bytes). This does not include
70            padding created by pad_before or pad_after. If the entry is
71            compressed, this contains the compressed data.
72        uncomp_data: Original uncompressed data, if this entry is compressed,
73            else None
74        compress: Compression algoithm used (e.g. 'lz4'), 'none' if none
75        orig_offset: Original offset value read from node
76        orig_size: Original size value read from node
77        missing: True if this entry is missing its contents. Note that if it is
78            optional, this entry will not appear in the list generated by
79            entry.CheckMissing() since it is considered OK for it to be missing.
80        allow_missing: Allow children of this entry to be missing (used by
81            subclasses such as Entry_section)
82        allow_fake: Allow creating a dummy fake file if the blob file is not
83            available. This is mainly used for testing.
84        external: True if this entry contains an external binary blob
85        bintools: Bintools used by this entry (only populated for Image)
86        missing_bintools: List of missing bintools for this entry
87        update_hash: True if this entry's "hash" subnode should be
88            updated with a hash of the entry contents
89        comp_bintool: Bintools used for compress and decompress data
90        fake_fname: Fake filename, if one was created, else None
91        faked (bool): True if the entry is absent and faked
92        required_props (dict of str): Properties which must be present. This can
93            be added to by subclasses
94        elf_fname (str): Filename of the ELF file, if this entry holds an ELF
95            file, or is a binary file produced from an ELF file
96        auto_write_symbols (bool): True to write ELF symbols into this entry's
97            contents
98        absent (bool): True if this entry is absent. This can be controlled by
99            the entry itself, allowing it to vanish in certain circumstances.
100            An absent entry is removed during processing so that it does not
101            appear in the map
102        optional (bool): True if this entry contains an optional external blob
103        overlap (bool): True if this entry overlaps with others
104        preserve (bool): True if this entry should be preserved when updating
105            firmware. This means that it will not be changed by the update.
106            This is just a signal: enforcement of this is up to the updater.
107            This flag does not automatically propagate down to child entries.
108        build_done (bool): Indicates that the entry data has been built and does
109            not need to be done again. This is only used with 'binman replace',
110            to stop sections from being rebuilt if their entries have not been
111            replaced
112        symbols_base (int): Use this value as the assumed load address of the
113            target entry, when calculating the symbol value. If None, this is
114            0 for blobs and the image-start address for ELF files
115    """
116    fake_dir = None
117
118    def __init__(self, section, etype, node, name_prefix='',
119                 auto_write_symbols=False):
120        # Put this here to allow entry-docs and help to work without libfdt
121        global state
122        from binman import state
123
124        self.section = section
125        self.etype = etype
126        self._node = node
127        self.name = node and (name_prefix + node.name) or 'none'
128        self.offset = None
129        self.size = None
130        self.min_size = 0
131        self.pre_reset_size = None
132        self.uncomp_size = None
133        self.data = None
134        self.uncomp_data = None
135        self.contents_size = 0
136        self.align = None
137        self.align_size = None
138        self.align_end = None
139        self.pad_before = 0
140        self.pad_after = 0
141        self.offset_unset = False
142        self.image_pos = None
143        self.extend_size = False
144        self.compress = 'none'
145        self.missing = False
146        self.faked = False
147        self.external = False
148        self.allow_missing = False
149        self.allow_fake = False
150        self.bintools = {}
151        self.missing_bintools = []
152        self.update_hash = True
153        self.fake_fname = None
154        self.required_props = []
155        self.comp_bintool = None
156        self.elf_fname = None
157        self.auto_write_symbols = auto_write_symbols
158        self.absent = False
159        self.optional = False
160        self.overlap = False
161        self.elf_base_sym = None
162        self.offset_from_elf = None
163        self.preserve = False
164        self.build_done = False
165        self.no_write_symbols = False
166        self.symbols_base = None
167
168    @staticmethod
169    def FindEntryClass(etype, expanded):
170        """Look up the entry class for a node.
171
172        Args:
173            node_node: Path name of Node object containing information about
174                       the entry to create (used for errors)
175            etype:   Entry type to use
176            expanded: Use the expanded version of etype
177
178        Returns:
179            The entry class object if found, else None if not found and expanded
180                is True, else a tuple:
181                    module name that could not be found
182                    exception received
183        """
184        # Convert something like 'u-boot@0' to 'u_boot' since we are only
185        # interested in the type.
186        module_name = etype.replace('-', '_')
187
188        if '@' in module_name:
189            module_name = module_name.split('@')[0]
190        if expanded:
191            module_name += '_expanded'
192        module = modules.get(module_name)
193
194        # Also allow entry-type modules to be brought in from the etype directory.
195
196        # Import the module if we have not already done so.
197        if not module:
198            try:
199                module = importlib.import_module('binman.etype.' + module_name)
200            except ImportError as e:
201                if expanded:
202                    return None
203                return module_name, e
204            modules[module_name] = module
205
206        # Look up the expected class name
207        return getattr(module, 'Entry_%s' % module_name)
208
209    @staticmethod
210    def Lookup(node_path, etype, expanded, missing_etype=False):
211        """Look up the entry class for a node.
212
213        Args:
214            node_node (str): Path name of Node object containing information
215                about the entry to create (used for errors)
216            etype (str):   Entry type to use
217            expanded (bool): Use the expanded version of etype
218            missing_etype (bool): True to default to a blob etype if the
219                requested etype is not found
220
221        Returns:
222            The entry class object if found, else None if not found and expanded
223                is True
224
225        Raise:
226            ValueError if expanded is False and the class is not found
227        """
228        # Convert something like 'u-boot@0' to 'u_boot' since we are only
229        # interested in the type.
230        cls = Entry.FindEntryClass(etype, expanded)
231        if cls is None:
232            return None
233        elif isinstance(cls, tuple):
234            if missing_etype:
235                cls = Entry.FindEntryClass('blob', False)
236            if isinstance(cls, tuple): # This should not fail
237                module_name, e = cls
238                raise ValueError(
239                    "Unknown entry type '%s' in node '%s' (expected etype/%s.py, error '%s'" %
240                    (etype, node_path, module_name, e))
241        return cls
242
243    @staticmethod
244    def Create(section, node, etype=None, expanded=False, missing_etype=False):
245        """Create a new entry for a node.
246
247        Args:
248            section (entry_Section):  Section object containing this node
249            node (Node): Node object containing information about the entry to
250                create
251            etype (str): Entry type to use, or None to work it out (used for
252                tests)
253            expanded (bool): Use the expanded version of etype
254            missing_etype (bool): True to default to a blob etype if the
255                requested etype is not found
256
257        Returns:
258            A new Entry object of the correct type (a subclass of Entry)
259        """
260        if not etype:
261            etype = fdt_util.GetString(node, 'type', node.name)
262        obj = Entry.Lookup(node.path, etype, expanded, missing_etype)
263        if obj and expanded:
264            # Check whether to use the expanded entry
265            new_etype = etype + '-expanded'
266            can_expand = not fdt_util.GetBool(node, 'no-expanded')
267            if can_expand and obj.UseExpanded(node, etype, new_etype):
268                etype = new_etype
269            else:
270                obj = None
271        if not obj:
272            obj = Entry.Lookup(node.path, etype, False, missing_etype)
273
274        # Call its constructor to get the object we want.
275        return obj(section, etype, node)
276
277    def ReadNode(self):
278        """Read entry information from the node
279
280        This must be called as the first thing after the Entry is created.
281
282        This reads all the fields we recognise from the node, ready for use.
283        """
284        self.ensure_props()
285        if 'pos' in self._node.props:
286            self.Raise("Please use 'offset' instead of 'pos'")
287        if 'expand-size' in self._node.props:
288            self.Raise("Please use 'extend-size' instead of 'expand-size'")
289        self.offset = fdt_util.GetInt(self._node, 'offset')
290        self.size = fdt_util.GetInt(self._node, 'size')
291        self.min_size = fdt_util.GetInt(self._node, 'min-size', 0)
292        self.orig_offset = fdt_util.GetInt(self._node, 'orig-offset')
293        self.orig_size = fdt_util.GetInt(self._node, 'orig-size')
294        if self.GetImage().copy_to_orig:
295            self.orig_offset = self.offset
296            self.orig_size = self.size
297
298        # These should not be set in input files, but are set in an FDT map,
299        # which is also read by this code.
300        self.image_pos = fdt_util.GetInt(self._node, 'image-pos')
301        self.uncomp_size = fdt_util.GetInt(self._node, 'uncomp-size')
302
303        self.align = fdt_util.GetInt(self._node, 'align')
304        if tools.not_power_of_two(self.align):
305            raise ValueError("Node '%s': Alignment %s must be a power of two" %
306                             (self._node.path, self.align))
307        if self.section and self.align is None:
308            self.align = self.section.align_default
309        self.pad_before = fdt_util.GetInt(self._node, 'pad-before', 0)
310        self.pad_after = fdt_util.GetInt(self._node, 'pad-after', 0)
311        self.align_size = fdt_util.GetInt(self._node, 'align-size')
312        if tools.not_power_of_two(self.align_size):
313            self.Raise("Alignment size %s must be a power of two" %
314                       self.align_size)
315        self.align_end = fdt_util.GetInt(self._node, 'align-end')
316        self.offset_unset = fdt_util.GetBool(self._node, 'offset-unset')
317        self.extend_size = fdt_util.GetBool(self._node, 'extend-size')
318        self.missing_msg = fdt_util.GetString(self._node, 'missing-msg')
319        self.optional = fdt_util.GetBool(self._node, 'optional')
320        self.overlap = fdt_util.GetBool(self._node, 'overlap')
321        if self.overlap:
322            self.required_props += ['offset', 'size']
323        self.assume_size = fdt_util.GetInt(self._node, 'assume-size', 0)
324
325        # This is only supported by blobs and sections at present
326        self.compress = fdt_util.GetString(self._node, 'compress', 'none')
327        self.offset_from_elf = fdt_util.GetPhandleNameOffset(self._node,
328                                                             'offset-from-elf')
329
330        self.preserve = fdt_util.GetBool(self._node, 'preserve')
331        self.no_write_symbols = fdt_util.GetBool(self._node, 'no-write-symbols')
332        self.symbols_base = fdt_util.GetInt(self._node, 'symbols-base')
333
334    def GetDefaultFilename(self):
335        return None
336
337    def GetFdts(self):
338        """Get the device trees used by this entry
339
340        Returns:
341            Empty dict, if this entry is not a .dtb, otherwise:
342            Dict:
343                key: Filename from this entry (without the path)
344                value: Tuple:
345                    Entry object for this dtb
346                    Filename of file containing this dtb
347        """
348        return {}
349
350    def gen_entries(self):
351        """Allow entries to generate other entries
352
353        Some entries generate subnodes automatically, from which sub-entries
354        are then created. This method allows those to be added to the binman
355        definition for the current image. An entry which implements this method
356        should call state.AddSubnode() to add a subnode and can add properties
357        with state.AddString(), etc.
358
359        An example is 'files', which produces a section containing a list of
360        files.
361        """
362        pass
363
364    def AddMissingProperties(self, have_image_pos):
365        """Add new properties to the device tree as needed for this entry
366
367        Args:
368            have_image_pos: True if this entry has an image position. This can
369                be False if its parent section is compressed, since compression
370                groups all entries together into a compressed block of data,
371                obscuring the start of each individual child entry
372        """
373        for prop in ['offset', 'size']:
374            if not prop in self._node.props:
375                state.AddZeroProp(self._node, prop)
376        if have_image_pos and 'image-pos' not in self._node.props:
377            state.AddZeroProp(self._node, 'image-pos')
378        if self.GetImage().allow_repack:
379            if self.orig_offset is not None:
380                state.AddZeroProp(self._node, 'orig-offset', True)
381            if self.orig_size is not None:
382                state.AddZeroProp(self._node, 'orig-size', True)
383
384        if self.compress != 'none':
385            state.AddZeroProp(self._node, 'uncomp-size')
386
387        if self.update_hash:
388            err = state.CheckAddHashProp(self._node)
389            if err:
390                self.Raise(err)
391
392    def SetCalculatedProperties(self):
393        """Set the value of device-tree properties calculated by binman"""
394        state.SetInt(self._node, 'offset', self.offset)
395        state.SetInt(self._node, 'size', self.size)
396        if self.image_pos is not None:
397            state.SetInt(self._node, 'image-pos', self.image_pos)
398        if self.GetImage().allow_repack:
399            if self.orig_offset is not None:
400                state.SetInt(self._node, 'orig-offset', self.orig_offset, True)
401            if self.orig_size is not None:
402                state.SetInt(self._node, 'orig-size', self.orig_size, True)
403        if self.uncomp_size is not None:
404            state.SetInt(self._node, 'uncomp-size', self.uncomp_size)
405
406        if self.update_hash:
407            state.CheckSetHashValue(self._node, self.GetData)
408
409    def ProcessFdt(self, fdt):
410        """Allow entries to adjust the device tree
411
412        Some entries need to adjust the device tree for their purposes. This
413        may involve adding or deleting properties.
414
415        Returns:
416            True if processing is complete
417            False if processing could not be completed due to a dependency.
418                This will cause the entry to be retried after others have been
419                called
420        """
421        return True
422
423    def SetPrefix(self, prefix):
424        """Set the name prefix for a node
425
426        Args:
427            prefix: Prefix to set, or '' to not use a prefix
428        """
429        if prefix:
430            self.name = prefix + self.name
431
432    def SetContents(self, data):
433        """Set the contents of an entry
434
435        This sets both the data and content_size properties
436
437        Args:
438            data: Data to set to the contents (bytes)
439        """
440        self.data = data
441        self.contents_size = len(self.data)
442
443    def ProcessContentsUpdate(self, data):
444        """Update the contents of an entry, after the size is fixed
445
446        This checks that the new data is the same size as the old. If the size
447        has changed, this triggers a re-run of the packing algorithm.
448
449        Args:
450            data: Data to set to the contents (bytes)
451
452        Raises:
453            ValueError if the new data size is not the same as the old
454        """
455        size_ok = True
456        new_size = len(data)
457        if state.AllowEntryExpansion() and new_size > self.contents_size:
458            # self.data will indicate the new size needed
459            size_ok = False
460        elif state.AllowEntryContraction() and new_size < self.contents_size:
461            size_ok = False
462
463        # If not allowed to change, try to deal with it or give up
464        if size_ok:
465            if new_size > self.contents_size:
466                self.Raise('Cannot update entry size from %d to %d' %
467                        (self.contents_size, new_size))
468
469            # Don't let the data shrink. Pad it if necessary
470            if size_ok and new_size < self.contents_size:
471                data += tools.get_bytes(0, self.contents_size - new_size)
472
473        if not size_ok:
474            tout.debug("Entry '%s' size change from %s to %s" % (
475                self._node.path, to_hex(self.contents_size),
476                to_hex(new_size)))
477        self.SetContents(data)
478        return size_ok
479
480    def ObtainContents(self, skip_entry=None, fake_size=0):
481        """Figure out the contents of an entry.
482
483        For missing blobs (where allow-missing is enabled), the contents are set
484        to b'' and self.missing is set to True.
485
486        Args:
487            skip_entry (Entry): Entry to skip when obtaining section contents
488            fake_size (int): Size of fake file to create if needed
489
490        Returns:
491            True if the contents were found, False if another call is needed
492            after the other entries are processed, None if there is no contents
493        """
494        # No contents by default: subclasses can implement this
495        return True
496
497    def ResetForPack(self):
498        """Reset offset/size fields so that packing can be done again"""
499        self.Detail('ResetForPack: offset %s->%s, size %s->%s' %
500                    (to_hex(self.offset), to_hex(self.orig_offset),
501                     to_hex(self.size), to_hex(self.orig_size)))
502        self.pre_reset_size = self.size
503        self.offset = self.orig_offset
504        self.size = self.orig_size
505
506    def Pack(self, offset):
507        """Figure out how to pack the entry into the section
508
509        Most of the time the entries are not fully specified. There may be
510        an alignment but no size. In that case we take the size from the
511        contents of the entry.
512
513        If an entry has no hard-coded offset, it will be placed at @offset.
514
515        Once this function is complete, both the offset and size of the
516        entry will be know.
517
518        Args:
519            Current section offset pointer
520
521        Returns:
522            New section offset pointer (after this entry)
523        """
524        self.Detail('Packing: offset=%s, size=%s, content_size=%x' %
525                    (to_hex(self.offset), to_hex(self.size),
526                     self.contents_size))
527        if self.offset is None:
528            if self.offset_unset:
529                self.Raise('No offset set with offset-unset: should another '
530                           'entry provide this correct offset?')
531            elif self.offset_from_elf:
532                self.offset = self.lookup_offset()
533            else:
534                self.offset = tools.align(offset, self.align)
535        needed = self.pad_before + self.contents_size + self.pad_after
536        needed = max(needed, self.min_size)
537        needed = tools.align(needed, self.align_size)
538        size = self.size
539        if not size:
540            size = needed
541        new_offset = self.offset + size
542        aligned_offset = tools.align(new_offset, self.align_end)
543        if aligned_offset != new_offset:
544            size = aligned_offset - self.offset
545            new_offset = aligned_offset
546
547        if not self.size:
548            self.size = size
549
550        if self.size < needed:
551            self.Raise("Entry contents size is %#x (%d) but entry size is "
552                       "%#x (%d)" % (needed, needed, self.size, self.size))
553        # Check that the alignment is correct. It could be wrong if the
554        # and offset or size values were provided (i.e. not calculated), but
555        # conflict with the provided alignment values
556        if self.size != tools.align(self.size, self.align_size):
557            self.Raise("Size %#x (%d) does not match align-size %#x (%d)" %
558                  (self.size, self.size, self.align_size, self.align_size))
559        if self.offset != tools.align(self.offset, self.align):
560            self.Raise("Offset %#x (%d) does not match align %#x (%d)" %
561                  (self.offset, self.offset, self.align, self.align))
562        self.Detail('   - packed: offset=%#x, size=%#x, content_size=%#x, next_offset=%x' %
563                    (self.offset, self.size, self.contents_size, new_offset))
564
565        return new_offset
566
567    def Raise(self, msg):
568        """Convenience function to raise an error referencing a node"""
569        raise ValueError("Node '%s': %s" % (self._node.path, msg))
570
571    def Info(self, msg):
572        """Convenience function to log info referencing a node"""
573        tag = "Info '%s'" % self._node.path
574        tout.detail('%30s: %s' % (tag, msg))
575
576    def Detail(self, msg):
577        """Convenience function to log detail referencing a node"""
578        tag = "Node '%s'" % self._node.path
579        tout.detail('%30s: %s' % (tag, msg))
580
581    def GetEntryArgsOrProps(self, props, required=False):
582        """Return the values of a set of properties
583
584        Looks up the named entryargs and returns the value for each. If any
585        required ones are missing, the error is reported to the user.
586
587        Args:
588            props (list of EntryArg): List of entry arguments to look up
589            required (bool): True if these entry arguments are required
590
591        Returns:
592            list of values: one for each item in props, the type is determined
593                by the EntryArg's 'datatype' property (str or int)
594
595        Raises:
596            ValueError if a property is not found
597        """
598        values = []
599        missing = []
600        for prop in props:
601            python_prop = prop.name.replace('-', '_')
602            if hasattr(self, python_prop):
603                value = getattr(self, python_prop)
604            else:
605                value = None
606            if value is None:
607                value = self.GetArg(prop.name, prop.datatype)
608            if value is None and required:
609                missing.append(prop.name)
610            values.append(value)
611        if missing:
612            self.GetImage().MissingArgs(self, missing)
613        return values
614
615    def GetPath(self):
616        """Get the path of a node
617
618        Returns:
619            Full path of the node for this entry
620        """
621        return self._node.path
622
623    def GetData(self, required=True):
624        """Get the contents of an entry
625
626        Args:
627            required: True if the data must be present, False if it is OK to
628                return None
629
630        Returns:
631            bytes content of the entry, excluding any padding. If the entry is
632                compressed, the compressed data is returned. If the entry data
633                is not yet available, False can be returned. If the entry data
634                is null, then None is returned.
635        """
636        self.Detail('GetData: size %s' % to_hex_size(self.data))
637        return self.data
638
639    def GetPaddedData(self, data=None):
640        """Get the data for an entry including any padding
641
642        Gets the entry data and uses its section's pad-byte value to add padding
643        before and after as defined by the pad-before and pad-after properties.
644
645        This does not consider alignment.
646
647        Returns:
648            Contents of the entry along with any pad bytes before and
649            after it (bytes)
650        """
651        if data is None:
652            data = self.GetData()
653        return self.section.GetPaddedDataForEntry(self, data)
654
655    def GetOffsets(self):
656        """Get the offsets for siblings
657
658        Some entry types can contain information about the position or size of
659        other entries. An example of this is the Intel Flash Descriptor, which
660        knows where the Intel Management Engine section should go.
661
662        If this entry knows about the position of other entries, it can specify
663        this by returning values here
664
665        Returns:
666            Dict:
667                key: Entry type
668                value: List containing position and size of the given entry
669                    type. Either can be None if not known
670        """
671        return {}
672
673    def SetOffsetSize(self, offset, size):
674        """Set the offset and/or size of an entry
675
676        Args:
677            offset: New offset, or None to leave alone
678            size: New size, or None to leave alone
679        """
680        if offset is not None:
681            self.offset = offset
682        if size is not None:
683            self.size = size
684
685    def SetImagePos(self, image_pos):
686        """Set the position in the image
687
688        Args:
689            image_pos: Position of this entry in the image
690        """
691        self.image_pos = image_pos + self.offset
692
693    def ProcessContents(self):
694        """Do any post-packing updates of entry contents
695
696        This function should call ProcessContentsUpdate() to update the entry
697        contents, if necessary, returning its return value here.
698
699        Args:
700            data: Data to set to the contents (bytes)
701
702        Returns:
703            True if the new data size is OK, False if expansion is needed
704
705        Raises:
706            ValueError if the new data size is not the same as the old and
707                state.AllowEntryExpansion() is False
708        """
709        return True
710
711    def WriteSymbols(self, section):
712        """Write symbol values into binary files for access at run time
713
714        As a special case, if symbols_base is not specified and this is an
715        end-at-4gb image, a symbols_base of 0 is used
716
717        Args:
718          section: Section containing the entry
719        """
720        if self.auto_write_symbols and not self.no_write_symbols:
721            # Check if we are writing symbols into an ELF file
722            is_elf = self.GetDefaultFilename() == self.elf_fname
723
724            symbols_base = self.symbols_base
725            if symbols_base is None and self.GetImage()._end_at_4gb:
726                symbols_base = 0
727
728            elf.LookupAndWriteSymbols(self.elf_fname, self, section.GetImage(),
729                                      is_elf, self.elf_base_sym, symbols_base)
730
731    def CheckEntries(self):
732        """Check that the entry offsets are correct
733
734        This is used for entries which have extra offset requirements (other
735        than having to be fully inside their section). Sub-classes can implement
736        this function and raise if there is a problem.
737        """
738        pass
739
740    @staticmethod
741    def GetStr(value):
742        if value is None:
743            return '<none>  '
744        return '%08x' % value
745
746    @staticmethod
747    def WriteMapLine(fd, indent, name, offset, size, image_pos):
748        print('%s  %s%s  %s  %s' % (Entry.GetStr(image_pos), ' ' * indent,
749                                    Entry.GetStr(offset), Entry.GetStr(size),
750                                    name), file=fd)
751
752    def WriteMap(self, fd, indent):
753        """Write a map of the entry to a .map file
754
755        Args:
756            fd: File to write the map to
757            indent: Curent indent level of map (0=none, 1=one level, etc.)
758        """
759        self.WriteMapLine(fd, indent, self.name, self.offset, self.size,
760                          self.image_pos)
761
762    # pylint: disable=assignment-from-none
763    def GetEntries(self) -> None:
764        """Return a list of entries contained by this entry
765
766        Returns:
767            List of entries, or None if none. A normal entry has no entries
768                within it so will return None
769        """
770        return None
771
772    def FindEntryByNode(self, find_node):
773        """Find a node in an entry, searching all subentries
774
775        This does a recursive search.
776
777        Args:
778            find_node (fdt.Node): Node to find
779
780        Returns:
781            Entry: entry, if found, else None
782        """
783        entries = self.GetEntries()
784        if entries:
785            for entry in entries.values():
786                if entry._node == find_node:
787                    return entry
788                found = entry.FindEntryByNode(find_node)
789                if found:
790                    return found
791
792        return None
793
794    def GetArg(self, name, datatype=str):
795        """Get the value of an entry argument or device-tree-node property
796
797        Some node properties can be provided as arguments to binman. First check
798        the entry arguments, and fall back to the device tree if not found
799
800        Args:
801            name: Argument name
802            datatype: Data type (str or int)
803
804        Returns:
805            Value of argument as a string or int, or None if no value
806
807        Raises:
808            ValueError if the argument cannot be converted to in
809        """
810        value = state.GetEntryArg(name)
811        if value is not None:
812            if datatype == int:
813                try:
814                    value = int(value)
815                except ValueError:
816                    self.Raise("Cannot convert entry arg '%s' (value '%s') to integer" %
817                               (name, value))
818            elif datatype == str:
819                pass
820            else:
821                raise ValueError("GetArg() internal error: Unknown data type '%s'" %
822                                 datatype)
823        else:
824            value = fdt_util.GetDatatype(self._node, name, datatype)
825        return value
826
827    @staticmethod
828    def WriteDocs(modules, test_missing=None):
829        """Write out documentation about the various entry types to stdout
830
831        Args:
832            modules: List of modules to include
833            test_missing: Used for testing. This is a module to report
834                as missing
835        """
836        print('''Binman Entry Documentation
837==========================
838
839This file describes the entry types supported by binman. These entry types can
840be placed in an image one by one to build up a final firmware image. It is
841fairly easy to create new entry types. Just add a new file to the 'etype'
842directory. You can use the existing entries as examples.
843
844Note that some entries are subclasses of others, using and extending their
845features to produce new behaviours.
846
847
848''')
849        modules = sorted(modules)
850
851        # Don't show the test entry
852        if '_testing' in modules:
853            modules.remove('_testing')
854        missing = []
855        for name in modules:
856            module = Entry.Lookup('WriteDocs', name, False)
857            docs = getattr(module, '__doc__')
858            if test_missing == name:
859                docs = None
860            if docs:
861                lines = docs.splitlines()
862                first_line = lines[0]
863                rest = [line[4:] for line in lines[1:]]
864                hdr = 'Entry: %s: %s' % (name.replace('_', '-'), first_line)
865
866                # Create a reference for use by rST docs
867                ref_name = f'etype_{module.__name__[6:]}'.lower()
868                print('.. _%s:' % ref_name)
869                print()
870                print(hdr)
871                print('-' * len(hdr))
872                print('\n'.join(rest))
873                print()
874                print()
875            else:
876                missing.append(name)
877
878        if missing:
879            raise ValueError('Documentation is missing for modules: %s' %
880                             ', '.join(missing))
881
882    def GetUniqueName(self):
883        """Get a unique name for a node
884
885        Returns:
886            String containing a unique name for a node, consisting of the name
887            of all ancestors (starting from within the 'binman' node) separated
888            by a dot ('.'). This can be useful for generating unique filesnames
889            in the output directory.
890        """
891        name = self.name
892        node = self._node
893        while node.parent:
894            node = node.parent
895            if node.name in ('binman', '/'):
896                break
897            name = '%s.%s' % (node.name, name)
898        return name
899
900    def extend_to_limit(self, limit):
901        """Extend an entry so that it ends at the given offset limit"""
902        if self.offset + self.size < limit:
903            self.size = limit - self.offset
904            # Request the contents again, since changing the size requires that
905            # the data grows. This should not fail, but check it to be sure.
906            if not self.ObtainContents():
907                self.Raise('Cannot obtain contents when expanding entry')
908
909    def HasSibling(self, name):
910        """Check if there is a sibling of a given name
911
912        Returns:
913            True if there is an entry with this name in the the same section,
914                else False
915        """
916        return name in self.section.GetEntries()
917
918    def GetSiblingImagePos(self, name):
919        """Return the image position of the given sibling
920
921        Returns:
922            Image position of sibling, or None if the sibling has no position,
923                or False if there is no such sibling
924        """
925        if not self.HasSibling(name):
926            return False
927        return self.section.GetEntries()[name].image_pos
928
929    @staticmethod
930    def AddEntryInfo(entries, indent, name, etype, size, image_pos,
931                     uncomp_size, offset, entry):
932        """Add a new entry to the entries list
933
934        Args:
935            entries: List (of EntryInfo objects) to add to
936            indent: Current indent level to add to list
937            name: Entry name (string)
938            etype: Entry type (string)
939            size: Entry size in bytes (int)
940            image_pos: Position within image in bytes (int)
941            uncomp_size: Uncompressed size if the entry uses compression, else
942                None
943            offset: Entry offset within parent in bytes (int)
944            entry: Entry object
945        """
946        entries.append(EntryInfo(indent, name, etype, size, image_pos,
947                                 uncomp_size, offset, entry))
948
949    def ListEntries(self, entries, indent):
950        """Add files in this entry to the list of entries
951
952        This can be overridden by subclasses which need different behaviour.
953
954        Args:
955            entries: List (of EntryInfo objects) to add to
956            indent: Current indent level to add to list
957        """
958        self.AddEntryInfo(entries, indent, self.name, self.etype, self.size,
959                          self.image_pos, self.uncomp_size, self.offset, self)
960
961    def ReadData(self, decomp=True, alt_format=None):
962        """Read the data for an entry from the image
963
964        This is used when the image has been read in and we want to extract the
965        data for a particular entry from that image.
966
967        Args:
968            decomp: True to decompress any compressed data before returning it;
969                False to return the raw, uncompressed data
970
971        Returns:
972            Entry data (bytes)
973        """
974        # Use True here so that we get an uncompressed section to work from,
975        # although compressed sections are currently not supported
976        tout.debug("ReadChildData section '%s', entry '%s'" %
977                   (self.section.GetPath(), self.GetPath()))
978        data = self.section.ReadChildData(self, decomp, alt_format)
979        return data
980
981    def ReadChildData(self, child, decomp=True, alt_format=None):
982        """Read the data for a particular child entry
983
984        This reads data from the parent and extracts the piece that relates to
985        the given child.
986
987        Args:
988            child (Entry): Child entry to read data for (must be valid)
989            decomp (bool): True to decompress any compressed data before
990                returning it; False to return the raw, uncompressed data
991            alt_format (str): Alternative format to read in, or None
992
993        Returns:
994            Data for the child (bytes)
995        """
996        pass
997
998    def LoadData(self, decomp=True):
999        data = self.ReadData(decomp)
1000        self.contents_size = len(data)
1001        self.ProcessContentsUpdate(data)
1002        self.Detail('Loaded data size %x' % len(data))
1003
1004    def GetAltFormat(self, data, alt_format):
1005        """Read the data for an extry in an alternative format
1006
1007        Supported formats are list in the documentation for each entry. An
1008        example is fdtmap which provides .
1009
1010        Args:
1011            data (bytes): Data to convert (this should have been produced by the
1012                entry)
1013            alt_format (str): Format to use
1014
1015        """
1016        pass
1017
1018    def GetImage(self):
1019        """Get the image containing this entry
1020
1021        Returns:
1022            Image object containing this entry
1023        """
1024        return self.section.GetImage()
1025
1026    def WriteData(self, data, decomp=True):
1027        """Write the data to an entry in the image
1028
1029        This is used when the image has been read in and we want to replace the
1030        data for a particular entry in that image.
1031
1032        The image must be re-packed and written out afterwards.
1033
1034        Args:
1035            data: Data to replace it with
1036            decomp: True to compress the data if needed, False if data is
1037                already compressed so should be used as is
1038
1039        Returns:
1040            True if the data did not result in a resize of this entry, False if
1041                 the entry must be resized
1042        """
1043        if self.size is not None:
1044            self.contents_size = self.size
1045        else:
1046            self.contents_size = self.pre_reset_size
1047        ok = self.ProcessContentsUpdate(data)
1048        self.build_done = False
1049        self.Detail('WriteData: size=%x, ok=%s' % (len(data), ok))
1050        section_ok = self.section.WriteChildData(self)
1051        return ok and section_ok
1052
1053    def WriteChildData(self, child):
1054        """Handle writing the data in a child entry
1055
1056        This should be called on the child's parent section after the child's
1057        data has been updated. It should update any data structures needed to
1058        validate that the update is successful.
1059
1060        This base-class implementation does nothing, since the base Entry object
1061        does not have any children.
1062
1063        Args:
1064            child: Child Entry that was written
1065
1066        Returns:
1067            True if the section could be updated successfully, False if the
1068                data is such that the section could not update
1069        """
1070        self.build_done = False
1071        entry = self.section
1072
1073        # Now we must rebuild all sections above this one
1074        while entry and entry != entry.section:
1075            self.build_done = False
1076            entry = entry.section
1077
1078        return True
1079
1080    def GetSiblingOrder(self):
1081        """Get the relative order of an entry amoung its siblings
1082
1083        Returns:
1084            'start' if this entry is first among siblings, 'end' if last,
1085                otherwise None
1086        """
1087        entries = list(self.section.GetEntries().values())
1088        if entries:
1089            if self == entries[0]:
1090                return 'start'
1091            elif self == entries[-1]:
1092                return 'end'
1093        return 'middle'
1094
1095    def SetAllowMissing(self, allow_missing):
1096        """Set whether a section allows missing external blobs
1097
1098        Args:
1099            allow_missing: True if allowed, False if not allowed
1100        """
1101        # This is meaningless for anything other than sections
1102        pass
1103
1104    def SetAllowFakeBlob(self, allow_fake):
1105        """Set whether a section allows to create a fake blob
1106
1107        Args:
1108            allow_fake: True if allowed, False if not allowed
1109        """
1110        self.allow_fake = allow_fake
1111
1112    def CheckMissing(self, missing_list):
1113        """Check if the entry has missing external blobs
1114
1115        If there are missing (non-optional) blobs, the entries are added to the
1116        list
1117
1118        Args:
1119            missing_list: List of Entry objects to be added to
1120        """
1121        if self.missing and not self.optional:
1122            missing_list.append(self)
1123
1124    def check_fake_fname(self, fname: str, size: int = 0) -> str:
1125        """If the file is missing and the entry allows fake blobs, fake it
1126
1127        Sets self.faked to True if faked
1128
1129        Args:
1130            fname (str): Filename to check
1131            size (int): Size of fake file to create
1132
1133        Returns:
1134            fname (str): Filename of faked file
1135        """
1136        if self.allow_fake and not pathlib.Path(fname).is_file():
1137            if not self.fake_fname:
1138                outfname = os.path.join(self.fake_dir, os.path.basename(fname))
1139                with open(outfname, "wb") as out:
1140                    out.truncate(size)
1141                tout.info(f"Entry '{self._node.path}': Faked blob '{outfname}'")
1142                self.fake_fname = outfname
1143            self.faked = True
1144            return self.fake_fname
1145        return fname
1146
1147    def CheckFakedBlobs(self, faked_blobs_list):
1148        """Check if any entries in this section have faked external blobs
1149
1150        If there are faked blobs, the entries are added to the list
1151
1152        Args:
1153            faked_blobs_list: List of Entry objects to be added to
1154        """
1155        # This is meaningless for anything other than blobs
1156        pass
1157
1158    def CheckOptional(self, optional_list):
1159        """Check if the entry has missing but optional external blobs
1160
1161        If there are missing (optional) blobs, the entries are added to the list
1162
1163        Args:
1164            optional_list (list): List of Entry objects to be added to
1165        """
1166        if self.missing and self.optional:
1167            optional_list.append(self)
1168
1169    def GetAllowMissing(self):
1170        """Get whether a section allows missing external blobs
1171
1172        Returns:
1173            True if allowed, False if not allowed
1174        """
1175        return self.allow_missing
1176
1177    def record_missing_bintool(self, bintool):
1178        """Record a missing bintool that was needed to produce this entry
1179
1180        Args:
1181            bintool (Bintool): Bintool that was missing
1182        """
1183        if bintool not in self.missing_bintools:
1184            self.missing_bintools.append(bintool)
1185
1186    def check_missing_bintools(self, missing_list):
1187        """Check if any entries in this section have missing bintools
1188
1189        If there are missing bintools, these are added to the list
1190
1191        Args:
1192            missing_list: List of Bintool objects to be added to
1193        """
1194        for bintool in self.missing_bintools:
1195            if bintool not in missing_list:
1196                missing_list.append(bintool)
1197
1198
1199    def GetHelpTags(self):
1200        """Get the tags use for missing-blob help
1201
1202        Returns:
1203            list of possible tags, most desirable first
1204        """
1205        return list(filter(None, [self.missing_msg, self.name, self.etype]))
1206
1207    def CompressData(self, indata):
1208        """Compress data according to the entry's compression method
1209
1210        Args:
1211            indata: Data to compress
1212
1213        Returns:
1214            Compressed data
1215        """
1216        self.uncomp_data = indata
1217        if self.compress != 'none':
1218            self.uncomp_size = len(indata)
1219            if self.comp_bintool.is_present():
1220                data = self.comp_bintool.compress(indata)
1221                uniq = self.GetUniqueName()
1222                fname = tools.get_output_filename(f'comp.{uniq}')
1223                tools.write_file(fname, data)
1224            else:
1225                self.record_missing_bintool(self.comp_bintool)
1226                data = tools.get_bytes(0, 1024)
1227        else:
1228            data = indata
1229        return data
1230
1231    def DecompressData(self, indata):
1232        """Decompress data according to the entry's compression method
1233
1234        Args:
1235            indata: Data to decompress
1236
1237        Returns:
1238            Decompressed data
1239        """
1240        if self.compress != 'none':
1241            if self.comp_bintool.is_present():
1242                data = self.comp_bintool.decompress(indata)
1243                self.uncomp_size = len(data)
1244            else:
1245                self.record_missing_bintool(self.comp_bintool)
1246                data = tools.get_bytes(0, 1024)
1247        else:
1248            data = indata
1249        self.uncomp_data = data
1250        return data
1251
1252    @classmethod
1253    def UseExpanded(cls, node, etype, new_etype):
1254        """Check whether to use an expanded entry type
1255
1256        This is called by Entry.Create() when it finds an expanded version of
1257        an entry type (e.g. 'u-boot-expanded'). If this method returns True then
1258        it will be used (e.g. in place of 'u-boot'). If it returns False, it is
1259        ignored.
1260
1261        Args:
1262            node:     Node object containing information about the entry to
1263                      create
1264            etype:    Original entry type being used
1265            new_etype: New entry type proposed
1266
1267        Returns:
1268            True to use this entry type, False to use the original one
1269        """
1270        tout.info("Node '%s': etype '%s': %s selected" %
1271                  (node.path, etype, new_etype))
1272        return True
1273
1274    def CheckAltFormats(self, alt_formats):
1275        """Add any alternative formats supported by this entry type
1276
1277        Args:
1278            alt_formats (dict): Dict to add alt_formats to:
1279                key: Name of alt format
1280                value: Help text
1281        """
1282        pass
1283
1284    def AddBintools(self, btools):
1285        """Add the bintools used by this entry type
1286
1287        Args:
1288            btools (dict of Bintool):
1289
1290        Raise:
1291            ValueError if compression algorithm is not supported
1292        """
1293        algo = self.compress
1294        if algo != 'none':
1295            algos = ['bzip2', 'gzip', 'lz4', 'lzma', 'lzo', 'xz', 'zstd']
1296            if algo not in algos:
1297                raise ValueError("Unknown algorithm '%s'" % algo)
1298            names = {'lzma': 'lzma_alone', 'lzo': 'lzop'}
1299            name = names.get(self.compress, self.compress)
1300            self.comp_bintool = self.AddBintool(btools, name)
1301
1302    @classmethod
1303    def AddBintool(self, tools, name):
1304        """Add a new bintool to the tools used by this etype
1305
1306        Args:
1307            name: Name of the tool
1308        """
1309        btool = bintool.Bintool.create(name)
1310        tools[name] = btool
1311        return btool
1312
1313    def SetUpdateHash(self, update_hash):
1314        """Set whether this entry's "hash" subnode should be updated
1315
1316        Args:
1317            update_hash: True if hash should be updated, False if not
1318        """
1319        self.update_hash = update_hash
1320
1321    def collect_contents_to_file(self, entries, prefix, fake_size=0):
1322        """Put the contents of a list of entries into a file
1323
1324        Args:
1325            entries (list of Entry): Entries to collect
1326            prefix (str): Filename prefix of file to write to
1327            fake_size (int): Size of fake file to create if needed
1328
1329        If any entry does not have contents yet, this function returns False
1330        for the data.
1331
1332        Returns:
1333            Tuple:
1334                bytes: Concatenated data from all the entries (or None)
1335                str: Filename of file written (or None if no data)
1336                str: Unique portion of filename (or None if no data)
1337        """
1338        data = b''
1339        for entry in entries:
1340            data += entry.GetData()
1341        uniq = self.GetUniqueName()
1342        fname = tools.get_output_filename(f'{prefix}.{uniq}')
1343        tools.write_file(fname, data)
1344        return data, fname, uniq
1345
1346    @classmethod
1347    def create_fake_dir(cls):
1348        """Create the directory for fake files"""
1349        cls.fake_dir = tools.get_output_filename('binman-fake')
1350        if not os.path.exists(cls.fake_dir):
1351            os.mkdir(cls.fake_dir)
1352        tout.notice(f"Fake-blob dir is '{cls.fake_dir}'")
1353
1354    def drop_absent_optional(self) -> None:
1355        """Entries don't have any entries, do nothing"""
1356        pass
1357
1358    def ensure_props(self):
1359        """Raise an exception if properties are missing
1360
1361        Args:
1362            prop_list (list of str): List of properties to check for
1363
1364        Raises:
1365            ValueError: Any property is missing
1366        """
1367        not_present = []
1368        for prop in self.required_props:
1369            if not prop in self._node.props:
1370                not_present.append(prop)
1371        if not_present:
1372            self.Raise(f"'{self.etype}' entry is missing properties: {' '.join(not_present)}")
1373
1374    def mark_absent(self, msg):
1375        tout.info("Entry '%s' marked absent: %s" % (self._node.path, msg))
1376        self.absent = True
1377
1378    def read_elf_segments(self):
1379        """Read segments from an entry that can generate an ELF file
1380
1381        Returns:
1382            tuple:
1383                list of segments, each:
1384                    int: Segment number (0 = first)
1385                    int: Start address of segment in memory
1386                    bytes: Contents of segment
1387                int: entry address of ELF file
1388        """
1389        return None
1390
1391    def lookup_offset(self):
1392        node, sym_name, offset = self.offset_from_elf
1393        entry = self.section.FindEntryByNode(node)
1394        if not entry:
1395            self.Raise("Cannot find entry for node '%s'" % node.name)
1396        if not entry.elf_fname:
1397            entry.Raise("Need elf-fname property '%s'" % node.name)
1398        val = elf.GetSymbolOffset(entry.elf_fname, sym_name,
1399                                  entry.elf_base_sym)
1400        return val + offset
1401
1402    def mark_build_done(self):
1403        """Mark an entry as already built"""
1404        self.build_done = True
1405        entries = self.GetEntries()
1406        if entries:
1407            for entry in entries.values():
1408                entry.mark_build_done()
1409
1410    def UpdateSignatures(self, privatekey_fname, algo, input_fname):
1411        self.Raise('Updating signatures is not supported with this entry type')
1412
1413    def FdtContents(self, fdt_etype):
1414        """Get the contents of an FDT for a particular phase
1415
1416        Args:
1417            fdt_etype (str): Filename of the phase of the FDT to return, e.g.
1418                'u-boot-tpl-dtb'
1419
1420        Returns:
1421            tuple:
1422                fname (str): Filename of .dtb
1423                bytes: Contents of FDT (possibly run through fdtgrep)
1424        """
1425        return self.section.FdtContents(fdt_etype)
1426