1# SPDX-License-Identifier: GPL-2.0+
2# Copyright (c) 2016 Google, Inc
3#
4# Base class for all entries
5#
6
7from collections import namedtuple
8import importlib
9import os
10import pathlib
11import sys
12import time
13
14from binman import bintool
15from binman import elf
16from dtoc import fdt_util
17from u_boot_pylib import tools
18from u_boot_pylib.tools import to_hex, to_hex_size
19from u_boot_pylib import tout
20
21modules = {}
22
23# This is imported if needed
24state = None
25
26# An argument which can be passed to entries on the command line, in lieu of
27# device-tree properties.
28EntryArg = namedtuple('EntryArg', ['name', 'datatype'])
29
30# Information about an entry for use when displaying summaries
31EntryInfo = namedtuple('EntryInfo', ['indent', 'name', 'etype', 'size',
32                                     'image_pos', 'uncomp_size', 'offset',
33                                     'entry'])
34
35class Entry(object):
36    """An Entry in the section
37
38    An entry corresponds to a single node in the device-tree description
39    of the section. Each entry ends up being a part of the final section.
40    Entries can be placed either right next to each other, or with padding
41    between them. The type of the entry determines the data that is in it.
42
43    This class is not used by itself. All entry objects are subclasses of
44    Entry.
45
46    Attributes:
47        section: Section object containing this entry
48        node: The node that created this entry
49        offset: Offset of entry within the section, None if not known yet (in
50            which case it will be calculated by Pack())
51        size: Entry size in bytes, None if not known
52        min_size: Minimum entry size in bytes
53        pre_reset_size: size as it was before ResetForPack(). This allows us to
54            keep track of the size we started with and detect size changes
55        uncomp_size: Size of uncompressed data in bytes, if the entry is
56            compressed, else None
57        contents_size: Size of contents in bytes, 0 by default
58        align: Entry start offset alignment relative to the start of the
59            containing section, or None
60        align_size: Entry size alignment, or None
61        align_end: Entry end offset alignment relative to the start of the
62            containing section, or None
63        pad_before: Number of pad bytes before the contents when it is placed
64            in the containing section, 0 if none. The pad bytes become part of
65            the entry.
66        pad_after: Number of pad bytes after the contents when it is placed in
67            the containing section, 0 if none. The pad bytes become part of
68            the entry.
69        data: Contents of entry (string of bytes). This does not include
70            padding created by pad_before or pad_after. If the entry is
71            compressed, this contains the compressed data.
72        uncomp_data: Original uncompressed data, if this entry is compressed,
73            else None
74        compress: Compression algoithm used (e.g. 'lz4'), 'none' if none
75        orig_offset: Original offset value read from node
76        orig_size: Original size value read from node
77        missing: True if this entry is missing its contents. Note that if it is
78            optional, this entry will not appear in the list generated by
79            entry.CheckMissing() since it is considered OK for it to be missing.
80        allow_missing: Allow children of this entry to be missing (used by
81            subclasses such as Entry_section)
82        allow_fake: Allow creating a dummy fake file if the blob file is not
83            available. This is mainly used for testing.
84        external: True if this entry contains an external binary blob
85        bintools: Bintools used by this entry (only populated for Image)
86        missing_bintools: List of missing bintools for this entry
87        update_hash: True if this entry's "hash" subnode should be
88            updated with a hash of the entry contents
89        comp_bintool: Bintools used for compress and decompress data
90        fake_fname: Fake filename, if one was created, else None
91        required_props (dict of str): Properties which must be present. This can
92            be added to by subclasses
93        elf_fname (str): Filename of the ELF file, if this entry holds an ELF
94            file, or is a binary file produced from an ELF file
95        auto_write_symbols (bool): True to write ELF symbols into this entry's
96            contents
97        absent (bool): True if this entry is absent. This can be controlled by
98            the entry itself, allowing it to vanish in certain circumstances.
99            An absent entry is removed during processing so that it does not
100            appear in the map
101        optional (bool): True if this entry contains an optional external blob
102        overlap (bool): True if this entry overlaps with others
103        preserve (bool): True if this entry should be preserved when updating
104            firmware. This means that it will not be changed by the update.
105            This is just a signal: enforcement of this is up to the updater.
106            This flag does not automatically propagate down to child entries.
107        build_done (bool): Indicates that the entry data has been built and does
108            not need to be done again. This is only used with 'binman replace',
109            to stop sections from being rebuilt if their entries have not been
110            replaced
111    """
112    fake_dir = None
113
114    def __init__(self, section, etype, node, name_prefix='',
115                 auto_write_symbols=False):
116        # Put this here to allow entry-docs and help to work without libfdt
117        global state
118        from binman import state
119
120        self.section = section
121        self.etype = etype
122        self._node = node
123        self.name = node and (name_prefix + node.name) or 'none'
124        self.offset = None
125        self.size = None
126        self.min_size = 0
127        self.pre_reset_size = None
128        self.uncomp_size = None
129        self.data = None
130        self.uncomp_data = None
131        self.contents_size = 0
132        self.align = None
133        self.align_size = None
134        self.align_end = None
135        self.pad_before = 0
136        self.pad_after = 0
137        self.offset_unset = False
138        self.image_pos = None
139        self.extend_size = False
140        self.compress = 'none'
141        self.missing = False
142        self.faked = False
143        self.external = False
144        self.allow_missing = False
145        self.allow_fake = False
146        self.bintools = {}
147        self.missing_bintools = []
148        self.update_hash = True
149        self.fake_fname = None
150        self.required_props = []
151        self.comp_bintool = None
152        self.elf_fname = None
153        self.auto_write_symbols = auto_write_symbols
154        self.absent = False
155        self.optional = False
156        self.overlap = False
157        self.elf_base_sym = None
158        self.offset_from_elf = None
159        self.preserve = False
160        self.build_done = False
161
162    @staticmethod
163    def FindEntryClass(etype, expanded):
164        """Look up the entry class for a node.
165
166        Args:
167            node_node: Path name of Node object containing information about
168                       the entry to create (used for errors)
169            etype:   Entry type to use
170            expanded: Use the expanded version of etype
171
172        Returns:
173            The entry class object if found, else None if not found and expanded
174                is True, else a tuple:
175                    module name that could not be found
176                    exception received
177        """
178        # Convert something like 'u-boot@0' to 'u_boot' since we are only
179        # interested in the type.
180        module_name = etype.replace('-', '_')
181
182        if '@' in module_name:
183            module_name = module_name.split('@')[0]
184        if expanded:
185            module_name += '_expanded'
186        module = modules.get(module_name)
187
188        # Also allow entry-type modules to be brought in from the etype directory.
189
190        # Import the module if we have not already done so.
191        if not module:
192            try:
193                module = importlib.import_module('binman.etype.' + module_name)
194            except ImportError as e:
195                if expanded:
196                    return None
197                return module_name, e
198            modules[module_name] = module
199
200        # Look up the expected class name
201        return getattr(module, 'Entry_%s' % module_name)
202
203    @staticmethod
204    def Lookup(node_path, etype, expanded, missing_etype=False):
205        """Look up the entry class for a node.
206
207        Args:
208            node_node (str): Path name of Node object containing information
209                about the entry to create (used for errors)
210            etype (str):   Entry type to use
211            expanded (bool): Use the expanded version of etype
212            missing_etype (bool): True to default to a blob etype if the
213                requested etype is not found
214
215        Returns:
216            The entry class object if found, else None if not found and expanded
217                is True
218
219        Raise:
220            ValueError if expanded is False and the class is not found
221        """
222        # Convert something like 'u-boot@0' to 'u_boot' since we are only
223        # interested in the type.
224        cls = Entry.FindEntryClass(etype, expanded)
225        if cls is None:
226            return None
227        elif isinstance(cls, tuple):
228            if missing_etype:
229                cls = Entry.FindEntryClass('blob', False)
230            if isinstance(cls, tuple): # This should not fail
231                module_name, e = cls
232                raise ValueError(
233                    "Unknown entry type '%s' in node '%s' (expected etype/%s.py, error '%s'" %
234                    (etype, node_path, module_name, e))
235        return cls
236
237    @staticmethod
238    def Create(section, node, etype=None, expanded=False, missing_etype=False):
239        """Create a new entry for a node.
240
241        Args:
242            section (entry_Section):  Section object containing this node
243            node (Node): Node object containing information about the entry to
244                create
245            etype (str): Entry type to use, or None to work it out (used for
246                tests)
247            expanded (bool): Use the expanded version of etype
248            missing_etype (bool): True to default to a blob etype if the
249                requested etype is not found
250
251        Returns:
252            A new Entry object of the correct type (a subclass of Entry)
253        """
254        if not etype:
255            etype = fdt_util.GetString(node, 'type', node.name)
256        obj = Entry.Lookup(node.path, etype, expanded, missing_etype)
257        if obj and expanded:
258            # Check whether to use the expanded entry
259            new_etype = etype + '-expanded'
260            can_expand = not fdt_util.GetBool(node, 'no-expanded')
261            if can_expand and obj.UseExpanded(node, etype, new_etype):
262                etype = new_etype
263            else:
264                obj = None
265        if not obj:
266            obj = Entry.Lookup(node.path, etype, False, missing_etype)
267
268        # Call its constructor to get the object we want.
269        return obj(section, etype, node)
270
271    def ReadNode(self):
272        """Read entry information from the node
273
274        This must be called as the first thing after the Entry is created.
275
276        This reads all the fields we recognise from the node, ready for use.
277        """
278        self.ensure_props()
279        if 'pos' in self._node.props:
280            self.Raise("Please use 'offset' instead of 'pos'")
281        if 'expand-size' in self._node.props:
282            self.Raise("Please use 'extend-size' instead of 'expand-size'")
283        self.offset = fdt_util.GetInt(self._node, 'offset')
284        self.size = fdt_util.GetInt(self._node, 'size')
285        self.min_size = fdt_util.GetInt(self._node, 'min-size', 0)
286        self.orig_offset = fdt_util.GetInt(self._node, 'orig-offset')
287        self.orig_size = fdt_util.GetInt(self._node, 'orig-size')
288        if self.GetImage().copy_to_orig:
289            self.orig_offset = self.offset
290            self.orig_size = self.size
291
292        # These should not be set in input files, but are set in an FDT map,
293        # which is also read by this code.
294        self.image_pos = fdt_util.GetInt(self._node, 'image-pos')
295        self.uncomp_size = fdt_util.GetInt(self._node, 'uncomp-size')
296
297        self.align = fdt_util.GetInt(self._node, 'align')
298        if tools.not_power_of_two(self.align):
299            raise ValueError("Node '%s': Alignment %s must be a power of two" %
300                             (self._node.path, self.align))
301        if self.section and self.align is None:
302            self.align = self.section.align_default
303        self.pad_before = fdt_util.GetInt(self._node, 'pad-before', 0)
304        self.pad_after = fdt_util.GetInt(self._node, 'pad-after', 0)
305        self.align_size = fdt_util.GetInt(self._node, 'align-size')
306        if tools.not_power_of_two(self.align_size):
307            self.Raise("Alignment size %s must be a power of two" %
308                       self.align_size)
309        self.align_end = fdt_util.GetInt(self._node, 'align-end')
310        self.offset_unset = fdt_util.GetBool(self._node, 'offset-unset')
311        self.extend_size = fdt_util.GetBool(self._node, 'extend-size')
312        self.missing_msg = fdt_util.GetString(self._node, 'missing-msg')
313        self.optional = fdt_util.GetBool(self._node, 'optional')
314        self.overlap = fdt_util.GetBool(self._node, 'overlap')
315        if self.overlap:
316            self.required_props += ['offset', 'size']
317
318        # This is only supported by blobs and sections at present
319        self.compress = fdt_util.GetString(self._node, 'compress', 'none')
320        self.offset_from_elf = fdt_util.GetPhandleNameOffset(self._node,
321                                                             'offset-from-elf')
322
323        self.preserve = fdt_util.GetBool(self._node, 'preserve')
324
325    def GetDefaultFilename(self):
326        return None
327
328    def GetFdts(self):
329        """Get the device trees used by this entry
330
331        Returns:
332            Empty dict, if this entry is not a .dtb, otherwise:
333            Dict:
334                key: Filename from this entry (without the path)
335                value: Tuple:
336                    Entry object for this dtb
337                    Filename of file containing this dtb
338        """
339        return {}
340
341    def gen_entries(self):
342        """Allow entries to generate other entries
343
344        Some entries generate subnodes automatically, from which sub-entries
345        are then created. This method allows those to be added to the binman
346        definition for the current image. An entry which implements this method
347        should call state.AddSubnode() to add a subnode and can add properties
348        with state.AddString(), etc.
349
350        An example is 'files', which produces a section containing a list of
351        files.
352        """
353        pass
354
355    def AddMissingProperties(self, have_image_pos):
356        """Add new properties to the device tree as needed for this entry
357
358        Args:
359            have_image_pos: True if this entry has an image position. This can
360                be False if its parent section is compressed, since compression
361                groups all entries together into a compressed block of data,
362                obscuring the start of each individual child entry
363        """
364        for prop in ['offset', 'size']:
365            if not prop in self._node.props:
366                state.AddZeroProp(self._node, prop)
367        if have_image_pos and 'image-pos' not in self._node.props:
368            state.AddZeroProp(self._node, 'image-pos')
369        if self.GetImage().allow_repack:
370            if self.orig_offset is not None:
371                state.AddZeroProp(self._node, 'orig-offset', True)
372            if self.orig_size is not None:
373                state.AddZeroProp(self._node, 'orig-size', True)
374
375        if self.compress != 'none':
376            state.AddZeroProp(self._node, 'uncomp-size')
377
378        if self.update_hash:
379            err = state.CheckAddHashProp(self._node)
380            if err:
381                self.Raise(err)
382
383    def SetCalculatedProperties(self):
384        """Set the value of device-tree properties calculated by binman"""
385        state.SetInt(self._node, 'offset', self.offset)
386        state.SetInt(self._node, 'size', self.size)
387        base = self.section.GetRootSkipAtStart() if self.section else 0
388        if self.image_pos is not None:
389            state.SetInt(self._node, 'image-pos', self.image_pos - base)
390        if self.GetImage().allow_repack:
391            if self.orig_offset is not None:
392                state.SetInt(self._node, 'orig-offset', self.orig_offset, True)
393            if self.orig_size is not None:
394                state.SetInt(self._node, 'orig-size', self.orig_size, True)
395        if self.uncomp_size is not None:
396            state.SetInt(self._node, 'uncomp-size', self.uncomp_size)
397
398        if self.update_hash:
399            state.CheckSetHashValue(self._node, self.GetData)
400
401    def ProcessFdt(self, fdt):
402        """Allow entries to adjust the device tree
403
404        Some entries need to adjust the device tree for their purposes. This
405        may involve adding or deleting properties.
406
407        Returns:
408            True if processing is complete
409            False if processing could not be completed due to a dependency.
410                This will cause the entry to be retried after others have been
411                called
412        """
413        return True
414
415    def SetPrefix(self, prefix):
416        """Set the name prefix for a node
417
418        Args:
419            prefix: Prefix to set, or '' to not use a prefix
420        """
421        if prefix:
422            self.name = prefix + self.name
423
424    def SetContents(self, data):
425        """Set the contents of an entry
426
427        This sets both the data and content_size properties
428
429        Args:
430            data: Data to set to the contents (bytes)
431        """
432        self.data = data
433        self.contents_size = len(self.data)
434
435    def ProcessContentsUpdate(self, data):
436        """Update the contents of an entry, after the size is fixed
437
438        This checks that the new data is the same size as the old. If the size
439        has changed, this triggers a re-run of the packing algorithm.
440
441        Args:
442            data: Data to set to the contents (bytes)
443
444        Raises:
445            ValueError if the new data size is not the same as the old
446        """
447        size_ok = True
448        new_size = len(data)
449        if state.AllowEntryExpansion() and new_size > self.contents_size:
450            # self.data will indicate the new size needed
451            size_ok = False
452        elif state.AllowEntryContraction() and new_size < self.contents_size:
453            size_ok = False
454
455        # If not allowed to change, try to deal with it or give up
456        if size_ok:
457            if new_size > self.contents_size:
458                self.Raise('Cannot update entry size from %d to %d' %
459                        (self.contents_size, new_size))
460
461            # Don't let the data shrink. Pad it if necessary
462            if size_ok and new_size < self.contents_size:
463                data += tools.get_bytes(0, self.contents_size - new_size)
464
465        if not size_ok:
466            tout.debug("Entry '%s' size change from %s to %s" % (
467                self._node.path, to_hex(self.contents_size),
468                to_hex(new_size)))
469        self.SetContents(data)
470        return size_ok
471
472    def ObtainContents(self, skip_entry=None, fake_size=0):
473        """Figure out the contents of an entry.
474
475        Args:
476            skip_entry (Entry): Entry to skip when obtaining section contents
477            fake_size (int): Size of fake file to create if needed
478
479        Returns:
480            True if the contents were found, False if another call is needed
481            after the other entries are processed, None if there is no contents
482        """
483        # No contents by default: subclasses can implement this
484        return True
485
486    def ResetForPack(self):
487        """Reset offset/size fields so that packing can be done again"""
488        self.Detail('ResetForPack: offset %s->%s, size %s->%s' %
489                    (to_hex(self.offset), to_hex(self.orig_offset),
490                     to_hex(self.size), to_hex(self.orig_size)))
491        self.pre_reset_size = self.size
492        self.offset = self.orig_offset
493        self.size = self.orig_size
494
495    def Pack(self, offset):
496        """Figure out how to pack the entry into the section
497
498        Most of the time the entries are not fully specified. There may be
499        an alignment but no size. In that case we take the size from the
500        contents of the entry.
501
502        If an entry has no hard-coded offset, it will be placed at @offset.
503
504        Once this function is complete, both the offset and size of the
505        entry will be know.
506
507        Args:
508            Current section offset pointer
509
510        Returns:
511            New section offset pointer (after this entry)
512        """
513        self.Detail('Packing: offset=%s, size=%s, content_size=%x' %
514                    (to_hex(self.offset), to_hex(self.size),
515                     self.contents_size))
516        if self.offset is None:
517            if self.offset_unset:
518                self.Raise('No offset set with offset-unset: should another '
519                           'entry provide this correct offset?')
520            elif self.offset_from_elf:
521                self.offset = self.lookup_offset()
522            else:
523                self.offset = tools.align(offset, self.align)
524        needed = self.pad_before + self.contents_size + self.pad_after
525        needed = max(needed, self.min_size)
526        needed = tools.align(needed, self.align_size)
527        size = self.size
528        if not size:
529            size = needed
530        new_offset = self.offset + size
531        aligned_offset = tools.align(new_offset, self.align_end)
532        if aligned_offset != new_offset:
533            size = aligned_offset - self.offset
534            new_offset = aligned_offset
535
536        if not self.size:
537            self.size = size
538
539        if self.size < needed:
540            self.Raise("Entry contents size is %#x (%d) but entry size is "
541                       "%#x (%d)" % (needed, needed, self.size, self.size))
542        # Check that the alignment is correct. It could be wrong if the
543        # and offset or size values were provided (i.e. not calculated), but
544        # conflict with the provided alignment values
545        if self.size != tools.align(self.size, self.align_size):
546            self.Raise("Size %#x (%d) does not match align-size %#x (%d)" %
547                  (self.size, self.size, self.align_size, self.align_size))
548        if self.offset != tools.align(self.offset, self.align):
549            self.Raise("Offset %#x (%d) does not match align %#x (%d)" %
550                  (self.offset, self.offset, self.align, self.align))
551        self.Detail('   - packed: offset=%#x, size=%#x, content_size=%#x, next_offset=%x' %
552                    (self.offset, self.size, self.contents_size, new_offset))
553
554        return new_offset
555
556    def Raise(self, msg):
557        """Convenience function to raise an error referencing a node"""
558        raise ValueError("Node '%s': %s" % (self._node.path, msg))
559
560    def Info(self, msg):
561        """Convenience function to log info referencing a node"""
562        tag = "Info '%s'" % self._node.path
563        tout.detail('%30s: %s' % (tag, msg))
564
565    def Detail(self, msg):
566        """Convenience function to log detail referencing a node"""
567        tag = "Node '%s'" % self._node.path
568        tout.detail('%30s: %s' % (tag, msg))
569
570    def GetEntryArgsOrProps(self, props, required=False):
571        """Return the values of a set of properties
572
573        Args:
574            props: List of EntryArg objects
575
576        Raises:
577            ValueError if a property is not found
578        """
579        values = []
580        missing = []
581        for prop in props:
582            python_prop = prop.name.replace('-', '_')
583            if hasattr(self, python_prop):
584                value = getattr(self, python_prop)
585            else:
586                value = None
587            if value is None:
588                value = self.GetArg(prop.name, prop.datatype)
589            if value is None and required:
590                missing.append(prop.name)
591            values.append(value)
592        if missing:
593            self.GetImage().MissingArgs(self, missing)
594        return values
595
596    def GetPath(self):
597        """Get the path of a node
598
599        Returns:
600            Full path of the node for this entry
601        """
602        return self._node.path
603
604    def GetData(self, required=True):
605        """Get the contents of an entry
606
607        Args:
608            required: True if the data must be present, False if it is OK to
609                return None
610
611        Returns:
612            bytes content of the entry, excluding any padding. If the entry is
613                compressed, the compressed data is returned. If the entry data
614                is not yet available, False can be returned. If the entry data
615                is null, then None is returned.
616        """
617        self.Detail('GetData: size %s' % to_hex_size(self.data))
618        return self.data
619
620    def GetPaddedData(self, data=None):
621        """Get the data for an entry including any padding
622
623        Gets the entry data and uses its section's pad-byte value to add padding
624        before and after as defined by the pad-before and pad-after properties.
625
626        This does not consider alignment.
627
628        Returns:
629            Contents of the entry along with any pad bytes before and
630            after it (bytes)
631        """
632        if data is None:
633            data = self.GetData()
634        return self.section.GetPaddedDataForEntry(self, data)
635
636    def GetOffsets(self):
637        """Get the offsets for siblings
638
639        Some entry types can contain information about the position or size of
640        other entries. An example of this is the Intel Flash Descriptor, which
641        knows where the Intel Management Engine section should go.
642
643        If this entry knows about the position of other entries, it can specify
644        this by returning values here
645
646        Returns:
647            Dict:
648                key: Entry type
649                value: List containing position and size of the given entry
650                    type. Either can be None if not known
651        """
652        return {}
653
654    def SetOffsetSize(self, offset, size):
655        """Set the offset and/or size of an entry
656
657        Args:
658            offset: New offset, or None to leave alone
659            size: New size, or None to leave alone
660        """
661        if offset is not None:
662            self.offset = offset
663        if size is not None:
664            self.size = size
665
666    def SetImagePos(self, image_pos):
667        """Set the position in the image
668
669        Args:
670            image_pos: Position of this entry in the image
671        """
672        self.image_pos = image_pos + self.offset
673
674    def ProcessContents(self):
675        """Do any post-packing updates of entry contents
676
677        This function should call ProcessContentsUpdate() to update the entry
678        contents, if necessary, returning its return value here.
679
680        Args:
681            data: Data to set to the contents (bytes)
682
683        Returns:
684            True if the new data size is OK, False if expansion is needed
685
686        Raises:
687            ValueError if the new data size is not the same as the old and
688                state.AllowEntryExpansion() is False
689        """
690        return True
691
692    def WriteSymbols(self, section):
693        """Write symbol values into binary files for access at run time
694
695        Args:
696          section: Section containing the entry
697        """
698        if self.auto_write_symbols:
699            # Check if we are writing symbols into an ELF file
700            is_elf = self.GetDefaultFilename() == self.elf_fname
701            elf.LookupAndWriteSymbols(self.elf_fname, self, section.GetImage(),
702                                      is_elf, self.elf_base_sym)
703
704    def CheckEntries(self):
705        """Check that the entry offsets are correct
706
707        This is used for entries which have extra offset requirements (other
708        than having to be fully inside their section). Sub-classes can implement
709        this function and raise if there is a problem.
710        """
711        pass
712
713    @staticmethod
714    def GetStr(value):
715        if value is None:
716            return '<none>  '
717        return '%08x' % value
718
719    @staticmethod
720    def WriteMapLine(fd, indent, name, offset, size, image_pos):
721        print('%s  %s%s  %s  %s' % (Entry.GetStr(image_pos), ' ' * indent,
722                                    Entry.GetStr(offset), Entry.GetStr(size),
723                                    name), file=fd)
724
725    def WriteMap(self, fd, indent):
726        """Write a map of the entry to a .map file
727
728        Args:
729            fd: File to write the map to
730            indent: Curent indent level of map (0=none, 1=one level, etc.)
731        """
732        self.WriteMapLine(fd, indent, self.name, self.offset, self.size,
733                          self.image_pos)
734
735    # pylint: disable=assignment-from-none
736    def GetEntries(self):
737        """Return a list of entries contained by this entry
738
739        Returns:
740            List of entries, or None if none. A normal entry has no entries
741                within it so will return None
742        """
743        return None
744
745    def FindEntryByNode(self, find_node):
746        """Find a node in an entry, searching all subentries
747
748        This does a recursive search.
749
750        Args:
751            find_node (fdt.Node): Node to find
752
753        Returns:
754            Entry: entry, if found, else None
755        """
756        entries = self.GetEntries()
757        if entries:
758            for entry in entries.values():
759                if entry._node == find_node:
760                    return entry
761                found = entry.FindEntryByNode(find_node)
762                if found:
763                    return found
764
765        return None
766
767    def GetArg(self, name, datatype=str):
768        """Get the value of an entry argument or device-tree-node property
769
770        Some node properties can be provided as arguments to binman. First check
771        the entry arguments, and fall back to the device tree if not found
772
773        Args:
774            name: Argument name
775            datatype: Data type (str or int)
776
777        Returns:
778            Value of argument as a string or int, or None if no value
779
780        Raises:
781            ValueError if the argument cannot be converted to in
782        """
783        value = state.GetEntryArg(name)
784        if value is not None:
785            if datatype == int:
786                try:
787                    value = int(value)
788                except ValueError:
789                    self.Raise("Cannot convert entry arg '%s' (value '%s') to integer" %
790                               (name, value))
791            elif datatype == str:
792                pass
793            else:
794                raise ValueError("GetArg() internal error: Unknown data type '%s'" %
795                                 datatype)
796        else:
797            value = fdt_util.GetDatatype(self._node, name, datatype)
798        return value
799
800    @staticmethod
801    def WriteDocs(modules, test_missing=None):
802        """Write out documentation about the various entry types to stdout
803
804        Args:
805            modules: List of modules to include
806            test_missing: Used for testing. This is a module to report
807                as missing
808        """
809        print('''Binman Entry Documentation
810===========================
811
812This file describes the entry types supported by binman. These entry types can
813be placed in an image one by one to build up a final firmware image. It is
814fairly easy to create new entry types. Just add a new file to the 'etype'
815directory. You can use the existing entries as examples.
816
817Note that some entries are subclasses of others, using and extending their
818features to produce new behaviours.
819
820
821''')
822        modules = sorted(modules)
823
824        # Don't show the test entry
825        if '_testing' in modules:
826            modules.remove('_testing')
827        missing = []
828        for name in modules:
829            module = Entry.Lookup('WriteDocs', name, False)
830            docs = getattr(module, '__doc__')
831            if test_missing == name:
832                docs = None
833            if docs:
834                lines = docs.splitlines()
835                first_line = lines[0]
836                rest = [line[4:] for line in lines[1:]]
837                hdr = 'Entry: %s: %s' % (name.replace('_', '-'), first_line)
838
839                # Create a reference for use by rST docs
840                ref_name = f'etype_{module.__name__[6:]}'.lower()
841                print('.. _%s:' % ref_name)
842                print()
843                print(hdr)
844                print('-' * len(hdr))
845                print('\n'.join(rest))
846                print()
847                print()
848            else:
849                missing.append(name)
850
851        if missing:
852            raise ValueError('Documentation is missing for modules: %s' %
853                             ', '.join(missing))
854
855    def GetUniqueName(self):
856        """Get a unique name for a node
857
858        Returns:
859            String containing a unique name for a node, consisting of the name
860            of all ancestors (starting from within the 'binman' node) separated
861            by a dot ('.'). This can be useful for generating unique filesnames
862            in the output directory.
863        """
864        name = self.name
865        node = self._node
866        while node.parent:
867            node = node.parent
868            if node.name in ('binman', '/'):
869                break
870            name = '%s.%s' % (node.name, name)
871        return name
872
873    def extend_to_limit(self, limit):
874        """Extend an entry so that it ends at the given offset limit"""
875        if self.offset + self.size < limit:
876            self.size = limit - self.offset
877            # Request the contents again, since changing the size requires that
878            # the data grows. This should not fail, but check it to be sure.
879            if not self.ObtainContents():
880                self.Raise('Cannot obtain contents when expanding entry')
881
882    def HasSibling(self, name):
883        """Check if there is a sibling of a given name
884
885        Returns:
886            True if there is an entry with this name in the the same section,
887                else False
888        """
889        return name in self.section.GetEntries()
890
891    def GetSiblingImagePos(self, name):
892        """Return the image position of the given sibling
893
894        Returns:
895            Image position of sibling, or None if the sibling has no position,
896                or False if there is no such sibling
897        """
898        if not self.HasSibling(name):
899            return False
900        return self.section.GetEntries()[name].image_pos
901
902    @staticmethod
903    def AddEntryInfo(entries, indent, name, etype, size, image_pos,
904                     uncomp_size, offset, entry):
905        """Add a new entry to the entries list
906
907        Args:
908            entries: List (of EntryInfo objects) to add to
909            indent: Current indent level to add to list
910            name: Entry name (string)
911            etype: Entry type (string)
912            size: Entry size in bytes (int)
913            image_pos: Position within image in bytes (int)
914            uncomp_size: Uncompressed size if the entry uses compression, else
915                None
916            offset: Entry offset within parent in bytes (int)
917            entry: Entry object
918        """
919        entries.append(EntryInfo(indent, name, etype, size, image_pos,
920                                 uncomp_size, offset, entry))
921
922    def ListEntries(self, entries, indent):
923        """Add files in this entry to the list of entries
924
925        This can be overridden by subclasses which need different behaviour.
926
927        Args:
928            entries: List (of EntryInfo objects) to add to
929            indent: Current indent level to add to list
930        """
931        self.AddEntryInfo(entries, indent, self.name, self.etype, self.size,
932                          self.image_pos, self.uncomp_size, self.offset, self)
933
934    def ReadData(self, decomp=True, alt_format=None):
935        """Read the data for an entry from the image
936
937        This is used when the image has been read in and we want to extract the
938        data for a particular entry from that image.
939
940        Args:
941            decomp: True to decompress any compressed data before returning it;
942                False to return the raw, uncompressed data
943
944        Returns:
945            Entry data (bytes)
946        """
947        # Use True here so that we get an uncompressed section to work from,
948        # although compressed sections are currently not supported
949        tout.debug("ReadChildData section '%s', entry '%s'" %
950                   (self.section.GetPath(), self.GetPath()))
951        data = self.section.ReadChildData(self, decomp, alt_format)
952        return data
953
954    def ReadChildData(self, child, decomp=True, alt_format=None):
955        """Read the data for a particular child entry
956
957        This reads data from the parent and extracts the piece that relates to
958        the given child.
959
960        Args:
961            child (Entry): Child entry to read data for (must be valid)
962            decomp (bool): True to decompress any compressed data before
963                returning it; False to return the raw, uncompressed data
964            alt_format (str): Alternative format to read in, or None
965
966        Returns:
967            Data for the child (bytes)
968        """
969        pass
970
971    def LoadData(self, decomp=True):
972        data = self.ReadData(decomp)
973        self.contents_size = len(data)
974        self.ProcessContentsUpdate(data)
975        self.Detail('Loaded data size %x' % len(data))
976
977    def GetAltFormat(self, data, alt_format):
978        """Read the data for an extry in an alternative format
979
980        Supported formats are list in the documentation for each entry. An
981        example is fdtmap which provides .
982
983        Args:
984            data (bytes): Data to convert (this should have been produced by the
985                entry)
986            alt_format (str): Format to use
987
988        """
989        pass
990
991    def GetImage(self):
992        """Get the image containing this entry
993
994        Returns:
995            Image object containing this entry
996        """
997        return self.section.GetImage()
998
999    def WriteData(self, data, decomp=True):
1000        """Write the data to an entry in the image
1001
1002        This is used when the image has been read in and we want to replace the
1003        data for a particular entry in that image.
1004
1005        The image must be re-packed and written out afterwards.
1006
1007        Args:
1008            data: Data to replace it with
1009            decomp: True to compress the data if needed, False if data is
1010                already compressed so should be used as is
1011
1012        Returns:
1013            True if the data did not result in a resize of this entry, False if
1014                 the entry must be resized
1015        """
1016        if self.size is not None:
1017            self.contents_size = self.size
1018        else:
1019            self.contents_size = self.pre_reset_size
1020        ok = self.ProcessContentsUpdate(data)
1021        self.build_done = False
1022        self.Detail('WriteData: size=%x, ok=%s' % (len(data), ok))
1023        section_ok = self.section.WriteChildData(self)
1024        return ok and section_ok
1025
1026    def WriteChildData(self, child):
1027        """Handle writing the data in a child entry
1028
1029        This should be called on the child's parent section after the child's
1030        data has been updated. It should update any data structures needed to
1031        validate that the update is successful.
1032
1033        This base-class implementation does nothing, since the base Entry object
1034        does not have any children.
1035
1036        Args:
1037            child: Child Entry that was written
1038
1039        Returns:
1040            True if the section could be updated successfully, False if the
1041                data is such that the section could not update
1042        """
1043        self.build_done = False
1044        entry = self.section
1045
1046        # Now we must rebuild all sections above this one
1047        while entry and entry != entry.section:
1048            self.build_done = False
1049            entry = entry.section
1050
1051        return True
1052
1053    def GetSiblingOrder(self):
1054        """Get the relative order of an entry amoung its siblings
1055
1056        Returns:
1057            'start' if this entry is first among siblings, 'end' if last,
1058                otherwise None
1059        """
1060        entries = list(self.section.GetEntries().values())
1061        if entries:
1062            if self == entries[0]:
1063                return 'start'
1064            elif self == entries[-1]:
1065                return 'end'
1066        return 'middle'
1067
1068    def SetAllowMissing(self, allow_missing):
1069        """Set whether a section allows missing external blobs
1070
1071        Args:
1072            allow_missing: True if allowed, False if not allowed
1073        """
1074        # This is meaningless for anything other than sections
1075        pass
1076
1077    def SetAllowFakeBlob(self, allow_fake):
1078        """Set whether a section allows to create a fake blob
1079
1080        Args:
1081            allow_fake: True if allowed, False if not allowed
1082        """
1083        self.allow_fake = allow_fake
1084
1085    def CheckMissing(self, missing_list):
1086        """Check if the entry has missing external blobs
1087
1088        If there are missing (non-optional) blobs, the entries are added to the
1089        list
1090
1091        Args:
1092            missing_list: List of Entry objects to be added to
1093        """
1094        if self.missing and not self.optional:
1095            missing_list.append(self)
1096
1097    def check_fake_fname(self, fname, size=0):
1098        """If the file is missing and the entry allows fake blobs, fake it
1099
1100        Sets self.faked to True if faked
1101
1102        Args:
1103            fname (str): Filename to check
1104            size (int): Size of fake file to create
1105
1106        Returns:
1107            tuple:
1108                fname (str): Filename of faked file
1109                bool: True if the blob was faked, False if not
1110        """
1111        if self.allow_fake and not pathlib.Path(fname).is_file():
1112            if not self.fake_fname:
1113                outfname = os.path.join(self.fake_dir, os.path.basename(fname))
1114                with open(outfname, "wb") as out:
1115                    out.truncate(size)
1116                tout.info(f"Entry '{self._node.path}': Faked blob '{outfname}'")
1117                self.fake_fname = outfname
1118            self.faked = True
1119            return self.fake_fname, True
1120        return fname, False
1121
1122    def CheckFakedBlobs(self, faked_blobs_list):
1123        """Check if any entries in this section have faked external blobs
1124
1125        If there are faked blobs, the entries are added to the list
1126
1127        Args:
1128            faked_blobs_list: List of Entry objects to be added to
1129        """
1130        # This is meaningless for anything other than blobs
1131        pass
1132
1133    def CheckOptional(self, optional_list):
1134        """Check if the entry has missing but optional external blobs
1135
1136        If there are missing (optional) blobs, the entries are added to the list
1137
1138        Args:
1139            optional_list (list): List of Entry objects to be added to
1140        """
1141        if self.missing and self.optional:
1142            optional_list.append(self)
1143
1144    def GetAllowMissing(self):
1145        """Get whether a section allows missing external blobs
1146
1147        Returns:
1148            True if allowed, False if not allowed
1149        """
1150        return self.allow_missing
1151
1152    def record_missing_bintool(self, bintool):
1153        """Record a missing bintool that was needed to produce this entry
1154
1155        Args:
1156            bintool (Bintool): Bintool that was missing
1157        """
1158        if bintool not in self.missing_bintools:
1159            self.missing_bintools.append(bintool)
1160
1161    def check_missing_bintools(self, missing_list):
1162        """Check if any entries in this section have missing bintools
1163
1164        If there are missing bintools, these are added to the list
1165
1166        Args:
1167            missing_list: List of Bintool objects to be added to
1168        """
1169        for bintool in self.missing_bintools:
1170            if bintool not in missing_list:
1171                missing_list.append(bintool)
1172
1173
1174    def GetHelpTags(self):
1175        """Get the tags use for missing-blob help
1176
1177        Returns:
1178            list of possible tags, most desirable first
1179        """
1180        return list(filter(None, [self.missing_msg, self.name, self.etype]))
1181
1182    def CompressData(self, indata):
1183        """Compress data according to the entry's compression method
1184
1185        Args:
1186            indata: Data to compress
1187
1188        Returns:
1189            Compressed data
1190        """
1191        self.uncomp_data = indata
1192        if self.compress != 'none':
1193            self.uncomp_size = len(indata)
1194            if self.comp_bintool.is_present():
1195                data = self.comp_bintool.compress(indata)
1196            else:
1197                self.record_missing_bintool(self.comp_bintool)
1198                data = tools.get_bytes(0, 1024)
1199        else:
1200            data = indata
1201        return data
1202
1203    def DecompressData(self, indata):
1204        """Decompress data according to the entry's compression method
1205
1206        Args:
1207            indata: Data to decompress
1208
1209        Returns:
1210            Decompressed data
1211        """
1212        if self.compress != 'none':
1213            if self.comp_bintool.is_present():
1214                data = self.comp_bintool.decompress(indata)
1215                self.uncomp_size = len(data)
1216            else:
1217                self.record_missing_bintool(self.comp_bintool)
1218                data = tools.get_bytes(0, 1024)
1219        else:
1220            data = indata
1221        self.uncomp_data = data
1222        return data
1223
1224    @classmethod
1225    def UseExpanded(cls, node, etype, new_etype):
1226        """Check whether to use an expanded entry type
1227
1228        This is called by Entry.Create() when it finds an expanded version of
1229        an entry type (e.g. 'u-boot-expanded'). If this method returns True then
1230        it will be used (e.g. in place of 'u-boot'). If it returns False, it is
1231        ignored.
1232
1233        Args:
1234            node:     Node object containing information about the entry to
1235                      create
1236            etype:    Original entry type being used
1237            new_etype: New entry type proposed
1238
1239        Returns:
1240            True to use this entry type, False to use the original one
1241        """
1242        tout.info("Node '%s': etype '%s': %s selected" %
1243                  (node.path, etype, new_etype))
1244        return True
1245
1246    def CheckAltFormats(self, alt_formats):
1247        """Add any alternative formats supported by this entry type
1248
1249        Args:
1250            alt_formats (dict): Dict to add alt_formats to:
1251                key: Name of alt format
1252                value: Help text
1253        """
1254        pass
1255
1256    def AddBintools(self, btools):
1257        """Add the bintools used by this entry type
1258
1259        Args:
1260            btools (dict of Bintool):
1261
1262        Raise:
1263            ValueError if compression algorithm is not supported
1264        """
1265        algo = self.compress
1266        if algo != 'none':
1267            algos = ['bzip2', 'gzip', 'lz4', 'lzma', 'lzo', 'xz', 'zstd']
1268            if algo not in algos:
1269                raise ValueError("Unknown algorithm '%s'" % algo)
1270            names = {'lzma': 'lzma_alone', 'lzo': 'lzop'}
1271            name = names.get(self.compress, self.compress)
1272            self.comp_bintool = self.AddBintool(btools, name)
1273
1274    @classmethod
1275    def AddBintool(self, tools, name):
1276        """Add a new bintool to the tools used by this etype
1277
1278        Args:
1279            name: Name of the tool
1280        """
1281        btool = bintool.Bintool.create(name)
1282        tools[name] = btool
1283        return btool
1284
1285    def SetUpdateHash(self, update_hash):
1286        """Set whether this entry's "hash" subnode should be updated
1287
1288        Args:
1289            update_hash: True if hash should be updated, False if not
1290        """
1291        self.update_hash = update_hash
1292
1293    def collect_contents_to_file(self, entries, prefix, fake_size=0):
1294        """Put the contents of a list of entries into a file
1295
1296        Args:
1297            entries (list of Entry): Entries to collect
1298            prefix (str): Filename prefix of file to write to
1299            fake_size (int): Size of fake file to create if needed
1300
1301        If any entry does not have contents yet, this function returns False
1302        for the data.
1303
1304        Returns:
1305            Tuple:
1306                bytes: Concatenated data from all the entries (or None)
1307                str: Filename of file written (or None if no data)
1308                str: Unique portion of filename (or None if no data)
1309        """
1310        data = b''
1311        for entry in entries:
1312            # First get the input data and put it in a file. If not available,
1313            # try later.
1314            if not entry.ObtainContents(fake_size=fake_size):
1315                return None, None, None
1316            data += entry.GetData()
1317        uniq = self.GetUniqueName()
1318        fname = tools.get_output_filename(f'{prefix}.{uniq}')
1319        tools.write_file(fname, data)
1320        return data, fname, uniq
1321
1322    @classmethod
1323    def create_fake_dir(cls):
1324        """Create the directory for fake files"""
1325        cls.fake_dir = tools.get_output_filename('binman-fake')
1326        if not os.path.exists(cls.fake_dir):
1327            os.mkdir(cls.fake_dir)
1328        tout.notice(f"Fake-blob dir is '{cls.fake_dir}'")
1329
1330    def ensure_props(self):
1331        """Raise an exception if properties are missing
1332
1333        Args:
1334            prop_list (list of str): List of properties to check for
1335
1336        Raises:
1337            ValueError: Any property is missing
1338        """
1339        not_present = []
1340        for prop in self.required_props:
1341            if not prop in self._node.props:
1342                not_present.append(prop)
1343        if not_present:
1344            self.Raise(f"'{self.etype}' entry is missing properties: {' '.join(not_present)}")
1345
1346    def mark_absent(self, msg):
1347        tout.info("Entry '%s' marked absent: %s" % (self._node.path, msg))
1348        self.absent = True
1349
1350    def read_elf_segments(self):
1351        """Read segments from an entry that can generate an ELF file
1352
1353        Returns:
1354            tuple:
1355                list of segments, each:
1356                    int: Segment number (0 = first)
1357                    int: Start address of segment in memory
1358                    bytes: Contents of segment
1359                int: entry address of ELF file
1360        """
1361        return None
1362
1363    def lookup_offset(self):
1364        node, sym_name, offset = self.offset_from_elf
1365        entry = self.section.FindEntryByNode(node)
1366        if not entry:
1367            self.Raise("Cannot find entry for node '%s'" % node.name)
1368        if not entry.elf_fname:
1369            entry.Raise("Need elf-fname property '%s'" % node.name)
1370        val = elf.GetSymbolOffset(entry.elf_fname, sym_name,
1371                                  entry.elf_base_sym)
1372        return val + offset
1373
1374    def mark_build_done(self):
1375        """Mark an entry as already built"""
1376        self.build_done = True
1377        entries = self.GetEntries()
1378        if entries:
1379            for entry in entries.values():
1380                entry.mark_build_done()
1381
1382    def UpdateSignatures(self, privatekey_fname, algo, input_fname):
1383        self.Raise('Updating signatures is not supported with this entry type')
1384