1# Copyright (c) 2019 Nordic Semiconductor ASA
2# Copyright (c) 2019 Linaro Limited
3# SPDX-License-Identifier: BSD-3-Clause
4
5# Tip: You can view just the documentation with 'pydoc3 devicetree.edtlib'
6
7"""
8Library for working with devicetrees at a higher level compared to dtlib. Like
9dtlib, this library presents a tree of devicetree nodes, but the nodes are
10augmented with information from bindings and include some interpretation of
11properties. Some of this interpretation is based on conventions established
12by the Linux kernel, so the Documentation/devicetree/bindings in the Linux
13source code is sometimes good reference material.
14
15Bindings are YAML files that describe devicetree nodes. Devicetree
16nodes are usually mapped to bindings via their 'compatible = "..."' property,
17but a binding can also come from a 'child-binding:' key in the binding for the
18parent devicetree node.
19
20Each devicetree node (dtlib.Node) gets a corresponding edtlib.Node instance,
21which has all the information related to the node.
22
23The top-level entry points for the library are the EDT and Binding classes.
24See their constructor docstrings for details. There is also a
25bindings_from_paths() helper function.
26"""
27
28# NOTE: tests/test_edtlib.py is the test suite for this library.
29
30# Implementation notes
31# --------------------
32#
33# A '_' prefix on an identifier in Python is a convention for marking it private.
34# Please do not access private things. Instead, think of what API you need, and
35# add it.
36#
37# This module is not meant to have any global state. It should be possible to
38# create several EDT objects with independent binding paths and flags. If you
39# need to add a configuration parameter or the like, store it in the EDT
40# instance, and initialize it e.g. with a constructor argument.
41#
42# This library is layered on top of dtlib, and is not meant to expose it to
43# clients. This keeps the header generation script simple.
44#
45# General biased advice:
46#
47# - Consider using @property for APIs that don't need parameters. It makes
48#   functions look like attributes, which is less awkward in clients, and makes
49#   it easy to switch back and forth between variables and functions.
50#
51# - Think about the data type of the thing you're exposing. Exposing something
52#   as e.g. a list or a dictionary is often nicer and more flexible than adding
53#   a function.
54#
55# - Avoid get_*() prefixes on functions. Name them after the thing they return
56#   instead. This often makes the code read more naturally in callers.
57#
58#   Also, consider using @property instead of get_*().
59#
60# - Don't expose dtlib stuff directly.
61#
62# - Add documentation for any new APIs you add.
63#
64#   The convention here is that docstrings (quoted strings) are used for public
65#   APIs, and "doc comments" for internal functions.
66#
67#   @properties are documented in the class docstring, as if they were
68#   variables. See the existing @properties for a template.
69
70import base64
71import hashlib
72import logging
73import os
74import re
75from collections import defaultdict
76from collections.abc import Callable, Iterable
77from copy import deepcopy
78from dataclasses import dataclass
79from typing import TYPE_CHECKING, Any, NoReturn, Optional, Union
80
81import yaml
82
83try:
84    # Use the C LibYAML parser if available, rather than the Python parser.
85    # This makes e.g. gen_defines.py more than twice as fast.
86    from yaml import CLoader as Loader
87except ImportError:
88    from yaml import Loader  # type: ignore
89
90from devicetree._private import _slice_helper
91from devicetree.dtlib import DT, DTError, Type, to_num, to_nums
92from devicetree.dtlib import Node as dtlib_Node
93from devicetree.dtlib import Property as dtlib_Property
94from devicetree.grutils import Graph
95
96
97def _compute_hash(path: str) -> str:
98    # Calculates the hash associated with the node's full path.
99    hasher = hashlib.sha256()
100    hasher.update(path.encode())
101    return base64.b64encode(hasher.digest(), altchars=b'__').decode().rstrip('=')
102
103#
104# Public classes
105#
106
107
108class Binding:
109    """
110    Represents a parsed binding.
111
112    These attributes are available on Binding objects:
113
114    path:
115      The absolute path to the file defining the binding.
116
117    title:
118      The free-form title of the binding (optional).
119
120      When the content in the 'description:' is too long, the 'title:' can
121      be used as a heading for the extended description. Typically, it serves
122      as a description of the hardware model. For example:
123
124      title: Nordic GPIO
125
126      description: |
127        Descriptions and example nodes related to GPIO.
128        ...
129
130    description:
131      The free-form description of the binding.
132
133    compatible:
134      The compatible string the binding matches.
135
136      This may be None. For example, it's None when the Binding is inferred
137      from node properties. It can also be None for Binding objects created
138      using 'child-binding:' with no compatible.
139
140    prop2specs:
141      A dict mapping property names to PropertySpec objects
142      describing those properties' values.
143
144    specifier2cells:
145      A dict that maps specifier space names (like "gpio",
146      "clock", "pwm", etc.) to lists of cell names.
147
148      For example, if the binding YAML contains 'pin' and 'flags' cell names
149      for the 'gpio' specifier space, like this:
150
151          gpio-cells:
152          - pin
153          - flags
154
155      Then the Binding object will have a 'specifier2cells' attribute mapping
156      "gpio" to ["pin", "flags"]. A missing key should be interpreted as zero
157      cells.
158
159    raw:
160      The binding as an object parsed from YAML.
161
162    bus:
163      If nodes with this binding's 'compatible' describe a bus, a string
164      describing the bus type (like "i2c") or a list describing supported
165      protocols (like ["i3c", "i2c"]). None otherwise.
166
167      Note that this is the raw value from the binding where it can be
168      a string or a list. Use "buses" instead unless you need the raw
169      value, where "buses" is always a list.
170
171    buses:
172      Deprived property from 'bus' where 'buses' is a list of bus(es),
173      for example, ["i2c"] or ["i3c", "i2c"]. Or an empty list if there is
174      no 'bus:' in this binding.
175
176    on_bus:
177      If nodes with this binding's 'compatible' appear on a bus, a string
178      describing the bus type (like "i2c"). None otherwise.
179
180    child_binding:
181      If this binding describes the properties of child nodes, then
182      this is a Binding object for those children; it is None otherwise.
183      A Binding object's 'child_binding.child_binding' is not None if there
184      are multiple levels of 'child-binding' descriptions in the binding.
185    """
186
187    def __init__(self, path: Optional[str], fname2path: dict[str, str],
188                 raw: Any = None, require_compatible: bool = True,
189                 require_description: bool = True, require_title: bool = False):
190        """
191        Binding constructor.
192
193        path:
194          Path to binding YAML file. May be None.
195
196        fname2path:
197          Map from include files to their absolute paths. Must
198          not be None, but may be empty.
199
200        raw:
201          Optional raw content in the binding.
202          This does not have to have any "include:" lines resolved.
203          May be left out, in which case 'path' is opened and read.
204          This can be used to resolve child bindings, for example.
205
206        require_compatible:
207          If True, it is an error if the binding does not contain a
208          "compatible:" line. If False, a missing "compatible:" is
209          not an error. Either way, "compatible:" must be a string
210          if it is present in the binding.
211
212        require_description:
213          If True, it is an error if the binding does not contain a
214          "description:" line. If False, a missing "description:" is
215          not an error. Either way, "description:" must be a string
216          if it is present in the binding.
217
218        require_title:
219          If True, it is an error if the binding does not contain a
220          "title:" line. If False, a missing "title:" is not an error.
221          Either way, "title:" must be a string if it is present in
222          the binding.
223        """
224        self.path: Optional[str] = path
225        self._fname2path: dict[str, str] = fname2path
226
227        if raw is None:
228            if path is None:
229                _err("you must provide either a 'path' or a 'raw' argument")
230            with open(path, encoding="utf-8") as f:
231                raw = yaml.load(f, Loader=_BindingLoader)
232
233        # Merge any included files into self.raw. This also pulls in
234        # inherited child binding definitions, so it has to be done
235        # before initializing those.
236        self.raw: dict = self._merge_includes(raw, self.path)
237
238        # Recursively initialize any child bindings. These don't
239        # require a 'compatible', 'description' or 'title' to be well
240        # defined, but they must be dicts.
241        if "child-binding" in raw:
242            if not isinstance(raw["child-binding"], dict):
243                _err(f"malformed 'child-binding:' in {self.path}, "
244                     "expected a binding (dictionary with keys/values)")
245            self.child_binding: Optional[Binding] = Binding(
246                path, fname2path,
247                raw=raw["child-binding"],
248                require_compatible=False,
249                require_description=False)
250        else:
251            self.child_binding = None
252
253        # Make sure this is a well defined object.
254        self._check(require_compatible, require_description, require_title)
255
256        # Initialize look up tables.
257        self.prop2specs: dict[str, PropertySpec] = {}
258        for prop_name in self.raw.get("properties", {}):
259            self.prop2specs[prop_name] = PropertySpec(prop_name, self)
260        self.specifier2cells: dict[str, list[str]] = {}
261        for key, val in self.raw.items():
262            if key.endswith("-cells"):
263                self.specifier2cells[key[:-len("-cells")]] = val
264
265    def __repr__(self) -> str:
266        if self.compatible:
267            compat = f" for compatible '{self.compatible}'"
268        else:
269            compat = ""
270        basename = os.path.basename(self.path or "")
271        return f"<Binding {basename}" + compat + ">"
272
273    @property
274    def title(self) -> Optional[str]:
275        "See the class docstring"
276        return self.raw.get('title')
277
278    @property
279    def description(self) -> Optional[str]:
280        "See the class docstring"
281        return self.raw.get('description')
282
283    @property
284    def compatible(self) -> Optional[str]:
285        "See the class docstring"
286        return self.raw.get('compatible')
287
288    @property
289    def bus(self) -> Union[None, str, list[str]]:
290        "See the class docstring"
291        return self.raw.get('bus')
292
293    @property
294    def buses(self) -> list[str]:
295        "See the class docstring"
296        if self.raw.get('bus') is not None:
297            return self._buses
298        else:
299            return []
300
301    @property
302    def on_bus(self) -> Optional[str]:
303        "See the class docstring"
304        return self.raw.get('on-bus')
305
306    def _merge_includes(self, raw: dict, binding_path: Optional[str]) -> dict:
307        # Constructor helper. Merges included files in
308        # 'raw["include"]' into 'raw' using 'self._include_paths' as a
309        # source of include files, removing the "include" key while
310        # doing so.
311        #
312        # This treats 'binding_path' as the binding file being built up
313        # and uses it for error messages.
314
315        if "include" not in raw:
316            return raw
317
318        include = raw.pop("include")
319
320        # First, merge the included files together. If more than one included
321        # file has a 'required:' for a particular property, OR the values
322        # together, so that 'required: true' wins.
323
324        merged: dict[str, Any] = {}
325
326        if isinstance(include, str):
327            # Simple scalar string case
328            _merge_props(merged, self._load_raw(include), None, binding_path,
329                         False)
330        elif isinstance(include, list):
331            # List of strings and maps. These types may be intermixed.
332            for elem in include:
333                if isinstance(elem, str):
334                    _merge_props(merged, self._load_raw(elem), None,
335                                 binding_path, False)
336                elif isinstance(elem, dict):
337                    name = elem.pop('name', None)
338                    allowlist = elem.pop('property-allowlist', None)
339                    blocklist = elem.pop('property-blocklist', None)
340                    child_filter = elem.pop('child-binding', None)
341
342                    if elem:
343                        # We've popped out all the valid keys.
344                        _err(f"'include:' in {binding_path} should not have "
345                             f"these unexpected contents: {elem}")
346
347                    _check_include_dict(name, allowlist, blocklist,
348                                        child_filter, binding_path)
349
350                    contents = self._load_raw(name)
351
352                    _filter_properties(contents, allowlist, blocklist,
353                                       child_filter, binding_path)
354                    _merge_props(merged, contents, None, binding_path, False)
355                else:
356                    _err(f"all elements in 'include:' in {binding_path} "
357                         "should be either strings or maps with a 'name' key "
358                         "and optional 'property-allowlist' or "
359                         f"'property-blocklist' keys, but got: {elem}")
360        else:
361            # Invalid item.
362            _err(f"'include:' in {binding_path} "
363                 f"should be a string or list, but has type {type(include)}")
364
365        # Next, merge the merged included files into 'raw'. Error out if
366        # 'raw' has 'required: false' while the merged included files have
367        # 'required: true'.
368
369        _merge_props(raw, merged, None, binding_path, check_required=True)
370
371        return raw
372
373    def _load_raw(self, fname: str) -> dict:
374        # Returns the contents of the binding given by 'fname' after merging
375        # any bindings it lists in 'include:' into it. 'fname' is just the
376        # basename of the file, so we check that there aren't multiple
377        # candidates.
378
379        path = self._fname2path.get(fname)
380
381        if not path:
382            _err(f"'{fname}' not found")
383
384        with open(path, encoding="utf-8") as f:
385            contents = yaml.load(f, Loader=_BindingLoader)
386            if not isinstance(contents, dict):
387                _err(f'{path}: invalid contents, expected a mapping')
388
389        return self._merge_includes(contents, path)
390
391    def _check(self, require_compatible: bool, require_description: bool,
392               require_title: bool):
393        # Does sanity checking on the binding.
394
395        raw = self.raw
396
397        if "compatible" in raw:
398            compatible = raw["compatible"]
399            if not isinstance(compatible, str):
400                _err(f"malformed 'compatible: {compatible}' "
401                     f"field in {self.path} - "
402                     f"should be a string, not {type(compatible).__name__}")
403        elif require_compatible:
404            _err(f"missing 'compatible' in {self.path}")
405
406        if "title" in raw:
407            title = raw["title"]
408            if not isinstance(title, str) or not title:
409                _err(f"malformed or empty 'title' in {self.path}")
410        elif require_title:
411            _err(f"missing 'title' in {self.path}")
412
413        if "description" in raw:
414            description = raw["description"]
415            if not isinstance(description, str) or not description:
416                _err(f"malformed or empty 'description' in {self.path}")
417        elif require_description:
418            _err(f"missing 'description' in {self.path}")
419
420        # Allowed top-level keys. The 'include' key should have been
421        # removed by _load_raw() already.
422        ok_top = {"title", "description", "compatible", "bus",
423                  "on-bus", "properties", "child-binding"}
424
425        # Descriptive errors for legacy bindings.
426        legacy_errors = {
427            "#cells": "expected *-cells syntax",
428            "child": "use 'bus: <bus>' instead",
429            "child-bus": "use 'bus: <bus>' instead",
430            "parent": "use 'on-bus: <bus>' instead",
431            "parent-bus": "use 'on-bus: <bus>' instead",
432            "sub-node": "use 'child-binding' instead",
433        }
434
435        for key in raw:
436            if key in legacy_errors:
437                _err(f"legacy '{key}:' in {self.path}, {legacy_errors[key]}")
438
439            if key not in ok_top and not key.endswith("-cells"):
440                _err(f"unknown key '{key}' in {self.path}, "
441                     "expected one of {', '.join(ok_top)}, or *-cells")
442
443        if "bus" in raw:
444            bus = raw["bus"]
445            if (not isinstance(bus, str) and
446               (not isinstance(bus, list) and
447                not all(isinstance(elem, str) for elem in bus))):
448                _err(f"malformed 'bus:' value in {self.path}, "
449                     "expected string or list of strings")
450
451            if isinstance(bus, list):
452                self._buses = bus
453            else:
454                # Convert bus into a list
455                self._buses = [bus]
456
457        if ("on-bus" in raw
458            and not isinstance(raw["on-bus"], str)):
459            _err(f"malformed 'on-bus:' value in {self.path}, "
460                 "expected string")
461
462        self._check_properties()
463
464        for key, val in raw.items():
465            if (key.endswith("-cells")
466                and not isinstance(val, list)
467                or not all(isinstance(elem, str) for elem in val)):
468                _err(f"malformed '{key}:' in {self.path}, "
469                     "expected a list of strings")
470
471    def _check_properties(self) -> None:
472        # _check() helper for checking the contents of 'properties:'.
473
474        raw = self.raw
475
476        if "properties" not in raw:
477            return
478
479        ok_prop_keys = {"description", "type", "required",
480                        "enum", "const", "default", "deprecated",
481                        "specifier-space"}
482
483        for prop_name, options in raw["properties"].items():
484            for key in options:
485                if key not in ok_prop_keys:
486                    _err(f"unknown setting '{key}' in "
487                         f"'properties: {prop_name}: ...' in {self.path}, "
488                         f"expected one of {', '.join(ok_prop_keys)}")
489
490            _check_prop_by_type(prop_name, options, self.path)
491
492            for true_false_opt in ["required", "deprecated"]:
493                if true_false_opt in options:
494                    option = options[true_false_opt]
495                    if not isinstance(option, bool):
496                        _err(f"malformed '{true_false_opt}:' setting '{option}' "
497                             f"for '{prop_name}' in 'properties' in {self.path}, "
498                             "expected true/false")
499
500            if options.get("deprecated") and options.get("required"):
501                _err(f"'{prop_name}' in 'properties' in {self.path} should not "
502                      "have both 'deprecated' and 'required' set")
503
504            if ("description" in options
505                and not isinstance(options["description"], str)):
506                _err("missing, malformed, or empty 'description' for "
507                     f"'{prop_name}' in 'properties' in {self.path}")
508
509            if "enum" in options and not isinstance(options["enum"], list):
510                _err(f"enum in {self.path} for property '{prop_name}' "
511                     "is not a list")
512
513
514class PropertySpec:
515    """
516    Represents a "property specification", i.e. the description of a
517    property provided by a binding file, like its type and description.
518
519    These attributes are available on PropertySpec objects:
520
521    binding:
522      The Binding object which defined this property.
523
524    name:
525      The property's name.
526
527    path:
528      The file where this property was defined. In case a binding includes
529      other bindings, this is the including binding file.
530      Generally this means that this will be the binding file specifying
531      the devicetree node of which this is a property.
532
533    type:
534      The type of the property as a string, as given in the binding.
535
536    description:
537      The free-form description of the property as a string, or None.
538
539    enum:
540      A list of values the property may take as given in the binding, or None.
541
542    enum_tokenizable:
543      True if enum is not None and all the values in it are tokenizable;
544      False otherwise.
545
546      A property must have string or string-array type and an "enum:" in its
547      binding to be tokenizable. Additionally, the "enum:" values must be
548      unique after converting all non-alphanumeric characters to underscores
549      (so "foo bar" and "foo_bar" in the same "enum:" would not be
550      tokenizable).
551
552    enum_upper_tokenizable:
553      Like 'enum_tokenizable', with the additional restriction that the
554      "enum:" values must be unique after uppercasing and converting
555      non-alphanumeric characters to underscores.
556
557    const:
558      The property's constant value as given in the binding, or None.
559
560    default:
561      The property's default value as given in the binding, or None.
562
563    deprecated:
564      True if the property is deprecated; False otherwise.
565
566    required:
567      True if the property is marked required; False otherwise.
568
569    specifier_space:
570      The specifier space for the property as given in the binding, or None.
571    """
572
573    def __init__(self, name: str, binding: Binding):
574        self.binding: Binding = binding
575        self.name: str = name
576        self._raw: dict[str, Any] = self.binding.raw["properties"][name]
577
578    def __repr__(self) -> str:
579        return f"<PropertySpec {self.name} type '{self.type}'>"
580
581    @property
582    def path(self) -> Optional[str]:
583        "See the class docstring"
584        return self.binding.path
585
586    @property
587    def type(self) -> str:
588        "See the class docstring"
589        return self._raw["type"]
590
591    @property
592    def description(self) -> Optional[str]:
593        "See the class docstring"
594        return self._raw.get("description")
595
596    @property
597    def enum(self) -> Optional[list]:
598        "See the class docstring"
599        return self._raw.get("enum")
600
601    @property
602    def enum_tokenizable(self) -> bool:
603        "See the class docstring"
604        if not hasattr(self, '_enum_tokenizable'):
605            if self.type not in {'string', 'string-array'} or self.enum is None:
606                self._enum_tokenizable = False
607            else:
608                # Saving _as_tokens here lets us reuse it in
609                # enum_upper_tokenizable.
610                self._as_tokens = [re.sub(_NOT_ALPHANUM_OR_UNDERSCORE,
611                                          '_', value)
612                                   for value in self.enum]
613                self._enum_tokenizable = (len(self._as_tokens) ==
614                                          len(set(self._as_tokens)))
615
616        return self._enum_tokenizable
617
618    @property
619    def enum_upper_tokenizable(self) -> bool:
620        "See the class docstring"
621        if not hasattr(self, '_enum_upper_tokenizable'):
622            if not self.enum_tokenizable:
623                self._enum_upper_tokenizable = False
624            else:
625                self._enum_upper_tokenizable = (
626                    len(self._as_tokens) == len(
627                        set(x.upper() for x in self._as_tokens)
628                    ))
629        return self._enum_upper_tokenizable
630
631    @property
632    def const(self) -> Union[None, int, list[int], str, list[str]]:
633        "See the class docstring"
634        return self._raw.get("const")
635
636    @property
637    def default(self) -> Union[None, int, list[int], str, list[str]]:
638        "See the class docstring"
639        return self._raw.get("default")
640
641    @property
642    def required(self) -> bool:
643        "See the class docstring"
644        return self._raw.get("required", False)
645
646    @property
647    def deprecated(self) -> bool:
648        "See the class docstring"
649        return self._raw.get("deprecated", False)
650
651    @property
652    def specifier_space(self) -> Optional[str]:
653        "See the class docstring"
654        return self._raw.get("specifier-space")
655
656PropertyValType = Union[int, str,
657                        list[int], list[str],
658                        'Node', list['Node'],
659                        list[Optional['ControllerAndData']],
660                        bytes, None]
661
662
663@dataclass
664class Property:
665    """
666    Represents a property on a Node, as set in its DT node and with
667    additional info from the 'properties:' section of the binding.
668
669    Only properties mentioned in 'properties:' get created. Properties of type
670    'compound' currently do not get Property instances, as it's not clear
671    what to generate for them.
672
673    These attributes are available on Property objects. Several are
674    just convenience accessors for attributes on the PropertySpec object
675    accessible via the 'spec' attribute.
676
677    These attributes are available on Property objects:
678
679    spec:
680      The PropertySpec object which specifies this property.
681
682    val:
683      The value of the property, with the format determined by spec.type,
684      which comes from the 'type:' string in the binding.
685
686        - For 'type: int/array/string/string-array', 'val' is what you'd expect
687          (a Python integer or string, or a list of them)
688
689        - For 'type: uint8-array', 'val' is a bytes object
690
691        - For 'type: phandle' and 'type: path', 'val' is the pointed-to Node
692          instance
693
694        - For 'type: phandles', 'val' is a list of the pointed-to Node
695          instances
696
697        - For 'type: phandle-array', 'val' is a list of ControllerAndData
698          instances. See the documentation for that class.
699
700    node:
701      The Node instance the property is on
702
703    name:
704      Convenience for spec.name.
705
706    description:
707      Convenience for spec.description with leading and trailing whitespace
708      (including newlines) removed. May be None.
709
710    type:
711      Convenience for spec.type.
712
713    val_as_tokens:
714      The value of the property as a list of tokens, i.e. with non-alphanumeric
715      characters replaced with underscores. This is only safe to access
716      if 'spec.enum_tokenizable' returns True.
717
718    enum_indices:
719      A list of indices of 'val' in 'spec.enum' (which comes from the 'enum:'
720      list in the binding), or None if spec.enum is None.
721    """
722
723    spec: PropertySpec
724    val: PropertyValType
725    node: 'Node'
726
727    @property
728    def name(self) -> str:
729        "See the class docstring"
730        return self.spec.name
731
732    @property
733    def description(self) -> Optional[str]:
734        "See the class docstring"
735        return self.spec.description.strip() if self.spec.description else None
736
737    @property
738    def type(self) -> str:
739        "See the class docstring"
740        return self.spec.type
741
742    @property
743    def val_as_tokens(self) -> list[str]:
744        "See the class docstring"
745        ret = []
746        for subval in self.val if isinstance(self.val, list) else [self.val]:
747            assert isinstance(subval, str)
748            ret.append(str_as_token(subval))
749        return ret
750
751    @property
752    def enum_indices(self) -> Optional[list[int]]:
753        "See the class docstring"
754        enum = self.spec.enum
755        val = self.val if isinstance(self.val, list) else [self.val]
756        return [enum.index(subval) for subval in val] if enum else None
757
758
759@dataclass
760class Register:
761    """
762    Represents a register on a node.
763
764    These attributes are available on Register objects:
765
766    node:
767      The Node instance this register is from
768
769    name:
770      The name of the register as given in the 'reg-names' property, or None if
771      there is no 'reg-names' property
772
773    addr:
774      The starting address of the register, in the parent address space, or None
775      if #address-cells is zero. Any 'ranges' properties are taken into account.
776
777    size:
778      The length of the register in bytes
779    """
780
781    node: 'Node'
782    name: Optional[str]
783    addr: Optional[int]
784    size: Optional[int]
785
786
787@dataclass
788class Range:
789    """
790    Represents a translation range on a node as described by the 'ranges' property.
791
792    These attributes are available on Range objects:
793
794    node:
795      The Node instance this range is from
796
797    child_bus_cells:
798      The number of cells used to describe a child bus address.
799
800    child_bus_addr:
801      A physical address within the child bus address space, or None if the
802      child's #address-cells equals 0.
803
804    parent_bus_cells:
805      The number of cells used to describe a parent bus address.
806
807    parent_bus_addr:
808      A physical address within the parent bus address space, or None if the
809      parent's #address-cells equals 0.
810
811    length_cells:
812      The number of cells used to describe the size of range in
813      the child's address space.
814
815    length:
816      The size of the range in the child address space, or None if the
817      child's #size-cells equals 0.
818    """
819    node: 'Node'
820    child_bus_cells: int
821    child_bus_addr: Optional[int]
822    parent_bus_cells: int
823    parent_bus_addr: Optional[int]
824    length_cells: int
825    length: Optional[int]
826
827
828@dataclass
829class ControllerAndData:
830    """
831    Represents an entry in an 'interrupts' or 'type: phandle-array' property
832    value, e.g. <&ctrl-1 4 0> in
833
834        cs-gpios = <&ctrl-1 4 0 &ctrl-2 3 4>;
835
836    These attributes are available on ControllerAndData objects:
837
838    node:
839      The Node instance the property appears on
840
841    controller:
842      The Node instance for the controller (e.g. the controller the interrupt
843      gets sent to for interrupts)
844
845    data:
846      A dictionary that maps names from the *-cells key in the binding for the
847      controller to data values, e.g. {"pin": 4, "flags": 0} for the example
848      above.
849
850      'interrupts = <1 2>' might give {"irq": 1, "level": 2}.
851
852    name:
853      The name of the entry as given in
854      'interrupt-names'/'gpio-names'/'pwm-names'/etc., or None if there is no
855      *-names property
856
857    basename:
858      Basename for the controller when supporting named cells
859    """
860    node: 'Node'
861    controller: 'Node'
862    data: dict
863    name: Optional[str]
864    basename: Optional[str]
865
866
867@dataclass
868class PinCtrl:
869    """
870    Represents a pin control configuration for a set of pins on a device,
871    e.g. pinctrl-0 or pinctrl-1.
872
873    These attributes are available on PinCtrl objects:
874
875    node:
876      The Node instance the pinctrl-* property is on
877
878    name:
879      The name of the configuration, as given in pinctrl-names, or None if
880      there is no pinctrl-names property
881
882    name_as_token:
883      Like 'name', but with non-alphanumeric characters converted to underscores.
884
885    conf_nodes:
886      A list of Node instances for the pin configuration nodes, e.g.
887      the nodes pointed at by &state_1 and &state_2 in
888
889          pinctrl-0 = <&state_1 &state_2>;
890    """
891
892    node: 'Node'
893    name: Optional[str]
894    conf_nodes: list['Node']
895
896    @property
897    def name_as_token(self):
898        "See the class docstring"
899        return str_as_token(self.name) if self.name is not None else None
900
901
902class Node:
903    """
904    Represents a devicetree node, augmented with information from bindings, and
905    with some interpretation of devicetree properties. There's a one-to-one
906    correspondence between devicetree nodes and Nodes.
907
908    These attributes are available on Node objects:
909
910    edt:
911      The EDT instance this node is from
912
913    name:
914      The name of the node
915
916    unit_addr:
917      An integer with the ...@<unit-address> portion of the node name,
918      translated through any 'ranges' properties on parent nodes, or None if
919      the node name has no unit-address portion. PCI devices use a different
920      node name format ...@<dev>,<func> or ...@<dev> (e.g. "pcie@1,0"), in
921      this case None is returned.
922
923    title:
924      The title string from the binding for the node, or None if the node
925      has no binding.
926
927    description:
928      The description string from the binding for the node, or None if the node
929      has no binding. Leading and trailing whitespace (including newlines) is
930      removed.
931
932    path:
933      The devicetree path of the node
934
935    label:
936      The text from the 'label' property on the node, or None if the node has
937      no 'label'
938
939    labels:
940      A list of all of the devicetree labels for the node, in the same order
941      as the labels appear, but with duplicates removed.
942
943      This corresponds to the actual devicetree source labels, unlike the
944      "label" attribute, which is the value of a devicetree property named
945      "label".
946
947    parent:
948      The Node instance for the devicetree parent of the Node, or None if the
949      node is the root node
950
951    children:
952      A dictionary with the Node instances for the devicetree children of the
953      node, indexed by name
954
955    dep_ordinal:
956      A non-negative integer value such that the value for a Node is
957      less than the value for all Nodes that depend on it.
958
959      The ordinal is defined for all Nodes, and is unique among nodes in its
960      EDT 'nodes' list.
961
962    hash:
963      A hashed value of the devicetree path of the node. This is defined for
964      all Nodes, and is checked for uniqueness among nodes in its EDT 'nodes'
965      list.
966
967    required_by:
968      A list with the nodes that directly depend on the node
969
970    depends_on:
971      A list with the nodes that the node directly depends on
972
973    status:
974      The node's status property value, as a string, or "okay" if the node
975      has no status property set. If the node's status property is "ok",
976      it is converted to "okay" for consistency.
977
978    read_only:
979      True if the node has a 'read-only' property, and False otherwise
980
981    matching_compat:
982      The 'compatible' string for the binding that matched the node, or None if
983      the node has no binding
984
985    binding_path:
986      The path to the binding file for the node, or None if the node has no
987      binding
988
989    compats:
990      A list of 'compatible' strings for the node, in the same order that
991      they're listed in the .dts file
992
993    ranges:
994      A list of Range objects extracted from the node's ranges property.
995      The list is empty if the node does not have a range property.
996
997    regs:
998      A list of Register objects for the node's registers
999
1000    props:
1001      A dict that maps property names to Property objects.
1002      Property objects are created for the devicetree properties
1003      defined by the node's binding and that have a default value
1004      or for which a value is set in the DTS.
1005
1006    aliases:
1007      A list of aliases for the node. This is fetched from the /aliases node.
1008
1009    interrupts:
1010      A list of ControllerAndData objects for the interrupts generated by the
1011      node. The list is empty if the node does not generate interrupts.
1012
1013    pinctrls:
1014      A list of PinCtrl objects for the pinctrl-<index> properties on the
1015      node, sorted by index. The list is empty if the node does not have any
1016      pinctrl-<index> properties.
1017
1018    buses:
1019      If the node is a bus node (has a 'bus:' key in its binding), then this
1020      attribute holds the list of supported bus types, e.g. ["i2c"], ["spi"]
1021      or ["i3c", "i2c"] if multiple protocols are supported via the same bus.
1022      If the node is not a bus node, then this attribute is an empty list.
1023
1024    on_buses:
1025      The bus the node appears on, e.g. ["i2c"], ["spi"] or ["i3c", "i2c"] if
1026      multiple protocols are supported via the same bus. The bus is determined
1027      by searching upwards for a parent node whose binding has a 'bus:' key,
1028      returning the value of the first 'bus:' key found. If none of the node's
1029      parents has a 'bus:' key, this attribute is an empty list.
1030
1031    bus_node:
1032      Like on_bus, but contains the Node for the bus controller, or None if the
1033      node is not on a bus.
1034
1035    flash_controller:
1036      The flash controller for the node. Only meaningful for nodes representing
1037      flash partitions.
1038
1039    spi_cs_gpio:
1040      The device's SPI GPIO chip select as a ControllerAndData instance, if it
1041      exists, and None otherwise. See
1042      Documentation/devicetree/bindings/spi/spi-controller.yaml in the Linux kernel.
1043
1044    gpio_hogs:
1045      A list of ControllerAndData objects for the GPIOs hogged by the node. The
1046      list is empty if the node does not hog any GPIOs. Only relevant for GPIO hog
1047      nodes.
1048
1049    is_pci_device:
1050      True if the node is a PCI device.
1051    """
1052
1053    def __init__(
1054        self,
1055        dt_node: dtlib_Node,
1056        edt: "EDT",
1057        support_fixed_partitions_on_any_bus: bool = True,
1058    ):
1059        '''
1060        For internal use only; not meant to be used outside edtlib itself.
1061        '''
1062
1063        compats = (
1064            dt_node.props["compatible"].to_strings()
1065            if "compatible" in dt_node.props
1066            else []
1067        )
1068
1069        # Private, don't touch outside the class:
1070        self._node: dtlib_Node = dt_node
1071        self._binding: Optional[Binding] = None
1072
1073        # Public, some of which are initialized properly later:
1074        self.edt: EDT = edt
1075        self.dep_ordinal: int = -1
1076        self.compats: list[str] = compats
1077        self.ranges: list[Range] = []
1078        self.regs: list[Register] = []
1079        self.props: dict[str, Property] = {}
1080        self.interrupts: list[ControllerAndData] = []
1081        self.pinctrls: list[PinCtrl] = []
1082        self.bus_node = self._bus_node(support_fixed_partitions_on_any_bus)
1083        self.hash: str = _compute_hash(dt_node.path)
1084
1085        self._init_binding()
1086        self._init_regs()
1087        self._init_ranges()
1088
1089    @property
1090    def name(self) -> str:
1091        "See the class docstring"
1092        return self._node.name
1093
1094    @property
1095    def filename(self) -> str:
1096        "See the class docstring"
1097        return self._node.filename
1098
1099    @property
1100    def lineno(self) -> int:
1101        "See the class docstring"
1102        return self._node.lineno
1103
1104    @property
1105    def unit_addr(self) -> Optional[int]:
1106        "See the class docstring"
1107
1108        # TODO: Return a plain string here later, like dtlib.Node.unit_addr?
1109
1110        # PCI devices use a different node name format (e.g. "pcie@1,0")
1111        if "@" not in self.name or self.is_pci_device:
1112            return None
1113
1114        try:
1115            addr = int(self.name.split("@", 1)[1], 16)
1116        except ValueError:
1117            _err(f"{self!r} has non-hex unit address")
1118
1119        return _translate(addr, self._node)
1120
1121    @property
1122    def title(self) -> Optional[str]:
1123        "See the class docstring."
1124        if self._binding:
1125            return self._binding.title
1126        return None
1127
1128    @property
1129    def description(self) -> Optional[str]:
1130        "See the class docstring."
1131        if self._binding:
1132            return self._binding.description
1133        return None
1134
1135    @property
1136    def path(self) ->  str:
1137        "See the class docstring"
1138        return self._node.path
1139
1140    @property
1141    def label(self) -> Optional[str]:
1142        "See the class docstring"
1143        if "label" in self._node.props:
1144            return self._node.props["label"].to_string()
1145        return None
1146
1147    @property
1148    def labels(self) -> list[str]:
1149        "See the class docstring"
1150        return self._node.labels
1151
1152    @property
1153    def parent(self) -> Optional['Node']:
1154        "See the class docstring"
1155        return self.edt._node2enode.get(self._node.parent) # type: ignore
1156
1157    @property
1158    def children(self) -> dict[str, 'Node']:
1159        "See the class docstring"
1160        # Could be initialized statically too to preserve identity, but not
1161        # sure if needed. Parent nodes being initialized before their children
1162        # would need to be kept in mind.
1163        return {name: self.edt._node2enode[node]
1164                for name, node in self._node.nodes.items()}
1165
1166    def child_index(self, node) -> int:
1167        """Get the index of *node* in self.children.
1168        Raises KeyError if the argument is not a child of this node.
1169        """
1170        if not hasattr(self, '_child2index'):
1171            # Defer initialization of this lookup table until this
1172            # method is callable to handle parents needing to be
1173            # initialized before their chidlren. By the time we
1174            # return from __init__, 'self.children' is callable.
1175            self._child2index: dict[str, int] = {}
1176            for index, child_path in enumerate(child.path for child in
1177                                               self.children.values()):
1178                self._child2index[child_path] = index
1179
1180        return self._child2index[node.path]
1181
1182    @property
1183    def required_by(self) -> list['Node']:
1184        "See the class docstring"
1185        return self.edt._graph.required_by(self)
1186
1187    @property
1188    def depends_on(self) -> list['Node']:
1189        "See the class docstring"
1190        return self.edt._graph.depends_on(self)
1191
1192    @property
1193    def status(self) -> str:
1194        "See the class docstring"
1195        status = self._node.props.get("status")
1196
1197        if status is None:
1198            as_string = "okay"
1199        else:
1200            as_string = status.to_string()
1201
1202        if as_string == "ok":
1203            as_string = "okay"
1204
1205        return as_string
1206
1207    @property
1208    def read_only(self) -> bool:
1209        "See the class docstring"
1210        return "read-only" in self._node.props
1211
1212    @property
1213    def matching_compat(self) -> Optional[str]:
1214        "See the class docstring"
1215        if self._binding:
1216            return self._binding.compatible
1217        return None
1218
1219    @property
1220    def binding_path(self) -> Optional[str]:
1221        "See the class docstring"
1222        if self._binding:
1223            return self._binding.path
1224        return None
1225
1226    @property
1227    def aliases(self) -> list[str]:
1228        "See the class docstring"
1229        return [alias for alias, node in self._node.dt.alias2node.items()
1230                if node is self._node]
1231
1232    @property
1233    def buses(self) -> list[str]:
1234        "See the class docstring"
1235        if self._binding:
1236            return self._binding.buses
1237        return []
1238
1239    @property
1240    def on_buses(self) -> list[str]:
1241        "See the class docstring"
1242        bus_node = self.bus_node
1243        return bus_node.buses if bus_node else []
1244
1245    @property
1246    def flash_controller(self) -> 'Node':
1247        "See the class docstring"
1248
1249        # The node path might be something like
1250        # /flash-controller@4001E000/flash@0/partitions/partition@fc000. We go
1251        # up two levels to get the flash and check its compat. The flash
1252        # controller might be the flash itself (for cases like NOR flashes).
1253        # For the case of 'soc-nv-flash', we assume the controller is the
1254        # parent of the flash node.
1255
1256        if not self.parent or not self.parent.parent:
1257            _err(f"flash partition {self!r} lacks parent or grandparent node")
1258
1259        controller = self.parent.parent
1260        if controller.matching_compat == "soc-nv-flash":
1261            if controller.parent is None:
1262                _err(f"flash controller '{controller.path}' cannot be the root node")
1263            return controller.parent
1264        return controller
1265
1266    @property
1267    def spi_cs_gpio(self) -> Optional[ControllerAndData]:
1268        "See the class docstring"
1269
1270        if not ("spi" in self.on_buses
1271                and self.bus_node
1272                and "cs-gpios" in self.bus_node.props):
1273            return None
1274
1275        if not self.regs:
1276            _err(f"{self!r} needs a 'reg' property, to look up the "
1277                 "chip select index for SPI")
1278
1279        parent_cs_lst = self.bus_node.props["cs-gpios"].val
1280        if TYPE_CHECKING:
1281            assert isinstance(parent_cs_lst, list)
1282
1283        # cs-gpios is indexed by the unit address
1284        cs_index = self.regs[0].addr
1285        if TYPE_CHECKING:
1286            assert isinstance(cs_index, int)
1287
1288        if cs_index >= len(parent_cs_lst):
1289            _err(f"index from 'regs' in {self!r} ({cs_index}) "
1290                 "is >= number of cs-gpios in "
1291                 f"{self.bus_node!r} ({len(parent_cs_lst)})")
1292
1293        ret = parent_cs_lst[cs_index]
1294        if TYPE_CHECKING:
1295            assert isinstance(ret, ControllerAndData)
1296        return ret
1297
1298    @property
1299    def gpio_hogs(self) -> list[ControllerAndData]:
1300        "See the class docstring"
1301
1302        if "gpio-hog" not in self.props:
1303            return []
1304
1305        if not self.parent or "gpio-controller" not in self.parent.props:
1306            _err(f"GPIO hog {self!r} lacks parent GPIO controller node")
1307
1308        if "#gpio-cells" not in self.parent._node.props:
1309            _err(f"GPIO hog {self!r} parent node lacks #gpio-cells")
1310
1311        n_cells = self.parent._node.props["#gpio-cells"].to_num()
1312        res = []
1313
1314        for item in _slice(self._node, "gpios", 4*n_cells,
1315                           f"4*(<#gpio-cells> (= {n_cells})"):
1316            controller = self.parent
1317            res.append(ControllerAndData(
1318                node=self, controller=controller,
1319                data=self._named_cells(controller, item, "gpio"),
1320                name=None, basename="gpio"))
1321
1322        return res
1323
1324    @property
1325    def has_child_binding(self) -> bool:
1326        """
1327        True if the node's binding contains a child-binding definition, False
1328        otherwise
1329        """
1330        return bool(self._binding and self._binding.child_binding)
1331
1332    @property
1333    def is_pci_device(self) -> bool:
1334        "See the class docstring"
1335        return 'pcie' in self.on_buses
1336
1337    def __repr__(self) -> str:
1338        if self.binding_path:
1339            binding = "binding " + self.binding_path
1340        else:
1341            binding = "no binding"
1342        return f"<Node {self.path} in '{self.edt.dts_path}', {binding}>"
1343
1344    def _init_binding(self) -> None:
1345        # Initializes Node._binding. It holds data from the node's binding file,
1346        # in the format returned by PyYAML (plain Python lists, dicts, etc.), or
1347        # None if the node has no binding.
1348
1349        # This relies on the parent of the node having already been
1350        # initialized, which is guaranteed by going through the nodes in
1351        # node_iter() order.
1352
1353        if self.path in self.edt._infer_binding_for_paths:
1354            self._binding_from_properties()
1355            return
1356
1357        if self.compats:
1358            on_buses = self.on_buses
1359
1360            for compat in self.compats:
1361                # When matching, respect the order of the 'compatible' entries,
1362                # and for each one first try to match against an explicitly
1363                # specified bus (if any) and then against any bus. This is so
1364                # that matching against bindings which do not specify a bus
1365                # works the same way in Zephyr as it does elsewhere.
1366                binding = None
1367
1368                for bus in on_buses:
1369                    if (compat, bus) in self.edt._compat2binding:
1370                        binding = self.edt._compat2binding[compat, bus]
1371                        break
1372
1373                if not binding:
1374                    if (compat, None) in self.edt._compat2binding:
1375                        binding = self.edt._compat2binding[compat, None]
1376                    else:
1377                        continue
1378
1379                self._binding = binding
1380                return
1381        else:
1382            # No 'compatible' property. See if the parent binding has
1383            # a compatible. This can come from one or more levels of
1384            # nesting with 'child-binding:'.
1385
1386            binding_from_parent = self._binding_from_parent()
1387            if binding_from_parent:
1388                self._binding = binding_from_parent
1389                return
1390
1391        # No binding found
1392        self._binding = None
1393
1394    def _binding_from_properties(self) -> None:
1395        # Sets up a Binding object synthesized from the properties in the node.
1396
1397        if self.compats:
1398            _err(f"compatible in node with inferred binding: {self.path}")
1399
1400        # Synthesize a 'raw' binding as if it had been parsed from YAML.
1401        raw: dict[str, Any] = {
1402            'description': 'Inferred binding from properties, via edtlib.',
1403            'properties': {},
1404        }
1405        for name, prop in self._node.props.items():
1406            pp: dict[str, str] = {}
1407            if prop.type == Type.EMPTY:
1408                pp["type"] = "boolean"
1409            elif prop.type == Type.BYTES:
1410                pp["type"] = "uint8-array"
1411            elif prop.type == Type.NUM:
1412                pp["type"] = "int"
1413            elif prop.type == Type.NUMS:
1414                pp["type"] = "array"
1415            elif prop.type == Type.STRING:
1416                pp["type"] = "string"
1417            elif prop.type == Type.STRINGS:
1418                pp["type"] = "string-array"
1419            elif prop.type == Type.PHANDLE:
1420                pp["type"] = "phandle"
1421            elif prop.type == Type.PHANDLES:
1422                pp["type"] = "phandles"
1423            elif prop.type == Type.PHANDLES_AND_NUMS:
1424                pp["type"] = "phandle-array"
1425            elif prop.type == Type.PATH:
1426                pp["type"] = "path"
1427            else:
1428                _err(f"cannot infer binding from property: {prop} "
1429                     f"with type {prop.type!r}")
1430            raw['properties'][name] = pp
1431
1432        # Set up Node state.
1433        self.compats = []
1434        self._binding = Binding(None, {}, raw=raw, require_compatible=False)
1435
1436    def _binding_from_parent(self) -> Optional[Binding]:
1437        # Returns the binding from 'child-binding:' in the parent node's
1438        # binding.
1439
1440        if not self.parent:
1441            return None
1442
1443        pbinding = self.parent._binding
1444        if not pbinding:
1445            return None
1446
1447        if pbinding.child_binding:
1448            return pbinding.child_binding
1449
1450        return None
1451
1452    def _bus_node(self, support_fixed_partitions_on_any_bus: bool = True
1453                  ) -> Optional['Node']:
1454        # Returns the value for self.bus_node. Relies on parent nodes being
1455        # initialized before their children.
1456
1457        if not self.parent:
1458            # This is the root node
1459            return None
1460
1461        # Treat 'fixed-partitions' as if they are not on any bus.  The reason is
1462        # that flash nodes might be on a SPI or controller or SoC bus.  Having
1463        # bus be None means we'll always match the binding for fixed-partitions
1464        # also this means want processing the fixed-partitions node we wouldn't
1465        # try to do anything bus specific with it.
1466        if support_fixed_partitions_on_any_bus and "fixed-partitions" in self.compats:
1467            return None
1468
1469        if self.parent.buses:
1470            # The parent node is a bus node
1471            return self.parent
1472
1473        # Same bus node as parent (possibly None)
1474        return self.parent.bus_node
1475
1476    def _init_crossrefs(
1477        self, default_prop_types: bool = False, err_on_deprecated: bool = False
1478    ) -> None:
1479        # Initializes all properties that require cross-references to other
1480        # nodes, like 'phandle' and 'phandles'. This is done after all nodes
1481        # have been initialized.
1482        self._init_props(
1483            default_prop_types=default_prop_types, err_on_deprecated=err_on_deprecated
1484        )
1485        self._init_interrupts()
1486        self._init_pinctrls()
1487
1488    def _init_props(self, default_prop_types: bool = False,
1489                    err_on_deprecated: bool = False) -> None:
1490        # Creates self.props. See the class docstring. Also checks that all
1491        # properties on the node are declared in its binding.
1492
1493        self.props = {}
1494
1495        if self._binding:
1496            prop2specs = self._binding.prop2specs
1497        else:
1498            prop2specs = None
1499
1500        # Initialize self.props
1501        if prop2specs:
1502            for prop_spec in prop2specs.values():
1503                self._init_prop(prop_spec, err_on_deprecated)
1504            self._check_undeclared_props()
1505        elif default_prop_types:
1506            for name in self._node.props:
1507                if name not in _DEFAULT_PROP_SPECS:
1508                    continue
1509                prop_spec = _DEFAULT_PROP_SPECS[name]
1510                val = self._prop_val(name, prop_spec, err_on_deprecated)
1511                self.props[name] = Property(prop_spec, val, self)
1512
1513    def _init_prop(self, prop_spec: PropertySpec,
1514                   err_on_deprecated: bool) -> None:
1515        # _init_props() helper for initializing a single property.
1516        # 'prop_spec' is a PropertySpec object from the node's binding.
1517
1518        name = prop_spec.name
1519        prop_type = prop_spec.type
1520        if not prop_type:
1521            _err(f"'{name}' in {self.binding_path} lacks 'type'")
1522
1523        val = self._prop_val(name, prop_spec, err_on_deprecated)
1524
1525        if val is None:
1526            # 'required: false' property that wasn't there, or a property type
1527            # for which we store no data.
1528            return
1529
1530        enum = prop_spec.enum
1531        for subval in val if isinstance(val, list) else [val]:
1532            if enum and subval not in enum:
1533                _err(f"value of property '{name}' on {self.path} in "
1534                    f"{self.edt.dts_path} ({subval!r}) is not in 'enum' list in "
1535                    f"{self.binding_path} ({enum!r})")
1536
1537        const = prop_spec.const
1538        if const is not None and val != const:
1539            _err(f"value of property '{name}' on {self.path} in "
1540                 f"{self.edt.dts_path} ({val!r}) "
1541                 "is different from the 'const' value specified in "
1542                 f"{self.binding_path} ({const!r})")
1543
1544        # Skip properties that start with '#', like '#size-cells', and mapping
1545        # properties like 'gpio-map'/'interrupt-map'
1546        if name[0] == "#" or name.endswith("-map"):
1547            return
1548
1549        self.props[name] = Property(prop_spec, val, self)
1550
1551    def _prop_val(
1552        self,
1553        name: str,
1554        prop_spec: PropertySpec,
1555        err_on_deprecated: bool,
1556    ) -> PropertyValType:
1557        # _init_prop() helper for getting the property's value
1558        #
1559        # name:
1560        #   Property name from binding
1561        #
1562        # prop_spec:
1563        #   PropertySpec from binding
1564        #
1565        # err_on_deprecated:
1566        #   If True, a deprecated property is an error instead of warning.
1567
1568        node = self._node
1569        prop = node.props.get(name)
1570        binding_path = prop_spec.binding.path
1571        prop_type = prop_spec.type
1572        deprecated = prop_spec.deprecated
1573        required = prop_spec.required
1574        default = prop_spec.default
1575        specifier_space = prop_spec.specifier_space
1576
1577        if prop and deprecated:
1578            msg = (
1579                f"'{name}' is marked as deprecated in 'properties:' "
1580                f"in {binding_path} for node {node.path}."
1581            )
1582            if err_on_deprecated:
1583                _err(msg)
1584            else:
1585                _LOG.warning(msg)
1586
1587        if not prop:
1588            if required and self.status == "okay":
1589                _err(
1590                    f"'{name}' is marked as required in 'properties:' in "
1591                    f"{binding_path}, but does not appear in {node!r}"
1592                )
1593
1594            if default is not None:
1595                # YAML doesn't have a native format for byte arrays. We need to
1596                # convert those from an array like [0x12, 0x34, ...]. The
1597                # format has already been checked in
1598                # _check_prop_by_type().
1599                if prop_type == "uint8-array":
1600                    return bytes(default) # type: ignore
1601                return default
1602
1603            return False if prop_type == "boolean" else None
1604
1605        if prop_type == "boolean":
1606            if prop.type != Type.EMPTY:
1607                _err(f"'{name}' in {node!r} is defined with 'type: boolean' "
1608                     f"in {binding_path}, but is assigned a value ('{prop}') "
1609                     f"instead of being empty ('{name};')")
1610            return True
1611
1612        if prop_type == "int":
1613            return prop.to_num()
1614
1615        if prop_type == "array":
1616            return prop.to_nums()
1617
1618        if prop_type == "uint8-array":
1619            return prop.to_bytes()
1620
1621        if prop_type == "string":
1622            return prop.to_string()
1623
1624        if prop_type == "string-array":
1625            return prop.to_strings()
1626
1627        if prop_type == "phandle":
1628            return self.edt._node2enode[prop.to_node()]
1629
1630        if prop_type == "phandles":
1631            return [self.edt._node2enode[node] for node in prop.to_nodes()]
1632
1633        if prop_type == "phandle-array":
1634            # This type is a bit high-level for dtlib as it involves
1635            # information from bindings and *-names properties, so there's no
1636            # to_phandle_array() in dtlib. Do the type check ourselves.
1637            if prop.type not in (Type.PHANDLE, Type.PHANDLES, Type.PHANDLES_AND_NUMS):
1638                _err(f"expected property '{name}' in {node.path} in "
1639                     f"{node.dt.filename} to be assigned "
1640                     f"with '{name} = < &foo ... &bar 1 ... &baz 2 3 >' "
1641                     f"(a mix of phandles and numbers), not '{prop}'")
1642
1643            return self._standard_phandle_val_list(prop, specifier_space)
1644
1645        if prop_type == "path":
1646            return self.edt._node2enode[prop.to_path()]
1647
1648        # prop_type == "compound". Checking that the 'type:'
1649        # value is valid is done in _check_prop_by_type().
1650        #
1651        # 'compound' is a dummy type for properties that don't fit any of the
1652        # patterns above, so that we can require all entries in 'properties:'
1653        # to have a 'type: ...'. No Property object is created for it.
1654        return None
1655
1656    def _check_undeclared_props(self) -> None:
1657        # Checks that all properties are declared in the binding
1658        wl = {"compatible", "status", "ranges", "phandle",
1659              "interrupt-parent", "interrupts-extended", "device_type"}
1660
1661        for prop_name in self._node.props:
1662            # Allow a few special properties to not be declared in the binding
1663            if (prop_name.endswith("-controller")
1664                or prop_name.startswith("#")
1665                or prop_name in wl):
1666                continue
1667
1668            if TYPE_CHECKING:
1669                assert self._binding
1670
1671            if prop_name not in self._binding.prop2specs:
1672                _err(f"'{prop_name}' appears in {self._node.path} in "
1673                     f"{self.edt.dts_path}, but is not declared in "
1674                     f"'properties:' in {self.binding_path}")
1675
1676    def _init_ranges(self) -> None:
1677        # Initializes self.ranges
1678        node = self._node
1679
1680        self.ranges = []
1681
1682        if "ranges" not in node.props:
1683            return
1684
1685        raw_child_address_cells = node.props.get("#address-cells")
1686        parent_address_cells = _address_cells(node)
1687        if raw_child_address_cells is None:
1688            child_address_cells = 2 # Default value per DT spec.
1689        else:
1690            child_address_cells = raw_child_address_cells.to_num()
1691        raw_child_size_cells = node.props.get("#size-cells")
1692        if raw_child_size_cells is None:
1693            child_size_cells = 1 # Default value per DT spec.
1694        else:
1695            child_size_cells = raw_child_size_cells.to_num()
1696
1697        # Number of cells for one translation 3-tuple in 'ranges'
1698        entry_cells = child_address_cells + parent_address_cells + child_size_cells
1699
1700        if entry_cells == 0:
1701            if len(node.props["ranges"].value) == 0:
1702                return
1703            else:
1704                _err(f"'ranges' should be empty in {self._node.path} since "
1705                     f"<#address-cells> = {child_address_cells}, "
1706                     f"<#address-cells for parent> = {parent_address_cells} and "
1707                     f"<#size-cells> = {child_size_cells}")
1708
1709        for raw_range in _slice(node, "ranges", 4*entry_cells,
1710                                f"4*(<#address-cells> (= {child_address_cells}) + "
1711                                "<#address-cells for parent> "
1712                                f"(= {parent_address_cells}) + "
1713                                f"<#size-cells> (= {child_size_cells}))"):
1714
1715            child_bus_cells = child_address_cells
1716            if child_address_cells == 0:
1717                child_bus_addr = None
1718            else:
1719                child_bus_addr = to_num(raw_range[:4*child_address_cells])
1720            parent_bus_cells = parent_address_cells
1721            if parent_address_cells == 0:
1722                parent_bus_addr = None
1723            else:
1724                parent_bus_addr = to_num(
1725                    raw_range[(4*child_address_cells):
1726                              (4*child_address_cells + 4*parent_address_cells)])
1727            length_cells = child_size_cells
1728            if child_size_cells == 0:
1729                length = None
1730            else:
1731                length = to_num(
1732                    raw_range[(4*child_address_cells + 4*parent_address_cells):])
1733
1734            self.ranges.append(Range(self, child_bus_cells, child_bus_addr,
1735                                     parent_bus_cells, parent_bus_addr,
1736                                     length_cells, length))
1737
1738    def _init_regs(self) -> None:
1739        # Initializes self.regs
1740
1741        node = self._node
1742
1743        self.regs = []
1744
1745        if "reg" not in node.props:
1746            return
1747
1748        address_cells = _address_cells(node)
1749        size_cells = _size_cells(node)
1750
1751        for raw_reg in _slice(node, "reg", 4*(address_cells + size_cells),
1752                              f"4*(<#address-cells> (= {address_cells}) + "
1753                              f"<#size-cells> (= {size_cells}))"):
1754            if address_cells == 0:
1755                addr = None
1756            else:
1757                addr = _translate(to_num(raw_reg[:4*address_cells]), node)
1758            if size_cells == 0:
1759                size = None
1760            else:
1761                size = to_num(raw_reg[4*address_cells:])
1762            # Size zero is ok for PCI devices
1763            if size_cells != 0 and size == 0 and not self.is_pci_device:
1764                _err(f"zero-sized 'reg' in {self._node!r} seems meaningless "
1765                     "(maybe you want a size of one or #size-cells = 0 "
1766                     "instead)")
1767
1768            # We'll fix up the name when we're done.
1769            self.regs.append(Register(self, None, addr, size))
1770
1771        _add_names(node, "reg", self.regs)
1772
1773    def _init_pinctrls(self) -> None:
1774        # Initializes self.pinctrls from any pinctrl-<index> properties
1775
1776        node = self._node
1777
1778        # pinctrl-<index> properties
1779        pinctrl_props = [prop for name, prop in node.props.items()
1780                         if re.match("pinctrl-[0-9]+", name)]
1781        # Sort by index
1782        pinctrl_props.sort(key=lambda prop: prop.name)
1783
1784        # Check indices
1785        for i, prop in enumerate(pinctrl_props):
1786            if prop.name != "pinctrl-" + str(i):
1787                _err(f"missing 'pinctrl-{i}' property on {node!r} "
1788                     "- indices should be contiguous and start from zero")
1789
1790        self.pinctrls = []
1791        for prop in pinctrl_props:
1792            # We'll fix up the names below.
1793            self.pinctrls.append(PinCtrl(
1794                node=self,
1795                name=None,
1796                conf_nodes=[self.edt._node2enode[node]
1797                            for node in prop.to_nodes()]))
1798
1799        _add_names(node, "pinctrl", self.pinctrls)
1800
1801    def _init_interrupts(self) -> None:
1802        # Initializes self.interrupts
1803
1804        node = self._node
1805
1806        self.interrupts = []
1807
1808        for controller_node, data in _interrupts(node):
1809            # We'll fix up the names below.
1810            controller = self.edt._node2enode[controller_node]
1811            self.interrupts.append(ControllerAndData(
1812                node=self, controller=controller,
1813                data=self._named_cells(controller, data, "interrupt"),
1814                name=None, basename=None))
1815
1816        _add_names(node, "interrupt", self.interrupts)
1817
1818    def _standard_phandle_val_list(
1819            self,
1820            prop: dtlib_Property,
1821            specifier_space: Optional[str]
1822    ) -> list[Optional[ControllerAndData]]:
1823        # Parses a property like
1824        #
1825        #     <prop.name> = <phandle cell phandle cell ...>;
1826        #
1827        # where each phandle points to a controller node that has a
1828        #
1829        #     #<specifier_space>-cells = <size>;
1830        #
1831        # property that gives the number of cells in the value after the
1832        # controller's phandle in the property.
1833        #
1834        # E.g. with a property like
1835        #
1836        #     pwms = <&foo 1 2 &bar 3>;
1837        #
1838        # If 'specifier_space' is "pwm", then we should have this elsewhere
1839        # in the tree:
1840        #
1841        #     foo: ... {
1842        #             #pwm-cells = <2>;
1843        #     };
1844        #
1845        #     bar: ... {
1846        #             #pwm-cells = <1>;
1847        #     };
1848        #
1849        # These values can be given names using the <specifier_space>-names:
1850        # list in the binding for the phandle nodes.
1851        #
1852        # Also parses any
1853        #
1854        #     <specifier_space>-names = "...", "...", ...
1855        #
1856        # Returns a list of Optional[ControllerAndData] instances.
1857        #
1858        # An index is None if the underlying phandle-array element is
1859        # unspecified.
1860
1861        if not specifier_space:
1862            if prop.name.endswith("gpios"):
1863                # There's some slight special-casing for *-gpios properties in that
1864                # e.g. foo-gpios still maps to #gpio-cells rather than
1865                # #foo-gpio-cells
1866                specifier_space = "gpio"
1867            else:
1868                # Strip -s. We've already checked that property names end in -s
1869                # if there is no specifier space in _check_prop_by_type().
1870                specifier_space = prop.name[:-1]
1871
1872        res: list[Optional[ControllerAndData]] = []
1873
1874        for item in _phandle_val_list(prop, specifier_space):
1875            if item is None:
1876                res.append(None)
1877                continue
1878
1879            controller_node, data = item
1880            mapped_controller, mapped_data = (
1881                _map_phandle_array_entry(prop.node, controller_node,
1882                                         data, specifier_space))
1883
1884            controller = self.edt._node2enode[mapped_controller]
1885            # We'll fix up the names below.
1886            res.append(ControllerAndData(
1887                node=self, controller=controller,
1888                data=self._named_cells(controller, mapped_data,
1889                                       specifier_space),
1890                name=None, basename=specifier_space))
1891
1892        _add_names(self._node, specifier_space, res)
1893
1894        return res
1895
1896    def _named_cells(
1897            self,
1898            controller: 'Node',
1899            data: bytes,
1900            basename: str
1901    ) -> dict[str, int]:
1902        # Returns a dictionary that maps <basename>-cells names given in the
1903        # binding for 'controller' to cell values. 'data' is the raw data, as a
1904        # byte array.
1905
1906        if not controller._binding:
1907            _err(f"{basename} controller {controller._node!r} "
1908                 f"for {self._node!r} lacks binding")
1909
1910        if basename in controller._binding.specifier2cells:
1911            cell_names: list[str] = controller._binding.specifier2cells[basename]
1912        else:
1913            # Treat no *-cells in the binding the same as an empty *-cells, so
1914            # that bindings don't have to have e.g. an empty 'clock-cells:' for
1915            # '#clock-cells = <0>'.
1916            cell_names = []
1917
1918        data_list = to_nums(data)
1919        if len(data_list) != len(cell_names):
1920            _err(f"unexpected '{basename}-cells:' length in binding for "
1921                 f"{controller._node!r} - {len(cell_names)} "
1922                 f"instead of {len(data_list)}")
1923
1924        return dict(zip(cell_names, data_list, strict=False))
1925
1926
1927class EDT:
1928    """
1929    Represents a devicetree augmented with information from bindings.
1930
1931    These attributes are available on EDT objects:
1932
1933    nodes:
1934      A list of Node objects for the nodes that appear in the devicetree
1935
1936    compat2nodes:
1937      A collections.defaultdict that maps each 'compatible' string that appears
1938      on some Node to a list of Nodes with that compatible.
1939      The collection is sorted so that enabled nodes appear first in the
1940      collection.
1941
1942    compat2okay:
1943      Like compat2nodes, but just for nodes with status 'okay'.
1944
1945    compat2notokay:
1946      Like compat2nodes, but just for nodes with status not 'okay'.
1947
1948    compat2vendor:
1949      A collections.defaultdict that maps each 'compatible' string that appears
1950      on some Node to a vendor name parsed from vendor_prefixes.
1951
1952    compat2model:
1953      A collections.defaultdict that maps each 'compatible' string that appears
1954      on some Node to a model name parsed from that compatible.
1955
1956    label2node:
1957      A dict that maps a node label to the node with that label.
1958
1959    dep_ord2node:
1960      A dict that maps an ordinal to the node with that dependency ordinal.
1961
1962    chosen_nodes:
1963      A dict that maps the properties defined on the devicetree's /chosen
1964      node to their values. 'chosen' is indexed by property name (a string),
1965      and values are converted to Node objects. Note that properties of the
1966      /chosen node which can't be converted to a Node are not included in
1967      the value.
1968
1969    dts_path:
1970      The .dts path passed to __init__()
1971
1972    dts_source:
1973      The final DTS source code of the loaded devicetree after merging nodes
1974      and processing /delete-node/ and /delete-property/, as a string
1975
1976    bindings_dirs:
1977      The bindings directory paths passed to __init__()
1978
1979    scc_order:
1980      A list of lists of Nodes. All elements of each list
1981      depend on each other, and the Nodes in any list do not depend
1982      on any Node in a subsequent list. Each list defines a Strongly
1983      Connected Component (SCC) of the graph.
1984
1985      For an acyclic graph each list will be a singleton. Cycles
1986      will be represented by lists with multiple nodes. Cycles are
1987      not expected to be present in devicetree graphs.
1988
1989    The standard library's pickle module can be used to marshal and
1990    unmarshal EDT objects.
1991    """
1992
1993    def __init__(self,
1994                 dts: Optional[str],
1995                 bindings_dirs: list[str],
1996                 workspace_dir: Optional[str] = None,
1997                 warn_reg_unit_address_mismatch: bool = True,
1998                 default_prop_types: bool = True,
1999                 support_fixed_partitions_on_any_bus: bool = True,
2000                 infer_binding_for_paths: Optional[Iterable[str]] = None,
2001                 vendor_prefixes: Optional[dict[str, str]] = None,
2002                 werror: bool = False):
2003        """EDT constructor.
2004
2005        dts:
2006          Path to devicetree .dts file. Passing None for this value
2007          is only for internal use; do not do that outside of edtlib.
2008
2009        bindings_dirs:
2010          List of paths to directories containing bindings, in YAML format.
2011          These directories are recursively searched for .yaml files.
2012
2013        workspace_dir:
2014          Path to the root of the Zephyr workspace. This is used as a base
2015          directory for relative paths in the generated devicetree comments.
2016
2017        warn_reg_unit_address_mismatch (default: True):
2018          If True, a warning is logged if a node has a 'reg' property where
2019          the address of the first entry does not match the unit address of the
2020          node
2021
2022        default_prop_types (default: True):
2023          If True, default property types will be used when a node has no
2024          bindings.
2025
2026        support_fixed_partitions_on_any_bus (default True):
2027          If True, set the Node.bus for 'fixed-partitions' compatible nodes
2028          to None.  This allows 'fixed-partitions' binding to match regardless
2029          of the bus the 'fixed-partition' is under.
2030
2031        infer_binding_for_paths (default: None):
2032          An iterable of devicetree paths identifying nodes for which bindings
2033          should be inferred from the node content.  (Child nodes are not
2034          processed.)  Pass none if no nodes should support inferred bindings.
2035
2036        vendor_prefixes (default: None):
2037          A dict mapping vendor prefixes in compatible properties to their
2038          descriptions. If given, compatibles in the form "manufacturer,device"
2039          for which "manufacturer" is neither a key in the dict nor a specially
2040          exempt set of legacy cases will cause warnings.
2041
2042        werror (default: False):
2043          If True, some edtlib specific warnings become errors. This currently
2044          errors out if 'dts' has any deprecated properties set, or an unknown
2045          vendor prefix is used.
2046        """
2047        # All instance attributes should be initialized here.
2048        # This makes it easy to keep track of them, which makes
2049        # implementing __deepcopy__() easier.
2050        # If you change this, make sure to update __deepcopy__() too,
2051        # and update the tests for that method.
2052
2053        # Public attributes (the rest are properties)
2054        self.nodes: list[Node] = []
2055        self.compat2nodes: dict[str, list[Node]] = defaultdict(list)
2056        self.compat2okay: dict[str, list[Node]] = defaultdict(list)
2057        self.compat2notokay: dict[str, list[Node]] = defaultdict(list)
2058        self.compat2vendor: dict[str, str] = defaultdict(str)
2059        self.compat2model: dict[str, str]  = defaultdict(str)
2060        self.label2node: dict[str, Node] = {}
2061        self.dep_ord2node: dict[int, Node] = {}
2062        self.dts_path: str = dts # type: ignore
2063        self.bindings_dirs: list[str] = list(bindings_dirs)
2064
2065        # Saved kwarg values for internal use
2066        self._warn_reg_unit_address_mismatch: bool = warn_reg_unit_address_mismatch
2067        self._default_prop_types: bool = default_prop_types
2068        self._fixed_partitions_no_bus: bool = support_fixed_partitions_on_any_bus
2069        self._infer_binding_for_paths: set[str] = set(infer_binding_for_paths or [])
2070        self._vendor_prefixes: dict[str, str] = vendor_prefixes or {}
2071        self._werror: bool = bool(werror)
2072
2073        # Other internal state
2074        self._compat2binding: dict[tuple[str, Optional[str]], Binding] = {}
2075        self._graph: Graph = Graph()
2076        self._binding_paths: list[str] = _binding_paths(self.bindings_dirs)
2077        self._binding_fname2path: dict[str, str] = {
2078            os.path.basename(path): path
2079            for path in self._binding_paths
2080        }
2081        self._node2enode: dict[dtlib_Node, Node] = {}
2082
2083        if dts is not None:
2084            try:
2085                self._dt = DT(dts, base_dir=workspace_dir)
2086            except DTError as e:
2087                raise EDTError(e) from e
2088            self._finish_init()
2089
2090    def _finish_init(self) -> None:
2091        # This helper exists to make the __deepcopy__() implementation
2092        # easier to keep in sync with __init__().
2093        _check_dt(self._dt)
2094
2095        self._init_compat2binding()
2096        self._init_nodes()
2097        self._init_graph()
2098        self._init_luts()
2099
2100        self._check()
2101
2102    def get_node(self, path: str) -> Node:
2103        """
2104        Returns the Node at the DT path or alias 'path'. Raises EDTError if the
2105        path or alias doesn't exist.
2106        """
2107        try:
2108            return self._node2enode[self._dt.get_node(path)]
2109        except DTError as e:
2110            _err(e)
2111
2112    @property
2113    def chosen_nodes(self) -> dict[str, Node]:
2114        ret: dict[str, Node] = {}
2115
2116        try:
2117            chosen = self._dt.get_node("/chosen")
2118        except DTError:
2119            return ret
2120
2121        for name, prop in chosen.props.items():
2122            try:
2123                node = prop.to_path()
2124            except DTError:
2125                # DTS value is not phandle or string, or path doesn't exist
2126                continue
2127
2128            ret[name] = self._node2enode[node]
2129
2130        return ret
2131
2132    def chosen_node(self, name: str) -> Optional[Node]:
2133        """
2134        Returns the Node pointed at by the property named 'name' in /chosen, or
2135        None if the property is missing
2136        """
2137        return self.chosen_nodes.get(name)
2138
2139    @property
2140    def dts_source(self) -> str:
2141        return f"{self._dt}"
2142
2143    def __repr__(self) -> str:
2144        return (f"<EDT for '{self.dts_path}', binding directories "
2145                f"'{self.bindings_dirs}'>")
2146
2147    def __deepcopy__(self, memo) -> 'EDT':
2148        """
2149        Implements support for the standard library copy.deepcopy()
2150        function on EDT instances.
2151        """
2152
2153        ret = EDT(
2154            None,
2155            self.bindings_dirs,
2156            warn_reg_unit_address_mismatch=self._warn_reg_unit_address_mismatch,
2157            default_prop_types=self._default_prop_types,
2158            support_fixed_partitions_on_any_bus=self._fixed_partitions_no_bus,
2159            infer_binding_for_paths=set(self._infer_binding_for_paths),
2160            vendor_prefixes=dict(self._vendor_prefixes),
2161            werror=self._werror
2162        )
2163        ret.dts_path = self.dts_path
2164        ret._dt = deepcopy(self._dt, memo)
2165        ret._finish_init()
2166        return ret
2167
2168    @property
2169    def scc_order(self) -> list[list[Node]]:
2170        try:
2171            return self._graph.scc_order()
2172        except Exception as e:
2173            raise EDTError(e) from None
2174
2175    def _process_properties_r(self, root_node: Node, props_node: Node) -> None:
2176        """
2177        Process props_node properties for dependencies, and add those as
2178        dependencies of root_node. Then walk through all the props_node
2179        children and do the same recursively, maintaining the same root_node.
2180
2181        This ensures that on a node with child nodes, the parent node includes
2182        the dependencies of all the child nodes as well as its own.
2183        """
2184        # A Node depends on any Nodes present in 'phandle',
2185        # 'phandles', or 'phandle-array' property values.
2186        for prop in props_node.props.values():
2187            if prop.type == 'phandle':
2188                self._graph.add_edge(root_node, prop.val)
2189            elif prop.type == 'phandles':
2190                if TYPE_CHECKING:
2191                    assert isinstance(prop.val, list)
2192                for phandle_node in prop.val:
2193                    self._graph.add_edge(root_node, phandle_node)
2194            elif prop.type == 'phandle-array':
2195                if TYPE_CHECKING:
2196                    assert isinstance(prop.val, list)
2197                for cd in prop.val:
2198                    if cd is None:
2199                        continue
2200                    if TYPE_CHECKING:
2201                        assert isinstance(cd, ControllerAndData)
2202                    self._graph.add_edge(root_node, cd.controller)
2203
2204        # A Node depends on whatever supports the interrupts it
2205        # generates.
2206        for intr in props_node.interrupts:
2207            self._graph.add_edge(root_node, intr.controller)
2208
2209        # If the binding defines child bindings, link the child properties to
2210        # the root_node as well.
2211        if props_node.has_child_binding:
2212            for child in props_node.children.values():
2213                if "compatible" in child.props:
2214                    # Not a child node, normal node on a different binding.
2215                    continue
2216                self._process_properties_r(root_node, child)
2217
2218    def _process_properties(self, node: Node) -> None:
2219        """
2220        Add node dependencies based on own as well as child node properties,
2221        start from the node itself.
2222        """
2223        self._process_properties_r(node, node)
2224
2225    def _init_graph(self) -> None:
2226        # Constructs a graph of dependencies between Node instances,
2227        # which is usable for computing a partial order over the dependencies.
2228        # The algorithm supports detecting dependency loops.
2229        #
2230        # Actually computing the SCC order is lazily deferred to the
2231        # first time the scc_order property is read.
2232
2233        for node in self.nodes:
2234            # Always insert root node
2235            if not node.parent:
2236                self._graph.add_node(node)
2237
2238            # A Node always depends on its parent.
2239            for child in node.children.values():
2240                self._graph.add_edge(child, node)
2241
2242            self._process_properties(node)
2243
2244    def _init_compat2binding(self) -> None:
2245        # Creates self._compat2binding, a dictionary that maps
2246        # (<compatible>, <bus>) tuples (both strings) to Binding objects.
2247        #
2248        # The Binding objects are created from YAML files discovered
2249        # in self.bindings_dirs as needed.
2250        #
2251        # For example, self._compat2binding["company,dev", "can"]
2252        # contains the Binding for the 'company,dev' device, when it
2253        # appears on the CAN bus.
2254        #
2255        # For bindings that don't specify a bus, <bus> is None, so that e.g.
2256        # self._compat2binding["company,notonbus", None] is the Binding.
2257        #
2258        # Only bindings for 'compatible' strings that appear in the devicetree
2259        # are loaded.
2260
2261        dt_compats = _dt_compats(self._dt)
2262        # Searches for any 'compatible' string mentioned in the devicetree
2263        # files, with a regex
2264        dt_compats_search = re.compile(
2265            "|".join(re.escape(compat) for compat in dt_compats)
2266        ).search
2267
2268        for binding_path in self._binding_paths:
2269            with open(binding_path, encoding="utf-8") as f:
2270                contents = f.read()
2271
2272            # As an optimization, skip parsing files that don't contain any of
2273            # the .dts 'compatible' strings, which should be reasonably safe
2274            if not dt_compats_search(contents):
2275                continue
2276
2277            # Load the binding and check that it actually matches one of the
2278            # compatibles. Might get false positives above due to comments and
2279            # stuff.
2280
2281            try:
2282                # Parsed PyYAML output (Python lists/dictionaries/strings/etc.,
2283                # representing the file)
2284                raw = yaml.load(contents, Loader=_BindingLoader)
2285            except yaml.YAMLError as e:
2286                _err(
2287                        f"'{binding_path}' appears in binding directories "
2288                        f"but isn't valid YAML: {e}")
2289
2290            # Convert the raw data to a Binding object, erroring out
2291            # if necessary.
2292            binding = self._binding(raw, binding_path, dt_compats)
2293
2294            # Register the binding in self._compat2binding, along with
2295            # any child bindings that have their own compatibles.
2296            while binding is not None:
2297                if binding.compatible:
2298                    self._register_binding(binding)
2299                binding = binding.child_binding
2300
2301    def _binding(self,
2302                 raw: Optional[dict],
2303                 binding_path: str,
2304                 dt_compats: set[str]) -> Optional[Binding]:
2305        # Convert a 'raw' binding from YAML to a Binding object and return it.
2306        #
2307        # Error out if the raw data looks like an invalid binding.
2308        #
2309        # Return None if the file doesn't contain a binding or the
2310        # binding's compatible isn't in dt_compats.
2311
2312        # Get the 'compatible:' string.
2313        if raw is None or "compatible" not in raw:
2314            # Empty file, binding fragment, spurious file, etc.
2315            return None
2316
2317        compatible = raw["compatible"]
2318
2319        if compatible not in dt_compats:
2320            # Not a compatible we care about.
2321            return None
2322
2323        # Initialize and return the Binding object.
2324        return Binding(binding_path, self._binding_fname2path, raw=raw)
2325
2326    def _register_binding(self, binding: Binding) -> None:
2327        # Do not allow two different bindings to have the same
2328        # 'compatible:'/'on-bus:' combo
2329        if TYPE_CHECKING:
2330            assert binding.compatible
2331        old_binding = self._compat2binding.get((binding.compatible,
2332                                                binding.on_bus))
2333        if old_binding:
2334            msg = (f"both {old_binding.path} and {binding.path} have "
2335                   f"'compatible: {binding.compatible}'")
2336            if binding.on_bus is not None:
2337                msg += f" and 'on-bus: {binding.on_bus}'"
2338            _err(msg)
2339
2340        # Register the binding.
2341        self._compat2binding[binding.compatible, binding.on_bus] = binding
2342
2343    def _init_nodes(self) -> None:
2344        # Creates a list of edtlib.Node objects from the dtlib.Node objects, in
2345        # self.nodes
2346
2347        hash2node: dict[str, Node] = {}
2348
2349        for dt_node in self._dt.node_iter():
2350            # Warning: We depend on parent Nodes being created before their
2351            # children. This is guaranteed by node_iter().
2352            node = Node(dt_node, self, self._fixed_partitions_no_bus)
2353
2354            if node.hash in hash2node:
2355                _err(f"hash collision between '{node.path}' and "
2356                     f"'{hash2node[node.hash].path}'")
2357            hash2node[node.hash] = node
2358
2359            self.nodes.append(node)
2360            self._node2enode[dt_node] = node
2361
2362        for node in self.nodes:
2363            # Initialize properties that may depend on other Node objects having
2364            # been created, because they (either always or sometimes) reference
2365            # other nodes. Must be called separately after all nodes have been
2366            # created.
2367            node._init_crossrefs(
2368                default_prop_types=self._default_prop_types,
2369                err_on_deprecated=self._werror,
2370            )
2371
2372        if self._warn_reg_unit_address_mismatch:
2373            # This warning matches the simple_bus_reg warning in dtc
2374            for node in self.nodes:
2375                # Address mismatch is ok for PCI devices
2376                if (node.regs and node.regs[0].addr != node.unit_addr and
2377                        not node.is_pci_device):
2378                    _LOG.warning("unit address and first address in 'reg' "
2379                                 f"(0x{node.regs[0].addr:x}) don't match for "
2380                                 f"{node.path}")
2381
2382    def _init_luts(self) -> None:
2383        # Initialize node lookup tables (LUTs).
2384
2385        for node in self.nodes:
2386            for label in node.labels:
2387                self.label2node[label] = node
2388
2389            for compat in node.compats:
2390                if node.status == "okay":
2391                    self.compat2okay[compat].append(node)
2392                else:
2393                    self.compat2notokay[compat].append(node)
2394
2395                if compat in self.compat2vendor:
2396                    continue
2397
2398                # The regular expression comes from dt-schema.
2399                compat_re = r'^[a-zA-Z][a-zA-Z0-9,+\-._]+$'
2400                if not re.match(compat_re, compat):
2401                    _err(f"node '{node.path}' compatible '{compat}' "
2402                         'must match this regular expression: '
2403                         f"'{compat_re}'")
2404
2405                if ',' in compat and self._vendor_prefixes:
2406                    vendor, model = compat.split(',', 1)
2407                    if vendor in self._vendor_prefixes:
2408                        self.compat2vendor[compat] = self._vendor_prefixes[vendor]
2409                        self.compat2model[compat] = model
2410
2411                    # As an exception, the root node can have whatever
2412                    # compatibles it wants. Other nodes get checked.
2413                    elif node.path != '/':
2414                        if self._werror:
2415                            handler_fn: Any = _err
2416                        else:
2417                            handler_fn = _LOG.warning
2418                        handler_fn(
2419                            f"node '{node.path}' compatible '{compat}' "
2420                            f"has unknown vendor prefix '{vendor}'")
2421
2422        for compat, nodes in self.compat2okay.items():
2423            self.compat2nodes[compat].extend(nodes)
2424
2425        for compat, nodes in self.compat2notokay.items():
2426            self.compat2nodes[compat].extend(nodes)
2427
2428        for nodeset in self.scc_order:
2429            node = nodeset[0]
2430            self.dep_ord2node[node.dep_ordinal] = node
2431
2432    def _check(self) -> None:
2433        # Tree-wide checks and warnings.
2434
2435        for binding in self._compat2binding.values():
2436            for spec in binding.prop2specs.values():
2437                if not spec.enum or spec.type != 'string':
2438                    continue
2439
2440                if not spec.enum_tokenizable:
2441                    _LOG.warning(
2442                        f"compatible '{binding.compatible}' "
2443                        f"in binding '{binding.path}' has non-tokenizable enum "
2444                        f"for property '{spec.name}': " +
2445                        ', '.join(repr(x) for x in spec.enum))
2446                elif not spec.enum_upper_tokenizable:
2447                    _LOG.warning(
2448                        f"compatible '{binding.compatible}' "
2449                        f"in binding '{binding.path}' has enum for property "
2450                        f"'{spec.name}' that is only tokenizable "
2451                        'in lowercase: ' +
2452                        ', '.join(repr(x) for x in spec.enum))
2453
2454        # Validate the contents of compatible properties.
2455        for node in self.nodes:
2456            if 'compatible' not in node.props:
2457                continue
2458
2459            compatibles = node.props['compatible'].val
2460
2461            # _check() runs after _init_compat2binding() has called
2462            # _dt_compats(), which already converted every compatible
2463            # property to a list of strings. So we know 'compatibles'
2464            # is a list, but add an assert for future-proofing.
2465            assert isinstance(compatibles, list)
2466
2467            for compat in compatibles:
2468                # This is also just for future-proofing.
2469                assert isinstance(compat, str)
2470
2471
2472def bindings_from_paths(yaml_paths: list[str],
2473                        ignore_errors: bool = False) -> list[Binding]:
2474    """
2475    Get a list of Binding objects from the yaml files 'yaml_paths'.
2476
2477    If 'ignore_errors' is True, YAML files that cause an EDTError when
2478    loaded are ignored. (No other exception types are silenced.)
2479    """
2480
2481    ret = []
2482    fname2path = {os.path.basename(path): path for path in yaml_paths}
2483    for path in yaml_paths:
2484        try:
2485            ret.append(Binding(path, fname2path))
2486        except EDTError:
2487            if ignore_errors:
2488                continue
2489            raise
2490
2491    return ret
2492
2493
2494class EDTError(Exception):
2495    "Exception raised for devicetree- and binding-related errors"
2496
2497#
2498# Public global functions
2499#
2500
2501
2502def load_vendor_prefixes_txt(vendor_prefixes: str) -> dict[str, str]:
2503    """Load a vendor-prefixes.txt file and return a dict
2504    representation mapping a vendor prefix to the vendor name.
2505    """
2506    vnd2vendor: dict[str, str] = {}
2507    with open(vendor_prefixes, encoding='utf-8') as f:
2508        for line in f:
2509            line = line.strip()
2510
2511            if not line or line.startswith('#'):
2512                # Comment or empty line.
2513                continue
2514
2515            # Other lines should be in this form:
2516            #
2517            # <vnd><TAB><vendor>
2518            vnd_vendor = line.split('\t', 1)
2519            assert len(vnd_vendor) == 2, line
2520            vnd2vendor[vnd_vendor[0]] = vnd_vendor[1]
2521    return vnd2vendor
2522
2523#
2524# Private global functions
2525#
2526
2527
2528def _dt_compats(dt: DT) -> set[str]:
2529    # Returns a set() with all 'compatible' strings in the devicetree
2530    # represented by dt (a dtlib.DT instance)
2531
2532    return {compat
2533            for node in dt.node_iter()
2534            if "compatible" in node.props
2535            for compat in node.props["compatible"].to_strings()}
2536
2537
2538def _binding_paths(bindings_dirs: list[str]) -> list[str]:
2539    # Returns a list with the paths to all bindings (.yaml files) in
2540    # 'bindings_dirs'
2541
2542    return [os.path.join(root, filename)
2543            for bindings_dir in bindings_dirs
2544            for root, _, filenames in os.walk(bindings_dir)
2545            for filename in filenames
2546            if filename.endswith((".yaml", ".yml"))]
2547
2548
2549def _binding_inc_error(msg):
2550    # Helper for reporting errors in the !include implementation
2551
2552    raise yaml.constructor.ConstructorError(None, None, "error: " + msg)
2553
2554
2555def _check_include_dict(name: Optional[str],
2556                        allowlist: Optional[list[str]],
2557                        blocklist: Optional[list[str]],
2558                        child_filter: Optional[dict],
2559                        binding_path: Optional[str]) -> None:
2560    # Check that an 'include:' named 'name' with property-allowlist
2561    # 'allowlist', property-blocklist 'blocklist', and
2562    # child-binding filter 'child_filter' has valid structure.
2563
2564    if name is None:
2565        _err(f"'include:' element in {binding_path} "
2566             "should have a 'name' key")
2567
2568    if allowlist is not None and blocklist is not None:
2569        _err(f"'include:' of file '{name}' in {binding_path} "
2570             "should not specify both 'property-allowlist:' "
2571             "and 'property-blocklist:'")
2572
2573    while child_filter is not None:
2574        child_copy = deepcopy(child_filter)
2575        child_allowlist: Optional[list[str]] = (
2576            child_copy.pop('property-allowlist', None))
2577        child_blocklist: Optional[list[str]] = (
2578            child_copy.pop('property-blocklist', None))
2579        next_child_filter: Optional[dict] = (
2580            child_copy.pop('child-binding', None))
2581
2582        if child_copy:
2583            # We've popped out all the valid keys.
2584            _err(f"'include:' of file '{name}' in {binding_path} "
2585                 "should not have these unexpected contents in a "
2586                 f"'child-binding': {child_copy}")
2587
2588        if child_allowlist is not None and child_blocklist is not None:
2589            _err(f"'include:' of file '{name}' in {binding_path} "
2590                 "should not specify both 'property-allowlist:' and "
2591                 "'property-blocklist:' in a 'child-binding:'")
2592
2593        child_filter = next_child_filter
2594
2595
2596def _filter_properties(raw: dict,
2597                       allowlist: Optional[list[str]],
2598                       blocklist: Optional[list[str]],
2599                       child_filter: Optional[dict],
2600                       binding_path: Optional[str]) -> None:
2601    # Destructively modifies 'raw["properties"]' and
2602    # 'raw["child-binding"]', if they exist, according to
2603    # 'allowlist', 'blocklist', and 'child_filter'.
2604
2605    props = raw.get('properties')
2606    _filter_properties_helper(props, allowlist, blocklist, binding_path)
2607
2608    child_binding = raw.get('child-binding')
2609    while child_filter is not None and child_binding is not None:
2610        _filter_properties_helper(child_binding.get('properties'),
2611                                  child_filter.get('property-allowlist'),
2612                                  child_filter.get('property-blocklist'),
2613                                  binding_path)
2614        child_filter = child_filter.get('child-binding')
2615        child_binding = child_binding.get('child-binding')
2616
2617
2618def _filter_properties_helper(props: Optional[dict],
2619                              allowlist: Optional[list[str]],
2620                              blocklist: Optional[list[str]],
2621                              binding_path: Optional[str]) -> None:
2622    if props is None or (allowlist is None and blocklist is None):
2623        return
2624
2625    _check_prop_filter('property-allowlist', allowlist, binding_path)
2626    _check_prop_filter('property-blocklist', blocklist, binding_path)
2627
2628    if allowlist is not None:
2629        allowset = set(allowlist)
2630        to_del = [prop for prop in props if prop not in allowset]
2631    else:
2632        if TYPE_CHECKING:
2633            assert blocklist
2634        blockset = set(blocklist)
2635        to_del = [prop for prop in props if prop in blockset]
2636
2637    for prop in to_del:
2638        del props[prop]
2639
2640
2641def _check_prop_filter(name: str, value: Optional[list[str]],
2642                       binding_path: Optional[str]) -> None:
2643    # Ensure an include: ... property-allowlist or property-blocklist
2644    # is a list.
2645
2646    if value is None:
2647        return
2648
2649    if not isinstance(value, list):
2650        _err(f"'{name}' value {value} in {binding_path} should be a list")
2651
2652
2653def _merge_props(to_dict: dict,
2654                 from_dict: dict,
2655                 parent: Optional[str],
2656                 binding_path: Optional[str],
2657                 check_required: bool = False):
2658    # Recursively merges 'from_dict' into 'to_dict', to implement 'include:'.
2659    #
2660    # If 'from_dict' and 'to_dict' contain a 'required:' key for the same
2661    # property, then the values are ORed together.
2662    #
2663    # If 'check_required' is True, then an error is raised if 'from_dict' has
2664    # 'required: true' while 'to_dict' has 'required: false'. This prevents
2665    # bindings from "downgrading" requirements from bindings they include,
2666    # which might help keep bindings well-organized.
2667    #
2668    # It's an error for most other keys to appear in both 'from_dict' and
2669    # 'to_dict'. When it's not an error, the value in 'to_dict' takes
2670    # precedence.
2671    #
2672    # 'parent' is the name of the parent key containing 'to_dict' and
2673    # 'from_dict', and 'binding_path' is the path to the top-level binding.
2674    # These are used to generate errors for sketchy property overwrites.
2675
2676    for prop in from_dict:
2677        if (isinstance(to_dict.get(prop), dict)
2678            and isinstance(from_dict[prop], dict)):
2679            _merge_props(to_dict[prop], from_dict[prop], prop, binding_path,
2680                         check_required)
2681        elif prop not in to_dict:
2682            to_dict[prop] = from_dict[prop]
2683        elif _bad_overwrite(to_dict, from_dict, prop, check_required):
2684            _err(f"{binding_path} (in '{parent}'): '{prop}' "
2685                 f"from included file overwritten ('{from_dict[prop]}' "
2686                 f"replaced with '{to_dict[prop]}')")
2687        elif prop == "required":
2688            # Need a separate check here, because this code runs before
2689            # Binding._check()
2690            if not (isinstance(from_dict["required"], bool) and
2691                    isinstance(to_dict["required"], bool)):
2692                _err(f"malformed 'required:' setting for '{parent}' in "
2693                     f"'properties' in {binding_path}, expected true/false")
2694
2695            # 'required: true' takes precedence
2696            to_dict["required"] = to_dict["required"] or from_dict["required"]
2697
2698
2699def _bad_overwrite(to_dict: dict, from_dict: dict, prop: str,
2700                   check_required: bool) -> bool:
2701    # _merge_props() helper. Returns True in cases where it's bad that
2702    # to_dict[prop] takes precedence over from_dict[prop].
2703
2704    if to_dict[prop] == from_dict[prop]:
2705        return False
2706
2707    # These are overridden deliberately
2708    if prop in {"title", "description", "compatible"}:
2709        return False
2710
2711    if prop == "required":
2712        if not check_required:
2713            return False
2714        return from_dict[prop] and not to_dict[prop]
2715
2716    return True
2717
2718
2719def _binding_include(loader, node):
2720    # Implements !include, for backwards compatibility. '!include [foo, bar]'
2721    # just becomes [foo, bar].
2722
2723    if isinstance(node, yaml.ScalarNode):
2724        # !include foo.yaml
2725        return [loader.construct_scalar(node)]
2726
2727    if isinstance(node, yaml.SequenceNode):
2728        # !include [foo.yaml, bar.yaml]
2729        return loader.construct_sequence(node)
2730
2731    _binding_inc_error("unrecognised node type in !include statement")
2732
2733
2734def _check_prop_by_type(prop_name: str,
2735                        options: dict,
2736                        binding_path: Optional[str]) -> None:
2737    # Binding._check_properties() helper. Checks 'type:', 'default:',
2738    # 'const:' and # 'specifier-space:' for the property named 'prop_name'
2739
2740    prop_type = options.get("type")
2741    default = options.get("default")
2742    const = options.get("const")
2743
2744    if prop_type is None:
2745        _err(f"missing 'type:' for '{prop_name}' in 'properties' in "
2746             f"{binding_path}")
2747
2748    ok_types = {"boolean", "int", "array", "uint8-array", "string",
2749                "string-array", "phandle", "phandles", "phandle-array",
2750                "path", "compound"}
2751
2752    if prop_type not in ok_types:
2753        _err(f"'{prop_name}' in 'properties:' in {binding_path} "
2754             f"has unknown type '{prop_type}', expected one of " +
2755             ", ".join(ok_types))
2756
2757    if "specifier-space" in options and prop_type != "phandle-array":
2758        _err(f"'specifier-space' in 'properties: {prop_name}' "
2759             f"has type '{prop_type}', expected 'phandle-array'")
2760
2761    if (prop_type == "phandle-array"
2762        and not prop_name.endswith("s")
2763        and "specifier-space" not in options):
2764        _err(f"'{prop_name}' in 'properties:' in {binding_path} "
2765             f"has type 'phandle-array' and its name does not end in 's', "
2766             f"but no 'specifier-space' was provided.")
2767
2768    # If you change const_types, be sure to update the type annotation
2769    # for PropertySpec.const.
2770    const_types = {"int", "array", "uint8-array", "string", "string-array"}
2771    if const and prop_type not in const_types:
2772        _err(f"const in {binding_path} for property '{prop_name}' "
2773             f"has type '{prop_type}', expected one of " +
2774             ", ".join(const_types))
2775
2776    # Check default
2777
2778    if default is None:
2779        return
2780
2781    if prop_type in {"boolean", "compound", "phandle", "phandles",
2782                     "phandle-array", "path"}:
2783        _err("'default:' can't be combined with "
2784             f"'type: {prop_type}' for '{prop_name}' in "
2785             f"'properties:' in {binding_path}")
2786
2787    def ok_default() -> bool:
2788        # Returns True if 'default' is an okay default for the property's type.
2789        # If you change this, be sure to update the type annotation for
2790        # PropertySpec.default.
2791
2792        if (prop_type == "int" and isinstance(default, int)
2793            or prop_type == "string" and isinstance(default, str)):
2794            return True
2795
2796        # array, uint8-array, or string-array
2797
2798        if not isinstance(default, list):
2799            return False
2800
2801        if (prop_type == "array"
2802            and all(isinstance(val, int) for val in default)):
2803            return True
2804
2805        if (prop_type == "uint8-array"
2806            and all(isinstance(val, int)
2807                    and 0 <= val <= 255 for val in default)):
2808            return True
2809
2810        # string-array
2811        return all(isinstance(val, str) for val in default)
2812
2813    if not ok_default():
2814        _err(f"'default: {default}' is invalid for '{prop_name}' "
2815             f"in 'properties:' in {binding_path}, "
2816             f"which has type {prop_type}")
2817
2818
2819def _translate(addr: int, node: dtlib_Node) -> int:
2820    # Recursively translates 'addr' on 'node' to the address space(s) of its
2821    # parent(s), by looking at 'ranges' properties. Returns the translated
2822    # address.
2823
2824    if not node.parent or "ranges" not in node.parent.props:
2825        # No translation
2826        return addr
2827
2828    if not node.parent.props["ranges"].value:
2829        # DT spec.: "If the property is defined with an <empty> value, it
2830        # specifies that the parent and child address space is identical, and
2831        # no address translation is required."
2832        #
2833        # Treat this the same as a 'range' that explicitly does a one-to-one
2834        # mapping, as opposed to there not being any translation.
2835        return _translate(addr, node.parent)
2836
2837    # Gives the size of each component in a translation 3-tuple in 'ranges'
2838    child_address_cells = _address_cells(node)
2839    parent_address_cells = _address_cells(node.parent)
2840    child_size_cells = _size_cells(node)
2841
2842    # Number of cells for one translation 3-tuple in 'ranges'
2843    entry_cells = child_address_cells + parent_address_cells + child_size_cells
2844
2845    for raw_range in _slice(node.parent, "ranges", 4*entry_cells,
2846                            f"4*(<#address-cells> (= {child_address_cells}) + "
2847                            "<#address-cells for parent> "
2848                            f"(= {parent_address_cells}) + "
2849                            f"<#size-cells> (= {child_size_cells}))"):
2850        child_addr = to_num(raw_range[:4*child_address_cells])
2851        raw_range = raw_range[4*child_address_cells:]
2852
2853        parent_addr = to_num(raw_range[:4*parent_address_cells])
2854        raw_range = raw_range[4*parent_address_cells:]
2855
2856        child_len = to_num(raw_range)
2857
2858        if child_addr <= addr < child_addr + child_len:
2859            # 'addr' is within range of a translation in 'ranges'. Recursively
2860            # translate it and return the result.
2861            return _translate(parent_addr + addr - child_addr, node.parent)
2862
2863    # 'addr' is not within range of any translation in 'ranges'
2864    return addr
2865
2866
2867def _add_names(node: dtlib_Node, names_ident: str, objs: Any) -> None:
2868    # Helper for registering names from <foo>-names properties.
2869    #
2870    # node:
2871    #   Node which has a property that might need named elements.
2872    #
2873    # names-ident:
2874    #   The <foo> part of <foo>-names, e.g. "reg" for "reg-names"
2875    #
2876    # objs:
2877    #   list of objects whose .name field should be set
2878
2879    full_names_ident = names_ident + "-names"
2880
2881    if full_names_ident in node.props:
2882        names = node.props[full_names_ident].to_strings()
2883        if len(names) != len(objs):
2884            _err(f"{full_names_ident} property in {node.path} "
2885                 f"in {node.dt.filename} has {len(names)} strings, "
2886                 f"expected {len(objs)} strings")
2887
2888        for obj, name in zip(objs, names, strict=False):
2889            if obj is None:
2890                continue
2891            obj.name = name
2892    else:
2893        for obj in objs:
2894            if obj is not None:
2895                obj.name = None
2896
2897
2898def _interrupt_parent(start_node: dtlib_Node) -> dtlib_Node:
2899    # Returns the node pointed at by the closest 'interrupt-parent', searching
2900    # the parents of 'node'. As of writing, this behavior isn't specified in
2901    # the DT spec., but seems to match what some .dts files except.
2902
2903    node: Optional[dtlib_Node] = start_node
2904
2905    while node:
2906        if "interrupt-parent" in node.props:
2907            iparent = node.props["interrupt-parent"].to_node()
2908            assert "interrupt-controller" in iparent.props or "interrupt-map" in iparent.props
2909            return iparent
2910        node = node.parent
2911        if node is None:
2912            _err(f"{start_node!r} no interrupt parent found")
2913        if ("interrupt-controller" in node.props) or ("interrupt-map" in node.props):
2914            return node
2915
2916    _err(f"{start_node!r} has an 'interrupts' property, but neither the node "
2917         f"nor any of its parents has an 'interrupt-parent' property")
2918
2919
2920def _interrupts(node: dtlib_Node) -> list[tuple[dtlib_Node, bytes]]:
2921    # Returns a list of (<controller>, <data>) tuples, with one tuple per
2922    # interrupt generated by 'node'. <controller> is the destination of the
2923    # interrupt (possibly after mapping through an 'interrupt-map'), and <data>
2924    # the data associated with the interrupt (as a 'bytes' object).
2925
2926    # Takes precedence over 'interrupts' if both are present
2927    if "interrupts-extended" in node.props:
2928        prop = node.props["interrupts-extended"]
2929
2930        ret: list[tuple[dtlib_Node, bytes]] = []
2931        for entry in _phandle_val_list(prop, "interrupt"):
2932            if entry is None:
2933                _err(f"node '{node.path}' interrupts-extended property "
2934                     "has an empty element")
2935            iparent, spec = entry
2936            ret.append(_map_interrupt(node, iparent, spec))
2937        return ret
2938
2939    if "interrupts" in node.props:
2940        # Treat 'interrupts' as a special case of 'interrupts-extended', with
2941        # the same interrupt parent for all interrupts
2942
2943        iparent = _interrupt_parent(node)
2944        interrupt_cells = _interrupt_cells(iparent)
2945
2946        return [_map_interrupt(node, iparent, raw)
2947                for raw in _slice(node, "interrupts", 4*interrupt_cells,
2948                                  "4*<#interrupt-cells>")]
2949
2950    return []
2951
2952
2953def _map_interrupt(
2954        child: dtlib_Node,
2955        parent: dtlib_Node,
2956        child_spec: bytes
2957) -> tuple[dtlib_Node, bytes]:
2958    # Translates an interrupt headed from 'child' to 'parent' with data
2959    # 'child_spec' through any 'interrupt-map' properties. Returns a
2960    # (<controller>, <data>) tuple with the final destination after mapping.
2961
2962    if "interrupt-controller" in parent.props:
2963        return (parent, child_spec)
2964
2965    def own_address_cells(node):
2966        # Used for parents pointed at by 'interrupt-map'. We can't use
2967        # _address_cells(), because it's the #address-cells property on 'node'
2968        # itself that matters.
2969
2970        address_cells = _address_cells_self(node)
2971        if address_cells is None:
2972            _err(f"missing #address-cells on {node!r} "
2973                 "(while handling interrupt-map)")
2974        return address_cells
2975
2976    def spec_len_fn(node):
2977        # Can't use _address_cells() here, because it's the #address-cells
2978        # property on 'node' itself that matters
2979        return own_address_cells(node) + _interrupt_cells(node)
2980
2981    parent, raw_spec = _map(
2982        "interrupt", child, parent, _raw_unit_addr(child, parent) + child_spec,
2983        spec_len_fn, require_controller=True)
2984
2985    # Strip the parent unit address part, if any
2986    return (parent, raw_spec[4*own_address_cells(parent):])
2987
2988
2989def _map_phandle_array_entry(
2990        child: dtlib_Node,
2991        parent: dtlib_Node,
2992        child_spec: bytes,
2993        basename: str
2994) -> tuple[dtlib_Node, bytes]:
2995    # Returns a (<controller>, <data>) tuple with the final destination after
2996    # mapping through any '<basename>-map' (e.g. gpio-map) properties. See
2997    # _map_interrupt().
2998
2999    def spec_len_fn(node):
3000        prop_name = f"#{basename}-cells"
3001        if prop_name not in node.props:
3002            _err(f"expected '{prop_name}' property on {node!r} "
3003                 f"(referenced by {child!r})")
3004        return node.props[prop_name].to_num()
3005
3006    # Do not require <prefix>-controller for anything but interrupts for now
3007    return _map(basename, child, parent, child_spec, spec_len_fn,
3008                require_controller=False)
3009
3010
3011def _map(
3012        prefix: str,
3013        child: dtlib_Node,
3014        parent: dtlib_Node,
3015        child_spec: bytes,
3016        spec_len_fn: Callable[[dtlib_Node], int],
3017        require_controller: bool
3018) -> tuple[dtlib_Node, bytes]:
3019    # Common code for mapping through <prefix>-map properties, e.g.
3020    # interrupt-map and gpio-map.
3021    #
3022    # prefix:
3023    #   The prefix, e.g. "interrupt" or "gpio"
3024    #
3025    # child:
3026    #   The "sender", e.g. the node with 'interrupts = <...>'
3027    #
3028    # parent:
3029    #   The "receiver", e.g. a node with 'interrupt-map = <...>' or
3030    #   'interrupt-controller' (no mapping)
3031    #
3032    # child_spec:
3033    #   The data associated with the interrupt/GPIO/etc., as a 'bytes' object,
3034    #   e.g. <1 2> for 'foo-gpios = <&gpio1 1 2>'.
3035    #
3036    # spec_len_fn:
3037    #   Function called on a parent specified in a *-map property to get the
3038    #   length of the parent specifier (data after phandle in *-map), in cells
3039    #
3040    # require_controller:
3041    #   If True, the final controller node after mapping is required to have
3042    #   to have a <prefix>-controller property.
3043
3044    map_prop = parent.props.get(prefix + "-map")
3045    if not map_prop:
3046        if require_controller and prefix + "-controller" not in parent.props:
3047            _err(f"expected '{prefix}-controller' property on {parent!r} "
3048                 f"(referenced by {child!r})")
3049
3050        # No mapping
3051        return (parent, child_spec)
3052
3053    masked_child_spec = _mask(prefix, child, parent, child_spec)
3054
3055    raw = map_prop.value
3056    while raw:
3057        if len(raw) < len(child_spec):
3058            _err(f"bad value for {map_prop!r}, missing/truncated child data")
3059        child_spec_entry = raw[:len(child_spec)]
3060        raw = raw[len(child_spec):]
3061
3062        if len(raw) < 4:
3063            _err(f"bad value for {map_prop!r}, missing/truncated phandle")
3064        phandle = to_num(raw[:4])
3065        raw = raw[4:]
3066
3067        # Parent specified in *-map
3068        map_parent = parent.dt.phandle2node.get(phandle)
3069        if not map_parent:
3070            _err(f"bad phandle ({phandle}) in {map_prop!r}")
3071
3072        map_parent_spec_len = 4*spec_len_fn(map_parent)
3073        if len(raw) < map_parent_spec_len:
3074            _err(f"bad value for {map_prop!r}, missing/truncated parent data")
3075        parent_spec = raw[:map_parent_spec_len]
3076        raw = raw[map_parent_spec_len:]
3077
3078        # Got one *-map row. Check if it matches the child data.
3079        if child_spec_entry == masked_child_spec:
3080            # Handle *-map-pass-thru
3081            parent_spec = _pass_thru(
3082                prefix, child, parent, child_spec, parent_spec)
3083
3084            # Found match. Recursively map and return it.
3085            return _map(prefix, parent, map_parent, parent_spec, spec_len_fn,
3086                        require_controller)
3087
3088    _err(f"child specifier for {child!r} ({child_spec!r}) "
3089         f"does not appear in {map_prop!r}")
3090
3091
3092def _mask(
3093        prefix: str,
3094        child: dtlib_Node,
3095        parent: dtlib_Node,
3096        child_spec: bytes
3097) -> bytes:
3098    # Common code for handling <prefix>-mask properties, e.g. interrupt-mask.
3099    # See _map() for the parameters.
3100
3101    mask_prop = parent.props.get(prefix + "-map-mask")
3102    if not mask_prop:
3103        # No mask
3104        return child_spec
3105
3106    mask = mask_prop.value
3107    if len(mask) != len(child_spec):
3108        _err(f"{child!r}: expected '{prefix}-mask' in {parent!r} "
3109             f"to be {len(child_spec)} bytes, is {len(mask)} bytes")
3110
3111    return _and(child_spec, mask)
3112
3113
3114def _pass_thru(
3115        prefix: str,
3116        child: dtlib_Node,
3117        parent: dtlib_Node,
3118        child_spec: bytes,
3119        parent_spec: bytes
3120) -> bytes:
3121    # Common code for handling <prefix>-map-thru properties, e.g.
3122    # interrupt-pass-thru.
3123    #
3124    # parent_spec:
3125    #   The parent data from the matched entry in the <prefix>-map property
3126    #
3127    # See _map() for the other parameters.
3128
3129    pass_thru_prop = parent.props.get(prefix + "-map-pass-thru")
3130    if not pass_thru_prop:
3131        # No pass-thru
3132        return parent_spec
3133
3134    pass_thru = pass_thru_prop.value
3135    if len(pass_thru) != len(child_spec):
3136        _err(f"{child!r}: expected '{prefix}-map-pass-thru' in {parent!r} "
3137             f"to be {len(child_spec)} bytes, is {len(pass_thru)} bytes")
3138
3139    res = _or(_and(child_spec, pass_thru),
3140              _and(parent_spec, _not(pass_thru)))
3141
3142    # Truncate to length of parent spec.
3143    return res[-len(parent_spec):]
3144
3145
3146def _raw_unit_addr(node: dtlib_Node, parent: dtlib_Node) -> bytes:
3147    # _map_interrupt() helper. Returns the unit address (derived from 'reg' and
3148    # #address-cells) as a raw 'bytes'
3149
3150    iparent: Optional[dtlib_Node] = parent
3151    iparent_addr_len = _address_cells_self(iparent)
3152    parent_addr_len = _address_cells(node)
3153
3154    if iparent_addr_len is None:
3155        iparent_addr_len =  2  # Default value per DT spec.
3156
3157    if parent_addr_len is None:
3158        parent_addr_len =  2  # Default value per DT spec.
3159
3160    if iparent_addr_len == 0:
3161        return b''
3162
3163    if 'reg' not in node.props:
3164        _err(f"{node!r} lacks 'reg' property "
3165             "(needed for 'interrupt-map' unit address lookup)")
3166
3167    iparent_addr_len *= 4
3168    parent_addr_len *= 4
3169
3170    prop_len = len(node.props['reg'].value)
3171    if prop_len < iparent_addr_len or prop_len %4 != 0:
3172        _err(f"{node!r} has too short or incorrectly defined 'reg' property "
3173             "(while doing 'interrupt-map' unit address lookup)")
3174
3175    address = b''
3176    if parent_addr_len > iparent_addr_len:
3177        address = node.props['reg'].value[iparent_addr_len - parent_addr_len:parent_addr_len]
3178    else:
3179        address = node.props['reg'].value[:iparent_addr_len]
3180
3181    return address
3182
3183def _and(b1: bytes, b2: bytes) -> bytes:
3184    # Returns the bitwise AND of the two 'bytes' objects b1 and b2. Pads
3185    # with ones on the left if the lengths are not equal.
3186
3187    # Pad on the left, to equal length
3188    maxlen = max(len(b1), len(b2))
3189    return bytes(x & y for x, y in zip(b1.rjust(maxlen, b'\xff'),
3190                                       b2.rjust(maxlen, b'\xff'), strict=False))
3191
3192
3193def _or(b1: bytes, b2: bytes) -> bytes:
3194    # Returns the bitwise OR of the two 'bytes' objects b1 and b2. Pads with
3195    # zeros on the left if the lengths are not equal.
3196
3197    # Pad on the left, to equal length
3198    maxlen = max(len(b1), len(b2))
3199    return bytes(x | y for x, y in zip(b1.rjust(maxlen, b'\x00'),
3200                                       b2.rjust(maxlen, b'\x00'), strict=False))
3201
3202
3203def _not(b: bytes) -> bytes:
3204    # Returns the bitwise not of the 'bytes' object 'b'
3205
3206    # ANDing with 0xFF avoids negative numbers
3207    return bytes(~x & 0xFF for x in b)
3208
3209
3210def _phandle_val_list(
3211        prop: dtlib_Property,
3212        n_cells_name: str
3213) -> list[Optional[tuple[dtlib_Node, bytes]]]:
3214    # Parses a '<phandle> <value> <phandle> <value> ...' value. The number of
3215    # cells that make up each <value> is derived from the node pointed at by
3216    # the preceding <phandle>.
3217    #
3218    # prop:
3219    #   dtlib.Property with value to parse
3220    #
3221    # n_cells_name:
3222    #   The <name> part of the #<name>-cells property to look for on the nodes
3223    #   the phandles point to, e.g. "gpio" for #gpio-cells.
3224    #
3225    # Each tuple in the return value is a (<node>, <value>) pair, where <node>
3226    # is the node pointed at by <phandle>. If <phandle> does not refer
3227    # to a node, the entire list element is None.
3228
3229    full_n_cells_name = f"#{n_cells_name}-cells"
3230
3231    res: list[Optional[tuple[dtlib_Node, bytes]]] = []
3232
3233    raw = prop.value
3234    while raw:
3235        if len(raw) < 4:
3236            # Not enough room for phandle
3237            _err("bad value for " + repr(prop))
3238        phandle = to_num(raw[:4])
3239        raw = raw[4:]
3240
3241        node = prop.node.dt.phandle2node.get(phandle)
3242        if not node:
3243            # Unspecified phandle-array element. This is valid; a 0
3244            # phandle value followed by no cells is an empty element.
3245            res.append(None)
3246            continue
3247
3248        if full_n_cells_name not in node.props:
3249            _err(f"{node!r} lacks {full_n_cells_name}")
3250
3251        n_cells = node.props[full_n_cells_name].to_num()
3252        if len(raw) < 4*n_cells:
3253            _err("missing data after phandle in " + repr(prop))
3254
3255        res.append((node, raw[:4*n_cells]))
3256        raw = raw[4*n_cells:]
3257
3258    return res
3259
3260
3261def _address_cells_self(node: Optional[dtlib_Node]) -> Optional[int]:
3262    # Returns the #address-cells setting for 'node', giving the number of <u32>
3263    # cells used to encode the address in the 'reg' property
3264
3265    if node is not None and "#address-cells" in node.props:
3266        return node.props["#address-cells"].to_num()
3267    return None
3268
3269def _address_cells(node: dtlib_Node) -> int:
3270    # Returns the #address-cells setting for parent node of 'node', giving the number of <u32>
3271    # cells used to encode the address in the 'reg' property
3272    if TYPE_CHECKING:
3273        assert node.parent
3274
3275    ret = _address_cells_self(node.parent)
3276    if ret is None:
3277        return 2  # Default value per DT spec.
3278    return int(ret)
3279
3280
3281def _size_cells(node: dtlib_Node) -> int:
3282    # Returns the #size-cells setting for 'node', giving the number of <u32>
3283    # cells used to encode the size in the 'reg' property
3284    if TYPE_CHECKING:
3285        assert node.parent
3286
3287    if "#size-cells" in node.parent.props:
3288        return node.parent.props["#size-cells"].to_num()
3289    return 1  # Default value per DT spec.
3290
3291
3292def _interrupt_cells(node: dtlib_Node) -> int:
3293    # Returns the #interrupt-cells property value on 'node', erroring out if
3294    # 'node' has no #interrupt-cells property
3295
3296    if "#interrupt-cells" not in node.props:
3297        _err(f"{node!r} lacks #interrupt-cells")
3298    return node.props["#interrupt-cells"].to_num()
3299
3300
3301def _slice(node: dtlib_Node,
3302           prop_name: str,
3303           size: int,
3304           size_hint: str) -> list[bytes]:
3305    return _slice_helper(node, prop_name, size, size_hint, EDTError)
3306
3307
3308def _check_dt(dt: DT) -> None:
3309    # Does devicetree sanity checks. dtlib is meant to be general and
3310    # anything-goes except for very special properties like phandle, but in
3311    # edtlib we can be pickier.
3312
3313    # Check that 'status' has one of the values given in the devicetree spec.
3314
3315    # Accept "ok" for backwards compatibility
3316    ok_status = {"ok", "okay", "disabled", "reserved", "fail", "fail-sss"}
3317
3318    for node in dt.node_iter():
3319        if "status" in node.props:
3320            try:
3321                status_val = node.props["status"].to_string()
3322            except DTError as e:
3323                # The error message gives the path
3324                _err(str(e))
3325
3326            if status_val not in ok_status:
3327                _err(f"unknown 'status' value \"{status_val}\" in {node.path} "
3328                     f"in {node.dt.filename}, expected one of " +
3329                     ", ".join(ok_status) +
3330                     " (see the devicetree specification)")
3331
3332        ranges_prop = node.props.get("ranges")
3333        if ranges_prop and ranges_prop.type not in (Type.EMPTY, Type.NUMS):
3334            _err(f"expected 'ranges = < ... >;' in {node.path} in "
3335                 f"{node.dt.filename}, not '{ranges_prop}' "
3336                  "(see the devicetree specification)")
3337
3338
3339def _err(msg) -> NoReturn:
3340    raise EDTError(msg)
3341
3342# Logging object
3343_LOG = logging.getLogger(__name__)
3344
3345# Regular expression for non-alphanumeric-or-underscore characters.
3346_NOT_ALPHANUM_OR_UNDERSCORE = re.compile(r'\W', re.ASCII)
3347
3348
3349def str_as_token(val: str) -> str:
3350    """Return a canonical representation of a string as a C token.
3351
3352    This converts special characters in 'val' to underscores, and
3353    returns the result."""
3354
3355    return re.sub(_NOT_ALPHANUM_OR_UNDERSCORE, '_', val)
3356
3357
3358# Custom PyYAML binding loader class to avoid modifying yaml.Loader directly,
3359# which could interfere with YAML loading in clients
3360class _BindingLoader(Loader):
3361    pass
3362
3363
3364# Add legacy '!include foo.yaml' handling
3365_BindingLoader.add_constructor("!include", _binding_include)
3366
3367#
3368# "Default" binding for properties which are defined by the spec.
3369#
3370# Zephyr: do not change the _DEFAULT_PROP_TYPES keys without
3371# updating the documentation for the DT_PROP() macro in
3372# include/devicetree.h.
3373#
3374
3375_DEFAULT_PROP_TYPES: dict[str, str] = {
3376    "compatible": "string-array",
3377    "status": "string",
3378    "ranges": "compound",  # NUMS or EMPTY
3379    "reg": "array",
3380    "reg-names": "string-array",
3381    "label": "string",
3382    "interrupts": "array",
3383    "interrupts-extended": "compound",
3384    "interrupt-names": "string-array",
3385    "interrupt-controller": "boolean",
3386}
3387
3388_STATUS_ENUM: list[str] = "ok okay disabled reserved fail fail-sss".split()
3389
3390def _raw_default_property_for(
3391        name: str
3392) -> dict[str, Union[str, bool, list[str]]]:
3393    ret: dict[str, Union[str, bool, list[str]]] = {
3394        'type': _DEFAULT_PROP_TYPES[name],
3395        'required': False,
3396    }
3397    if name == 'status':
3398        ret['enum'] = _STATUS_ENUM
3399    return ret
3400
3401_DEFAULT_PROP_BINDING: Binding = Binding(
3402    None, {},
3403    raw={
3404        'properties': {
3405            name: _raw_default_property_for(name)
3406            for name in _DEFAULT_PROP_TYPES
3407        },
3408    },
3409    require_compatible=False,
3410    require_description=False,
3411)
3412
3413_DEFAULT_PROP_SPECS: dict[str, PropertySpec] = {
3414    name: PropertySpec(name, _DEFAULT_PROP_BINDING)
3415    for name in _DEFAULT_PROP_TYPES
3416}
3417