1# Copyright (c) 2020 Nordic Semiconductor ASA
2# SPDX-License-Identifier: Apache-2.0
3
4"""
5Like gen_kconfig_rest.py, but for generating an index of existing
6devicetree bindings.
7"""
8
9import argparse
10import glob
11import io
12import logging
13import os
14import pprint
15import re
16import sys
17import textwrap
18from collections import defaultdict
19from pathlib import Path
20
21import gen_helpers
22from devicetree import edtlib
23
24ZEPHYR_BASE = Path(__file__).parents[2]
25
26GENERIC_OR_VENDOR_INDEPENDENT = 'Generic or vendor-independent'
27UNKNOWN_VENDOR = 'Unknown vendor'
28
29ZEPHYR_BASE = Path(__file__).parents[2]
30
31# Base properties that have documentation in 'dt-important-props'.
32DETAILS_IN_IMPORTANT_PROPS = set('compatible label reg status interrupts'.split())
33
34logger = logging.getLogger('gen_devicetree_rest')
35
36class VndLookup:
37    """
38    A convenience class for looking up information based on a
39    devicetree compatible's vendor prefix 'vnd'.
40    """
41
42    def __init__(self, vendor_prefixes, bindings):
43        self.vnd2vendor = self.load_vnd2vendor(vendor_prefixes)
44        self.vnd2bindings = self.init_vnd2bindings(bindings)
45        self.vnd2ref_target = self.init_vnd2ref_target()
46
47    def vendor(self, vnd):
48        return self.vnd2vendor.get(vnd, UNKNOWN_VENDOR)
49
50    def bindings(self, vnd, default=None):
51        return self.vnd2bindings.get(vnd, default)
52
53    def target(self, vnd):
54        return self.vnd2ref_target.get(
55            vnd, self.vnd2ref_target[(UNKNOWN_VENDOR,)])
56
57    @staticmethod
58    def load_vnd2vendor(vendor_prefixes):
59        # Load the vendor-prefixes.txt file. Return a dict mapping 'vnd'
60        # vendor prefixes as they are found in compatible properties to
61        # each vendor's full name.
62        #
63        # For example, this line:
64        #
65        #    vnd	A stand-in for a real vendor
66        #
67        # Gets split into a key 'vnd' and a value 'A stand-in for a real
68        # vendor' in the return value.
69        #
70        # The 'None' key maps to GENERIC_OR_VENDOR_INDEPENDENT.
71
72        vnd2vendor = {
73            None: GENERIC_OR_VENDOR_INDEPENDENT,
74        }
75        vnd2vendor.update(edtlib.load_vendor_prefixes_txt(vendor_prefixes))
76
77        logger.info('found %d vendor prefixes in %s', len(vnd2vendor) - 1,
78                    vendor_prefixes)
79        if logger.isEnabledFor(logging.DEBUG):
80            logger.debug('vnd2vendor=%s', pprint.pformat(vnd2vendor))
81
82        return vnd2vendor
83
84    def init_vnd2bindings(self, bindings):
85        # Take a 'vnd2vendor' map and a list of bindings and return a dict
86        # mapping 'vnd' vendor prefixes prefixes to lists of bindings. The
87        # bindings in each list are sorted by compatible. The keys in the
88        # return value are sorted by vendor name.
89        #
90        # Special cases:
91        #
92        # - The 'None' key maps to bindings with no vendor prefix
93        #   in their compatibles, like 'gpio-keys'. This is the first key.
94        # - The (UNKNOWN_VENDOR,) key maps to bindings whose compatible
95        #   has a vendor prefix that exists, but is not known,
96        #   like 'somethingrandom,device'. This is the last key.
97
98        # Get an unsorted dict mapping vendor prefixes to lists of bindings.
99        unsorted = defaultdict(list)
100        generic_bindings = []
101        unknown_vendor_bindings = []
102        for binding in bindings:
103            vnd = compatible_vnd(binding.compatible)
104            if vnd is None:
105                generic_bindings.append(binding)
106            elif vnd in self.vnd2vendor:
107                unsorted[vnd].append(binding)
108            else:
109                unknown_vendor_bindings.append(binding)
110
111        # Key functions for sorting.
112        def vnd_key(vnd):
113            return self.vnd2vendor[vnd].casefold()
114
115        def binding_key(binding):
116            return binding.compatible
117
118        # Sort the bindings for each vendor by compatible.
119        # Plain dicts are sorted in CPython 3.6+, which is what we
120        # support, so the return dict's keys are in the same
121        # order as vnd2vendor.
122        #
123        # The unknown-vendor bindings being inserted as a 1-tuple key is a
124        # hack for convenience that ensures they won't collide with a
125        # known vendor. The code that consumes the dict below handles
126        # this.
127        vnd2bindings = {
128            None: sorted(generic_bindings, key=binding_key)
129        }
130        for vnd in sorted(unsorted, key=vnd_key):
131            vnd2bindings[vnd] = sorted(unsorted[vnd], key=binding_key)
132        vnd2bindings[(UNKNOWN_VENDOR,)] = sorted(unknown_vendor_bindings,
133                                                      key=binding_key)
134
135        if logger.isEnabledFor(logging.DEBUG):
136            logger.debug('vnd2bindings: %s', pprint.pformat(vnd2bindings))
137
138        return vnd2bindings
139
140    def init_vnd2ref_target(self):
141        # The return value, vnd2ref_target, is a dict mapping vendor
142        # prefixes to ref targets for their relevant sections in this
143        # file, with these special cases:
144        #
145        # - The None key maps to the ref target for bindings with no
146        #   vendor prefix in their compatibles, like 'gpio-keys'
147        # - The (UNKNOWN_VENDOR,) key maps to the ref target for bindings
148        #   whose compatible has a vendor prefix that is not recognized.
149        vnd2ref_target = {}
150
151        for vnd in self.vnd2bindings:
152            if vnd is None:
153                vnd2ref_target[vnd] = 'dt_no_vendor'
154            elif isinstance(vnd, str):
155                vnd2ref_target[vnd] = f'dt_vendor_{vnd}'
156            else:
157                assert vnd == (UNKNOWN_VENDOR,), vnd
158                vnd2ref_target[vnd] = 'dt_unknown_vendor'
159
160        return vnd2ref_target
161
162def main():
163    args = parse_args()
164    setup_logging(args.verbose)
165    bindings = load_bindings(args.dts_roots, args.dts_folders, args.dts_files)
166    base_binding = load_base_binding()
167    driver_sources = load_driver_sources()
168    vnd_lookup = VndLookup(args.vendor_prefixes, bindings)
169    dump_content(bindings, base_binding, vnd_lookup, driver_sources, args.out_dir,
170                 args.turbo_mode)
171
172def parse_args():
173    # Parse command line arguments from sys.argv.
174
175    parser = argparse.ArgumentParser(allow_abbrev=False)
176    parser.add_argument('-v', '--verbose', default=0, action='count',
177                        help='increase verbosity; may be given multiple times')
178    parser.add_argument('--vendor-prefixes', required=True,
179                        help='vendor-prefixes.txt file path')
180    parser.add_argument('--dts-root', dest='dts_roots', action='append',
181                        help='''additional DTS root directory as it would
182                        be set in DTS_ROOTS''')
183    parser.add_argument('--dts-folder', dest='dts_folders', action='append', default=[],
184                        help='additional DTS folders containing binding files')
185    parser.add_argument('--dts-file', dest='dts_files', action='append', default=[],
186                        help='additional individual DTS binding files')
187    parser.add_argument('--turbo-mode', action='store_true',
188                        help='Enable turbo mode (dummy references)')
189    parser.add_argument('out_dir', help='output files are generated here')
190
191    return parser.parse_args()
192
193def setup_logging(verbose):
194    if verbose >= 2:
195        log_level = logging.DEBUG
196    elif verbose == 1:
197        log_level = logging.INFO
198    else:
199        log_level = logging.ERROR
200    logging.basicConfig(format='%(filename)s:%(levelname)s: %(message)s',
201                        level=log_level)
202
203def load_bindings(dts_roots, dts_folders, dts_files):
204    # Get a list of edtlib.Binding objects from searching 'dts_roots'.
205
206    if not dts_roots:
207        sys.exit('no DTS roots; use --dts-root to specify at least one')
208
209    binding_files = []
210    for dts_root in dts_roots:
211        binding_files.extend(glob.glob(f'{dts_root}/dts/bindings/**/*.yml',
212                                       recursive=True))
213        binding_files.extend(glob.glob(f'{dts_root}/dts/bindings/**/*.yaml',
214                                       recursive=True))
215    for folders in dts_folders:
216        binding_files.extend(glob.glob(f'{folders}/*.yml', recursive=False))
217        binding_files.extend(glob.glob(f'{folders}/*.yaml', recursive=False))
218    binding_files.extend(dts_files)
219
220    bindings = edtlib.bindings_from_paths(binding_files, ignore_errors=True)
221
222    num_total = len(bindings)
223
224    # Remove bindings from the 'vnd' vendor, which is not a real vendor,
225    # but rather a stand-in we use for examples and tests when a real
226    # vendor would be inappropriate.
227    bindings = [binding for binding in bindings if
228                compatible_vnd(binding.compatible) != 'vnd']
229
230    logger.info('found %d bindings (ignored %d) in this dts_roots list: %s',
231                len(bindings), num_total - len(bindings), dts_roots)
232
233    return bindings
234
235def load_base_binding():
236    # Make a Binding object for base.yaml.
237    #
238    # This helps separate presentation for properties common to all
239    # nodes from node-specific properties.
240
241    base_yaml = ZEPHYR_BASE / 'dts' / 'bindings' / 'base' / 'base.yaml'
242    base_includes = {"pm.yaml": os.fspath(ZEPHYR_BASE / 'dts' / 'bindings' / 'base'/ 'pm.yaml')}
243
244    if not base_yaml.is_file():
245        sys.exit(f'Expected to find base.yaml at {base_yaml}')
246    return edtlib.Binding(os.fspath(base_yaml), base_includes, require_compatible=False,
247                          require_description=False)
248
249def load_driver_sources():
250    driver_sources = {}
251    dt_drv_compat_occurrences = defaultdict(list)
252
253    dt_drv_compat_pattern = re.compile(r"#define DT_DRV_COMPAT\s+(.*)")
254    device_dt_inst_define_pattern = re.compile(r"DEVICE_DT_INST_DEFINE")
255
256    folders_to_scan = ["boards", "drivers", "modules", "soc", "subsys"]
257
258    # When looking at folders_to_scan, a file is considered as a likely driver source if:
259    # - There is only one and only one file with a "#define DT_DRV_COMPAT <compatible>" for a given
260    #   compatible.
261    # - or, a file contains both a "#define DT_DRV_COMPAT <compatible>" and a
262    #   DEVICE_DT_INST_DEFINE(...) call.
263
264    for folder in folders_to_scan:
265        for dirpath, _, filenames in os.walk(ZEPHYR_BASE / folder):
266            for filename in filenames:
267                if not filename.endswith(('.c', '.h')):
268                    continue
269                filepath = Path(dirpath) / filename
270                with open(filepath, encoding="utf-8") as f:
271                    content = f.read()
272
273                relative_path = filepath.relative_to(ZEPHYR_BASE)
274
275                # Find all DT_DRV_COMPAT occurrences in the file
276                dt_drv_compat_matches = dt_drv_compat_pattern.findall(content)
277                for compatible in dt_drv_compat_matches:
278                    dt_drv_compat_occurrences[compatible].append(relative_path)
279
280                if dt_drv_compat_matches and device_dt_inst_define_pattern.search(content):
281                    for compatible in dt_drv_compat_matches:
282                        if compatible in driver_sources:
283                            # Mark as ambiguous if multiple files define the same compatible
284                            driver_sources[compatible] = None
285                        else:
286                            driver_sources[compatible] = relative_path
287
288    # Remove ambiguous driver sources
289    driver_sources = {k: v for k, v in driver_sources.items() if v is not None}
290
291    # Consider DT_DRV_COMPATs with only one occurrence as driver sources
292    for compatible, occurrences in dt_drv_compat_occurrences.items():
293        if compatible not in driver_sources and len(occurrences) == 1:
294            path = occurrences[0]
295            # Assume the driver is defined in the enclosing folder if it's a header file
296            if path.suffix == ".h":
297                path = path.parent
298            driver_sources[compatible] = path
299
300    return driver_sources
301
302def dump_content(bindings, base_binding, vnd_lookup, driver_sources, out_dir, turbo_mode):
303    # Dump the generated .rst files for a vnd2bindings dict.
304    # Files are only written if they are changed. Existing .rst
305    # files which would not be written by the 'vnd2bindings'
306    # dict are removed.
307
308    out_dir = Path(out_dir)
309
310    setup_bindings_dir(bindings, out_dir)
311    if turbo_mode:
312        write_dummy_index(bindings, out_dir)
313    else:
314        write_bindings_rst(vnd_lookup, out_dir)
315        write_orphans(bindings, base_binding, vnd_lookup, driver_sources, out_dir)
316
317def setup_bindings_dir(bindings, out_dir):
318    # Make a set of all the Path objects we will be creating for
319    # out_dir / bindings / {binding_path}.rst. Delete all the ones that
320    # shouldn't be there. Make sure the bindings output directory
321    # exists.
322
323    paths = set()
324    bindings_dir = out_dir / 'bindings'
325    logger.info('making output subdirectory %s', bindings_dir)
326    bindings_dir.mkdir(parents=True, exist_ok=True)
327
328    for binding in bindings:
329        paths.add(bindings_dir / binding_filename(binding))
330
331    for dirpath, _, filenames in os.walk(bindings_dir):
332        for filename in filenames:
333            path = Path(dirpath) / filename
334            if path not in paths:
335                logger.info('removing unexpected file %s', path)
336                path.unlink()
337
338
339def write_dummy_index(bindings, out_dir):
340    # Write out_dir / bindings.rst, with dummy anchors
341
342    # header
343    content = '\n'.join((
344        '.. _devicetree_binding_index:',
345        '.. _dt_vendor_zephyr:',
346        '',
347        'Dummy bindings index',
348        '####################',
349        '',
350    ))
351
352    # build compatibles set and dump it
353    compatibles = {binding.compatible for binding in bindings}
354    content += '\n'.join(
355        f'.. dtcompatible:: {compatible}' for compatible in compatibles
356    )
357
358    write_if_updated(out_dir / 'bindings.rst', content)
359
360
361def write_bindings_rst(vnd_lookup, out_dir):
362    # Write out_dir / bindings.rst, the top level index of bindings.
363
364    string_io = io.StringIO()
365
366    print_block(f'''\
367    .. _devicetree_binding_index:
368
369    Bindings index
370    ##############
371
372    This page documents the available devicetree bindings.
373    See {zref('dt-bindings')} for an introduction to the Zephyr bindings
374    file format.
375
376    Vendor index
377    ************
378
379    This section contains an index of hardware vendors.
380    Click on a vendor's name to go to the list of bindings for
381    that vendor.
382
383    .. rst-class:: rst-columns
384    ''', string_io)
385
386    for vnd, bindings in vnd_lookup.vnd2bindings.items():
387        if len(bindings) == 0:
388            continue
389        print(f'- :ref:`{vnd_lookup.target(vnd)}`', file=string_io)
390
391    print_block('''\
392
393    Bindings by vendor
394    ******************
395
396    This section contains available bindings, grouped by vendor.
397    Within each group, bindings are listed by the "compatible" property
398    they apply to, like this:
399
400    **Vendor name (vendor prefix)**
401
402    .. rst-class:: rst-columns
403
404    - <compatible-A>
405    - <compatible-B> (on <bus-name> bus)
406    - <compatible-C>
407    - ...
408
409    The text "(on <bus-name> bus)" appears when bindings may behave
410    differently depending on the bus the node appears on.
411    For example, this applies to some sensor device nodes, which may
412    appear as children of either I2C or SPI bus nodes.
413    ''', string_io)
414
415    for vnd, bindings in vnd_lookup.vnd2bindings.items():
416        if isinstance(vnd, tuple):
417            title = vnd[0]
418        else:
419            title = vnd_lookup.vendor(vnd).strip()
420            if isinstance(vnd, str):
421                title += f' ({vnd})'
422        underline = '=' * len(title)
423
424        if len(bindings) == 0:
425            continue
426
427        print_block(f'''\
428        .. _{vnd_lookup.target(vnd)}:
429
430        {title}
431        {underline}
432
433        .. rst-class:: rst-columns
434        ''', string_io)
435        for binding in bindings:
436            print(f'- :ref:`{binding_ref_target(binding)}`', file=string_io)
437        print(file=string_io)
438
439    write_if_updated(out_dir / 'bindings.rst', string_io.getvalue())
440
441def write_orphans(bindings, base_binding, vnd_lookup, driver_sources, out_dir):
442    # Write out_dir / bindings / foo / binding_page.rst for each binding
443    # in 'bindings', along with any "disambiguation" pages needed when a
444    # single compatible string can be handled by multiple bindings.
445    #
446    # These files are 'orphans' in the Sphinx sense: they are not in
447    # any toctree.
448
449    logging.info('updating :orphan: files for %d bindings', len(bindings))
450    num_written = 0
451
452    # First, figure out which compatibles map to multiple bindings. We
453    # need this information to decide which of the generated files for
454    # a compatible are "disambiguation" pages that point to per-bus
455    # binding pages, and which ones aren't.
456
457    compat2bindings = defaultdict(list)
458    for binding in bindings:
459        compat2bindings[binding.compatible].append(binding)
460    dup_compat2bindings = {compatible: bindings for compatible, bindings
461                           in compat2bindings.items() if len(bindings) > 1}
462
463    # Next, write the per-binding pages. These contain the
464    # per-compatible targets for compatibles not in 'dup_compats'.
465    # We'll finish up by writing per-compatible "disambiguation" pages
466    # for compatibles in 'dup_compats'.
467
468    # Names of properties in base.yaml.
469    base_names = set(base_binding.prop2specs.keys())
470    for binding in bindings:
471        string_io = io.StringIO()
472
473        print_binding_page(binding, base_names, vnd_lookup,
474                           driver_sources, dup_compat2bindings, string_io)
475
476        written = write_if_updated(out_dir / 'bindings' /
477                                   binding_filename(binding),
478                                   string_io.getvalue())
479
480        if written:
481            num_written += 1
482
483    # Generate disambiguation pages for dup_compats.
484    compatibles_dir = out_dir / 'compatibles'
485    setup_compatibles_dir(dup_compat2bindings.keys(), compatibles_dir)
486    for compatible in dup_compat2bindings:
487        string_io = io.StringIO()
488
489        print_compatible_disambiguation_page(
490            compatible, dup_compat2bindings[compatible], string_io)
491
492        written = write_if_updated(compatibles_dir /
493                                   compatible_filename(compatible),
494                                   string_io.getvalue())
495
496        if written:
497            num_written += 1
498
499    logging.info('done writing :orphan: files; %d files needed updates',
500                 num_written)
501
502def print_binding_page(binding, base_names, vnd_lookup, driver_sources,dup_compats,
503                       string_io):
504    # Print the rst content for 'binding' to 'string_io'. The
505    # 'dup_compats' argument should support membership testing for
506    # compatibles which have multiple associated bindings; if
507    # 'binding.compatible' is not in it, then the ref target for the
508    # entire compatible is generated in this page as well.
509
510    # :orphan:
511    #
512    # .. ref_target:
513    #
514    # Title [(on <bus> bus)]
515    # ######################
516    if binding.on_bus:
517        on_bus_title = f' (on {binding.on_bus} bus)'
518    else:
519        on_bus_title = ''
520    compatible = binding.compatible
521
522    title = f'{compatible}{on_bus_title}'
523    underline = '#' * len(title)
524    if compatible not in dup_compats:
525        # If this binding is the only one that handles this
526        # compatible, point the ".. dtcompatible:" directive straight
527        # to this page. There's no need for disambiguation.
528        dtcompatible = f'.. dtcompatible:: {binding.compatible}'
529    else:
530        # This compatible is handled by multiple bindings;
531        # its ".. dtcompatible::" should be in a disambiguation page
532        # instead.
533        dtcompatible = ''
534
535    print_block(f'''\
536    :orphan:
537
538    .. raw:: html
539
540        <!--
541        FIXME: do not limit page width until content uses another representation
542        format other than tables
543        -->
544        <style>.wy-nav-content {{ max-width: none; !important }}</style>
545
546    {dtcompatible}
547    .. _{binding_ref_target(binding)}:
548
549    {title}
550    {underline}
551    ''', string_io)
552
553    # Vendor: <link-to-vendor-section>
554    vnd = compatible_vnd(compatible)
555    print('Vendor: '
556          f':ref:`{vnd_lookup.vendor(vnd)} <{vnd_lookup.target(vnd)}>`\n',
557          file=string_io)
558
559    # Link to driver implementation (if it exists).
560    compatible = re.sub("[-,.@/+]", "_", compatible.lower())
561    if compatible in driver_sources:
562        print_block(
563            f"""\
564            .. note::
565
566               An implementation of a driver matching this compatible is available in
567               :zephyr_file:`{driver_sources[compatible]}`.
568        """,
569            string_io,
570        )
571
572    # Binding description.
573    if binding.bus:
574        bus_help = f'These nodes are "{binding.bus}" bus nodes.'
575    else:
576        bus_help = ''
577    print_block(f'''\
578    Description
579    ***********
580
581    {bus_help}
582    ''', string_io)
583
584    if binding.title:
585        description = ("\n\n"
586                       .join([binding.title, binding.description])
587                       .strip())
588    else:
589        description = binding.description.strip()
590    print(to_code_block(description), file=string_io)
591
592    # Properties.
593    print_block('''\
594    Properties
595    **********
596    ''', string_io)
597    print_top_level_properties(binding, base_names, string_io)
598    print_child_binding_properties(binding, string_io)
599
600    # Specifier cells.
601    #
602    # This presentation isn't particularly nice. Perhaps something
603    # better can be done for future work.
604    if binding.specifier2cells:
605        print_block('''\
606        Specifier cell names
607        ********************
608        ''', string_io)
609        for specifier, cells in binding.specifier2cells.items():
610            print(f'- {specifier} cells: {", ".join(cells)}',
611                  file=string_io)
612
613def print_top_level_properties(binding, base_names, string_io):
614    # Print the RST for top level properties for 'binding' to 'string_io'.
615    #
616    # The 'base_names' set contains all the base.yaml properties.
617
618    def prop_table(filter_fn, deprecated):
619        # Get a properly formatted and indented table of properties.
620        specs = [prop_spec for prop_spec in binding.prop2specs.values()
621                 if filter_fn(prop_spec)]
622        indent = ' ' * 14
623        if specs:
624            temp_io = io.StringIO()
625            print_property_table(specs, temp_io, deprecated=deprecated)
626            return textwrap.indent(temp_io.getvalue(), indent)
627
628        return indent + '(None)'
629
630    def node_props_filter(prop_spec):
631        return prop_spec.name not in base_names and not prop_spec.deprecated
632
633    def deprecated_node_props_filter(prop_spec):
634        return prop_spec.name not in base_names and prop_spec.deprecated
635
636    def base_props_filter(prop_spec):
637        return prop_spec.name in base_names
638
639    if binding.child_binding:
640        print_block('''\
641        Top level properties
642        ====================
643        ''', string_io)
644    if binding.prop2specs:
645        if binding.child_binding:
646            print_block(f'''
647            These property descriptions apply to "{binding.compatible}"
648            nodes themselves. This page also describes child node
649            properties in the following sections.
650            ''', string_io)
651
652
653        print_block(f'''\
654        .. tabs::
655
656           .. group-tab:: Node specific properties
657
658              Properties not inherited from the base binding file.
659
660{prop_table(node_props_filter, False)}
661
662           .. group-tab:: Deprecated node specific properties
663
664              Deprecated properties not inherited from the base binding file.
665
666{prop_table(deprecated_node_props_filter, False)}
667
668           .. group-tab:: Base properties
669
670              Properties inherited from the base binding file, which defines
671              common properties that may be set on many nodes. Not all of these
672              may apply to the "{binding.compatible}" compatible.
673
674{prop_table(base_props_filter, True)}
675
676        ''', string_io)
677    else:
678        print('No top-level properties.\n', file=string_io)
679
680def print_child_binding_properties(binding, string_io):
681    # Prints property tables for all levels of nesting of child
682    # bindings.
683
684    level = 1
685    child = binding.child_binding
686    while child is not None:
687        if level == 1:
688            level_string = 'Child'
689        elif level == 2:
690            level_string = 'Grandchild'
691        else:
692            level_string = f'Level {level} child'
693        if child.prop2specs:
694            title = f'{level_string} node properties'
695            underline = '=' * len(title)
696            print(f'{title}\n{underline}\n', file=string_io)
697            print_property_table(child.prop2specs.values(), string_io,
698                                 deprecated=True)
699        child = child.child_binding
700        level += 1
701
702def print_property_table(prop_specs, string_io, deprecated=False):
703    # Writes a table of properties based on 'prop_specs', an iterable
704    # of edtlib.PropertySpec objects, to 'string_io'.
705    #
706    # If 'deprecated' is true and the property is deprecated, an extra
707    # line is printed mentioning that fact. We allow this to be turned
708    # off for tables where all properties are deprecated, so it's
709    # clear from context.
710
711    # Table header.
712    print_block('''\
713    .. list-table::
714       :widths: 1 1 4
715       :header-rows: 1
716
717       * - Name
718         - Type
719         - Details
720    ''', string_io)
721
722    def to_prop_table_row(prop_spec):
723        # Get a multiline string for a PropertySpec table row.
724
725        # The description column combines the description field,
726        # along with things like the default value or enum values.
727        #
728        # The property 'description' field from the binding may span
729        # one or multiple lines. We try to come up with a nice
730        # presentation for each.
731        details = ''
732        raw_prop_descr = prop_spec.description
733        if raw_prop_descr:
734            details += to_code_block(raw_prop_descr)
735
736        if prop_spec.required:
737            details += '\n\nThis property is **required**.'
738
739        if prop_spec.default:
740            details += f'\n\nDefault value: ``{prop_spec.default}``'
741
742        if prop_spec.const:
743            details += f'\n\nConstant value: ``{prop_spec.const}``'
744        elif prop_spec.enum:
745            details += ('\n\nLegal values: ' +
746                        ', '.join(f'``{repr(val)}``' for val in
747                                  prop_spec.enum))
748
749        if prop_spec.name in DETAILS_IN_IMPORTANT_PROPS:
750            details += (f'\n\nSee {zref("dt-important-props")} for more '
751                        'information.')
752
753        if deprecated and prop_spec.deprecated:
754            details += '\n\nThis property is **deprecated**.'
755
756        return f"""\
757   * - ``{prop_spec.name}``
758     - ``{prop_spec.type}``
759     - {textwrap.indent(details, ' ' * 7).lstrip()}
760"""
761
762    # Print each row.
763    for prop_spec in prop_specs:
764        print(to_prop_table_row(prop_spec), file=string_io)
765
766def setup_compatibles_dir(compatibles, compatibles_dir):
767    # Make a set of all the Path objects we will be creating for
768    # out_dir / compatibles / {compatible_path}.rst. Delete all the ones that
769    # shouldn't be there. Make sure the compatibles output directory
770    # exists.
771
772    logger.info('making output subdirectory %s', compatibles_dir)
773    compatibles_dir.mkdir(parents=True, exist_ok=True)
774
775    paths = set(compatibles_dir / compatible_filename(compatible)
776                for compatible in compatibles)
777
778    for path in compatibles_dir.iterdir():
779        if path not in paths:
780            logger.info('removing unexpected file %s', path)
781            path.unlink()
782
783
784def print_compatible_disambiguation_page(compatible, bindings, string_io):
785    # Print the disambiguation page for 'compatible', which can be
786    # handled by any of the bindings in 'bindings', to 'string_io'.
787
788    assert len(bindings) > 1, (compatible, bindings)
789
790    underline = '#' * len(compatible)
791    output_list = '\n    '.join(f'- :ref:`{binding_ref_target(binding)}`'
792                                for binding in bindings)
793
794    print_block(f'''\
795    :orphan:
796
797    .. dtcompatible:: {compatible}
798
799    {compatible}
800    {underline}
801
802    The devicetree compatible ``{compatible}`` may be handled by any
803    of the following bindings:
804
805    {output_list}
806    ''', string_io)
807
808def print_block(block, string_io):
809    # Helper for dedenting and printing a triple-quoted RST block.
810    # (Just a block of text, not necessarily just a 'code-block'
811    # directive.)
812
813    print(textwrap.dedent(block), file=string_io)
814
815def to_code_block(s, indent=0):
816    # Converts 's', a string, to an indented rst .. code-block::. The
817    # 'indent' argument is a leading indent for each line in the code
818    # block, in spaces.
819    indent = indent * ' '
820    return ('.. code-block:: none\n\n' +
821            textwrap.indent(s, indent + '   ') + '\n')
822
823def compatible_vnd(compatible):
824    # Get the vendor prefix for a compatible string 'compatible'.
825    #
826    # For example, compatible_vnd('foo,device') is 'foo'.
827    #
828    # If 'compatible' has no comma (','), None is returned.
829
830    if ',' not in compatible:
831        return None
832
833    return compatible.split(',', 1)[0]
834
835def compatible_filename(compatible):
836    # Name of the per-compatible disambiguation page within the
837    # out_dir / compatibles directory.
838
839    return f'{compatible}.rst'
840
841def zref(target, text=None):
842    # Make an appropriate RST :ref:`text <target>` or :ref:`target`
843    # string to a zephyr documentation ref target 'target', and return
844    # it.
845    #
846    # By default, the bindings docs are in the main Zephyr
847    # documentation, but this script supports putting them in a
848    # separate Sphinx doc set. Since we also link to Zephyr
849    # documentation from the generated content, we have an environment
850    # variable based escape hatch for putting the target in the zephyr
851    # doc set.
852    #
853    # This relies on intersphinx:
854    # https://www.sphinx-doc.org/en/master/usage/extensions/intersphinx.html
855
856    docset = os.environ.get('GEN_DEVICETREE_REST_ZEPHYR_DOCSET', '')
857
858    if docset.strip():
859        target = f'{docset}:{target}'
860
861    if text:
862        return f':ref:`{text} <{target}>`'
863
864    return f':ref:`{target}`'
865
866def binding_filename(binding):
867    # Returns the output file name for a binding relative to the
868    # directory containing documentation for all bindings. It does
869    # this by stripping off the '.../dts/bindings/' prefix common to
870    # all bindings files in a DTS_ROOT directory.
871    #
872    # For example, for .../zephyr/dts/bindings/base/base.yaml, this
873    # would return 'base/base.yaml'.
874    #
875    # Hopefully that's unique across roots. If not, we'll need to
876    # update this function.
877
878    as_posix = Path(binding.path).as_posix()
879    dts_bindings = 'dts/bindings/'
880    idx = as_posix.rfind(dts_bindings)
881
882    if idx == -1:
883        raise ValueError(f'binding path has no {dts_bindings}: {binding.path}')
884
885    # Cut past dts/bindings, strip off the extension (.yaml or .yml), and
886    # replace with .rst.
887    return os.path.splitext(as_posix[idx + len(dts_bindings):])[0] + '.rst'
888
889def binding_ref_target(binding):
890    # Return the sphinx ':ref:' target name for a binding.
891
892    stem = Path(binding.path).stem
893    return 'dtbinding_' + re.sub('[/,-]', '_', stem)
894
895def write_if_updated(path, s):
896    # gen_helpers.write_if_updated() wrapper that handles logging and
897    # creating missing parents, as needed.
898
899    if not path.parent.is_dir():
900        path.parent.mkdir(parents=True)
901    written = gen_helpers.write_if_updated(path, s)
902    logger.debug('%s %s', 'wrote' if written else 'did NOT write', path)
903    return written
904
905
906if __name__ == '__main__':
907    main()
908    sys.exit(0)
909