1#!/usr/bin/env python3
2
3# Copyright (c) 2018,2020 Intel Corporation
4# Copyright (c) 2022 Nordic Semiconductor ASA
5# SPDX-License-Identifier: Apache-2.0
6
7import argparse
8import collections
9from itertools import takewhile
10import json
11import logging
12import os
13from pathlib import Path, PurePath
14import platform
15import re
16import subprocess
17import sys
18import tempfile
19import traceback
20import shlex
21import shutil
22import textwrap
23import unidiff
24import yaml
25
26from yamllint import config, linter
27
28from junitparser import TestCase, TestSuite, JUnitXml, Skipped, Error, Failure
29import magic
30
31from west.manifest import Manifest
32from west.manifest import ManifestProject
33
34try:
35    from yaml import CSafeLoader as SafeLoader
36except ImportError:
37    from yaml import SafeLoader
38
39sys.path.insert(0, str(Path(__file__).resolve().parents[1]))
40from get_maintainer import Maintainers, MaintainersError
41import list_boards
42import list_hardware
43
44sys.path.insert(0, str(Path(__file__).resolve().parents[2]
45                       / "scripts" / "dts" / "python-devicetree" / "src"))
46from devicetree import edtlib
47
48
49# Let the user run this script as ./scripts/ci/check_compliance.py without
50# making them set ZEPHYR_BASE.
51ZEPHYR_BASE = os.environ.get('ZEPHYR_BASE')
52if ZEPHYR_BASE:
53    ZEPHYR_BASE = Path(ZEPHYR_BASE)
54else:
55    ZEPHYR_BASE = Path(__file__).resolve().parents[2]
56    # Propagate this decision to child processes.
57    os.environ['ZEPHYR_BASE'] = str(ZEPHYR_BASE)
58
59# Initialize the property names allowlist
60BINDINGS_PROPERTIES_AL = None
61with open(Path(__file__).parents[1] / 'bindings_properties_allowlist.yaml') as f:
62    allowlist = yaml.safe_load(f.read())
63    if allowlist is not None:
64        BINDINGS_PROPERTIES_AL = set(allowlist)
65    else:
66        BINDINGS_PROPERTIES_AL = set()
67
68logger = None
69
70def git(*args, cwd=None, ignore_non_zero=False):
71    # Helper for running a Git command. Returns the rstrip()ed stdout output.
72    # Called like git("diff"). Exits with SystemError (raised by sys.exit()) on
73    # errors if 'ignore_non_zero' is set to False (default: False). 'cwd' is the
74    # working directory to use (default: current directory).
75
76    git_cmd = ("git",) + args
77    try:
78        cp = subprocess.run(git_cmd, capture_output=True, cwd=cwd)
79    except OSError as e:
80        err(f"failed to run '{cmd2str(git_cmd)}': {e}")
81
82    if not ignore_non_zero and (cp.returncode or cp.stderr):
83        err(f"'{cmd2str(git_cmd)}' exited with status {cp.returncode} and/or "
84            f"wrote to stderr.\n"
85            f"==stdout==\n"
86            f"{cp.stdout.decode('utf-8')}\n"
87            f"==stderr==\n"
88            f"{cp.stderr.decode('utf-8')}\n")
89
90    return cp.stdout.decode("utf-8").rstrip()
91
92def get_shas(refspec):
93    """
94    Returns the list of Git SHAs for 'refspec'.
95
96    :param refspec:
97    :return:
98    """
99    return git('rev-list',
100               f'--max-count={-1 if "." in refspec else 1}', refspec).split()
101
102def get_files(filter=None, paths=None):
103    filter_arg = (f'--diff-filter={filter}',) if filter else ()
104    paths_arg = ('--', *paths) if paths else ()
105    out = git('diff', '--name-only', *filter_arg, COMMIT_RANGE, *paths_arg)
106    files = out.splitlines()
107    for file in list(files):
108        if not (GIT_TOP / file).exists():
109            # Drop submodule directories from the list.
110            files.remove(file)
111    return files
112
113def get_module_setting_root(root, settings_file):
114    """
115    Parse the Zephyr module generated settings file given by 'settings_file'
116    and return all root settings defined by 'root'.
117    """
118    # Invoke the script directly using the Python executable since this is
119    # not a module nor a pip-installed Python utility
120    root_paths = []
121
122    if os.path.exists(settings_file):
123        with open(settings_file, 'r') as fp_setting_file:
124            content = fp_setting_file.read()
125
126        lines = content.strip().split('\n')
127        for line in lines:
128            root = root.upper()
129            if line.startswith(f'"{root}_ROOT":'):
130                _, root_path = line.split(":", 1)
131                root_paths.append(Path(root_path.strip('"')))
132    return root_paths
133
134def get_vendor_prefixes(path, errfn = print) -> set[str]:
135    vendor_prefixes = set()
136    with open(path) as fp:
137        for line in fp.readlines():
138            line = line.strip()
139            if not line or line.startswith("#"):
140                continue
141            try:
142                vendor, _ = line.split("\t", 2)
143                vendor_prefixes.add(vendor)
144            except ValueError:
145                errfn(f"Invalid line in {path}:\"{line}\".")
146                errfn("Did you forget the tab character?")
147    return vendor_prefixes
148
149class FmtdFailure(Failure):
150    def __init__(
151        self, severity, title, file, line=None, col=None, desc="", end_line=None, end_col=None
152    ):
153        self.severity = severity
154        self.title = title
155        self.file = file
156        self.line = line
157        self.col = col
158        self.end_line = end_line
159        self.end_col = end_col
160        self.desc = desc
161        description = f':{desc}' if desc else ''
162        msg_body = desc or title
163
164        txt = f'\n{title}{description}\nFile:{file}' + \
165              (f'\nLine:{line}' if line else '') + \
166              (f'\nColumn:{col}' if col else '') + \
167              (f'\nEndLine:{end_line}' if end_line else '') + \
168              (f'\nEndColumn:{end_col}' if end_col else '')
169        msg = f'{file}' + (f':{line}' if line else '') + f' {msg_body}'
170        typ = severity.lower()
171
172        super().__init__(msg, typ)
173
174        self.text = txt
175
176
177class ComplianceTest:
178    """
179    Base class for tests. Inheriting classes should have a run() method and set
180    these class variables:
181
182    name:
183      Test name
184
185    doc:
186      Link to documentation related to what's being tested
187
188    path_hint:
189      The path the test runs itself in. By default it uses the magic string
190      "<git-top>" which refers to the top-level repository directory.
191
192      This avoids running 'git' to find the top-level directory before main()
193      runs (class variable assignments run when the 'class ...' statement
194      runs). That avoids swallowing errors, because main() reports them to
195      GitHub.
196
197      Subclasses may override the default with a specific path or one of the
198      magic strings below:
199      - "<zephyr-base>" can be used to refer to the environment variable
200        ZEPHYR_BASE or, when missing, the calculated base of the zephyr tree.
201    """
202    path_hint = "<git-top>"
203
204    def __init__(self):
205        self.case = TestCase(type(self).name, "Guidelines")
206        # This is necessary because Failure can be subclassed, but since it is
207        # always restored form the element tree, the subclass is lost upon
208        # restoring
209        self.fmtd_failures = []
210
211    def _result(self, res, text):
212        res.text = text.rstrip()
213        self.case.result += [res]
214
215    def error(self, text, msg=None, type_="error"):
216        """
217        Signals a problem with running the test, with message 'msg'.
218
219        Raises an exception internally, so you do not need to put a 'return'
220        after error().
221        """
222        err = Error(msg or f'{type(self).name} error', type_)
223        self._result(err, text)
224
225        raise EndTest
226
227    def skip(self, text, msg=None, type_="skip"):
228        """
229        Signals that the test should be skipped, with message 'msg'.
230
231        Raises an exception internally, so you do not need to put a 'return'
232        after skip().
233        """
234        skpd = Skipped(msg or f'{type(self).name} skipped', type_)
235        self._result(skpd, text)
236
237        raise EndTest
238
239    def failure(self, text, msg=None, type_="failure"):
240        """
241        Signals that the test failed, with message 'msg'. Can be called many
242        times within the same test to report multiple failures.
243        """
244        fail = Failure(msg or f'{type(self).name} issues', type_)
245        self._result(fail, text)
246
247    def fmtd_failure(
248        self, severity, title, file, line=None, col=None, desc="", end_line=None, end_col=None
249    ):
250        """
251        Signals that the test failed, and store the information in a formatted
252        standardized manner. Can be called many times within the same test to
253        report multiple failures.
254        """
255        fail = FmtdFailure(severity, title, file, line, col, desc, end_line, end_col)
256        self._result(fail, fail.text)
257        self.fmtd_failures.append(fail)
258
259class EndTest(Exception):
260    """
261    Raised by ComplianceTest.error()/skip() to end the test.
262
263    Tests can raise EndTest themselves to immediately end the test, e.g. from
264    within a nested function call.
265    """
266
267
268class CheckPatch(ComplianceTest):
269    """
270    Runs checkpatch and reports found issues
271
272    """
273    name = "Checkpatch"
274    doc = "See https://docs.zephyrproject.org/latest/contribute/guidelines.html#coding-style for more details."
275
276    def run(self):
277        checkpatch = ZEPHYR_BASE / 'scripts' / 'checkpatch.pl'
278        if not checkpatch.exists():
279            self.skip(f'{checkpatch} not found')
280
281        # check for Perl installation on Windows
282        if os.name == 'nt':
283            if not shutil.which('perl'):
284                self.failure("Perl not installed - required for checkpatch.pl. Please install Perl or add to PATH.")
285                return
286            else:
287                cmd = ['perl', checkpatch]
288
289        # Linux and MacOS
290        else:
291            cmd = [checkpatch]
292
293        cmd.extend(['--mailback', '--no-tree', '-'])
294        with subprocess.Popen(('git', 'diff', '--no-ext-diff', COMMIT_RANGE),
295                                stdout=subprocess.PIPE,
296                                cwd=GIT_TOP) as diff:
297            try:
298                subprocess.run(cmd,
299                               check=True,
300                               stdin=diff.stdout,
301                               stdout=subprocess.PIPE,
302                               stderr=subprocess.STDOUT,
303                               shell=False, cwd=GIT_TOP)
304
305            except subprocess.CalledProcessError as ex:
306                output = ex.output.decode("utf-8")
307                regex = r'^\s*\S+:(\d+):\s*(ERROR|WARNING):(.+?):(.+)(?:\n|\r\n?)+' \
308                        r'^\s*#(\d+):\s*FILE:\s*(.+):(\d+):'
309
310                matches = re.findall(regex, output, re.MULTILINE)
311
312                # add a guard here for excessive number of errors, do not try and
313                # process each one of them and instead push this as one failure.
314                if len(matches) > 500:
315                    self.failure(output)
316                    return
317
318                for m in matches:
319                    self.fmtd_failure(m[1].lower(), m[2], m[5], m[6], col=None,
320                            desc=m[3])
321
322                # If the regex has not matched add the whole output as a failure
323                if len(matches) == 0:
324                    self.failure(output)
325
326
327class BoardYmlCheck(ComplianceTest):
328    """
329    Check the board.yml files
330    """
331    name = "BoardYml"
332    doc = "Check the board.yml file format"
333
334    def check_board_file(self, file, vendor_prefixes):
335        """Validate a single board file."""
336        with open(file) as fp:
337            for line_num, line in enumerate(fp.readlines(), start=1):
338                if "vendor:" in line:
339                    _, vnd = line.strip().split(":", 2)
340                    vnd = vnd.strip()
341                    if vnd not in vendor_prefixes:
342                        desc = f"invalid vendor: {vnd}"
343                        self.fmtd_failure("error", "BoardYml", file, line_num,
344                                          desc=desc)
345
346    def run(self):
347        path = resolve_path_hint(self.path_hint)
348
349        vendor_prefixes = {"others"}
350        # add vendor prefixes from the main zephyr repo
351        vendor_prefixes |= get_vendor_prefixes(ZEPHYR_BASE / "dts" / "bindings" / "vendor-prefixes.txt", self.error)
352
353        # add vendor prefixes from the current repo
354        dts_roots = get_module_setting_root('dts', path / "zephyr" / "module.yml")
355        for dts_root in dts_roots:
356            vendor_prefix_file = dts_root / "dts" / "bindings" / "vendor-prefixes.txt"
357            if vendor_prefix_file.exists():
358                vendor_prefixes |= get_vendor_prefixes(vendor_prefix_file, self.error)
359
360        for file in path.glob("**/board.yml"):
361            self.check_board_file(file, vendor_prefixes)
362
363
364class ClangFormatCheck(ComplianceTest):
365    """
366    Check if clang-format reports any issues
367    """
368    name = "ClangFormat"
369    doc = "See https://docs.zephyrproject.org/latest/contribute/guidelines.html#clang-format for more details."
370
371    def run(self):
372        exe = f"clang-format-diff.{'exe' if platform.system() == 'Windows' else 'py'}"
373
374        for file in get_files():
375            if Path(file).suffix not in ['.c', '.h']:
376                continue
377
378            diff = subprocess.Popen(('git', 'diff', '-U0', '--no-color', COMMIT_RANGE, '--', file),
379                                    stdout=subprocess.PIPE,
380                                    cwd=GIT_TOP)
381            try:
382                subprocess.run((exe, '-p1'),
383                               check=True,
384                               stdin=diff.stdout,
385                               stdout=subprocess.PIPE,
386                               stderr=subprocess.STDOUT,
387                               cwd=GIT_TOP)
388
389            except subprocess.CalledProcessError as ex:
390                patchset = unidiff.PatchSet.from_string(ex.output, encoding="utf-8")
391                for patch in patchset:
392                    for hunk in patch:
393                        # Strip the before and after context
394                        before = next(i for i,v in enumerate(hunk) if str(v).startswith(('-', '+')))
395                        after = next(i for i,v in enumerate(reversed(hunk)) if str(v).startswith(('-', '+')))
396                        msg = "".join([str(l) for l in hunk[before:-after or None]])
397
398                        # show the hunk at the last line
399                        self.fmtd_failure("notice",
400                                          "You may want to run clang-format on this change",
401                                          file, line=hunk.source_start + hunk.source_length - after,
402                                          desc=f'\r\n{msg}')
403
404
405class DevicetreeBindingsCheck(ComplianceTest):
406    """
407    Checks if we are introducing any unwanted properties in Devicetree Bindings.
408    """
409    name = "DevicetreeBindings"
410    doc = "See https://docs.zephyrproject.org/latest/build/dts/bindings.html for more details."
411
412    def run(self, full=True):
413        bindings_diff, bindings = self.get_yaml_bindings()
414
415        # If no bindings are changed, skip this check.
416        try:
417            subprocess.check_call(['git', 'diff', '--quiet', COMMIT_RANGE]
418                                  + bindings_diff)
419            nodiff = True
420        except subprocess.CalledProcessError:
421            nodiff = False
422        if nodiff:
423            self.skip('no changes to bindings were made')
424
425        for binding in bindings:
426            self.check(binding, self.check_yaml_property_name)
427            self.check(binding, self.required_false_check)
428
429    @staticmethod
430    def check(binding, callback):
431        while binding is not None:
432            callback(binding)
433            binding = binding.child_binding
434
435    def get_yaml_bindings(self):
436        """
437        Returns a list of 'dts/bindings/**/*.yaml'
438        """
439        from glob import glob
440        BINDINGS_PATH = 'dts/bindings/'
441        bindings_diff_dir, bindings = set(), []
442
443        for file_name in get_files(filter='d'):
444            if BINDINGS_PATH in file_name:
445                p = file_name.partition(BINDINGS_PATH)
446                bindings_diff_dir.add(os.path.join(p[0], p[1]))
447
448        for path in bindings_diff_dir:
449            yamls = glob(f'{os.fspath(path)}/**/*.yaml', recursive=True)
450            bindings.extend(yamls)
451
452        bindings = edtlib.bindings_from_paths(bindings, ignore_errors=True)
453        return list(bindings_diff_dir), bindings
454
455    def check_yaml_property_name(self, binding):
456        """
457        Checks if the property names in the binding file contain underscores.
458        """
459        for prop_name in binding.prop2specs:
460            if '_' in prop_name and prop_name not in BINDINGS_PROPERTIES_AL:
461                better_prop = prop_name.replace('_', '-')
462                print(f"Required: In '{binding.path}', "
463                      f"the property '{prop_name}' "
464                      f"should be renamed to '{better_prop}'.")
465                self.failure(
466                    f"{binding.path}: property '{prop_name}' contains underscores.\n"
467                    f"\tUse '{better_prop}' instead unless this property name is from Linux.\n"
468                    "Or another authoritative upstream source of bindings for "
469                    f"compatible '{binding.compatible}'.\n"
470                    "\tHint: update 'bindings_properties_allowlist.yaml' if you need to "
471                    "override this check for this property."
472                )
473
474    def required_false_check(self, binding):
475        raw_props = binding.raw.get('properties', {})
476        for prop_name, raw_prop in raw_props.items():
477            if raw_prop.get('required') is False:
478                self.failure(
479                    f'{binding.path}: property "{prop_name}": '
480                    "'required: false' is redundant, please remove"
481                )
482
483class KconfigCheck(ComplianceTest):
484    """
485    Checks is we are introducing any new warnings/errors with Kconfig,
486    for example using undefined Kconfig variables.
487    """
488    name = "Kconfig"
489    doc = "See https://docs.zephyrproject.org/latest/build/kconfig/tips.html for more details."
490
491    # Top-level Kconfig file. The path can be relative to srctree (ZEPHYR_BASE).
492    FILENAME = "Kconfig"
493
494    # Kconfig symbol prefix/namespace.
495    CONFIG_ = "CONFIG_"
496
497    def run(self):
498        kconf = self.parse_kconfig()
499
500        self.check_top_menu_not_too_long(kconf)
501        self.check_no_pointless_menuconfigs(kconf)
502        self.check_no_undef_within_kconfig(kconf)
503        self.check_no_redefined_in_defconfig(kconf)
504        self.check_no_enable_in_boolean_prompt(kconf)
505        self.check_soc_name_sync(kconf)
506        self.check_no_undef_outside_kconfig(kconf)
507        self.check_disallowed_defconfigs(kconf)
508
509    def get_modules(self, modules_file, sysbuild_modules_file, settings_file):
510        """
511        Get a list of modules and put them in a file that is parsed by
512        Kconfig
513
514        This is needed to complete Kconfig sanity tests.
515
516        """
517        # Invoke the script directly using the Python executable since this is
518        # not a module nor a pip-installed Python utility
519        zephyr_module_path = ZEPHYR_BASE / "scripts" / "zephyr_module.py"
520        cmd = [sys.executable, zephyr_module_path,
521               '--kconfig-out', modules_file,
522               '--sysbuild-kconfig-out', sysbuild_modules_file,
523               '--settings-out', settings_file]
524        try:
525            subprocess.run(cmd, check=True, stdout=subprocess.PIPE,
526                           stderr=subprocess.STDOUT)
527        except subprocess.CalledProcessError as ex:
528            self.error(ex.output.decode("utf-8"))
529
530        modules_dir = ZEPHYR_BASE / 'modules'
531        modules = [name for name in os.listdir(modules_dir) if
532                   modules_dir / name / 'Kconfig']
533
534        with open(modules_file, 'r') as fp_module_file:
535            content = fp_module_file.read()
536
537        with open(modules_file, 'w') as fp_module_file:
538            for module in modules:
539                fp_module_file.write("ZEPHYR_{}_KCONFIG = {}\n".format(
540                    re.sub('[^a-zA-Z0-9]', '_', module).upper(),
541                    modules_dir / module / 'Kconfig'
542                ))
543            fp_module_file.write(content)
544
545    def get_kconfig_dts(self, kconfig_dts_file, settings_file):
546        """
547        Generate the Kconfig.dts using dts/bindings as the source.
548
549        This is needed to complete Kconfig compliance tests.
550
551        """
552        # Invoke the script directly using the Python executable since this is
553        # not a module nor a pip-installed Python utility
554        zephyr_drv_kconfig_path = ZEPHYR_BASE / "scripts" / "dts" / "gen_driver_kconfig_dts.py"
555        binding_paths = []
556        binding_paths.append(ZEPHYR_BASE / "dts" / "bindings")
557
558        dts_root_paths = get_module_setting_root('dts', settings_file)
559        for p in dts_root_paths:
560            binding_paths.append(p / "dts" / "bindings")
561
562        cmd = [sys.executable, zephyr_drv_kconfig_path,
563               '--kconfig-out', kconfig_dts_file, '--bindings-dirs']
564        for binding_path in binding_paths:
565            cmd.append(binding_path)
566        try:
567            subprocess.run(cmd, check=True, stdout=subprocess.PIPE,
568                           stderr=subprocess.STDOUT)
569        except subprocess.CalledProcessError as ex:
570            self.error(ex.output.decode("utf-8"))
571
572    def get_v2_model(self, kconfig_dir, settings_file):
573        """
574        Get lists of v2 boards and SoCs and put them in a file that is parsed by
575        Kconfig
576
577        This is needed to complete Kconfig sanity tests.
578        """
579        os.environ['HWM_SCHEME'] = 'v2'
580        os.environ["KCONFIG_BOARD_DIR"] = os.path.join(kconfig_dir, 'boards')
581
582        os.makedirs(os.path.join(kconfig_dir, 'boards'), exist_ok=True)
583        os.makedirs(os.path.join(kconfig_dir, 'soc'), exist_ok=True)
584        os.makedirs(os.path.join(kconfig_dir, 'arch'), exist_ok=True)
585
586        kconfig_file = os.path.join(kconfig_dir, 'boards', 'Kconfig')
587        kconfig_boards_file = os.path.join(kconfig_dir, 'boards', 'Kconfig.boards')
588        kconfig_sysbuild_file = os.path.join(kconfig_dir, 'boards', 'Kconfig.sysbuild')
589        kconfig_defconfig_file = os.path.join(kconfig_dir, 'boards', 'Kconfig.defconfig')
590
591        board_roots = get_module_setting_root('board', settings_file)
592        board_roots.insert(0, ZEPHYR_BASE)
593        soc_roots = get_module_setting_root('soc', settings_file)
594        soc_roots.insert(0, ZEPHYR_BASE)
595        root_args = argparse.Namespace(**{'board_roots': board_roots,
596                                          'soc_roots': soc_roots, 'board': None,
597                                          'board_dir': []})
598        v2_boards = list_boards.find_v2_boards(root_args).values()
599
600        with open(kconfig_defconfig_file, 'w') as fp:
601            for board in v2_boards:
602                for board_dir in board.directories:
603                    fp.write('osource "' + (board_dir / 'Kconfig.defconfig').as_posix() + '"\n')
604
605        with open(kconfig_sysbuild_file, 'w') as fp:
606            for board in v2_boards:
607                for board_dir in board.directories:
608                    fp.write('osource "' + (board_dir / 'Kconfig.sysbuild').as_posix() + '"\n')
609
610        with open(kconfig_boards_file, 'w') as fp:
611            for board in v2_boards:
612                board_str = 'BOARD_' + re.sub(r"[^a-zA-Z0-9_]", "_", board.name).upper()
613                fp.write('config  ' + board_str + '\n')
614                fp.write('\t bool\n')
615                for qualifier in list_boards.board_v2_qualifiers(board):
616                    board_str = ('BOARD_' + board.name + '_' +
617                                 re.sub(r"[^a-zA-Z0-9_]", "_", qualifier)).upper()
618                    fp.write('config  ' + board_str + '\n')
619                    fp.write('\t bool\n')
620                for board_dir in board.directories:
621                    fp.write(
622                        'source "' + (board_dir / ('Kconfig.' + board.name)).as_posix() + '"\n'
623                    )
624
625        with open(kconfig_file, 'w') as fp:
626            for board in v2_boards:
627                for board_dir in board.directories:
628                    fp.write('osource "' + (board_dir / 'Kconfig').as_posix() + '"\n')
629
630        kconfig_defconfig_file = os.path.join(kconfig_dir, 'soc', 'Kconfig.defconfig')
631        kconfig_sysbuild_file = os.path.join(kconfig_dir, 'soc', 'Kconfig.sysbuild')
632        kconfig_soc_file = os.path.join(kconfig_dir, 'soc', 'Kconfig.soc')
633        kconfig_file = os.path.join(kconfig_dir, 'soc', 'Kconfig')
634
635        root_args = argparse.Namespace(**{'soc_roots': soc_roots})
636        v2_systems = list_hardware.find_v2_systems(root_args)
637
638        soc_folders = {folder for soc in v2_systems.get_socs() for folder in soc.folder}
639        with open(kconfig_defconfig_file, 'w') as fp:
640            for folder in soc_folders:
641                fp.write('osource "' + (Path(folder) / 'Kconfig.defconfig').as_posix() + '"\n')
642
643        with open(kconfig_sysbuild_file, 'w') as fp:
644            for folder in soc_folders:
645                fp.write('osource "' + (Path(folder) / 'Kconfig.sysbuild').as_posix() + '"\n')
646
647        with open(kconfig_soc_file, 'w') as fp:
648            for folder in soc_folders:
649                fp.write('source "' + (Path(folder) / 'Kconfig.soc').as_posix() + '"\n')
650
651        with open(kconfig_file, 'w') as fp:
652            for folder in soc_folders:
653                fp.write('source "' + (Path(folder) / 'Kconfig').as_posix() + '"\n')
654
655        kconfig_file = os.path.join(kconfig_dir, 'arch', 'Kconfig')
656
657        root_args = argparse.Namespace(**{'arch_roots': [ZEPHYR_BASE], 'arch': None})
658        v2_archs = list_hardware.find_v2_archs(root_args)
659
660        with open(kconfig_file, 'w') as fp:
661            for arch in v2_archs['archs']:
662                fp.write('source "' + (Path(arch['path']) / 'Kconfig').as_posix() + '"\n')
663
664    def parse_kconfig(self):
665        """
666        Returns a kconfiglib.Kconfig object for the Kconfig files. We reuse
667        this object for all tests to avoid having to reparse for each test.
668        """
669        # Put the Kconfiglib path first to make sure no local Kconfiglib version is
670        # used
671        kconfig_path = ZEPHYR_BASE / "scripts" / "kconfig"
672        if not kconfig_path.exists():
673            self.error(kconfig_path + " not found")
674
675        kconfiglib_dir = tempfile.mkdtemp(prefix="kconfiglib_")
676
677        sys.path.insert(0, str(kconfig_path))
678        # Import globally so that e.g. kconfiglib.Symbol can be referenced in
679        # tests
680        global kconfiglib
681        import kconfiglib
682
683        # Look up Kconfig files relative to ZEPHYR_BASE
684        os.environ["srctree"] = str(ZEPHYR_BASE)
685
686        # Parse the entire Kconfig tree, to make sure we see all symbols
687        os.environ["SOC_DIR"] = "soc/"
688        os.environ["ARCH_DIR"] = "arch/"
689        os.environ["BOARD"] = "boards"
690        os.environ["ARCH"] = "*"
691        os.environ["KCONFIG_BINARY_DIR"] = kconfiglib_dir
692        os.environ['DEVICETREE_CONF'] = "dummy"
693        os.environ['TOOLCHAIN_HAS_NEWLIB'] = "y"
694
695        # Older name for DEVICETREE_CONF, for compatibility with older Zephyr
696        # versions that don't have the renaming
697        os.environ["GENERATED_DTS_BOARD_CONF"] = "dummy"
698
699        # For multi repo support
700        self.get_modules(os.path.join(kconfiglib_dir, "Kconfig.modules"),
701                         os.path.join(kconfiglib_dir, "Kconfig.sysbuild.modules"),
702                         os.path.join(kconfiglib_dir, "settings_file.txt"))
703        # For Kconfig.dts support
704        self.get_kconfig_dts(os.path.join(kconfiglib_dir, "Kconfig.dts"),
705                             os.path.join(kconfiglib_dir, "settings_file.txt"))
706        # For hardware model support (board, soc, arch)
707        self.get_v2_model(kconfiglib_dir, os.path.join(kconfiglib_dir, "settings_file.txt"))
708
709        # Tells Kconfiglib to generate warnings for all references to undefined
710        # symbols within Kconfig files
711        os.environ["KCONFIG_WARN_UNDEF"] = "y"
712
713        try:
714            # Note this will both print warnings to stderr _and_ return
715            # them: so some warnings might get printed
716            # twice. "warn_to_stderr=False" could unfortunately cause
717            # some (other) warnings to never be printed.
718            return kconfiglib.Kconfig(filename=self.FILENAME)
719        except kconfiglib.KconfigError as e:
720            self.failure(str(e))
721            raise EndTest
722        finally:
723            # Clean up the temporary directory
724            shutil.rmtree(kconfiglib_dir)
725
726    def module_kconfigs(self, regex):
727        manifest = Manifest.from_file()
728        kconfigs = ""
729
730        # Use hard coded paths for Zephyr for tests, samples and ext. module root
731        tmp_output = git("grep", "-I", "-h", "--perl-regexp", regex, "--", ":tests", ":samples",
732                         ":modules", cwd=ZEPHYR_BASE, ignore_non_zero=True)
733
734        if len(tmp_output) > 0:
735            kconfigs += tmp_output + "\n"
736
737        for project in manifest.get_projects([]):
738            if not manifest.is_active(project):
739                continue
740
741            if not project.is_cloned():
742                continue
743
744            module_path = PurePath(project.abspath)
745            module_yml = module_path.joinpath('zephyr/module.yml')
746
747            if not Path(module_yml).is_file():
748                module_yml = module_path.joinpath('zephyr/module.yaml')
749
750            if Path(module_yml).is_file():
751                dirs = []
752
753                with Path(module_yml).open('r', encoding='utf-8') as f:
754                    meta = yaml.load(f.read(), Loader=SafeLoader)
755
756                for folder_type in ['samples', 'tests']:
757                    if folder_type in meta:
758                        for path_ext in meta[folder_type]:
759                            path_full = module_path.joinpath(path_ext)
760
761                            if Path(path_full).is_dir():
762                                dirs.append(":" + path_ext)
763
764                # Add ext. module root, if one is defined
765                if 'build' in meta and 'settings' in meta['build'] and \
766                     'module_ext_root' in meta['build']['settings']:
767                    path_full = module_path.joinpath(meta['build']['settings']['module_ext_root'])
768
769                    if Path(path_full).is_dir():
770                        dirs.append(":" + meta['build']['settings']['module_ext_root'])
771
772                if len(dirs) > 0:
773                    tmp_output = git("grep", "-I", "-h", "--perl-regexp", regex, "--",
774                                     *dirs, cwd=module_path, ignore_non_zero=True)
775
776                    if len(tmp_output) > 0:
777                        kconfigs += tmp_output + "\n"
778
779        return kconfigs
780
781    def get_logging_syms(self, kconf):
782        # Returns a set() with the names of the Kconfig symbols generated with
783        # logging template in samples/tests folders. The Kconfig symbols doesn't
784        # include `CONFIG_` and for each module declared there is one symbol
785        # per suffix created.
786
787        suffixes = [
788            "_LOG_LEVEL",
789            "_LOG_LEVEL_DBG",
790            "_LOG_LEVEL_ERR",
791            "_LOG_LEVEL_INF",
792            "_LOG_LEVEL_WRN",
793            "_LOG_LEVEL_OFF",
794            "_LOG_LEVEL_INHERIT",
795            "_LOG_LEVEL_DEFAULT",
796        ]
797
798        # Warning: Needs to work with both --perl-regexp and the 're' module.
799        regex = r"^\s*(?:module\s*=\s*)([A-Z0-9_]+)\s*(?:#|$)"
800
801        # Grep samples/ and tests/ for symbol definitions in all modules
802        grep_stdout = self.module_kconfigs(regex)
803
804        names = re.findall(regex, grep_stdout, re.MULTILINE)
805
806        kconf_syms = []
807        for name in names:
808            for suffix in suffixes:
809                kconf_syms.append(f"{name}{suffix}")
810
811        return set(kconf_syms)
812
813    def module_disallowed_check(self, module_path, type, folder, meta, regex):
814        # Returns a list with lines from git grep which includes Kconfigs from defconfig files
815        entry = type + '_root'
816        git_folder = ":" + folder
817
818        if entry in meta['build']['settings']:
819            tmp_path = module_path.joinpath(meta['build']['settings'][entry])
820
821            if Path(tmp_path.joinpath(folder)).is_dir():
822                tmp_output = git("grep", "--line-number", "-I", "--null",
823                                 "--perl-regexp", regex, "--", git_folder,
824                                 cwd=tmp_path, ignore_non_zero=True)
825
826                if len(tmp_output) > 0:
827                    return tmp_output.splitlines()
828        return []
829
830    def check_disallowed_defconfigs(self, kconf):
831        """
832        Checks that there are no disallowed Kconfigs used in board/SoC defconfig files
833        """
834        # Grep for symbol references.
835        #
836        # Example output line for a reference to CONFIG_FOO at line 17 of
837        # foo/bar.c:
838        #
839        #   foo/bar.c<null>17<null>#ifdef CONFIG_FOO
840        #
841        # 'git grep --only-matching' would get rid of the surrounding context
842        # ('#ifdef '), but it was added fairly recently (second half of 2018),
843        # so we extract the references from each line ourselves instead.
844        #
845        # The regex uses word boundaries (\b) to isolate the reference, and
846        # negative lookahead to automatically allowlist the following:
847        #
848        #  - ##, for token pasting (CONFIG_FOO_##X)
849        #
850        #  - $, e.g. for CMake variable expansion (CONFIG_FOO_${VAR})
851        #
852        #  - @, e.g. for CMakes's configure_file() (CONFIG_FOO_@VAR@)
853        #
854        #  - {, e.g. for Python scripts ("CONFIG_FOO_{}_BAR".format(...)")
855        #
856        #  - *, meant for comments like '#endif /* CONFIG_FOO_* */
857
858        disallowed_symbols = {
859            "PINCTRL": "Drivers requiring PINCTRL must SELECT it instead.",
860            "BOARD_EARLY_INIT_HOOK": "Boards requiring hooks must SELECT them instead.",
861            "BOARD_LATE_INIT_HOOK": "Boards requiring hooks must SELECT them instead.",
862        }
863
864        disallowed_regex = "(" + "|".join(disallowed_symbols.keys()) + ")$"
865
866        # Warning: Needs to work with both --perl-regexp and the 're' module
867        # Windows
868        if os.name == 'nt':
869            # Remove word boundaries on Windows implementation
870            regex_boards = r"CONFIG_[A-Z0-9_]+(?!\s*##|[$@{(.*])"
871            regex_socs = r"config[ \t]+[A-Z0-9_]+"
872        else:
873            regex_boards = r"\bCONFIG_[A-Z0-9_]+\b(?!\s*##|[$@{(.*])"
874            regex_socs = r"\bconfig\s+[A-Z0-9_]+$"
875
876        grep_stdout_boards = git("grep", "--line-number", "-I", "--null",
877                                 "--perl-regexp", regex_boards, "--", ":boards",
878                                 cwd=ZEPHYR_BASE).splitlines()
879        grep_stdout_socs = git("grep", "--line-number", "-I", "--null",
880                               "--perl-regexp", regex_socs, "--", ":soc",
881                               cwd=ZEPHYR_BASE).splitlines()
882
883        manifest = Manifest.from_file()
884        for project in manifest.get_projects([]):
885            if not manifest.is_active(project):
886                continue
887
888            if not project.is_cloned():
889                continue
890
891            module_path = PurePath(project.abspath)
892            module_yml = module_path.joinpath('zephyr/module.yml')
893
894            if not Path(module_yml).is_file():
895                module_yml = module_path.joinpath('zephyr/module.yaml')
896
897            if Path(module_yml).is_file():
898                with Path(module_yml).open('r', encoding='utf-8') as f:
899                    meta = yaml.load(f.read(), Loader=SafeLoader)
900
901                    if 'build' in meta and 'settings' in meta['build']:
902                        grep_stdout_boards.extend(self.module_disallowed_check(module_path,
903                                                                               'board',
904                                                                               'boards', meta,
905                                                                               regex_boards))
906                        grep_stdout_socs.extend(self.module_disallowed_check(module_path, 'soc',
907                                                                             'soc', meta,
908                                                                             regex_socs))
909
910        # Board processing
911        # splitlines() supports various line terminators
912        for grep_line in grep_stdout_boards:
913            path, lineno, line = grep_line.split("\0")
914
915            # Extract symbol references (might be more than one) within the line
916            for sym_name in re.findall(regex_boards, line):
917                sym_name = sym_name[len("CONFIG_"):]
918                # Only check in Kconfig fragment files, references might exist in documentation
919                if re.match(disallowed_regex, sym_name) and (path[-len("conf"):] == "conf" or
920                path[-len("defconfig"):] == "defconfig"):
921                    reason = disallowed_symbols.get(sym_name)
922                    self.fmtd_failure("error", "BoardDisallowedKconfigs", path, lineno, desc=f"""
923Found disallowed Kconfig symbol in board Kconfig files: CONFIG_{sym_name:35}
924{reason}
925""")
926
927        # SoCs processing
928        # splitlines() supports various line terminators
929        for grep_line in grep_stdout_socs:
930            path, lineno, line = grep_line.split("\0")
931
932            # Extract symbol references (might be more than one) within the line
933            for sym_name in re.findall(regex_socs, line):
934                sym_name = sym_name[len("config"):].strip()
935                # Only check in Kconfig defconfig files
936                if re.match(disallowed_regex, sym_name) and "defconfig" in path:
937                    reason = disallowed_symbols.get(sym_name, "Unknown reason")
938                    self.fmtd_failure("error", "SoCDisallowedKconfigs", path, lineno, desc=f"""
939Found disallowed Kconfig symbol in SoC Kconfig files: {sym_name:35}
940{reason}
941""")
942
943    def get_defined_syms(self, kconf):
944        # Returns a set() with the names of all defined Kconfig symbols (with no
945        # 'CONFIG_' prefix). This is complicated by samples and tests defining
946        # their own Kconfig trees. For those, just grep for 'config FOO' to find
947        # definitions. Doing it "properly" with Kconfiglib is still useful for
948        # the main tree, because some symbols are defined using preprocessor
949        # macros.
950
951        # Warning: Needs to work with both --perl-regexp and the 're' module.
952        # (?:...) is a non-capturing group.
953        regex = r"^\s*(?:menu)?config\s*([A-Z0-9_]+)\s*(?:#|$)"
954
955        # Grep samples/ and tests/ for symbol definitions in all modules
956        grep_stdout = self.module_kconfigs(regex)
957
958        # Generate combined list of configs and choices from the main Kconfig tree.
959        kconf_syms = kconf.unique_defined_syms + kconf.unique_choices
960
961        # Symbols from the main Kconfig tree + grepped definitions from samples
962        # and tests
963        return set(
964            [sym.name for sym in kconf_syms]
965            + re.findall(regex, grep_stdout, re.MULTILINE)
966        ).union(self.get_logging_syms(kconf))
967
968    def check_top_menu_not_too_long(self, kconf):
969        """
970        Checks that there aren't too many items in the top-level menu (which
971        might be a sign that stuff accidentally got added there)
972        """
973        max_top_items = 50
974
975        n_top_items = 0
976        node = kconf.top_node.list
977        while node:
978            # Only count items with prompts. Other items will never be
979            # shown in the menuconfig (outside show-all mode).
980            if node.prompt:
981                n_top_items += 1
982            node = node.next
983
984        if n_top_items > max_top_items:
985            self.failure(f"""
986Expected no more than {max_top_items} potentially visible items (items with
987prompts) in the top-level Kconfig menu, found {n_top_items} items. If you're
988deliberately adding new entries, then bump the 'max_top_items' variable in
989{__file__}.""")
990
991    def check_no_redefined_in_defconfig(self, kconf):
992        # Checks that no symbols are (re)defined in defconfigs.
993
994        for node in kconf.node_iter():
995            # 'kconfiglib' is global
996            # pylint: disable=undefined-variable
997            if "defconfig" in node.filename and (node.prompt or node.help):
998                name = (node.item.name if node.item not in
999                        (kconfiglib.MENU, kconfiglib.COMMENT) else str(node))
1000                self.failure(f"""
1001Kconfig node '{name}' found with prompt or help in {node.filename}.
1002Options must not be defined in defconfig files.
1003""")
1004                continue
1005
1006    def check_no_enable_in_boolean_prompt(self, kconf):
1007        # Checks that boolean's prompt does not start with "Enable...".
1008
1009        for node in kconf.node_iter():
1010            # skip Kconfig nodes not in-tree (will present an absolute path)
1011            if os.path.isabs(node.filename):
1012                continue
1013
1014            # 'kconfiglib' is global
1015            # pylint: disable=undefined-variable
1016
1017            # only process boolean symbols with a prompt
1018            if (not isinstance(node.item, kconfiglib.Symbol) or
1019                node.item.type != kconfiglib.BOOL or
1020                not node.prompt or
1021                not node.prompt[0]):
1022                continue
1023
1024            if re.match(r"^[Ee]nable.*", node.prompt[0]):
1025                self.failure(f"""
1026Boolean option '{node.item.name}' prompt must not start with 'Enable...'. Please
1027check Kconfig guidelines.
1028""")
1029                continue
1030
1031    def check_no_pointless_menuconfigs(self, kconf):
1032        # Checks that there are no pointless 'menuconfig' symbols without
1033        # children in the Kconfig files
1034
1035        bad_mconfs = []
1036        for node in kconf.node_iter():
1037            # 'kconfiglib' is global
1038            # pylint: disable=undefined-variable
1039
1040            # Avoid flagging empty regular menus and choices, in case people do
1041            # something with 'osource' (could happen for 'menuconfig' symbols
1042            # too, though it's less likely)
1043            if node.is_menuconfig and not node.list and \
1044               isinstance(node.item, kconfiglib.Symbol):
1045
1046                bad_mconfs.append(node)
1047
1048        if bad_mconfs:
1049            self.failure("""\
1050Found pointless 'menuconfig' symbols without children. Use regular 'config'
1051symbols instead. See
1052https://docs.zephyrproject.org/latest/build/kconfig/tips.html#menuconfig-symbols.
1053
1054""" + "\n".join(f"{node.item.name:35} {node.filename}:{node.linenr}"
1055                for node in bad_mconfs))
1056
1057    def check_no_undef_within_kconfig(self, kconf):
1058        """
1059        Checks that there are no references to undefined Kconfig symbols within
1060        the Kconfig files
1061        """
1062        undef_ref_warnings = "\n\n\n".join(warning for warning in kconf.warnings
1063                                           if "undefined symbol" in warning)
1064
1065        if undef_ref_warnings:
1066            self.failure(f"Undefined Kconfig symbols:\n\n {undef_ref_warnings}")
1067
1068    def check_soc_name_sync(self, kconf):
1069        root_args = argparse.Namespace(**{'soc_roots': [ZEPHYR_BASE]})
1070        v2_systems = list_hardware.find_v2_systems(root_args)
1071
1072        soc_names = {soc.name for soc in v2_systems.get_socs()}
1073
1074        soc_kconfig_names = set()
1075        for node in kconf.node_iter():
1076            # 'kconfiglib' is global
1077            # pylint: disable=undefined-variable
1078            if isinstance(node.item, kconfiglib.Symbol) and node.item.name == "SOC":
1079                n = node.item
1080                for d in n.defaults:
1081                    soc_kconfig_names.add(d[0].name)
1082
1083        soc_name_warnings = []
1084        for name in soc_names:
1085            if name not in soc_kconfig_names:
1086                soc_name_warnings.append(f"soc name: {name} not found in CONFIG_SOC defaults.")
1087
1088        if soc_name_warnings:
1089            soc_name_warning_str = '\n'.join(soc_name_warnings)
1090            self.failure(f'''
1091Missing SoC names or CONFIG_SOC vs soc.yml out of sync:
1092
1093{soc_name_warning_str}
1094''')
1095
1096    def check_no_undef_outside_kconfig(self, kconf):
1097        """
1098        Checks that there are no references to undefined Kconfig symbols
1099        outside Kconfig files (any CONFIG_FOO where no FOO symbol exists)
1100        """
1101        # Grep for symbol references.
1102        #
1103        # Example output line for a reference to CONFIG_FOO at line 17 of
1104        # foo/bar.c:
1105        #
1106        #   foo/bar.c<null>17<null>#ifdef CONFIG_FOO
1107        #
1108        # 'git grep --only-matching' would get rid of the surrounding context
1109        # ('#ifdef '), but it was added fairly recently (second half of 2018),
1110        # so we extract the references from each line ourselves instead.
1111        #
1112        # The regex uses word boundaries (\b) to isolate the reference, and
1113        # negative lookahead to automatically allowlist the following:
1114        #
1115        #  - ##, for token pasting (CONFIG_FOO_##X)
1116        #
1117        #  - $, e.g. for CMake variable expansion (CONFIG_FOO_${VAR})
1118        #
1119        #  - @, e.g. for CMakes's configure_file() (CONFIG_FOO_@VAR@)
1120        #
1121        #  - {, e.g. for Python scripts ("CONFIG_FOO_{}_BAR".format(...)")
1122        #
1123        #  - *, meant for comments like '#endif /* CONFIG_FOO_* */
1124
1125        defined_syms = self.get_defined_syms(kconf)
1126
1127        # Maps each undefined symbol to a list <filename>:<linenr> strings
1128        undef_to_locs = collections.defaultdict(list)
1129
1130        # Warning: Needs to work with both --perl-regexp and the 're' module
1131        regex = r"\b" + self.CONFIG_ + r"[A-Z0-9_]+\b(?!\s*##|[$@{(.*])"
1132
1133        # Skip doc/releases and doc/security/vulnerabilities.rst, which often
1134        # reference removed symbols
1135        grep_stdout = git("grep", "--line-number", "-I", "--null",
1136                          "--perl-regexp", regex, "--", ":!/doc/releases",
1137                          ":!/doc/security/vulnerabilities.rst",
1138                          cwd=GIT_TOP)
1139
1140        # splitlines() supports various line terminators
1141        for grep_line in grep_stdout.splitlines():
1142            path, lineno, line = grep_line.split("\0")
1143
1144            # Extract symbol references (might be more than one) within the
1145            # line
1146            for sym_name in re.findall(regex, line):
1147                sym_name = sym_name[len(self.CONFIG_):]  # Strip CONFIG_
1148                if sym_name not in defined_syms and \
1149                   sym_name not in self.UNDEF_KCONFIG_ALLOWLIST and \
1150                   not (sym_name.endswith("_MODULE") and sym_name[:-7] in defined_syms) \
1151                   and not sym_name.startswith("BOARD_REVISION_"):
1152
1153                    undef_to_locs[sym_name].append(f"{path}:{lineno}")
1154
1155        if not undef_to_locs:
1156            return
1157
1158        # String that describes all referenced but undefined Kconfig symbols,
1159        # in alphabetical order, along with the locations where they're
1160        # referenced. Example:
1161        #
1162        #   CONFIG_ALSO_MISSING    arch/xtensa/core/fatal.c:273
1163        #   CONFIG_MISSING         arch/xtensa/core/fatal.c:264, subsys/fb/cfb.c:20
1164        undef_desc = "\n".join(f"{self.CONFIG_}{sym_name:35} {', '.join(locs)}"
1165            for sym_name, locs in sorted(undef_to_locs.items()))
1166
1167        self.failure(f"""
1168Found references to undefined Kconfig symbols. If any of these are false
1169positives, then add them to UNDEF_KCONFIG_ALLOWLIST in {__file__}.
1170
1171If the reference is for a comment like /* CONFIG_FOO_* */ (or
1172/* CONFIG_FOO_*_... */), then please use exactly that form (with the '*'). The
1173CI check knows not to flag it.
1174
1175More generally, a reference followed by $, @, {{, (, ., *, or ## will never be
1176flagged.
1177
1178{undef_desc}""")
1179
1180    # Many of these are symbols used as examples. Note that the list is sorted
1181    # alphabetically, and skips the CONFIG_ prefix.
1182    UNDEF_KCONFIG_ALLOWLIST = {
1183        # zephyr-keep-sorted-start re(^\s+")
1184        "ALSO_MISSING",
1185        "APP_LINK_WITH_",
1186        "APP_LOG_LEVEL", # Application log level is not detected correctly as
1187                         # the option is defined using a template, so it can't
1188                         # be grepped
1189        "APP_LOG_LEVEL_DBG",
1190        "ARMCLANG_STD_LIBC",  # The ARMCLANG_STD_LIBC is defined in the
1191                              # toolchain Kconfig which is sourced based on
1192                              # Zephyr toolchain variant and therefore not
1193                              # visible to compliance.
1194        "BINDESC_", # Used in documentation as a prefix
1195        "BOARD_", # Used as regex in scripts/utils/board_v1_to_v2.py
1196        "BOARD_MPS2_AN521_CPUTEST", # Used for board and SoC extension feature tests
1197        "BOARD_NATIVE_SIM_NATIVE_64_TWO", # Used for board and SoC extension feature tests
1198        "BOARD_NATIVE_SIM_NATIVE_ONE", # Used for board and SoC extension feature tests
1199        "BOARD_UNIT_TESTING",  # Used for tests/unit
1200        "BOOT_DIRECT_XIP", # Used in sysbuild for MCUboot configuration
1201        "BOOT_DIRECT_XIP_REVERT", # Used in sysbuild for MCUboot configuration
1202        "BOOT_ENCRYPTION_KEY_FILE", # Used in sysbuild
1203        "BOOT_ENCRYPT_IMAGE", # Used in sysbuild
1204        "BOOT_FIRMWARE_LOADER", # Used in sysbuild for MCUboot configuration
1205        "BOOT_FIRMWARE_LOADER_BOOT_MODE", # Used in sysbuild for MCUboot configuration
1206        "BOOT_IMAGE_EXECUTABLE_RAM_SIZE", # MCUboot setting
1207        "BOOT_IMAGE_EXECUTABLE_RAM_START", # MCUboot setting
1208        "BOOT_MAX_IMG_SECTORS_AUTO", # Used in sysbuild
1209        "BOOT_RAM_LOAD", # Used in sysbuild for MCUboot configuration
1210        "BOOT_RAM_LOAD_REVERT", # Used in sysbuild for MCUboot configuration
1211        "BOOT_SERIAL_BOOT_MODE",     # Used in (sysbuild-based) test/
1212                                     # documentation
1213        "BOOT_SERIAL_CDC_ACM",       # Used in (sysbuild-based) test
1214        "BOOT_SERIAL_ENTRANCE_GPIO", # Used in (sysbuild-based) test
1215        "BOOT_SERIAL_IMG_GRP_HASH",  # Used in documentation
1216        "BOOT_SERIAL_UART",          # Used in (sysbuild-based) test
1217        "BOOT_SHARE_BACKEND_RETENTION", # Used in Kconfig text
1218        "BOOT_SHARE_DATA",           # Used in Kconfig text
1219        "BOOT_SHARE_DATA_BOOTINFO", # Used in (sysbuild-based) test
1220        "BOOT_SIGNATURE_KEY_FILE",   # MCUboot setting used by sysbuild
1221        "BOOT_SIGNATURE_TYPE_ECDSA_P256", # MCUboot setting used by sysbuild
1222        "BOOT_SIGNATURE_TYPE_ED25519",    # MCUboot setting used by sysbuild
1223        "BOOT_SIGNATURE_TYPE_NONE",       # MCUboot setting used by sysbuild
1224        "BOOT_SIGNATURE_TYPE_RSA",        # MCUboot setting used by sysbuild
1225        "BOOT_SWAP_USING_MOVE", # Used in sysbuild for MCUboot configuration
1226        "BOOT_SWAP_USING_OFFSET", # Used in sysbuild for MCUboot configuration
1227        "BOOT_SWAP_USING_SCRATCH", # Used in sysbuild for MCUboot configuration
1228        "BOOT_UPGRADE_ONLY", # Used in example adjusting MCUboot config, but
1229                             # symbol is defined in MCUboot itself.
1230        "BOOT_VALIDATE_SLOT0",       # Used in (sysbuild-based) test
1231        "BOOT_WATCHDOG_FEED",        # Used in (sysbuild-based) test
1232        "BT_6LOWPAN",  # Defined in Linux, mentioned in docs
1233        "CDC_ACM_PORT_NAME_",
1234        "CHRE",  # Optional module
1235        "CHRE_LOG_LEVEL_DBG",  # Optional module
1236        "CLOCK_STM32_SYSCLK_SRC_",
1237        "CMD_CACHE",  # Defined in U-Boot, mentioned in docs
1238        "CMU",
1239        "COMPILER_RT_RTLIB",
1240        "CRC",  # Used in TI CC13x2 / CC26x2 SDK comment
1241        "DEEP_SLEEP",  # #defined by RV32M1 in ext/
1242        "DESCRIPTION",
1243        "ERR",
1244        "ESP_DIF_LIBRARY",  # Referenced in CMake comment
1245        "EXPERIMENTAL",
1246        "EXTRA_FIRMWARE_DIR", # Linux, in boards/xtensa/intel_adsp_cavs25/doc
1247        "FFT",  # Used as an example in cmake/extensions.cmake
1248        "FLAG",  # Used as an example
1249        "FOO",
1250        "FOO_LOG_LEVEL",
1251        "FOO_SETTING_1",
1252        "FOO_SETTING_2",
1253        "HEAP_MEM_POOL_ADD_SIZE_", # Used as an option matching prefix
1254        "HUGETLBFS",          # Linux, in boards/xtensa/intel_adsp_cavs25/doc
1255        "IAR_BUFFERED_WRITE",
1256        "IAR_DATA_INIT",
1257        "IAR_LIBCPP",
1258        "IAR_SEMIHOSTING",
1259        "IAR_ZEPHYR_INIT",
1260        "IPC_SERVICE_ICMSG_BOND_NOTIFY_REPEAT_TO_MS", # Used in ICMsg tests for intercompatibility
1261                                                      # with older versions of the ICMsg.
1262        "LIBGCC_RTLIB",
1263        "LLVM_USE_LD",   # Both LLVM_USE_* are in cmake/toolchain/llvm/Kconfig
1264        "LLVM_USE_LLD",  # which are only included if LLVM is selected but
1265                         # not other toolchains. Compliance check would complain,
1266                         # for example, if you are using GCC.
1267        "LOG_BACKEND_MOCK_OUTPUT_DEFAULT", #Referenced in tests/subsys/logging/log_syst
1268        "LOG_BACKEND_MOCK_OUTPUT_SYST", #Referenced in testcase.yaml of log_syst test
1269        "LSM6DSO_INT_PIN",
1270        "MCUBOOT_ACTION_HOOKS",     # Used in (sysbuild-based) test
1271        "MCUBOOT_CLEANUP_ARM_CORE", # Used in (sysbuild-based) test
1272        "MCUBOOT_DOWNGRADE_PREVENTION", # but symbols are defined in MCUboot
1273                                        # itself.
1274        "MCUBOOT_LOG_LEVEL_DBG",
1275        "MCUBOOT_LOG_LEVEL_INF",
1276        "MCUBOOT_LOG_LEVEL_WRN",        # Used in example adjusting MCUboot
1277                                        # config,
1278        "MCUBOOT_SERIAL",           # Used in (sysbuild-based) test/
1279                                    # documentation
1280        "MCUMGR_GRP_EXAMPLE_OTHER_HOOK", # Used in documentation
1281        "MCUX_HW_CORE", # Used in modules/hal_nxp/mcux/mcux-sdk-ng/device/device.cmake.
1282                        # It is a variable used by MCUX SDK CMake.
1283        "MCUX_HW_DEVICE_CORE", # Used in modules/hal_nxp/mcux/mcux-sdk-ng/device/device.cmake.
1284                               # It is a variable used by MCUX SDK CMake.
1285        "MCUX_HW_FPU_TYPE", # Used in modules/hal_nxp/mcux/mcux-sdk-ng/device/device.cmake.
1286                            # It is a variable used by MCUX SDK CMake.
1287        "MISSING",
1288        "MODULES",
1289        "MODVERSIONS",        # Linux, in boards/xtensa/intel_adsp_cavs25/doc
1290        "MYFEATURE",
1291        "MY_DRIVER_0",
1292        "NORMAL_SLEEP",  # #defined by RV32M1 in ext/
1293        "NRF_WIFI_FW_BIN", # Directly passed from CMakeLists.txt
1294        "OPT",
1295        "OPT_0",
1296        "PEDO_THS_MIN",
1297        "PSA_H", # This is used in config-psa.h as guard for the header file
1298        "REG1",
1299        "REG2",
1300        "RIMAGE_SIGNING_SCHEMA",  # Optional module
1301        "SECURITY_LOADPIN",   # Linux, in boards/xtensa/intel_adsp_cavs25/doc
1302        "SEL",
1303        "SHIFT",
1304        "SINGLE_APPLICATION_SLOT", # Used in sysbuild for MCUboot configuration
1305        "SINGLE_APPLICATION_SLOT_RAM_LOAD", # Used in sysbuild for MCUboot configuration
1306        "SOC_SDKNG_UNSUPPORTED", # Used in modules/hal_nxp/mcux/CMakeLists.txt
1307        "SOC_SERIES_", # Used as regex in scripts/utils/board_v1_to_v2.py
1308        "SOC_WATCH",  # Issue 13749
1309        "SOME_BOOL",
1310        "SOME_INT",
1311        "SOME_OTHER_BOOL",
1312        "SOME_STRING",
1313        "SRAM2",  # Referenced in a comment in samples/application_development
1314        "STACK_SIZE",  # Used as an example in the Kconfig docs
1315        "STD_CPP",  # Referenced in CMake comment
1316        "TEST1",
1317        "TOOLCHAIN", # Defined in modules/hal_nxp/mcux/mcux-sdk-ng/basic.cmake.
1318                     # It is used by MCUX SDK cmake functions to add content
1319                     # based on current toolchain.
1320        "TOOLCHAIN_ARCMWDT_SUPPORTS_THREAD_LOCAL_STORAGE", # The symbol is defined in the toolchain
1321                                                    # Kconfig which is sourced based on Zephyr
1322                                                    # toolchain variant and therefore not visible
1323                                                    # to compliance.
1324        "TYPE_BOOLEAN",
1325        "USB_CONSOLE",
1326        "USE_STDC_",
1327        "WHATEVER",
1328        "ZEPHYR_TRY_MASS_ERASE", # MCUBoot setting described in sysbuild
1329                                 # documentation
1330        "ZTEST_FAIL_TEST_",  # regex in tests/ztest/fail/CMakeLists.txt
1331        # zephyr-keep-sorted-stop
1332    }
1333
1334
1335class KconfigBasicCheck(KconfigCheck):
1336    """
1337    Checks if we are introducing any new warnings/errors with Kconfig,
1338    for example using undefined Kconfig variables.
1339    This runs the basic Kconfig test, which is checking only for undefined
1340    references inside the Kconfig tree.
1341    """
1342    name = "KconfigBasic"
1343
1344    def check_no_undef_outside_kconfig(self, kconf):
1345        pass
1346
1347
1348class KconfigBasicNoModulesCheck(KconfigBasicCheck):
1349    """
1350    Checks if we are introducing any new warnings/errors with Kconfig when no
1351    modules are available. Catches symbols used in the main repository but
1352    defined only in a module.
1353    """
1354    name = "KconfigBasicNoModules"
1355    path_hint = "<zephyr-base>"
1356
1357    def get_modules(self, modules_file, sysbuild_modules_file, settings_file):
1358        with open(modules_file, 'w') as fp_module_file:
1359            fp_module_file.write("# Empty\n")
1360
1361        with open(sysbuild_modules_file, 'w') as fp_module_file:
1362            fp_module_file.write("# Empty\n")
1363
1364
1365class KconfigHWMv2Check(KconfigBasicCheck):
1366    """
1367    This runs the Kconfig test for board and SoC v2 scheme.
1368    This check ensures that all symbols inside the v2 scheme is also defined
1369    within the same tree.
1370    This ensures the board and SoC trees are fully self-contained and reusable.
1371    """
1372    name = "KconfigHWMv2"
1373
1374    # Use dedicated Kconfig board / soc v2 scheme file.
1375    # This file sources only v2 scheme tree.
1376    FILENAME = os.path.join(os.path.dirname(__file__), "Kconfig.board.v2")
1377
1378
1379class SysbuildKconfigCheck(KconfigCheck):
1380    """
1381    Checks if we are introducing any new warnings/errors with sysbuild Kconfig,
1382    for example using undefined Kconfig variables.
1383    """
1384    name = "SysbuildKconfig"
1385
1386    FILENAME = "share/sysbuild/Kconfig"
1387    CONFIG_ = "SB_CONFIG_"
1388
1389    # A different allowlist is used for symbols prefixed with SB_CONFIG_ (omitted here).
1390    UNDEF_KCONFIG_ALLOWLIST = {
1391        # zephyr-keep-sorted-start re(^\s+")
1392        "FOO",
1393        "MY_IMAGE", # Used in sysbuild documentation as example
1394        "OTHER_APP_IMAGE_NAME", # Used in sysbuild documentation as example
1395        "OTHER_APP_IMAGE_PATH", # Used in sysbuild documentation as example
1396        "SECOND_SAMPLE", # Used in sysbuild documentation
1397        "SUIT_ENVELOPE", # Used by nRF runners to program provisioning data
1398        "SUIT_MPI_APP_AREA_PATH", # Used by nRF runners to program provisioning data
1399        "SUIT_MPI_GENERATE", # Used by nRF runners to program provisioning data
1400        "SUIT_MPI_RAD_AREA_PATH", # Used by nRF runners to program provisioning data
1401        # zephyr-keep-sorted-stop
1402    }
1403
1404
1405class SysbuildKconfigBasicCheck(SysbuildKconfigCheck, KconfigBasicCheck):
1406    """
1407    Checks if we are introducing any new warnings/errors with sysbuild Kconfig,
1408    for example using undefined Kconfig variables.
1409    This runs the basic Kconfig test, which is checking only for undefined
1410    references inside the sysbuild Kconfig tree.
1411    """
1412    name = "SysbuildKconfigBasic"
1413
1414
1415class SysbuildKconfigBasicNoModulesCheck(SysbuildKconfigCheck, KconfigBasicNoModulesCheck):
1416    """
1417    Checks if we are introducing any new warnings/errors with sysbuild Kconfig
1418    when no modules are available. Catches symbols used in the main repository
1419    but defined only in a module.
1420    """
1421    name = "SysbuildKconfigBasicNoModules"
1422    path_hint = "<zephyr-base>"
1423
1424
1425class Nits(ComplianceTest):
1426    """
1427    Checks various nits in added/modified files. Doesn't check stuff that's
1428    already covered by e.g. checkpatch.pl and pylint.
1429    """
1430    name = "Nits"
1431    doc = "See https://docs.zephyrproject.org/latest/contribute/guidelines.html#coding-style for more details."
1432
1433    def run(self):
1434        # Loop through added/modified files
1435        for fname in get_files(filter="d"):
1436            if "Kconfig" in fname:
1437                self.check_kconfig_header(fname)
1438                self.check_redundant_zephyr_source(fname)
1439
1440            if fname.startswith("dts/bindings/"):
1441                self.check_redundant_document_separator(fname)
1442
1443            if fname.endswith((".c", ".conf", ".cpp", ".dts", ".overlay",
1444                               ".h", ".ld", ".py", ".rst", ".txt", ".yaml",
1445                               ".yml")) or \
1446               "Kconfig" in fname or \
1447               "defconfig" in fname or \
1448               fname == "README":
1449
1450                self.check_source_file(fname)
1451
1452    def check_kconfig_header(self, fname):
1453        # Checks for a spammy copy-pasted header format
1454
1455        with open(GIT_TOP / fname, encoding="utf-8") as f:
1456            contents = f.read()
1457
1458        # 'Kconfig - yada yada' has a copy-pasted redundant filename at the
1459        # top. This probably means all of the header was copy-pasted.
1460        if re.match(r"\s*#\s*(K|k)config[\w.-]*\s*-", contents):
1461            self.failure(f"""
1462Please use this format for the header in '{fname}' (see
1463https://docs.zephyrproject.org/latest/build/kconfig/tips.html#header-comments-and-other-nits):
1464
1465    # <Overview of symbols defined in the file, preferably in plain English>
1466    (Blank line)
1467    # Copyright (c) 2019 ...
1468    # SPDX-License-Identifier: <License>
1469    (Blank line)
1470    (Kconfig definitions)
1471
1472Skip the "Kconfig - " part of the first line, since it's clear that the comment
1473is about Kconfig from context. The "# Kconfig - " is what triggers this
1474failure.
1475""")
1476
1477    def check_redundant_zephyr_source(self, fname):
1478        # Checks for 'source "$(ZEPHYR_BASE)/Kconfig[.zephyr]"', which can be
1479        # be simplified to 'source "Kconfig[.zephyr]"'
1480
1481        with open(GIT_TOP / fname, encoding="utf-8") as f:
1482            # Look for e.g. rsource as well, for completeness
1483            match = re.search(
1484                r'^\s*(?:o|r|or)?source\s*"\$\(?ZEPHYR_BASE\)?/(Kconfig(?:\.zephyr)?)"',
1485                f.read(), re.MULTILINE)
1486
1487            if match:
1488                self.failure("""
1489Redundant 'source "$(ZEPHYR_BASE)/{0}" in '{1}'. Just do 'source "{0}"'
1490instead. The $srctree environment variable already points to the Zephyr root,
1491and all 'source's are relative to it.""".format(match.group(1), fname))
1492
1493    def check_redundant_document_separator(self, fname):
1494        # Looks for redundant '...' document separators in bindings
1495
1496        with open(GIT_TOP / fname, encoding="utf-8") as f:
1497            if re.search(r"^\.\.\.", f.read(), re.MULTILINE):
1498                self.failure(f"""\
1499Redundant '...' document separator in {fname}. Binding YAML files are never
1500concatenated together, so no document separators are needed.""")
1501
1502    def check_source_file(self, fname):
1503        # Generic nits related to various source files
1504
1505        with open(GIT_TOP / fname, encoding="utf-8") as f:
1506            contents = f.read()
1507
1508        if not contents.endswith("\n"):
1509            self.failure(f"Missing newline at end of '{fname}'. Check your text "
1510                         f"editor settings.")
1511
1512        if contents.startswith("\n"):
1513            self.failure(f"Please remove blank lines at start of '{fname}'")
1514
1515        if contents.endswith("\n\n"):
1516            self.failure(f"Please remove blank lines at end of '{fname}'")
1517
1518
1519class GitDiffCheck(ComplianceTest):
1520    """
1521    Checks for conflict markers or whitespace errors with git diff --check
1522    """
1523    name = "GitDiffCheck"
1524    doc = "Git conflict markers and whitespace errors are not allowed in added changes"
1525
1526    def run(self):
1527        offending_lines = []
1528        # Use regex to filter out unnecessay output
1529        # Reason: `--check` is mutually exclusive with `--name-only` and `-s`
1530        p = re.compile(r"\S+\: .*\.")
1531
1532        for shaidx in get_shas(COMMIT_RANGE):
1533            # Ignore non-zero return status code
1534            # Reason: `git diff --check` sets the return code to the number of offending lines
1535            diff = git("diff", f"{shaidx}^!", "--check", "--", ":!*.diff", ":!*.patch", ignore_non_zero=True)
1536
1537            lines = p.findall(diff)
1538            lines = map(lambda x: f"{shaidx}: {x}", lines)
1539            offending_lines.extend(lines)
1540
1541        if len(offending_lines) > 0:
1542            self.failure("\n".join(offending_lines))
1543
1544
1545class GitLint(ComplianceTest):
1546    """
1547    Runs gitlint on the commits and finds issues with style and syntax
1548
1549    """
1550    name = "Gitlint"
1551    doc = "See https://docs.zephyrproject.org/latest/contribute/guidelines.html#commit-guidelines for more details"
1552
1553    def run(self):
1554        # By default gitlint looks for .gitlint configuration only in
1555        # the current directory
1556        try:
1557            subprocess.run('gitlint --commits ' + COMMIT_RANGE,
1558                           check=True,
1559                           stdout=subprocess.PIPE,
1560                           stderr=subprocess.STDOUT,
1561                           shell=True, cwd=GIT_TOP)
1562
1563        except subprocess.CalledProcessError as ex:
1564            self.failure(ex.output.decode("utf-8"))
1565
1566
1567class PyLint(ComplianceTest):
1568    """
1569    Runs pylint on all .py files, with a limited set of checks enabled. The
1570    configuration is in the pylintrc file.
1571    """
1572    name = "Pylint"
1573    doc = "See https://www.pylint.org/ for more details"
1574
1575    def run(self):
1576        # Path to pylint configuration file
1577        pylintrc = os.path.abspath(os.path.join(os.path.dirname(__file__),
1578                                                "pylintrc"))
1579
1580        # Path to additional pylint check scripts
1581        check_script_dir = os.path.abspath(os.path.join(os.path.dirname(__file__),
1582                                                        "../pylint/checkers"))
1583
1584        # List of files added/modified by the commit(s).
1585        files = get_files(filter="d")
1586
1587        # Filter out everything but Python files. Keep filenames
1588        # relative (to GIT_TOP) to stay farther from any command line
1589        # limit.
1590        py_files = filter_py(GIT_TOP, files)
1591        if not py_files:
1592            return
1593
1594        python_environment = os.environ.copy()
1595        if "PYTHONPATH" in python_environment:
1596            python_environment["PYTHONPATH"] = check_script_dir + ":" + \
1597                                               python_environment["PYTHONPATH"]
1598        else:
1599            python_environment["PYTHONPATH"] = check_script_dir
1600
1601        pylintcmd = ["pylint", "--output-format=json2", "--rcfile=" + pylintrc,
1602                     "--load-plugins=argparse-checker"] + py_files
1603        logger.info(cmd2str(pylintcmd))
1604        try:
1605            subprocess.run(pylintcmd,
1606                           check=True,
1607                           stdout=subprocess.PIPE,
1608                           stderr=subprocess.STDOUT,
1609                           cwd=GIT_TOP,
1610                           env=python_environment)
1611        except subprocess.CalledProcessError as ex:
1612            output = ex.output.decode("utf-8")
1613            messages = json.loads(output)['messages']
1614            for m in messages:
1615                severity = 'unknown'
1616                if m['messageId'][0] in ('F', 'E'):
1617                    severity = 'error'
1618                elif m['messageId'][0] in ('W','C', 'R', 'I'):
1619                    severity = 'warning'
1620                self.fmtd_failure(severity, m['messageId'], m['path'],
1621                                  m['line'], col=str(m['column']), desc=m['message']
1622                                  + f" ({m['symbol']})")
1623
1624            if len(messages) == 0:
1625                # If there are no specific messages add the whole output as a failure
1626                self.failure(output)
1627
1628
1629def filter_py(root, fnames):
1630    # PyLint check helper. Returns all Python script filenames among the
1631    # filenames in 'fnames', relative to directory 'root'.
1632    #
1633    # Uses the python-magic library, so that we can detect Python
1634    # files that don't end in .py as well. python-magic is a frontend
1635    # to libmagic, which is also used by 'file'.
1636    return [fname for fname in fnames
1637            if (fname.endswith(".py") or
1638             magic.from_file(os.path.join(root, fname),
1639                             mime=True) == "text/x-python")]
1640
1641
1642class Identity(ComplianceTest):
1643    """
1644    Checks if Emails of author and signed-off messages are consistent.
1645    """
1646    name = "Identity"
1647    doc = "See https://docs.zephyrproject.org/latest/contribute/guidelines.html#commit-guidelines for more details"
1648
1649    def run(self):
1650        for shaidx in get_shas(COMMIT_RANGE):
1651            commit_info = git('show', '-s', '--format=%an%n%ae%n%b', shaidx).split('\n', 2)
1652
1653            failures = []
1654
1655            if len(commit_info) == 2:
1656                failures.append(f'{shaidx}: Empty commit message body')
1657                auth_name, auth_email = commit_info
1658                body = ''
1659            elif len(commit_info) == 3:
1660                auth_name, auth_email, body = commit_info
1661            else:
1662                self.failure(f'Unable to parse commit message for {shaidx}')
1663
1664            match_signoff = re.search(r"signed-off-by:\s(.*)", body,
1665                                      re.IGNORECASE)
1666            detailed_match = re.search(rf"signed-off-by:\s({re.escape(auth_name)}) <({re.escape(auth_email)})>",
1667                                       body,
1668                                       re.IGNORECASE)
1669
1670            if auth_email.endswith("@users.noreply.github.com"):
1671                failures.append(f"{shaidx}: author email ({auth_email}) must "
1672                                "be a real email and cannot end in "
1673                                "@users.noreply.github.com")
1674
1675            if not match_signoff:
1676                failures.append(f'{shaidx}: Missing signed-off-by line')
1677            elif not detailed_match:
1678                signoff = match_signoff.group(0)
1679                failures.append(f"{shaidx}: Signed-off-by line ({signoff}) "
1680                                "does not follow the syntax: First "
1681                                "Last <email>.")
1682            elif (auth_name, auth_email) != detailed_match.groups():
1683                failures.append(f"{shaidx}: author email ({auth_email}) needs "
1684                                "to match one of the signed-off-by entries.")
1685
1686            if failures:
1687                self.failure('\n'.join(failures))
1688
1689
1690class BinaryFiles(ComplianceTest):
1691    """
1692    Check that the diff contains no binary files.
1693    """
1694    name = "BinaryFiles"
1695    doc = "No binary files allowed."
1696
1697    def run(self):
1698        BINARY_ALLOW_PATHS = ("doc/", "boards/", "samples/")
1699        # svg files are always detected as binary, see .gitattributes
1700        BINARY_ALLOW_EXT = (".jpg", ".jpeg", ".png", ".svg", ".webp")
1701
1702        for stat in git("diff", "--numstat", "--diff-filter=A",
1703                        COMMIT_RANGE).splitlines():
1704            added, deleted, fname = stat.split("\t")
1705            if added == "-" and deleted == "-":
1706                if (fname.startswith(BINARY_ALLOW_PATHS) and
1707                    fname.endswith(BINARY_ALLOW_EXT)):
1708                    continue
1709                self.failure(f"Binary file not allowed: {fname}")
1710
1711
1712class ImageSize(ComplianceTest):
1713    """
1714    Check that any added image is limited in size.
1715    """
1716    name = "ImageSize"
1717    doc = "Check the size of image files."
1718
1719    def run(self):
1720        SIZE_LIMIT = 250 << 10
1721        BOARD_SIZE_LIMIT = 100 << 10
1722
1723        for file in get_files(filter="d"):
1724            full_path = GIT_TOP / file
1725            mime_type = magic.from_file(os.fspath(full_path), mime=True)
1726
1727            if not mime_type.startswith("image/"):
1728                continue
1729
1730            size = os.path.getsize(full_path)
1731
1732            limit = SIZE_LIMIT
1733            if file.startswith("boards/"):
1734                limit = BOARD_SIZE_LIMIT
1735
1736            if size > limit:
1737                self.failure(f"Image file too large: {file} reduce size to "
1738                             f"less than {limit >> 10}kB")
1739
1740
1741class MaintainersFormat(ComplianceTest):
1742    """
1743    Check that MAINTAINERS file parses correctly.
1744    """
1745    name = "MaintainersFormat"
1746    doc = "Check that MAINTAINERS file parses correctly."
1747
1748    def run(self):
1749        MAINTAINERS_FILES = ["MAINTAINERS.yml", "MAINTAINERS.yaml"]
1750
1751        for file in MAINTAINERS_FILES:
1752            if not os.path.exists(file):
1753                continue
1754
1755            try:
1756                Maintainers(file)
1757            except MaintainersError as ex:
1758                self.failure(f"Error parsing {file}: {ex}")
1759
1760class ModulesMaintainers(ComplianceTest):
1761    """
1762    Check that all modules have a MAINTAINERS entry.
1763    """
1764    name = "ModulesMaintainers"
1765    doc = "Check that all modules have a MAINTAINERS entry."
1766
1767    def run(self):
1768        MAINTAINERS_FILES = ["MAINTAINERS.yml", "MAINTAINERS.yaml"]
1769
1770        manifest = Manifest.from_file()
1771
1772        maintainers_file = None
1773        for file in MAINTAINERS_FILES:
1774            if os.path.exists(file):
1775                maintainers_file = file
1776                break
1777        if not maintainers_file:
1778            return
1779
1780        maintainers = Maintainers(maintainers_file)
1781
1782        for project in manifest.get_projects([]):
1783            if not manifest.is_active(project):
1784                continue
1785
1786            if isinstance(project, ManifestProject):
1787                continue
1788
1789            area = f"West project: {project.name}"
1790            if area not in maintainers.areas:
1791                self.failure(f"Missing {maintainers_file} entry for: \"{area}\"")
1792
1793
1794class ZephyrModuleFile(ComplianceTest):
1795    """
1796    Check that no zephyr/module.yml file has been added to the Zephyr repository
1797    """
1798    name = "ZephyrModuleFile"
1799    doc = "Check that no zephyr/module.yml file has been added to the Zephyr repository."
1800
1801    def run(self):
1802        module_files = [ZEPHYR_BASE / 'zephyr' / 'module.yml',
1803                        ZEPHYR_BASE / 'zephyr' / 'module.yaml']
1804
1805        for file in module_files:
1806            if os.path.exists(file):
1807                self.failure("A zephyr module file has been added to the Zephyr repository")
1808                break
1809
1810
1811class YAMLLint(ComplianceTest):
1812    """
1813    YAMLLint
1814    """
1815    name = "YAMLLint"
1816    doc = "Check YAML files with YAMLLint."
1817
1818    def run(self):
1819        config_file = ZEPHYR_BASE / ".yamllint"
1820
1821        for file in get_files(filter="d"):
1822            if Path(file).suffix not in ['.yaml', '.yml']:
1823                continue
1824
1825            yaml_config = config.YamlLintConfig(file=config_file)
1826
1827            if file.startswith(".github/"):
1828                # Tweak few rules for workflow files.
1829                yaml_config.rules["line-length"] = False
1830                yaml_config.rules["truthy"]["allowed-values"].extend(['on', 'off'])
1831            elif file == ".codecov.yml":
1832                yaml_config.rules["truthy"]["allowed-values"].extend(['yes', 'no'])
1833
1834            with open(file, 'r') as fp:
1835                for p in linter.run(fp, yaml_config):
1836                    self.fmtd_failure('warning', f'YAMLLint ({p.rule})', file,
1837                                      p.line, col=p.column, desc=p.desc)
1838
1839
1840class SphinxLint(ComplianceTest):
1841    """
1842    SphinxLint
1843    """
1844
1845    name = "SphinxLint"
1846    doc = "Check Sphinx/reStructuredText files with sphinx-lint."
1847
1848    # Checkers added/removed to sphinx-lint's default set
1849    DISABLE_CHECKERS = [
1850        "horizontal-tab",
1851        "missing-space-before-default-role",
1852        "trailing-whitespace",
1853    ]
1854    ENABLE_CHECKERS = ["default-role"]
1855
1856    def run(self):
1857        for file in get_files():
1858            if not file.endswith(".rst"):
1859                continue
1860
1861            try:
1862                # sphinx-lint does not expose a public API so interaction is done via CLI
1863                subprocess.run(
1864                    f"sphinx-lint -d {','.join(self.DISABLE_CHECKERS)} -e {','.join(self.ENABLE_CHECKERS)} {file}",
1865                    check=True,
1866                    stdout=subprocess.PIPE,
1867                    stderr=subprocess.STDOUT,
1868                    shell=True,
1869                    cwd=GIT_TOP,
1870                )
1871
1872            except subprocess.CalledProcessError as ex:
1873                for line in ex.output.decode("utf-8").splitlines():
1874                    match = re.match(r"^(.*):(\d+): (.*)$", line)
1875
1876                    if match:
1877                        self.fmtd_failure(
1878                            "error",
1879                            "SphinxLint",
1880                            match.group(1),
1881                            int(match.group(2)),
1882                            desc=match.group(3),
1883                        )
1884
1885
1886class KeepSorted(ComplianceTest):
1887    """
1888    Check for blocks of code or config that should be kept sorted.
1889    """
1890    name = "KeepSorted"
1891    doc = "Check for blocks of code or config that should be kept sorted."
1892
1893    MARKER = "zephyr-keep-sorted"
1894
1895    def block_check_sorted(self, block_data, *, regex, strip, fold):
1896        def _test_indent(txt: str):
1897            return txt.startswith((" ", "\t"))
1898
1899        if regex is None:
1900            block_data = textwrap.dedent(block_data)
1901
1902        lines = block_data.splitlines()
1903        last = ''
1904
1905        for idx, line in enumerate(lines):
1906            if not line.strip():
1907                # Ignore blank lines
1908                continue
1909
1910            if strip is not None:
1911                line = line.strip(strip)
1912
1913            if regex:
1914                # check for regex
1915                if not re.match(regex, line):
1916                    continue
1917            else:
1918                if _test_indent(line):
1919                    continue
1920
1921                if fold:
1922                    # Fold back indented lines after the current one
1923                    for cont in takewhile(_test_indent, lines[idx + 1:]):
1924                        line += cont.strip()
1925
1926            if line < last:
1927                return idx
1928
1929            last = line
1930
1931        return -1
1932
1933    def check_file(self, file, fp):
1934        mime_type = magic.from_file(os.fspath(file), mime=True)
1935
1936        if not mime_type.startswith("text/"):
1937            return
1938
1939        block_data = ""
1940        in_block = False
1941
1942        start_marker = f"{self.MARKER}-start"
1943        stop_marker = f"{self.MARKER}-stop"
1944        regex_marker = r"re\(([^)]+)\)"
1945        strip_marker = r"strip\(([^)]+)\)"
1946        nofold_marker = "nofold"
1947        start_line = 0
1948        regex = None
1949        strip = None
1950        fold = True
1951
1952        for line_num, line in enumerate(fp.readlines(), start=1):
1953            if start_marker in line:
1954                if in_block:
1955                    desc = f"nested {start_marker}"
1956                    self.fmtd_failure("error", "KeepSorted", file, line_num,
1957                                     desc=desc)
1958                in_block = True
1959                block_data = ""
1960                start_line = line_num + 1
1961
1962                # Test for a regex block
1963                match = re.search(regex_marker, line)
1964                regex = match.group(1) if match else None
1965
1966                match = re.search(strip_marker, line)
1967                strip = match.group(1) if match else None
1968
1969                fold = nofold_marker not in line
1970            elif stop_marker in line:
1971                if not in_block:
1972                    desc = f"{stop_marker} without {start_marker}"
1973                    self.fmtd_failure("error", "KeepSorted", file, line_num,
1974                                     desc=desc)
1975                in_block = False
1976
1977                idx = self.block_check_sorted(block_data, regex=regex, strip=strip, fold=fold)
1978                if idx >= 0:
1979                    desc = f"sorted block has out-of-order line at {start_line + idx}"
1980                    self.fmtd_failure("error", "KeepSorted", file, line_num,
1981                                      desc=desc)
1982            elif in_block:
1983                block_data += line
1984
1985        if in_block:
1986            self.failure(f"unterminated {start_marker} in {file}")
1987
1988    def run(self):
1989        for file in get_files(filter="d"):
1990            with open(file, "r") as fp:
1991                self.check_file(file, fp)
1992
1993
1994class Ruff(ComplianceTest):
1995    """
1996    Ruff
1997    """
1998    name = "Ruff"
1999    doc = "Check python files with ruff."
2000
2001    def run(self):
2002        for file in get_files(filter="d"):
2003            if not file.endswith((".py", ".pyi")):
2004                continue
2005
2006            try:
2007                subprocess.run(
2008                    f"ruff check --force-exclude --output-format=json {file}",
2009                    check=True,
2010                    stdout=subprocess.PIPE,
2011                    stderr=subprocess.DEVNULL,
2012                    shell=True,
2013                    cwd=GIT_TOP,
2014                )
2015            except subprocess.CalledProcessError as ex:
2016                output = ex.output.decode("utf-8")
2017                messages = json.loads(output)
2018                for m in messages:
2019                    self.fmtd_failure(
2020                        "error",
2021                        f'Python lint error ({m.get("code")}) see {m.get("url")}',
2022                        file,
2023                        line=m.get("location", {}).get("row"),
2024                        col=m.get("location", {}).get("column"),
2025                        end_line=m.get("end_location", {}).get("row"),
2026                        end_col=m.get("end_location", {}).get("column"),
2027                        desc=m.get("message"),
2028                    )
2029            try:
2030                subprocess.run(
2031                    f"ruff format --force-exclude --diff {file}",
2032                    check=True,
2033                    shell=True,
2034                    cwd=GIT_TOP,
2035                )
2036            except subprocess.CalledProcessError:
2037                desc = f"Run 'ruff format {file}'"
2038                self.fmtd_failure("error", "Python format error", file, desc=desc)
2039
2040class PythonCompatCheck(ComplianceTest):
2041    """
2042    Python Compatibility Check
2043    """
2044    name = "PythonCompat"
2045    doc = "Check that Python files are compatible with Zephyr minimum supported Python version."
2046
2047    MAX_VERSION = (3, 10)
2048    MAX_VERSION_STR = f"{MAX_VERSION[0]}.{MAX_VERSION[1]}"
2049
2050    def run(self):
2051        py_files = [f for f in get_files(filter="d") if f.endswith(".py")]
2052        if not py_files:
2053            return
2054        cmd = ["vermin", "-f", "parsable", "--violations",
2055               f"-t={self.MAX_VERSION_STR}", "--no-make-paths-absolute"] + py_files
2056        try:
2057            result = subprocess.run(cmd,
2058                                    check=False,
2059                                    capture_output=True,
2060                                    cwd=GIT_TOP)
2061        except Exception as ex:
2062            self.error(f"Failed to run vermin: {ex}")
2063        output = result.stdout.decode("utf-8")
2064        failed = False
2065        for line in output.splitlines():
2066            parts = line.split(":")
2067            if len(parts) < 6:
2068                continue
2069            filename, line_number, column, _, py3ver, feature = parts[:6]
2070            if not line_number:
2071                # Ignore all file-level messages
2072                continue
2073
2074            desc = None
2075            if py3ver.startswith('!'):
2076                desc = f"{feature} is known to be incompatible with Python 3."
2077            elif py3ver.startswith('~'):
2078                # "no known reason it won't work", just skip
2079                continue
2080            else:
2081                major, minor = map(int, py3ver.split(".")[:2])
2082                if (major, minor) > self.MAX_VERSION:
2083                    desc = f"{feature} requires Python {major}.{minor}, which is higher than " \
2084                           f"Zephyr's minimum supported Python version ({self.MAX_VERSION_STR})."
2085
2086            if desc is not None:
2087                self.fmtd_failure(
2088                    "error",
2089                    "PythonCompat",
2090                    filename,
2091                    line=int(line_number),
2092                    col=int(column) if column else None,
2093                    desc=desc,
2094                )
2095                failed = True
2096        if failed:
2097            self.failure("Some Python files use features that are not compatible with Python " \
2098                         f"{self.MAX_VERSION_STR}.")
2099
2100
2101class TextEncoding(ComplianceTest):
2102    """
2103    Check that any text file is encoded in ascii or utf-8.
2104    """
2105    name = "TextEncoding"
2106    doc = "Check the encoding of text files."
2107
2108    ALLOWED_CHARSETS = ["us-ascii", "utf-8"]
2109
2110    def run(self):
2111        m = magic.Magic(mime=True, mime_encoding=True)
2112
2113        for file in get_files(filter="d"):
2114            full_path = GIT_TOP / file
2115            mime_type = m.from_file(os.fspath(full_path))
2116
2117            if not mime_type.startswith("text/"):
2118                continue
2119
2120            # format is "text/<type>; charset=<charset>"
2121            if mime_type.rsplit('=')[-1] not in self.ALLOWED_CHARSETS:
2122                desc = f"Text file with unsupported encoding: {file} has mime type {mime_type}"
2123                self.fmtd_failure("error", "TextEncoding", file, desc=desc)
2124
2125
2126def init_logs(cli_arg):
2127    # Initializes logging
2128
2129    global logger
2130
2131    level = os.environ.get('LOG_LEVEL', "WARN")
2132
2133    console = logging.StreamHandler()
2134    console.setFormatter(logging.Formatter('%(levelname)-8s: %(message)s'))
2135
2136    logger = logging.getLogger('')
2137    logger.addHandler(console)
2138    logger.setLevel(cli_arg or level)
2139
2140    logger.info("Log init completed, level=%s",
2141                 logging.getLevelName(logger.getEffectiveLevel()))
2142
2143
2144def inheritors(klass):
2145    subclasses = set()
2146    work = [klass]
2147    while work:
2148        parent = work.pop()
2149        for child in parent.__subclasses__():
2150            if child not in subclasses:
2151                subclasses.add(child)
2152                work.append(child)
2153    return subclasses
2154
2155
2156def annotate(res):
2157    """
2158    https://docs.github.com/en/actions/using-workflows/workflow-commands-for-github-actions#about-workflow-commands
2159    """
2160    msg = res.message.replace('%', '%25').replace('\n', '%0A').replace('\r', '%0D')
2161    notice = f'::{res.severity} file={res.file}' + \
2162             (f',line={res.line}' if res.line else '') + \
2163             (f',col={res.col}' if res.col else '') + \
2164             (f',endLine={res.end_line}' if res.end_line else '') + \
2165             (f',endColumn={res.end_col}' if res.end_col else '') + \
2166             f',title={res.title}::{msg}'
2167    print(notice)
2168
2169
2170def resolve_path_hint(hint):
2171    if hint == "<zephyr-base>":
2172        return ZEPHYR_BASE
2173    elif hint == "<git-top>":
2174        return GIT_TOP
2175    else:
2176        return hint
2177
2178
2179def parse_args(argv):
2180
2181    default_range = 'HEAD~1..HEAD'
2182    parser = argparse.ArgumentParser(
2183        description="Check for coding style and documentation warnings.", allow_abbrev=False)
2184    parser.add_argument('-c', '--commits', default=default_range,
2185                        help=f'''Commit range in the form: a..[b], default is
2186                        {default_range}''')
2187    parser.add_argument('-o', '--output', default="compliance.xml",
2188                        help='''Name of outfile in JUnit format,
2189                        default is ./compliance.xml''')
2190    parser.add_argument('-n', '--no-case-output', action="store_true",
2191                        help="Do not store the individual test case output.")
2192    parser.add_argument('-l', '--list', action="store_true",
2193                        help="List all checks and exit")
2194    parser.add_argument("-v", "--loglevel", choices=['DEBUG', 'INFO', 'WARNING',
2195                                                     'ERROR', 'CRITICAL'],
2196                        help="python logging level")
2197    parser.add_argument('-m', '--module', action="append", default=[],
2198                        help="Checks to run. All checks by default. (case " \
2199                        "insensitive)")
2200    parser.add_argument('-e', '--exclude-module', action="append", default=[],
2201                        help="Do not run the specified checks (case " \
2202                        "insensitive)")
2203    parser.add_argument('-j', '--previous-run', default=None,
2204                        help='''Pre-load JUnit results in XML format
2205                        from a previous run and combine with new results.''')
2206    parser.add_argument('--annotate', action="store_true",
2207                        help="Print GitHub Actions-compatible annotations.")
2208
2209    return parser.parse_args(argv)
2210
2211def _main(args):
2212    # The "real" main(), which is wrapped to catch exceptions and report them
2213    # to GitHub. Returns the number of test failures.
2214
2215    # The absolute path of the top-level git directory. Initialize it here so
2216    # that issues running Git can be reported to GitHub.
2217    global GIT_TOP
2218    GIT_TOP = Path(git("rev-parse", "--show-toplevel"))
2219
2220    # The commit range passed in --commit, e.g. "HEAD~3"
2221    global COMMIT_RANGE
2222    COMMIT_RANGE = args.commits
2223
2224    init_logs(args.loglevel)
2225
2226    logger.info(f'Running tests on commit range {COMMIT_RANGE}')
2227
2228    if args.list:
2229        for testcase in sorted(inheritors(ComplianceTest), key=lambda x: x.name):
2230            print(testcase.name)
2231        return 0
2232
2233    # Load saved test results from an earlier run, if requested
2234    if args.previous_run:
2235        if not os.path.exists(args.previous_run):
2236            # This probably means that an earlier pass had an internal error
2237            # (the script is currently run multiple times by the ci-pipelines
2238            # repo). Since that earlier pass might've posted an error to
2239            # GitHub, avoid generating a GitHub comment here, by avoiding
2240            # sys.exit() (which gets caught in main()).
2241            print(f"error: '{args.previous_run}' not found",
2242                  file=sys.stderr)
2243            return 1
2244
2245        logging.info(f"Loading previous results from {args.previous_run}")
2246        for loaded_suite in JUnitXml.fromfile(args.previous_run):
2247            suite = loaded_suite
2248            break
2249    else:
2250        suite = TestSuite("Compliance")
2251
2252    included = list(map(lambda x: x.lower(), args.module))
2253    excluded = list(map(lambda x: x.lower(), args.exclude_module))
2254
2255    for testcase in inheritors(ComplianceTest):
2256        # "Modules" and "testcases" are the same thing. Better flags would have
2257        # been --tests and --exclude-tests or the like, but it's awkward to
2258        # change now.
2259
2260        if included and testcase.name.lower() not in included:
2261            continue
2262
2263        if testcase.name.lower() in excluded:
2264            print("Skipping " + testcase.name)
2265            continue
2266
2267        test = testcase()
2268        try:
2269            print(f"Running {test.name:16} tests in "
2270                  f"{resolve_path_hint(test.path_hint)} ...")
2271            test.run()
2272        except EndTest:
2273            pass
2274        except BaseException:
2275            test.failure(f"An exception occurred in {test.name}:\n{traceback.format_exc()}")
2276
2277        # Annotate if required
2278        if args.annotate:
2279            for res in test.fmtd_failures:
2280                annotate(res)
2281
2282        suite.add_testcase(test.case)
2283
2284    if args.output:
2285        xml = JUnitXml()
2286        xml.add_testsuite(suite)
2287        xml.update_statistics()
2288        xml.write(args.output, pretty=True)
2289
2290    failed_cases = []
2291    name2doc = {testcase.name: testcase.doc
2292                for testcase in inheritors(ComplianceTest)}
2293
2294    for case in suite:
2295        if case.result:
2296            if case.is_skipped:
2297                logging.warning(f"Skipped {case.name}")
2298            else:
2299                failed_cases.append(case)
2300        else:
2301            # Some checks can produce no .result
2302            logging.info(f"No JUnit result for {case.name}")
2303
2304    n_fails = len(failed_cases)
2305
2306    if n_fails:
2307        print(f"{n_fails} checks failed")
2308        for case in failed_cases:
2309            for res in case.result:
2310                errmsg = res.text.strip()
2311                logging.error(f"Test {case.name} failed: \n{errmsg}")
2312            if args.no_case_output:
2313                continue
2314            with open(f"{case.name}.txt", "w") as f:
2315                docs = name2doc.get(case.name)
2316                f.write(f"{docs}\n")
2317                for res in case.result:
2318                    errmsg = res.text.strip()
2319                    f.write(f'\n {errmsg}')
2320
2321    if args.output:
2322        print(f"\nComplete results in {args.output}")
2323    return n_fails
2324
2325
2326def main(argv=None):
2327    args = parse_args(argv)
2328
2329    try:
2330        n_fails = _main(args)
2331    except BaseException:
2332        # Catch BaseException instead of Exception to include stuff like
2333        # SystemExit (raised by sys.exit())
2334        print(f"Python exception in `{__file__}`:\n\n"
2335              f"```\n{traceback.format_exc()}\n```")
2336
2337        raise
2338
2339    sys.exit(n_fails)
2340
2341
2342def cmd2str(cmd):
2343    # Formats the command-line arguments in the iterable 'cmd' into a string,
2344    # for error messages and the like
2345
2346    return " ".join(shlex.quote(word) for word in cmd)
2347
2348
2349def err(msg):
2350    cmd = sys.argv[0]  # Empty if missing
2351    if cmd:
2352        cmd += ": "
2353    sys.exit(f"{cmd} error: {msg}")
2354
2355
2356if __name__ == "__main__":
2357    main(sys.argv[1:])
2358