1from io import open
2import os
3import re
4import glob
5import subprocess
6import sys
7import unittest
8
9brpath = os.path.normpath(os.path.join(os.path.dirname(__file__), ".."))
10
11#
12# Patch parsing functions
13#
14
15FIND_INFRA_IN_PATCH = re.compile(r"^\+\$\(eval \$\((host-)?([^-]*)-package\)\)$")
16
17
18def analyze_patch(patch):
19    """Parse one patch and return the list of files modified, added or
20    removed by the patch."""
21    files = set()
22    infras = set()
23    for line in patch:
24        # If the patch is adding a package, find which infra it is
25        m = FIND_INFRA_IN_PATCH.match(line)
26        if m:
27            infras.add(m.group(2))
28        if not line.startswith("+++ ") and not line.startswith("--- "):
29            continue
30        line.strip()
31        fname = line[line.find("/") + 1:].strip()
32        if fname == "dev/null":
33            continue
34        files.add(fname)
35    return (files, infras)
36
37
38FIND_INFRA_IN_MK = re.compile(r"^\$\(eval \$\((host-)?([^-]*)-package\)\)$")
39
40
41def fname_get_package_infra(fname):
42    """Checks whether the file name passed as argument is a Buildroot .mk
43    file describing a package, and find the infrastructure it's using."""
44    if not fname.endswith(".mk"):
45        return None
46
47    if not os.path.exists(fname):
48        return None
49
50    with open(fname, "r") as f:
51        for line in f:
52            line = line.strip()
53            m = FIND_INFRA_IN_MK.match(line)
54            if m:
55                return m.group(2)
56    return None
57
58
59def analyze_patches(patches):
60    """Parse a list of patches and returns the list of files modified,
61    added or removed by the patches, as well as the list of package
62    infrastructures used by those patches (if any)"""
63    allfiles = set()
64    allinfras = set()
65    for patch in patches:
66        (files, infras) = analyze_patch(patch)
67        allfiles = allfiles | files
68        allinfras = allinfras | infras
69    return (allfiles, allinfras)
70
71
72#
73# Unit-test parsing functions
74#
75
76def get_all_test_cases(suite):
77    """Generate all test-cases from a given test-suite.
78    :return: (test.module, test.name)"""
79    if issubclass(type(suite), unittest.TestSuite):
80        for test in suite:
81            for res in get_all_test_cases(test):
82                yield res
83    else:
84        yield (suite.__module__, suite.__class__.__name__)
85
86
87def list_unittests():
88    """Use the unittest module to retreive all test cases from a given
89    directory"""
90    loader = unittest.TestLoader()
91    suite = loader.discover(os.path.join(brpath, "support", "testing"))
92    tests = {}
93    for module, test in get_all_test_cases(suite):
94        module_path = os.path.join("support", "testing", *module.split('.'))
95        tests.setdefault(module_path, []).append('%s.%s' % (module, test))
96    return tests
97
98
99unittests = {}
100
101
102#
103# DEVELOPERS file parsing functions
104#
105
106class Developer:
107    def __init__(self, name, files):
108        self.name = name
109        self.files = files
110        self.packages = parse_developer_packages(files)
111        self.architectures = parse_developer_architectures(files)
112        self.infras = parse_developer_infras(files)
113        self.runtime_tests = parse_developer_runtime_tests(files)
114        self.defconfigs = parse_developer_defconfigs(files)
115
116    def hasfile(self, f):
117        for fs in self.files:
118            if f.startswith(fs):
119                return True
120        return False
121
122    def __repr__(self):
123        name = '\'' + self.name.split(' <')[0][:20] + '\''
124        things = []
125        if len(self.files):
126            things.append('{} files'.format(len(self.files)))
127        if len(self.packages):
128            things.append('{} pkgs'.format(len(self.packages)))
129        if len(self.architectures):
130            things.append('{} archs'.format(len(self.architectures)))
131        if len(self.infras):
132            things.append('{} infras'.format(len(self.infras)))
133        if len(self.runtime_tests):
134            things.append('{} tests'.format(len(self.runtime_tests)))
135        if len(self.defconfigs):
136            things.append('{} defconfigs'.format(len(self.defconfigs)))
137        if things:
138            return 'Developer <{} ({})>'.format(name, ', '.join(things))
139        else:
140            return 'Developer <' + name + '>'
141
142
143def parse_developer_packages(fnames):
144    """Given a list of file patterns, travel through the Buildroot source
145    tree to find which packages are implemented by those file
146    patterns, and return a list of those packages."""
147    packages = set()
148    for fname in fnames:
149        for root, dirs, files in os.walk(os.path.join(brpath, fname)):
150            for f in files:
151                path = os.path.join(root, f)
152                if fname_get_package_infra(path):
153                    pkg = os.path.splitext(f)[0]
154                    packages.add(pkg)
155    return packages
156
157
158def parse_arches_from_config_in(fname):
159    """Given a path to an arch/Config.in.* file, parse it to get the list
160    of BR2_ARCH values for this architecture."""
161    arches = set()
162    with open(fname, "r") as f:
163        parsing_arches = False
164        for line in f:
165            line = line.strip()
166            if line == "config BR2_ARCH":
167                parsing_arches = True
168                continue
169            if parsing_arches:
170                m = re.match(r"^\s*default \"([^\"]*)\".*", line)
171                if m:
172                    arches.add(m.group(1))
173                else:
174                    parsing_arches = False
175    return arches
176
177
178def parse_developer_architectures(fnames):
179    """Given a list of file names, find the ones starting by
180    'arch/Config.in.', and use that to determine the architecture a
181    developer is working on."""
182    arches = set()
183    for fname in fnames:
184        if not re.match(r"^.*/arch/Config\.in\..*$", fname):
185            continue
186        arches = arches | parse_arches_from_config_in(fname)
187    return arches
188
189
190def parse_developer_infras(fnames):
191    infras = set()
192    for fname in fnames:
193        m = re.match(r"^package/pkg-([^.]*).mk$", fname)
194        if m:
195            infras.add(m.group(1))
196    return infras
197
198
199def parse_developer_defconfigs(fnames):
200    """Given a list of file names, returns the config names
201    corresponding to defconfigs."""
202    return {os.path.basename(fname[:-10])
203            for fname in fnames
204            if fname.endswith('_defconfig')}
205
206
207def parse_developer_runtime_tests(fnames):
208    """Given a list of file names, returns the runtime tests
209    corresponding to the file."""
210    all_files = []
211    # List all files recursively
212    for fname in fnames:
213        if os.path.isdir(fname):
214            for root, _dirs, files in os.walk(os.path.join(brpath, fname)):
215                all_files += [os.path.join(root, f) for f in files]
216        else:
217            all_files.append(fname)
218
219    # Get all runtime tests
220    runtimes = set()
221    for f in all_files:
222        name = os.path.splitext(f)[0]
223        if name in unittests:
224            runtimes |= set(unittests[name])
225    return runtimes
226
227
228def parse_developers(filename=None):
229    """Parse the DEVELOPERS file and return a list of Developer objects."""
230    developers = []
231    linen = 0
232    global unittests
233    unittests = list_unittests()
234    developers_fname = filename or os.path.join(brpath, 'DEVELOPERS')
235    with open(developers_fname, mode='r', encoding='utf_8') as f:
236        files = []
237        name = None
238        for line in f:
239            linen += 1
240            line = line.strip()
241            if line.startswith("#"):
242                continue
243            elif line.startswith("N:"):
244                if name is not None or len(files) != 0:
245                    print("Syntax error in DEVELOPERS file, line %d" % (linen - 1),
246                          file=sys.stderr)
247                    return None
248                name = line[2:].strip()
249            elif line.startswith("F:"):
250                fname = line[2:].strip()
251                dev_files = glob.glob(os.path.join(brpath, fname))
252                if len(dev_files) == 0:
253                    print("WARNING: '%s' doesn't match any file, line %d" % (fname, linen),
254                          file=sys.stderr)
255                for f in dev_files:
256                    dev_file = os.path.relpath(f, brpath)
257                    dev_file = dev_file.replace(os.sep, '/')  # force unix sep
258                    if f[-1] == '/':  # relpath removes the trailing /
259                        dev_file = dev_file + '/'
260                    files.append(dev_file)
261            elif line == "":
262                if not name:
263                    continue
264                developers.append(Developer(name, files))
265                files = []
266                name = None
267            else:
268                print("Syntax error in DEVELOPERS file, line %d: '%s'" % (linen, line),
269                      file=sys.stderr)
270                return None
271    # handle last developer
272    if name is not None:
273        developers.append(Developer(name, files))
274    return developers
275
276
277def check_developers(developers, basepath=None):
278    """Look at the list of files versioned in Buildroot, and returns the
279    list of files that are not handled by any developer"""
280    if basepath is None:
281        basepath = os.getcwd()
282    cmd = ["git", "--git-dir", os.path.join(basepath, ".git"), "ls-files"]
283    files = subprocess.check_output(cmd).decode(sys.stdout.encoding).strip().split("\n")
284    unhandled_files = []
285    for f in files:
286        handled = False
287        for d in developers:
288            if d.hasfile(f):
289                handled = True
290                break
291        if not handled:
292            unhandled_files.append(f)
293    return unhandled_files
294