Skip to content

Commit 872363d

Browse files
authored
Merge pull request #2854 from theotherjimmy/find-duplicates
[Tools] Find and report duplicates
2 parents ff2d8df + 5eed4f2 commit 872363d

File tree

5 files changed

+92
-1
lines changed

5 files changed

+92
-1
lines changed

tools/build_api.py

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -452,6 +452,8 @@ def build_project(src_paths, build_path, target, toolchain_name,
452452
# Link Program
453453
res, _ = toolchain.link_program(resources, build_path, name)
454454

455+
resources.detect_duplicates(toolchain)
456+
455457
if report != None:
456458
end = time()
457459
cur_result["elapsed_time"] = end - start

tools/git_hooks/__init__.py

Whitespace-only changes.

tools/git_hooks/find_duplicates.py

Lines changed: 26 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,26 @@
1+
from os import walk
2+
from os.path import join, abspath, dirname, basename, splitext
3+
import sys
4+
5+
ROOT = abspath(join(dirname(__file__), "..", ".."))
6+
sys.path.insert(0, ROOT)
7+
8+
from tools.toolchains.gcc import GCC_ARM
9+
from tools.targets import TARGET_MAP
10+
from argparse import ArgumentParser
11+
12+
if __name__ == "__main__":
13+
parser = ArgumentParser("Find duplicate file names within a directory structure")
14+
parser.add_argument("dirs", help="Directories to search for duplicate file names"
15+
, nargs="*")
16+
parser.add_argument("--silent", help="Supress printing of filenames, just return number of duplicates", action="store_true")
17+
args = parser.parse_args()
18+
19+
toolchain = GCC_ARM(TARGET_MAP["K64F"])
20+
21+
resources = sum([toolchain.scan_resources(d) for d in args.dirs], None)
22+
23+
scanned_files = {}
24+
25+
exit(resources.detect_duplicates(toolchain))
26+

tools/test/toolchains/api.py

Lines changed: 27 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -3,14 +3,16 @@
33
import os
44
from string import printable
55
from copy import deepcopy
6+
from mock import MagicMock
67
from hypothesis import given
78
from hypothesis.strategies import text, lists, fixed_dictionaries
89

910
ROOT = os.path.abspath(os.path.join(os.path.dirname(__file__), "..", "..",
1011
".."))
1112
sys.path.insert(0, ROOT)
1213

13-
from tools.toolchains import TOOLCHAIN_CLASSES, LEGACY_TOOLCHAIN_NAMES
14+
from tools.toolchains import TOOLCHAIN_CLASSES, LEGACY_TOOLCHAIN_NAMES,\
15+
Resources
1416
from tools.targets import TARGET_MAP
1517

1618
def test_instantiation():
@@ -96,3 +98,27 @@ def test_toolchain_profile_asm(profile, source_file):
9698
"Toolchain %s did not propigate arg %s" % (toolchain.name,
9799
parameter)
98100

101+
for name, Class in TOOLCHAIN_CLASSES.items():
102+
CLS = Class(TARGET_MAP["K64F"])
103+
assert name == CLS.name or name == LEGACY_TOOLCHAIN_NAMES[CLS.name]
104+
105+
106+
@given(lists(text(alphabet=ALPHABET, min_size=1), min_size=1))
107+
def test_detect_duplicates(filenames):
108+
c_sources = [os.path.join(name, "dupe.c") for name in filenames]
109+
s_sources = [os.path.join(name, "dupe.s") for name in filenames]
110+
cpp_sources = [os.path.join(name, "dupe.cpp") for name in filenames]
111+
with MagicMock() as notify:
112+
toolchain = TOOLCHAIN_CLASSES["ARM"](TARGET_MAP["K64F"], notify=notify)
113+
res = Resources()
114+
res.c_sources = c_sources
115+
res.s_sources = s_sources
116+
res.cpp_sources = cpp_sources
117+
assert res.detect_duplicates(toolchain) == 1,\
118+
"Not Enough duplicates found"
119+
120+
_, (notification, _), _ = notify.mock_calls[1]
121+
assert "dupe.o" in notification["message"]
122+
assert "dupe.s" in notification["message"]
123+
assert "dupe.c" in notification["message"]
124+
assert "dupe.cpp" in notification["message"]

tools/toolchains/__init__.py

Lines changed: 37 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -120,6 +120,43 @@ def add(self, resources):
120120

121121
return self
122122

123+
def _collect_duplicates(self, dupe_dict, dupe_headers):
124+
for filename in self.s_sources + self.c_sources + self.cpp_sources:
125+
objname, _ = splitext(basename(filename))
126+
dupe_dict.setdefault(objname, set())
127+
dupe_dict[objname] |= set([filename])
128+
for filename in self.headers:
129+
headername = basename(filename)
130+
dupe_headers.setdefault(headername, set())
131+
dupe_headers[headername] |= set([headername])
132+
for res in self.features.values():
133+
res._collect_duplicates(dupe_dict, dupe_headers)
134+
return dupe_dict, dupe_headers
135+
136+
def detect_duplicates(self, toolchain):
137+
"""Detect all potential ambiguities in filenames and report them with
138+
a toolchain notification
139+
140+
Positional Arguments:
141+
toolchain - used for notifications
142+
"""
143+
count = 0
144+
dupe_dict, dupe_headers = self._collect_duplicates(dict(), dict())
145+
for objname, filenames in dupe_dict.iteritems():
146+
if len(filenames) > 1:
147+
count+=1
148+
toolchain.tool_error(
149+
"Object file %s.o is not unique! It could be made from: %s"\
150+
% (objname, " ".join(filenames)))
151+
for headername, locations in dupe_headers.iteritems():
152+
if len(locations) > 1:
153+
count+=1
154+
toolchain.tool_error(
155+
"Header file %s is not unique! It could be: %s" %\
156+
(headername, " ".join(locations)))
157+
return count
158+
159+
123160
def relative_to(self, base, dot=False):
124161
for field in ['inc_dirs', 'headers', 's_sources', 'c_sources',
125162
'cpp_sources', 'lib_dirs', 'objects', 'libraries',

0 commit comments

Comments
 (0)