Skip to content

Commit 9f7c82a

Browse files
committed
Deduplicate find-duplicate functionality
1 parent a85a384 commit 9f7c82a

File tree

2 files changed

+19
-31
lines changed

2 files changed

+19
-31
lines changed

tools/git_hooks/find_duplicates.py

100644100755
Lines changed: 1 addition & 21 deletions
Original file line numberDiff line numberDiff line change
@@ -22,25 +22,5 @@
2222

2323
scanned_files = {}
2424

25-
for r in [resources] + resources.features.values():
26-
for file in r.c_sources + r.s_sources + r.cpp_sources + r.objects + r.libraries + r.hex_files + r.bin_files:
27-
scanned_files.setdefault(basename(file), [])
28-
scanned_files[basename(file)].append(file)
29-
filenameparts = splitext(file)
30-
if filenameparts[-1] in ["c", "cpp", "s", "S"]:
31-
filenameparts[-1] = "o"
32-
file = ".".join(filenamparts)
33-
scanned_files.setdefault(basename(file), [])
34-
scanned_files[basename(file)].append(file)
35-
36-
count_dupe = 0
37-
for key, value in scanned_files.iteritems():
38-
if len(value) > 1:
39-
count_dupe += 1
40-
if not args.silent:
41-
print("Multiple files found with name {}".format(key))
42-
for file in value:
43-
print(" {}".format(file))
44-
45-
exit(count_dupe)
25+
exit(resources.detect_duplicates())
4626

tools/toolchains/__init__.py

Lines changed: 18 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -120,27 +120,35 @@ def add(self, resources):
120120

121121
return self
122122

123-
def detect_duplicates(self):
124-
dupe_dict = dict()
123+
def _collect_duplicates(self, dupe_dict, dupe_headers):
125124
for filename in self.s_sources + self.c_sources + self.cpp_sources:
126125
objname, _ = splitext(basename(filename))
127-
dupe_dict.setdefault(objname, [])
128-
dupe_dict[objname].append(filename)
126+
dupe_dict.setdefault(objname, set())
127+
dupe_dict[objname] |= set([filename])
128+
for filename in self.headers:
129+
headername = basename(filename)
130+
dupe_headers.setdefault(headername, set())
131+
dupe_headers[headername] |= set([headername])
132+
for res in self.features.values():
133+
res._collect_duplicates(dupe_dict, dupe_headers)
134+
return dupe_dict, dupe_headers
135+
136+
def detect_duplicates(self):
137+
count = 0
138+
dupe_dict, dupe_headers = self._collect_duplicates(dict(), dict())
129139
for objname, filenames in dupe_dict.iteritems():
130140
if len(filenames) > 1:
141+
count+=1
131142
print "[ERROR] Object file %s.o is not unique!"\
132143
" It could be made from:" % objname
133144
print columnate(filenames)
134-
dupe_headers = dict()
135-
for filename in self.headers:
136-
headername = basename(filename)
137-
dupe_headers.setdefault(headername, [])
138-
dupe_headers[headername].append(headername)
139145
for headername, locations in dupe_headers.iteritems():
140-
if len(filenames) > 1:
146+
if len(locations) > 1:
147+
count+=1
141148
print "[ERROR] Header file %s is not unique! It could be:" %\
142149
headername
143150
print columnate(locations)
151+
return count
144152

145153

146154
def relative_to(self, base, dot=False):

0 commit comments

Comments
 (0)