Skip to content

Commit d98e144

Browse files
committed
Codemods will now execute once for each finding independently
1 parent 9a41a51 commit d98e144

File tree

1 file changed

+44
-11
lines changed

1 file changed

+44
-11
lines changed

src/codemodder/codemods/base_codemod.py

Lines changed: 44 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -226,21 +226,54 @@ def _apply(
226226
logger.debug("No files matched for %s", self.id)
227227
return None
228228

229-
process_file = functools.partial(
230-
self._process_file, context=context, results=results, rules=rules
231-
)
232-
233-
contexts = []
234-
if context.max_workers == 1:
235-
logger.debug("processing files serially")
236-
contexts.extend([process_file(file) for file in files_to_analyze])
237-
else:
229+
# Do each result independently
230+
if results:
231+
# gather positional arguments for the map
232+
resultset_arguments = []
233+
path_arguments = []
234+
for result in results.results_for_rules(rules):
235+
# this need to be the same type of ResultSet as results
236+
singleton = results.__class__()
237+
singleton.add_result(result)
238+
result_locations = self.get_files_to_analyze(context, singleton)
239+
# We do an execution for each location in the result
240+
# So we duplicate the resultset argument for each location
241+
for loc in result_locations:
242+
resultset_arguments.append(singleton)
243+
path_arguments.append(loc)
244+
245+
contexts: list = []
238246
with ThreadPoolExecutor() as executor:
239247
logger.debug("using executor with %s workers", context.max_workers)
240-
contexts.extend(executor.map(process_file, files_to_analyze))
248+
contexts.extend(
249+
executor.map(
250+
lambda path, resultset: self._process_file(
251+
path, context, resultset, rules
252+
),
253+
path_arguments,
254+
resultset_arguments,
255+
)
256+
)
241257
executor.shutdown(wait=True)
242258

243-
context.process_results(self.id, contexts)
259+
context.process_results(self.id, contexts)
260+
# for find and fix codemods
261+
else:
262+
process_file = functools.partial(
263+
self._process_file, context=context, results=results, rules=rules
264+
)
265+
266+
contexts = []
267+
if context.max_workers == 1:
268+
logger.debug("processing files serially")
269+
contexts.extend([process_file(file) for file in files_to_analyze])
270+
else:
271+
with ThreadPoolExecutor() as executor:
272+
logger.debug("using executor with %s workers", context.max_workers)
273+
contexts.extend(executor.map(process_file, files_to_analyze))
274+
executor.shutdown(wait=True)
275+
276+
context.process_results(self.id, contexts)
244277
return None
245278

246279
def apply(self, context: CodemodExecutionContext) -> None | TokenUsage:

0 commit comments

Comments
 (0)