Skip to content

Commit cf02eb4

Browse files
authored
feat: Add script to process IntegratedTests failures to geos_ats. (#53)
* feat: Add script to process IntegratedTests failures. * add comment in case of no unfiltered diffs. * add indent. * yapf formatting. * review comments + add a few statistics. * add break line * fixed small issues and tested it locally. * format file with yapf.
1 parent 1e91901 commit cf02eb4

File tree

2 files changed

+175
-0
lines changed

2 files changed

+175
-0
lines changed

geos-ats/pyproject.toml

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -38,6 +38,7 @@ setup_ats_environment = "geos.ats.environment_setup:main"
3838
geos_ats_log_check = "geos.ats.helpers.log_check:main"
3939
geos_ats_restart_check = "geos.ats.helpers.restart_check:main"
4040
geos_ats_curve_check = "geos.ats.helpers.curve_check:main"
41+
geos_ats_process_tests_fails="geos.ats.helpers.process_tests_failures:main"
4142

4243
[project.urls]
4344
Homepage = "https://github.com/GEOS-DEV/geosPythonPackages"
Lines changed: 174 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,174 @@
1+
#!/usr/bin/env python3
2+
import sys
3+
import os
4+
import stat
5+
import subprocess
6+
import argparse
7+
import platform
8+
import shutil
9+
import logging
10+
import glob
11+
12+
logging.basicConfig( level=logging.INFO, format="%(levelname)s: %(message)s" )
13+
14+
15+
def findFiles( folder, extension ):
16+
"""
17+
Recursively find all files in `folder` that match a given extension.
18+
"""
19+
# Build a pattern such as "*.py", "*.txt", etc.
20+
pattern = f"*{extension}"
21+
22+
# Use glob with ** (recursive) to match all files under folder
23+
return glob.glob( os.path.join( folder, "**", pattern ), recursive=True )
24+
25+
26+
def find_error_indices( lines, matchStrings ):
27+
"""
28+
Returns a list of indices where all `matchStrings` appear in the line.
29+
"""
30+
indices = []
31+
for idx, line in enumerate( lines ):
32+
if all( matchString in line for matchString in matchStrings ):
33+
indices.append( idx )
34+
return indices
35+
36+
37+
def process_error_blocks( lines, indices, numTrailingLines ):
38+
"""
39+
For each index in `indices`, collect the line itself plus a few trailing lines.
40+
Returns a list of match blocks (strings).
41+
"""
42+
match_blocks = []
43+
for idx in indices:
44+
# Prepare the current match block
45+
match_block = []
46+
47+
# Safely get the previous line if idx > 0
48+
if idx > 0:
49+
match_block.append( ' ' + lines[ idx - 1 ] )
50+
51+
# Current line
52+
match_block.append( ' ' + lines[ idx ] )
53+
54+
# Trailing lines
55+
for j in range( 1, numTrailingLines + 1 ):
56+
if idx + j >= len( lines ):
57+
match_block.append(
58+
' ***** No closing line. File truncated? Filters may not be properly applied! *****' )
59+
break
60+
match_block.append( ' ' + lines[ idx + j ] )
61+
62+
# If we see a "stop" condition, break out of the trailing loop
63+
if '******************************************************************************' in lines[ idx + j ]:
64+
break
65+
66+
# Convert match_block to a single string
67+
match_blocks.append( '\n'.join( match_block ) )
68+
69+
return match_blocks
70+
71+
72+
def parse_logs_and_filter_errors( directory, extension, exclusionStrings, numTrailingLines ):
73+
"""
74+
Returns a list of indices where all `matchStrings` appear in the line.
75+
"""
76+
# What strings to look for in order to flag a line/block for output
77+
errorStrings = [ 'Error:' ]
78+
79+
unfilteredErrors = {}
80+
total_files_processed = 0
81+
files_with_excluded_errors = []
82+
83+
for fileName in findFiles( directory, extension ):
84+
total_files_processed += 1
85+
errors = ''
86+
87+
# Count how many blocks we matched and how many blocks we ended up including
88+
matched_block_count = 0
89+
included_block_count = 0
90+
91+
with open( fileName ) as f:
92+
lines = f.readlines()
93+
94+
# 1. Find the indices where the errorStrings are found
95+
indices = find_error_indices( lines, errorStrings )
96+
97+
# 2. Extract the block of text associated with each error.
98+
matchBlock = process_error_blocks( lines, indices, numTrailingLines )
99+
100+
for block in matchBlock:
101+
# if none of the exclusions appear in this block
102+
matched_block_count += 1
103+
if not any( excludeString in block for excludeString in exclusionStrings ):
104+
# ... then add it to `errors`
105+
included_block_count += 1
106+
errors += block + "\n"
107+
108+
# If at least 1 block was matched, and not all of them ended up in 'included_block_count'
109+
# it means at least one block was excluded.
110+
if matched_block_count > 0 and included_block_count < matched_block_count:
111+
files_with_excluded_errors.append( fileName )
112+
113+
if errors:
114+
unfilteredErrors[ fileName ] = errors
115+
116+
# --- Logging / Output ---
117+
logging.info( f"Total number of log files processed: {total_files_processed}\n" )
118+
119+
# Unfiltered errors
120+
if unfilteredErrors:
121+
for fileName, errors in unfilteredErrors.items():
122+
logging.warning( f"Found unfiltered diff in: {fileName}" )
123+
logging.info( f"Details of diffs: {errors}" )
124+
else:
125+
logging.info( "No unfiltered differences were found.\n" )
126+
127+
# Files that had at least one excluded block
128+
if files_with_excluded_errors:
129+
files_with_excluded_errors_basename = [ os.path.basename( f ) for f in files_with_excluded_errors ]
130+
131+
excluded_files_text = "\n".join( files_with_excluded_errors_basename )
132+
logging.info( f"The following file(s) had at least one error block that was filtered:\n{excluded_files_text}" )
133+
134+
135+
def main():
136+
137+
DEFAULT_EXCLUSION_STRINGS = [
138+
'logLevel', 'NonlinearSolverParameters', 'has a child', 'different shapes', 'different types', 'differing types'
139+
]
140+
141+
parser = argparse.ArgumentParser( description='Process ats output to filter diffs.' )
142+
143+
parser.add_argument( '-d',
144+
'--directory',
145+
type=str,
146+
default='integratedTests',
147+
help='directory to search recursively for files with specified extension' )
148+
149+
parser.add_argument( '-ext', '--extension', type=str, default='.log', help='extension of files to filter' )
150+
151+
parser.add_argument( '-tl',
152+
'--numTrailingLines',
153+
type=int,
154+
default=5,
155+
help='number of lines to include in block after match is found.' )
156+
157+
parser.add_argument( '-e',
158+
'--exclusionStrings',
159+
type=str,
160+
nargs="*",
161+
default=[],
162+
help='What stings to look for in order to exclude a block' )
163+
164+
args, unknown_args = parser.parse_known_args()
165+
166+
if unknown_args:
167+
print( "unknown arguments %s" % unknown_args )
168+
169+
exclusionStrings = DEFAULT_EXCLUSION_STRINGS + args.exclusionStrings
170+
parse_logs_and_filter_errors( args.directory, args.extension, exclusionStrings, args.numTrailingLines )
171+
172+
173+
if __name__ == '__main__':
174+
main()

0 commit comments

Comments
 (0)