From 6235c8345467ce4333e7007627fc6c6135e38a52 Mon Sep 17 00:00:00 2001 From: Matteo Date: Wed, 8 Jan 2025 11:37:05 +0100 Subject: [PATCH 1/8] feat: Add script to process IntegratedTests failures. --- geos-ats/pyproject.toml | 1 + .../ats/helpers/process_tests_failures.py | 101 ++++++++++++++++++ 2 files changed, 102 insertions(+) create mode 100644 geos-ats/src/geos/ats/helpers/process_tests_failures.py diff --git a/geos-ats/pyproject.toml b/geos-ats/pyproject.toml index bb9a1c734..b7ba39f69 100644 --- a/geos-ats/pyproject.toml +++ b/geos-ats/pyproject.toml @@ -38,6 +38,7 @@ setup_ats_environment = "geos.ats.environment_setup:main" geos_ats_log_check = "geos.ats.helpers.log_check:main" geos_ats_restart_check = "geos.ats.helpers.restart_check:main" geos_ats_curve_check = "geos.ats.helpers.curve_check:main" +geos_ats_process_tests_fails="geos.ats.helpers.process_tests_failures:main" [project.urls] Homepage = "https://github.com/GEOS-DEV/geosPythonPackages" diff --git a/geos-ats/src/geos/ats/helpers/process_tests_failures.py b/geos-ats/src/geos/ats/helpers/process_tests_failures.py new file mode 100644 index 000000000..e9aa4b3fb --- /dev/null +++ b/geos-ats/src/geos/ats/helpers/process_tests_failures.py @@ -0,0 +1,101 @@ +#!/usr/bin/env python3 +# Python script to +import sys +import os +import stat +import subprocess +import argparse +import platform +import shutil +import logging + +logging.basicConfig(level=logging.INFO, format="%(levelname)s: %(message)s") + +# fines all files recursively from +def findFiles(folder, extension): + for root, folders, files in os.walk(folder): + for filename in folders + files: + if (extension in filename): + yield os.path.join(root, filename) + +def parse_logs_and_filter_errors( directory, extension, exclusionStrings, numTrailingLines ): + # What strings to look for in order to flag a line/block for output + matchStrings = ['Error:'] + + filteredErrors = {} + + # What stings to look for in order to exclude a block + for fileName in findFiles(directory, extension): + errors = '' + + with open(fileName) as f: + lines = f.readlines() + + for i in range(0, len(lines)): + line = lines[i] + if all(matchString in line for matchString in matchStrings): + matchBlock = [] + matchBlock.append(' ' + lines[i - 1]) + matchBlock.append(' ' + line) + + for j in range(1, numTrailingLines + 1): + if i + j >= len(lines): + matchBlock.append(' ***** No closing line. file truncated? Filters may not be properly applied! *****') + break + matchBlock.append(' ' + lines[i + j]) + + matchBlock = '\n'.join(matchBlock) + + if ('******************************************************************************' + in lines[i + j]): + break + + i += j + + if not any(excludeString in matchBlock for excludeString in exclusionStrings): + errors += matchBlock + + if errors: + filteredErrors[fileName] = errors + + for fileName, errors in filteredErrors.items(): + logging.warning(f"Found unfiltered diff in: {fileName}") + logging.info(f"Details of diffs: {errors}") + +def main(): + + DEFAULT_EXCLUSION_STRINGS = ['logLevel', 'NonlinearSolverParameters', 'has a child', 'different shapes', 'different types', 'differing types'] + + parser = argparse.ArgumentParser(description='Process ats output to filter diffs.') + + parser.add_argument('-d', + '--directory', + type=str, + default='integratedTests', + help='directory to search recursively for files with specified extension') + + parser.add_argument('-ext', '--extension', type=str, default='.log', help='extension of files to filter') + + parser.add_argument('-tl', + '--numTrailingLines', + type=int, + default=5, + help='number of lines to include in block after match is found.') + + parser.add_argument('-e', + '--exclusionStrings', + type=str, + nargs="*", + default=[], + help='What stings to look for in order to exclude a block') + + args, unknown_args = parser.parse_known_args() + + if unknown_args: + print("unknown arguments %s" % unknown_args) + + exclusionStrings = DEFAULT_EXCLUSION_LIST + args.exclusionStrings + parse_logs_and_filter_errors( args.directory, args.extension, exclusionStrings, args.numTrailingLines ) + +if __name__ == '__main__': + main() From 57cde4097121417438213cd5e848597225a2b7de Mon Sep 17 00:00:00 2001 From: Matteo Date: Wed, 8 Jan 2025 12:40:54 +0100 Subject: [PATCH 2/8] add comment in case of no unfiltered diffs. --- geos-ats/src/geos/ats/helpers/process_tests_failures.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/geos-ats/src/geos/ats/helpers/process_tests_failures.py b/geos-ats/src/geos/ats/helpers/process_tests_failures.py index e9aa4b3fb..b0969241e 100644 --- a/geos-ats/src/geos/ats/helpers/process_tests_failures.py +++ b/geos-ats/src/geos/ats/helpers/process_tests_failures.py @@ -58,9 +58,12 @@ def parse_logs_and_filter_errors( directory, extension, exclusionStrings, numTra if errors: filteredErrors[fileName] = errors - for fileName, errors in filteredErrors.items(): + if filteredErrors: + for fileName, errors in filteredErrors.items(): logging.warning(f"Found unfiltered diff in: {fileName}") logging.info(f"Details of diffs: {errors}") + else: + logging.info("No unfiltered differences were found.") def main(): From 2041f64d5a186725fca9e58450eddf3482c3d2c0 Mon Sep 17 00:00:00 2001 From: Matteo Date: Wed, 8 Jan 2025 12:41:55 +0100 Subject: [PATCH 3/8] add indent. --- geos-ats/src/geos/ats/helpers/process_tests_failures.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/geos-ats/src/geos/ats/helpers/process_tests_failures.py b/geos-ats/src/geos/ats/helpers/process_tests_failures.py index b0969241e..4ffb4e6fa 100644 --- a/geos-ats/src/geos/ats/helpers/process_tests_failures.py +++ b/geos-ats/src/geos/ats/helpers/process_tests_failures.py @@ -60,8 +60,8 @@ def parse_logs_and_filter_errors( directory, extension, exclusionStrings, numTra if filteredErrors: for fileName, errors in filteredErrors.items(): - logging.warning(f"Found unfiltered diff in: {fileName}") - logging.info(f"Details of diffs: {errors}") + logging.warning(f"Found unfiltered diff in: {fileName}") + logging.info(f"Details of diffs: {errors}") else: logging.info("No unfiltered differences were found.") From c4ec674c69f1fdf5af755d2adbb84647d3bc0490 Mon Sep 17 00:00:00 2001 From: Matteo Date: Wed, 8 Jan 2025 14:07:57 +0100 Subject: [PATCH 4/8] yapf formatting. --- .../ats/helpers/process_tests_failures.py | 122 +++++++++--------- 1 file changed, 64 insertions(+), 58 deletions(-) diff --git a/geos-ats/src/geos/ats/helpers/process_tests_failures.py b/geos-ats/src/geos/ats/helpers/process_tests_failures.py index 4ffb4e6fa..e904bcd8d 100644 --- a/geos-ats/src/geos/ats/helpers/process_tests_failures.py +++ b/geos-ats/src/geos/ats/helpers/process_tests_failures.py @@ -1,5 +1,4 @@ #!/usr/bin/env python3 -# Python script to import sys import os import stat @@ -9,96 +8,103 @@ import shutil import logging -logging.basicConfig(level=logging.INFO, format="%(levelname)s: %(message)s") +logging.basicConfig( level=logging.INFO, format="%(levelname)s: %(message)s" ) + # fines all files recursively from -def findFiles(folder, extension): - for root, folders, files in os.walk(folder): +def findFiles( folder, extension ): + for root, folders, files in os.walk( folder ): for filename in folders + files: - if (extension in filename): - yield os.path.join(root, filename) + if ( extension in filename ): + yield os.path.join( root, filename ) + def parse_logs_and_filter_errors( directory, extension, exclusionStrings, numTrailingLines ): # What strings to look for in order to flag a line/block for output - matchStrings = ['Error:'] + matchStrings = [ 'Error:' ] filteredErrors = {} # What stings to look for in order to exclude a block - for fileName in findFiles(directory, extension): + for fileName in findFiles( directory, extension ): errors = '' - with open(fileName) as f: + with open( fileName ) as f: lines = f.readlines() - for i in range(0, len(lines)): - line = lines[i] - if all(matchString in line for matchString in matchStrings): + for i in range( 0, len( lines ) ): + line = lines[ i ] + if all( matchString in line for matchString in matchStrings ): matchBlock = [] - matchBlock.append(' ' + lines[i - 1]) - matchBlock.append(' ' + line) + matchBlock.append( ' ' + lines[ i - 1 ] ) + matchBlock.append( ' ' + line ) - for j in range(1, numTrailingLines + 1): - if i + j >= len(lines): - matchBlock.append(' ***** No closing line. file truncated? Filters may not be properly applied! *****') + for j in range( 1, numTrailingLines + 1 ): + if i + j >= len( lines ): + matchBlock.append( + ' ***** No closing line. file truncated? Filters may not be properly applied! *****' ) break - matchBlock.append(' ' + lines[i + j]) - - matchBlock = '\n'.join(matchBlock) + matchBlock.append( ' ' + lines[ i + j ] ) - if ('******************************************************************************' - in lines[i + j]): + matchBlock = '\n'.join( matchBlock ) + + if ( '******************************************************************************' + in lines[ i + j ] ): break - + i += j - if not any(excludeString in matchBlock for excludeString in exclusionStrings): + if not any( excludeString in matchBlock for excludeString in exclusionStrings ): errors += matchBlock if errors: - filteredErrors[fileName] = errors + filteredErrors[ fileName ] = errors if filteredErrors: - for fileName, errors in filteredErrors.items(): - logging.warning(f"Found unfiltered diff in: {fileName}") - logging.info(f"Details of diffs: {errors}") + for fileName, errors in filteredErrors.items(): + logging.warning( f"Found unfiltered diff in: {fileName}" ) + logging.info( f"Details of diffs: {errors}" ) else: - logging.info("No unfiltered differences were found.") + logging.info( "No unfiltered differences were found." ) + def main(): - DEFAULT_EXCLUSION_STRINGS = ['logLevel', 'NonlinearSolverParameters', 'has a child', 'different shapes', 'different types', 'differing types'] - - parser = argparse.ArgumentParser(description='Process ats output to filter diffs.') - - parser.add_argument('-d', - '--directory', - type=str, - default='integratedTests', - help='directory to search recursively for files with specified extension') - - parser.add_argument('-ext', '--extension', type=str, default='.log', help='extension of files to filter') - - parser.add_argument('-tl', - '--numTrailingLines', - type=int, - default=5, - help='number of lines to include in block after match is found.') - - parser.add_argument('-e', - '--exclusionStrings', - type=str, - nargs="*", - default=[], - help='What stings to look for in order to exclude a block') + DEFAULT_EXCLUSION_STRINGS = [ + 'logLevel', 'NonlinearSolverParameters', 'has a child', 'different shapes', 'different types', 'differing types' + ] + + parser = argparse.ArgumentParser( description='Process ats output to filter diffs.' ) + + parser.add_argument( '-d', + '--directory', + type=str, + default='integratedTests', + help='directory to search recursively for files with specified extension' ) + + parser.add_argument( '-ext', '--extension', type=str, default='.log', help='extension of files to filter' ) + + parser.add_argument( '-tl', + '--numTrailingLines', + type=int, + default=5, + help='number of lines to include in block after match is found.' ) + + parser.add_argument( '-e', + '--exclusionStrings', + type=str, + nargs="*", + default=[], + help='What stings to look for in order to exclude a block' ) args, unknown_args = parser.parse_known_args() - + if unknown_args: - print("unknown arguments %s" % unknown_args) - - exclusionStrings = DEFAULT_EXCLUSION_LIST + args.exclusionStrings - parse_logs_and_filter_errors( args.directory, args.extension, exclusionStrings, args.numTrailingLines ) + print( "unknown arguments %s" % unknown_args ) + + exclusionStrings = DEFAULT_EXCLUSION_LIST + args.exclusionStrings + parse_logs_and_filter_errors( args.directory, args.extension, exclusionStrings, args.numTrailingLines ) + if __name__ == '__main__': main() From 013151054642394d8bf4ca783dd3f657134f2d0a Mon Sep 17 00:00:00 2001 From: Matteo Cusini Date: Fri, 10 Jan 2025 14:36:41 -0800 Subject: [PATCH 5/8] review comments + add a few statistics. --- .../ats/helpers/process_tests_failures.py | 125 +++++++++++++----- 1 file changed, 90 insertions(+), 35 deletions(-) diff --git a/geos-ats/src/geos/ats/helpers/process_tests_failures.py b/geos-ats/src/geos/ats/helpers/process_tests_failures.py index e904bcd8d..af255133a 100644 --- a/geos-ats/src/geos/ats/helpers/process_tests_failures.py +++ b/geos-ats/src/geos/ats/helpers/process_tests_failures.py @@ -10,63 +10,118 @@ logging.basicConfig( level=logging.INFO, format="%(levelname)s: %(message)s" ) - -# fines all files recursively from def findFiles( folder, extension ): - for root, folders, files in os.walk( folder ): - for filename in folders + files: - if ( extension in filename ): - yield os.path.join( root, filename ) - + """ + Recursively find all files in `folder` that match a given extension. + """ + # Build a pattern such as "*.py", "*.txt", etc. + pattern = f"*{extension}" + + # Use glob with ** (recursive) to match all files under folder + return glob.glob(os.path.join(folder, "**", pattern), recursive=True) + +def find_error_indices(lines, matchStrings): + """ + Returns a list of indices where all `matchStrings` appear in the line. + """ + indices = [] + for idx, line in enumerate(lines): + if all(matchString in line for matchString in matchStrings): + indices.append(idx) + return indices + +def process_error_blocks(lines, indices, numTrailingLines): + """ + For each index in `indices`, collect the line itself plus a few trailing lines. + Returns a list of match blocks (strings). + """ + match_blocks = [] + for idx in indices: + # Prepare the current match block + match_block = [] + + # Safely get the previous line if idx > 0 + if idx > 0: + match_block.append(' ' + lines[idx - 1]) + + # Current line + match_block.append(' ' + lines[idx]) + + # Trailing lines + for j in range(1, numTrailingLines + 1): + if idx + j >= len(lines): + match_block.append(' ***** No closing line. File truncated? Filters may not be properly applied! *****') + break + match_block.append(' ' + lines[idx + j]) + + # If we see a "stop" condition, break out of the trailing loop + if '******************************************************************************' in lines[idx + j]: + break + + # Convert match_block to a single string + match_blocks.append('\n'.join(match_block)) + + return match_blocks def parse_logs_and_filter_errors( directory, extension, exclusionStrings, numTrailingLines ): + """ + Returns a list of indices where all `matchStrings` appear in the line. + """ # What strings to look for in order to flag a line/block for output - matchStrings = [ 'Error:' ] + errorStrings = [ 'Error:' ] - filteredErrors = {} + unfilteredErrors = {} + total_files_processed = 0 + files_with_excluded_errors = [] - # What stings to look for in order to exclude a block for fileName in findFiles( directory, extension ): + total_files_processed += 1 errors = '' + + # Count how many blocks we matched and how many blocks we ended up including + matched_block_count = 0 + included_block_count = 0 with open( fileName ) as f: lines = f.readlines() - for i in range( 0, len( lines ) ): - line = lines[ i ] - if all( matchString in line for matchString in matchStrings ): - matchBlock = [] - matchBlock.append( ' ' + lines[ i - 1 ] ) - matchBlock.append( ' ' + line ) - - for j in range( 1, numTrailingLines + 1 ): - if i + j >= len( lines ): - matchBlock.append( - ' ***** No closing line. file truncated? Filters may not be properly applied! *****' ) - break - matchBlock.append( ' ' + lines[ i + j ] ) + # 1. Find the indices where the errorStrings are found + indices = find_error_indices(lines, errorStrings) - matchBlock = '\n'.join( matchBlock ) + # 2. Extract the block of text associated with each error. + matchBlock = process_error_blocks(lines, indices, numTrailingLines) - if ( '******************************************************************************' - in lines[ i + j ] ): - break + for block in matchBlock: + # if none of the exclusions appear in this block + matched_block_count += 1 + if not any(excludeString in block for excludeString in exclusionStrings): + # ... then add it to `errors` + included_block_count += 1 + errors += block + "\n" - i += j - - if not any( excludeString in matchBlock for excludeString in exclusionStrings ): - errors += matchBlock + # If at least 1 block was matched, and not all of them ended up in 'included_block_count' + # it means at least one block was excluded. + if matched_block_count > 0 and included_block_count < matched_block_count < 0: + files_with_excluded_errors.append( fileName ) if errors: - filteredErrors[ fileName ] = errors + unfilteredErrors[ fileName ] = errors + + # --- Logging / Output --- + logging.info(f"Total number of log files processed: {total_files_processed}") + # Unfiltered errors if filteredErrors: for fileName, errors in filteredErrors.items(): - logging.warning( f"Found unfiltered diff in: {fileName}" ) - logging.info( f"Details of diffs: {errors}" ) + logging.warning(f"Found unfiltered diff in: {fileName}") + logging.info(f"Details of diffs: {errors}") else: - logging.info( "No unfiltered differences were found." ) + logging.info("No unfiltered differences were found.\n") + # Files that had at least one excluded block + if files_with_excluded_errors: + excluded_files_text = "\n".join(files_with_excluded_errors) + logging.info( f"The following file(s) had at least one error block that was filtered:\n{excluded_files_text}") def main(): From 2a93f6e36a9a4353ccf354645616cfd36eb5b494 Mon Sep 17 00:00:00 2001 From: Matteo Cusini Date: Fri, 10 Jan 2025 14:38:15 -0800 Subject: [PATCH 6/8] add break line --- geos-ats/src/geos/ats/helpers/process_tests_failures.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/geos-ats/src/geos/ats/helpers/process_tests_failures.py b/geos-ats/src/geos/ats/helpers/process_tests_failures.py index af255133a..61169cb9f 100644 --- a/geos-ats/src/geos/ats/helpers/process_tests_failures.py +++ b/geos-ats/src/geos/ats/helpers/process_tests_failures.py @@ -108,7 +108,7 @@ def parse_logs_and_filter_errors( directory, extension, exclusionStrings, numTra unfilteredErrors[ fileName ] = errors # --- Logging / Output --- - logging.info(f"Total number of log files processed: {total_files_processed}") + logging.info(f"Total number of log files processed: {total_files_processed}\n") # Unfiltered errors if filteredErrors: From c0ea1912d5fca4b0f92a76496edd6145cbe47395 Mon Sep 17 00:00:00 2001 From: Matteo Cusini Date: Fri, 10 Jan 2025 15:17:34 -0800 Subject: [PATCH 7/8] fixed small issues and tested it locally. --- .../src/geos/ats/helpers/process_tests_failures.py | 13 ++++++++----- 1 file changed, 8 insertions(+), 5 deletions(-) diff --git a/geos-ats/src/geos/ats/helpers/process_tests_failures.py b/geos-ats/src/geos/ats/helpers/process_tests_failures.py index 61169cb9f..458733434 100644 --- a/geos-ats/src/geos/ats/helpers/process_tests_failures.py +++ b/geos-ats/src/geos/ats/helpers/process_tests_failures.py @@ -7,6 +7,7 @@ import platform import shutil import logging +import glob logging.basicConfig( level=logging.INFO, format="%(levelname)s: %(message)s" ) @@ -101,7 +102,7 @@ def parse_logs_and_filter_errors( directory, extension, exclusionStrings, numTra # If at least 1 block was matched, and not all of them ended up in 'included_block_count' # it means at least one block was excluded. - if matched_block_count > 0 and included_block_count < matched_block_count < 0: + if matched_block_count > 0 and included_block_count < matched_block_count: files_with_excluded_errors.append( fileName ) if errors: @@ -111,8 +112,8 @@ def parse_logs_and_filter_errors( directory, extension, exclusionStrings, numTra logging.info(f"Total number of log files processed: {total_files_processed}\n") # Unfiltered errors - if filteredErrors: - for fileName, errors in filteredErrors.items(): + if unfilteredErrors: + for fileName, errors in unfilteredErrors.items(): logging.warning(f"Found unfiltered diff in: {fileName}") logging.info(f"Details of diffs: {errors}") else: @@ -120,7 +121,9 @@ def parse_logs_and_filter_errors( directory, extension, exclusionStrings, numTra # Files that had at least one excluded block if files_with_excluded_errors: - excluded_files_text = "\n".join(files_with_excluded_errors) + files_with_excluded_errors_basename = [ os.path.basename(f) for f in files_with_excluded_errors ] + + excluded_files_text = "\n".join(files_with_excluded_errors_basename) logging.info( f"The following file(s) had at least one error block that was filtered:\n{excluded_files_text}") def main(): @@ -157,7 +160,7 @@ def main(): if unknown_args: print( "unknown arguments %s" % unknown_args ) - exclusionStrings = DEFAULT_EXCLUSION_LIST + args.exclusionStrings + exclusionStrings = DEFAULT_EXCLUSION_STRINGS + args.exclusionStrings parse_logs_and_filter_errors( args.directory, args.extension, exclusionStrings, args.numTrailingLines ) From 3dd8a69b548c0fab49b27714b4c78a40b3632366 Mon Sep 17 00:00:00 2001 From: Matteo Cusini Date: Sat, 11 Jan 2025 08:28:25 -0800 Subject: [PATCH 8/8] format file with yapf. --- .../ats/helpers/process_tests_failures.py | 60 ++++++++++--------- 1 file changed, 33 insertions(+), 27 deletions(-) diff --git a/geos-ats/src/geos/ats/helpers/process_tests_failures.py b/geos-ats/src/geos/ats/helpers/process_tests_failures.py index 458733434..518faa2d1 100644 --- a/geos-ats/src/geos/ats/helpers/process_tests_failures.py +++ b/geos-ats/src/geos/ats/helpers/process_tests_failures.py @@ -11,6 +11,7 @@ logging.basicConfig( level=logging.INFO, format="%(levelname)s: %(message)s" ) + def findFiles( folder, extension ): """ Recursively find all files in `folder` that match a given extension. @@ -19,19 +20,21 @@ def findFiles( folder, extension ): pattern = f"*{extension}" # Use glob with ** (recursive) to match all files under folder - return glob.glob(os.path.join(folder, "**", pattern), recursive=True) + return glob.glob( os.path.join( folder, "**", pattern ), recursive=True ) + -def find_error_indices(lines, matchStrings): +def find_error_indices( lines, matchStrings ): """ Returns a list of indices where all `matchStrings` appear in the line. """ indices = [] - for idx, line in enumerate(lines): - if all(matchString in line for matchString in matchStrings): - indices.append(idx) + for idx, line in enumerate( lines ): + if all( matchString in line for matchString in matchStrings ): + indices.append( idx ) return indices -def process_error_blocks(lines, indices, numTrailingLines): + +def process_error_blocks( lines, indices, numTrailingLines ): """ For each index in `indices`, collect the line itself plus a few trailing lines. Returns a list of match blocks (strings). @@ -43,26 +46,28 @@ def process_error_blocks(lines, indices, numTrailingLines): # Safely get the previous line if idx > 0 if idx > 0: - match_block.append(' ' + lines[idx - 1]) + match_block.append( ' ' + lines[ idx - 1 ] ) # Current line - match_block.append(' ' + lines[idx]) + match_block.append( ' ' + lines[ idx ] ) # Trailing lines - for j in range(1, numTrailingLines + 1): - if idx + j >= len(lines): - match_block.append(' ***** No closing line. File truncated? Filters may not be properly applied! *****') + for j in range( 1, numTrailingLines + 1 ): + if idx + j >= len( lines ): + match_block.append( + ' ***** No closing line. File truncated? Filters may not be properly applied! *****' ) break - match_block.append(' ' + lines[idx + j]) + match_block.append( ' ' + lines[ idx + j ] ) # If we see a "stop" condition, break out of the trailing loop - if '******************************************************************************' in lines[idx + j]: + if '******************************************************************************' in lines[ idx + j ]: break # Convert match_block to a single string - match_blocks.append('\n'.join(match_block)) + match_blocks.append( '\n'.join( match_block ) ) + + return match_blocks - return match_blocks def parse_logs_and_filter_errors( directory, extension, exclusionStrings, numTrailingLines ): """ @@ -78,7 +83,7 @@ def parse_logs_and_filter_errors( directory, extension, exclusionStrings, numTra for fileName in findFiles( directory, extension ): total_files_processed += 1 errors = '' - + # Count how many blocks we matched and how many blocks we ended up including matched_block_count = 0 included_block_count = 0 @@ -87,15 +92,15 @@ def parse_logs_and_filter_errors( directory, extension, exclusionStrings, numTra lines = f.readlines() # 1. Find the indices where the errorStrings are found - indices = find_error_indices(lines, errorStrings) + indices = find_error_indices( lines, errorStrings ) # 2. Extract the block of text associated with each error. - matchBlock = process_error_blocks(lines, indices, numTrailingLines) + matchBlock = process_error_blocks( lines, indices, numTrailingLines ) for block in matchBlock: # if none of the exclusions appear in this block matched_block_count += 1 - if not any(excludeString in block for excludeString in exclusionStrings): + if not any( excludeString in block for excludeString in exclusionStrings ): # ... then add it to `errors` included_block_count += 1 errors += block + "\n" @@ -103,28 +108,29 @@ def parse_logs_and_filter_errors( directory, extension, exclusionStrings, numTra # If at least 1 block was matched, and not all of them ended up in 'included_block_count' # it means at least one block was excluded. if matched_block_count > 0 and included_block_count < matched_block_count: - files_with_excluded_errors.append( fileName ) + files_with_excluded_errors.append( fileName ) if errors: unfilteredErrors[ fileName ] = errors # --- Logging / Output --- - logging.info(f"Total number of log files processed: {total_files_processed}\n") + logging.info( f"Total number of log files processed: {total_files_processed}\n" ) # Unfiltered errors if unfilteredErrors: for fileName, errors in unfilteredErrors.items(): - logging.warning(f"Found unfiltered diff in: {fileName}") - logging.info(f"Details of diffs: {errors}") + logging.warning( f"Found unfiltered diff in: {fileName}" ) + logging.info( f"Details of diffs: {errors}" ) else: - logging.info("No unfiltered differences were found.\n") + logging.info( "No unfiltered differences were found.\n" ) # Files that had at least one excluded block if files_with_excluded_errors: - files_with_excluded_errors_basename = [ os.path.basename(f) for f in files_with_excluded_errors ] + files_with_excluded_errors_basename = [ os.path.basename( f ) for f in files_with_excluded_errors ] + + excluded_files_text = "\n".join( files_with_excluded_errors_basename ) + logging.info( f"The following file(s) had at least one error block that was filtered:\n{excluded_files_text}" ) - excluded_files_text = "\n".join(files_with_excluded_errors_basename) - logging.info( f"The following file(s) had at least one error block that was filtered:\n{excluded_files_text}") def main():