-
-
Notifications
You must be signed in to change notification settings - Fork 13
Reduce debug noise for issue #336 #369
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
Changes from all commits
5ec198a
0210a88
9b0b024
62e5f32
e753cc4
fc4c295
38ffe80
81c542a
59a17b8
fa4d5cb
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
| Original file line number | Diff line number | Diff line change | ||||
|---|---|---|---|---|---|---|
|
|
@@ -15,20 +15,20 @@ | |||||
| import schema_validator | ||||||
| from schema_files import ALL_TEST_TYPES | ||||||
|
|
||||||
| logger = logging.Logger("Checking Test Data vs. Schemas LOGGER") | ||||||
| logger.setLevel(logging.WARNING) | ||||||
|
|
||||||
|
|
||||||
| def main(args): | ||||||
| logging.config.fileConfig("../logging.conf") | ||||||
|
|
||||||
| if len(args) <= 1: | ||||||
| logging.error('Please specify the path to test data directory') | ||||||
| logger.error('Please specify the path to test data directory') | ||||||
| return | ||||||
| else: | ||||||
| test_data_path = args[1] | ||||||
|
|
||||||
| logging.debug('TEST DATA PATH = %s', test_data_path) | ||||||
|
|
||||||
| logger = logging.Logger("Checking Test Data vs. Schemas LOGGER") | ||||||
| logger.setLevel(logging.INFO) | ||||||
| logger.info('+++ Test Generated test data vs. schemas files') | ||||||
|
|
||||||
| # TODO: get ICU versions | ||||||
|
|
@@ -39,8 +39,8 @@ def main(args): | |||||
| for dir_name in icu_dirs: | ||||||
| icu_versions.append(os.path.basename(dir_name)) | ||||||
|
|
||||||
| logging.debug('ICU directories = %s', icu_versions) | ||||||
| logging.debug('test types = %s', ALL_TEST_TYPES) | ||||||
| logger.debug('ICU directories = %s', icu_versions) | ||||||
| logger.debug('test types = %s', ALL_TEST_TYPES) | ||||||
|
|
||||||
| validator = schema_validator.ConformanceSchemaValidator() | ||||||
|
|
||||||
|
|
@@ -52,7 +52,7 @@ def main(args): | |||||
| validator.debug = 1 | ||||||
|
|
||||||
| all_results = validator.validate_test_data_with_schema() | ||||||
| logging.info(' %d results for generated test data', len(all_results)) | ||||||
| logger.debug(' %d results for generated test data', len(all_results)) | ||||||
|
|
||||||
| schema_errors = [] | ||||||
| failed_validations = [] | ||||||
|
|
@@ -78,7 +78,7 @@ def main(args): | |||||
| try: | ||||||
| summary_data = json.dumps(summary_json) | ||||||
| except BaseException as error: | ||||||
| logging.error('json.dumps Summary data problem: %s at %s', error, error) | ||||||
| logger.error('json.dumps Summary data problem: %s at %s', error, error) | ||||||
| sys.exit(1) | ||||||
|
|
||||||
| output_filename = os.path.join(test_data_path, 'test_data_validation_summary.json') | ||||||
|
|
@@ -88,19 +88,18 @@ def main(args): | |||||
| file_out.close() | ||||||
| except BaseException as error: | ||||||
| schema_errors.append(output_filename) | ||||||
| logging.fatal('Error: %s. Cannot save validation summary in file %s', error, output_filename) | ||||||
| logger.fatal('Error: %s. Cannot save validation summary in file %s', error, output_filename) | ||||||
| sys.exit(1) | ||||||
|
|
||||||
| if schema_errors: | ||||||
| logging.critical('Test data file files: %d fail out of %d:', | ||||||
| logger.critical('Test data file files: %d fail out of %d:', | ||||||
| len(schema_errors), schema_count) | ||||||
| for failure in schema_errors: | ||||||
| logging.critical(' %s', failure) | ||||||
| logger.critical(' %s', failure) | ||||||
| sys.exit(1) | ||||||
| else: | ||||||
| logging.info("All %d generated test data files match with schema", schema_count) | ||||||
|
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. For consistency, you should use the
Suggested change
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
| if __name__ == "__main__": | ||||||
| main(sys.argv) | ||||||
| Original file line number | Diff line number | Diff line change | ||||||||||
|---|---|---|---|---|---|---|---|---|---|---|---|---|
|
|
@@ -76,7 +76,7 @@ def parallel_validate_schema(validator, file_names): | |||||||||||
|
|
||||||||||||
| def main(args): | ||||||||||||
| logger = logging.Logger("TEST SCHEMAS LOGGER") | ||||||||||||
| logger.setLevel(logging.INFO) | ||||||||||||
| logger.setLevel(logging.WARNING) | ||||||||||||
|
Comment on lines
78
to
+79
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. You should use
Suggested change
|
||||||||||||
| logger.info('+++ Test JSON Schema files') | ||||||||||||
|
|
||||||||||||
| validator = schema_validator.ConformanceSchemaValidator() | ||||||||||||
|
|
||||||||||||
| Original file line number | Diff line number | Diff line change | ||||||||||
|---|---|---|---|---|---|---|---|---|---|---|---|---|
|
|
@@ -24,10 +24,12 @@ def main(args): | |||||||||||
| else: | ||||||||||||
| test_output_path = args[1] | ||||||||||||
|
|
||||||||||||
| logging.debug('TEST OUTPUT PATH = %s', test_output_path) | ||||||||||||
|
|
||||||||||||
| logger = logging.Logger("Checking Test Data vs. Schemas LOGGER") | ||||||||||||
| logger.setLevel(logging.INFO) | ||||||||||||
| logger.setLevel(logging.WARNING) | ||||||||||||
|
Comment on lines
28
to
+29
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. You should use
Suggested change
|
||||||||||||
|
|
||||||||||||
| logger.debug('TEST OUTPUT PATH = %s', test_output_path) | ||||||||||||
|
|
||||||||||||
| logger.info('+++ Test Generated test data vs. schemas files') | ||||||||||||
|
|
||||||||||||
| # TODO: get ICU versions | ||||||||||||
|
|
@@ -54,14 +56,14 @@ def main(args): | |||||||||||
| test_type = schema_files.TEST_FILE_TO_TEST_TYPE_MAP[test_file_prefix] | ||||||||||||
| test_type_set.add(test_type) | ||||||||||||
| except BaseException as err: | ||||||||||||
| logging.debug('No file (%s) during schema check output: %s', file, err | ||||||||||||
| logger.debug('No file (%s) during schema check output: %s', file, err | ||||||||||||
| ) | ||||||||||||
| for dir_nane in icu_dirs: | ||||||||||||
| icu_version_set.add(os.path.basename(dir_nane)) | ||||||||||||
|
|
||||||||||||
| icu_versions = sorted(list(icu_version_set)) | ||||||||||||
| logging.debug('ICU directories = %s', icu_versions) | ||||||||||||
| logging.debug('test types = %s', ALL_TEST_TYPES) | ||||||||||||
| logger.debug('ICU directories = %s', icu_versions) | ||||||||||||
| logger.debug('test types = %s', ALL_TEST_TYPES) | ||||||||||||
|
|
||||||||||||
| validator = schema_validator.ConformanceSchemaValidator() | ||||||||||||
| # Todo: use setters to initialize validator | ||||||||||||
|
|
@@ -74,7 +76,7 @@ def main(args): | |||||||||||
| validator.debug = 1 | ||||||||||||
|
|
||||||||||||
| all_results, test_validation_plans = validator.validate_test_output_with_schema() | ||||||||||||
| logging.info(' %d results for test output', len(all_results)) | ||||||||||||
| logger.info(' %d results for test output', len(all_results)) | ||||||||||||
|
|
||||||||||||
| # Check if any files in the expected list were not validated. | ||||||||||||
| test_paths = set() | ||||||||||||
|
|
@@ -83,7 +85,7 @@ def main(args): | |||||||||||
|
|
||||||||||||
| for json_file in json_files: | ||||||||||||
| if json_file not in test_paths: | ||||||||||||
| logging.fatal('JSON file %s was not verified against a schema', json_file) | ||||||||||||
| logger.fatal('JSON file %s was not verified against a schema', json_file) | ||||||||||||
| # Bail out right away! | ||||||||||||
| sys.exit(1) | ||||||||||||
|
|
||||||||||||
|
|
@@ -109,7 +111,6 @@ def main(args): | |||||||||||
| } | ||||||||||||
| except BaseException as error: | ||||||||||||
| logging.fatal('Cannot create summary_json %s', error) | ||||||||||||
| sys.exit(1) | ||||||||||||
|
|
||||||||||||
|
Comment on lines
113
to
114
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. This exception handler is missing a
Suggested change
|
||||||||||||
| # Create outputs from these results. | ||||||||||||
| try: | ||||||||||||
|
|
@@ -124,11 +125,11 @@ def main(args): | |||||||||||
| file_out.write(summary_data) | ||||||||||||
| file_out.close() | ||||||||||||
| except BaseException as error: | ||||||||||||
| logging.fatal('Error: %s. Cannot save validation summary in file %s', error, output_filename) | ||||||||||||
| logger.fatal('Error: %s. Cannot save validation summary in file %s', error, output_filename) | ||||||||||||
| # Don't continue after this problem. | ||||||||||||
| sys.exit(1) | ||||||||||||
|
|
||||||||||||
| logging.info("All %d test output files match with schema", schema_count) | ||||||||||||
| logger.info("All %d test output files match with schema", schema_count) | ||||||||||||
| return | ||||||||||||
|
|
||||||||||||
|
|
||||||||||||
|
|
||||||||||||
| Original file line number | Diff line number | Diff line change | ||||||||||
|---|---|---|---|---|---|---|---|---|---|---|---|---|
|
|
@@ -19,7 +19,7 @@ | |||||||||||
|
|
||||||||||||
| # ?? Move to the initialization | ||||||||||||
| ch = logging.StreamHandler() | ||||||||||||
| ch.setLevel(logging.INFO) | ||||||||||||
| ch.setLevel(logging.WARNING) | ||||||||||||
|
Comment on lines
21
to
+22
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. |
||||||||||||
|
|
||||||||||||
|
|
||||||||||||
| # Given a directory, validate JSON files against expected schema | ||||||||||||
|
|
@@ -45,7 +45,6 @@ def __init__(self): | |||||||||||
| self.test_types = schema_files.ALL_TEST_TYPES | ||||||||||||
| self.executors = [] | ||||||||||||
| self.icu_versions = [] | ||||||||||||
| self.debug_leve = 0 | ||||||||||||
|
|
||||||||||||
| logging.config.fileConfig("../logging.conf") | ||||||||||||
|
|
||||||||||||
|
|
@@ -145,7 +144,6 @@ def validate_test_data_with_schema(self): | |||||||||||
| schema_test_info.append(file_path_pair) | ||||||||||||
| else: | ||||||||||||
| test_data_files_not_found.append([icu_version, test_type]) | ||||||||||||
| logging.debug('No data test file %s for %s, %s', file_path_pair, test_type, icu_version) | ||||||||||||
| pass | ||||||||||||
|
|
||||||||||||
| if test_data_files_not_found: | ||||||||||||
|
|
@@ -161,7 +159,8 @@ def validate_test_data_with_schema(self): | |||||||||||
| logging.warning('FAIL: Test data %s, %s. MSG=%s', | ||||||||||||
| result_data['test_type'], result_data['icu_version'], result_data['err_info']) | ||||||||||||
| else: | ||||||||||||
| logging.debug('Test data validated: %s %s', result_data['test_type'], result_data['icu_version']) | ||||||||||||
| pass | ||||||||||||
|
|
||||||||||||
| all_results.append(result_data) | ||||||||||||
| return all_results | ||||||||||||
|
|
||||||||||||
|
|
@@ -217,7 +216,7 @@ def check_test_data_against_schema(self, schema_info): | |||||||||||
|
|
||||||||||||
| def check_test_data_schema(self, icu_version, test_type): | ||||||||||||
| # Check the generated test data for structure against the schema | ||||||||||||
| logging.debug('Validating %s with %s', test_type, icu_version) | ||||||||||||
| logging.info('Validating %s with %s', test_type, icu_version) | ||||||||||||
|
|
||||||||||||
| # Check test output vs. the test data schema | ||||||||||||
| schema_verify_file = os.path.join(self.schema_base, test_type, 'test_schema.json') | ||||||||||||
|
|
@@ -247,7 +246,7 @@ def check_test_data_schema(self, icu_version, test_type): | |||||||||||
|
|
||||||||||||
| results['result'] = result | ||||||||||||
| if result: | ||||||||||||
| logging.debug('Test data %s validated successfully, with ICU %s', test_type, icu_version) | ||||||||||||
| logging.info('Test data %s validated with ICU %s', test_type, icu_version) | ||||||||||||
| else: | ||||||||||||
| logging.error('Test data %s FAILED with ICU %s: %s', test_type, icu_version, err_info) | ||||||||||||
|
|
||||||||||||
|
|
@@ -278,7 +277,7 @@ def get_test_output_schema_plan(self, icu_version, test_type, executor): | |||||||||||
|
|
||||||||||||
| def check_test_output_schema(self, icu_version, test_type, executor): | ||||||||||||
| # Check the output of the tests for structure against the schema | ||||||||||||
| logging.debug('Validating test output: %s %s %s', executor, test_type, icu_version) | ||||||||||||
| logging.info('Validating test output: %s %s %s', executor, test_type, icu_version) | ||||||||||||
|
|
||||||||||||
| # Check test output vs. the schema | ||||||||||||
| schema_file_name = SCHEMA_FILE_MAP[test_type]['result_data']['schema_file'] | ||||||||||||
|
|
@@ -333,7 +332,6 @@ def validate_schema_file(self, schema_file_path): | |||||||||||
| logging.fatal('%s for %s. Cannot get test_type value', err, schema_file_path, test_type) | ||||||||||||
| return [False, err, schema_file_path, test_type] | ||||||||||||
|
|
||||||||||||
| logging.info('Checking schema %s', schema_file_path) | ||||||||||||
| try: | ||||||||||||
| # With just a schema, it validates the schema. | ||||||||||||
| # However Validator.check_schema doesn't fail as expected. | ||||||||||||
|
|
@@ -449,7 +447,7 @@ def main(args): | |||||||||||
| base_folders, test_types, result_folders = process_args(args) | ||||||||||||
|
|
||||||||||||
| logger = logging.Logger("TEST_GENERATE LOGGER") | ||||||||||||
| logger.setLevel(logging.INFO) | ||||||||||||
| logger.setLevel(logging.WARNING) | ||||||||||||
|
Collaborator
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. By hard-coding the logger level for this file to be WARNING, and given that most of the logging statements here are either DEBUG or INFO, you're effectively turning off all of the logging happening in this module. Is that your intention?
Collaborator
Author
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Yes, I want to get rid of most of the non-urgent log results.
Comment on lines
449
to
+450
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. You should use
Suggested change
|
||||||||||||
| logger.info('+++ Running JSON Schema tests') | ||||||||||||
|
|
||||||||||||
| schema_validator = ConformanceSchemaValidator() | ||||||||||||
|
|
@@ -461,20 +459,20 @@ def main(args): | |||||||||||
| 'icu76'] | ||||||||||||
| schema_validator.executors = ['node', 'rust', 'dart_web', 'dart_native', 'icu4j'] | ||||||||||||
|
|
||||||||||||
| logging.info('Checking test outputs') | ||||||||||||
| logger.info('Checking test outputs') | ||||||||||||
| all_test_out_results = schema_validator.validate_test_output_with_schema() | ||||||||||||
|
|
||||||||||||
| # Check all schema files for correctness. | ||||||||||||
| schema_errors = schema_validator.check_schema_files() | ||||||||||||
| if schema_errors: | ||||||||||||
| logging.error('INVALID SCHEMA: %s', schema_errors) | ||||||||||||
| logger.error('INVALID SCHEMA: %s', schema_errors) | ||||||||||||
| else: | ||||||||||||
| logging.info('All schemas are valid: %s', schema_errors) | ||||||||||||
| logger.info('All schemas are valid: %s', schema_errors) | ||||||||||||
|
|
||||||||||||
| logging.info('Checking generated data') | ||||||||||||
| logger.info('Checking generated data') | ||||||||||||
| all_test_data_results = schema_validator.validate_test_data_with_schema() | ||||||||||||
|
|
||||||||||||
| logging.info('Checking test outputs') | ||||||||||||
| logger.info('Checking test outputs') | ||||||||||||
| all_test_out_results = schema_validator.validate_test_output_with_schema() | ||||||||||||
|
|
||||||||||||
| return | ||||||||||||
|
|
||||||||||||
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
You should use
logging.getLogger()instead oflogging.Logger()to create or retrieve a logger.logging.getLogger()ensures that the logger is part of the logging hierarchy, allowing it to inherit configurations from parent loggers. Usinglogging.Logger()directly creates a standalone logger that won't have any handlers by default, so your log messages will not be processed or displayed.