Skip to content

Commit 5dbbd92

Browse files
NRL-1215 Clean up prints
1 parent 8bf8776 commit 5dbbd92

File tree

1 file changed

+0
-9
lines changed

1 file changed

+0
-9
lines changed

layer/nrlf/core/json_duplicate_checker.py

Lines changed: 0 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -9,10 +9,8 @@ def check_for_duplicate_keys(pairs: list[tuple[str, Any]]) -> dict:
99

1010
for key, value in pairs:
1111
normalized_key = key.lower()
12-
print(f"Processing key: {key}, value: {value}")
1312
if normalized_key in normalized_keys:
1413
dupes.setdefault(key, []).append(value)
15-
print(f"Duplicate key found: {key}")
1614
else:
1715
keys[key] = value
1816
normalized_keys += [normalized_key]
@@ -35,7 +33,6 @@ def flatten_duplicates(data: dict | list) -> list[str]:
3533
dupes = flatten_duplicates(value)
3634
duplicates.extend([f"{path}.{dupe}" for dupe in dupes])
3735

38-
print(f"flatten_duplicates data={data} dupes={duplicates}")
3936
return duplicates
4037

4138

@@ -62,19 +59,13 @@ def check_duplicate_keys(json_content: str) -> tuple[list[str], list[str]]:
6259
the same object, regardless of nesting level or array position.
6360
"""
6461
try:
65-
print("JSON content to be processed:")
66-
print(json_content)
67-
print("================================")
6862
dupe_data = json.loads(json_content, object_pairs_hook=check_for_duplicate_keys)
6963
duplicate_paths = [
7064
f"root.{format_path(path)}" for path in flatten_duplicates(dupe_data)
7165
]
7266
duplicate_keys = list(
7367
dict.fromkeys([key.split(".")[-1] for key in duplicate_paths])
7468
)
75-
print("================================")
76-
print(f"Duplicate keys: {duplicate_keys}")
77-
print(f"Duplicate paths: {duplicate_paths}")
7869
return duplicate_keys, duplicate_paths
7970
except json.JSONDecodeError:
8071
raise ValueError("Error: Invalid JSON format")

0 commit comments

Comments
 (0)