Skip to content

Commit 633c565

Browse files
NRL-1215 Fix linting, remove redundant code
1 parent 20ba00e commit 633c565

File tree

2 files changed

+18
-25
lines changed

2 files changed

+18
-25
lines changed
Lines changed: 17 additions & 24 deletions
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,8 @@
11
import json
22
from typing import Any
33

4-
def check_for_duplicate_keys(pairs: list[tuple[str, Any]]):
4+
5+
def check_for_duplicate_keys(pairs: list[tuple[str, Any]]) -> dict:
56
keys = {}
67
dupes = {}
78
for key, value in pairs:
@@ -11,7 +12,6 @@ def check_for_duplicate_keys(pairs: list[tuple[str, Any]]):
1112
dupes[key] = []
1213
dupes[key].append(value)
1314
continue
14-
1515
keys[key] = value
1616

1717
if dupes:
@@ -22,32 +22,27 @@ def check_for_duplicate_keys(pairs: list[tuple[str, Any]]):
2222

2323
def flatten_duplicates(data: dict | list) -> list[str]:
2424
duplicates = []
25-
2625
for key, value in data.items() if isinstance(data, dict) else enumerate(data):
2726
if key == "__duplicates__":
2827
duplicates.extend([f"{dupe_key}" for dupe_key in value.keys()])
2928
continue
30-
3129
if isinstance(value, (dict, list)):
3230
dupes = flatten_duplicates(value)
33-
3431
path = f"{key}" if isinstance(data, dict) else f"[{key}]"
3532
duplicates.extend([f"{path}.{dupe}" for dupe in dupes])
36-
3733
print(f"flatten_duplicates data={data} dupes={duplicates}")
38-
3934
return duplicates
4035

4136

42-
def format_path(path):
43-
parts = path.split('.')
37+
def format_path(path: str) -> str:
38+
parts = path.split(".")
4439
formatted_parts = []
4540
for part in parts:
46-
if part.startswith('['):
41+
if part.startswith("["):
4742
formatted_parts[-1] += part
4843
else:
4944
formatted_parts.append(part)
50-
return '.'.join(formatted_parts)
45+
return ".".join(formatted_parts)
5146

5247

5348
def check_duplicate_keys(json_content: str) -> tuple[list[str], list[str]]:
@@ -60,16 +55,14 @@ def check_duplicate_keys(json_content: str) -> tuple[list[str], list[str]]:
6055
A key is considered duplicate if it appears multiple times within
6156
the same object, regardless of nesting level or array position.
6257
"""
63-
64-
use_hooks_approach = True
65-
66-
if use_hooks_approach:
67-
try:
68-
dupe_data = json.loads(
69-
json_content, object_pairs_hook=check_for_duplicate_keys
70-
)
71-
duplicate_paths = [f"root.{format_path(path)}" for path in flatten_duplicates(dupe_data)]
72-
duplicate_keys = list(dict.fromkeys([key.split(".")[-1] for key in duplicate_paths]))
73-
return duplicate_keys, duplicate_paths
74-
except json.JSONDecodeError:
75-
raise ValueError("Error: Invalid JSON format")
58+
try:
59+
dupe_data = json.loads(json_content, object_pairs_hook=check_for_duplicate_keys)
60+
duplicate_paths = [
61+
f"root.{format_path(path)}" for path in flatten_duplicates(dupe_data)
62+
]
63+
duplicate_keys = list(
64+
dict.fromkeys([key.split(".")[-1] for key in duplicate_paths])
65+
)
66+
return duplicate_keys, duplicate_paths
67+
except json.JSONDecodeError:
68+
raise ValueError("Error: Invalid JSON format")

layer/nrlf/core/tests/test_json_duplicate_checker.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -323,4 +323,4 @@ def test_array_edge_case_duplicate(self):
323323
"""
324324
duplicates, paths = check_duplicate_keys(json_content)
325325
self.assertEqual(duplicates, ["array"])
326-
self.assertEqual(paths, ["root.array"])
326+
self.assertEqual(paths, ["root.array"])

0 commit comments

Comments
 (0)