Skip to content

Commit d079bce

Browse files
NRL-1215 Fix linting issues
1 parent 4e77c0a commit d079bce

File tree

5 files changed

+123
-96
lines changed

5 files changed

+123
-96
lines changed

layer/nrlf/core/json_duplicate_checker.py

Lines changed: 16 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -1,15 +1,16 @@
11
import json
2-
from typing import List, Tuple, Set, Dict
2+
from typing import Dict, List, Set, Tuple
33

44
JsonPrimitive = str | int | float | bool | None
55
type JsonValue = JsonPrimitive | JsonObject | JsonArray
66
JsonPair = tuple[str, JsonValue]
77
JsonObject = list[JsonPair]
88
JsonArray = list[JsonValue]
99

10+
1011
class DuplicateKeyChecker:
1112
"""JSON structure duplicate key detector.
12-
13+
1314
Tracks duplicate keys by maintaining path context during traversal.
1415
Paths are recorded in dot notation with array indices:
1516
- Objects: parent.child
@@ -25,7 +26,7 @@ def __init__(self):
2526
self.current_duplicate_index: Dict[str, int] = {}
2627

2728
def get_path_with_index(self, path: List[str], key: str) -> List[str]:
28-
current_level = '.'.join(path)
29+
current_level = ".".join(path)
2930
index_map = self.current_duplicate_index.setdefault(current_level, {})
3031
count = index_map.get(key, 0)
3132
index_map[key] = count + 1
@@ -39,20 +40,22 @@ def get_path_with_index(self, path: List[str], key: str) -> List[str]:
3940

4041
def check_key(self, key: str, path: List[str]) -> None:
4142
"""Check if a key at the current path is a duplicate.
42-
43+
4344
A duplicate occurs when the same key appears twice at the same
4445
nesting level, even if the values differ.
4546
"""
46-
current_level = '.'.join(path)
47+
current_level = ".".join(path)
4748
current_keys = self.key_registry.setdefault(current_level, {})
4849
if key in current_keys:
4950
self.duplicate_keys.add(key)
50-
self.duplicate_paths.add('.'.join(path + [key]))
51+
self.duplicate_paths.add(".".join(path + [key]))
5152
print(f"Found duplicate key: {key} at path: {'.'.join(path + [key])}")
5253
else:
5354
current_keys[key] = True
5455

55-
def process_collection(self, value: JsonObject | JsonArray, path: list[str], key: str) -> None:
56+
def process_collection(
57+
self, value: JsonObject | JsonArray, path: list[str], key: str
58+
) -> None:
5659
"""Determine if the given 'value' is an object or an array and handle it."""
5760
new_path = self.get_path_with_index(path, key)
5861
if value and isinstance(value[0], tuple):
@@ -78,13 +81,14 @@ def traverse_array(self, items: JsonArray, path: list[str]) -> None:
7881
continue
7982
self.process_collection(item, base_path, f"{array_path}[{idx}]")
8083

84+
8185
def check_duplicate_keys(json_content: str) -> Tuple[List[str], List[str]]:
8286
"""Find all duplicate keys in a JSON string.
83-
87+
8488
Traverses the entire JSON structure and reports:
8589
- List of keys that appear multiple times at the same level
8690
- Full paths to each duplicate key occurrence
87-
91+
8892
A key is considered duplicate if it appears multiple times within
8993
the same object, regardless of nesting level or array position.
9094
"""
@@ -93,10 +97,10 @@ def check_duplicate_keys(json_content: str) -> Tuple[List[str], List[str]]:
9397
print("Parsed JSON:", parsed_data)
9498
except json.JSONDecodeError:
9599
raise ValueError("Error: Invalid JSON format")
96-
100+
97101
checker = DuplicateKeyChecker()
98-
checker.traverse_json(parsed_data, ['root'])
99-
102+
checker.traverse_json(parsed_data, ["root"])
103+
100104
duplicates = list(checker.duplicate_keys)
101105
paths = list(checker.duplicate_paths)
102106
print("Final duplicates:", duplicates)

layer/nrlf/core/request.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -6,9 +6,9 @@
66
from nrlf.core.codes import SpineErrorConcept
77
from nrlf.core.constants import CLIENT_RP_DETAILS, CONNECTION_METADATA
88
from nrlf.core.errors import OperationOutcomeError, ParseError
9+
from nrlf.core.json_duplicate_checker import check_duplicate_keys
910
from nrlf.core.logger import LogReference, logger
1011
from nrlf.core.model import ClientRpDetails, ConnectionMetadata
11-
from nrlf.core.json_duplicate_checker import check_duplicate_keys
1212

1313

1414
def parse_headers(headers: Dict[str, str]) -> ConnectionMetadata:
@@ -100,6 +100,7 @@ def parse_body(
100100
msg="Request body could not be parsed",
101101
) from None
102102

103+
103104
def raise_when_duplicate_keys(json_content: str) -> None:
104105
"""
105106
Raises an error if duplicate keys are found in the JSON content.
@@ -139,4 +140,3 @@ def parse_path(
139140
details=SpineErrorConcept.from_code("INVALID_PARAMETER"),
140141
msg="Invalid path parameter",
141142
) from None
142-

0 commit comments

Comments
 (0)