Skip to content

Commit c17251d

Browse files
author
Anchal Agarwal
committed
EIR-2834: Migrate commits from gitlab to github.
2 parents ec458dc + 183f9cb commit c17251d

File tree

4 files changed

+360
-2
lines changed

4 files changed

+360
-2
lines changed

.pre-commit-hooks.yaml

Lines changed: 6 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -208,5 +208,10 @@
208208
entry: trailing-whitespace-fixer
209209
language: python
210210
types: [text]
211-
stages: [pre-commit, pre-push, manual]
211+
stages: [pre-commit, pre-push, manual, commit, push]
212212
minimum_pre_commit_version: 3.2.0
213+
- id: notify-duplicate-entry
214+
name: Notify duplicate entry
215+
description: Notifies duplicate entry in the same file
216+
entry: notify-duplicate-entry
217+
language: python
Lines changed: 93 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,93 @@
1+
import argparse
2+
import json
3+
from typing import Optional
4+
from typing import Sequence
5+
from pathlib import Path
6+
7+
8+
def _check_duplicate_entry(json_entries, pkeys):
9+
""" Check duplicate entry based on pkey criteria.
10+
11+
:param json_entries: List of json entries
12+
:param pkeys: List of Primary keys
13+
:return: list of duplicated entry pkey value tuples
14+
"""
15+
unique_entries = set()
16+
duplicate_entries = set()
17+
for entry in json_entries:
18+
pkey_value_tuple = tuple(entry[pkey] for pkey in pkeys)
19+
if pkey_value_tuple not in unique_entries:
20+
unique_entries.add(pkey_value_tuple)
21+
else:
22+
duplicate_entries.add(pkey_value_tuple)
23+
return duplicate_entries, len(duplicate_entries)
24+
25+
26+
def main(argv: Optional[Sequence[str]] = None) -> int:
27+
parser = argparse.ArgumentParser()
28+
parser.add_argument('filenames', nargs='*', type=str,
29+
help='Names of the JSON files to check duplicate entries'
30+
)
31+
table_uuid_mapping = {
32+
'action': ['uuid'],
33+
'env_property_group': ['uuid'],
34+
'environment': ['uuid'],
35+
'environment_property': ['code'],
36+
'report_summary': ['uuid'],
37+
'runner': ['uuid'],
38+
'scenario': ['uuid'],
39+
'sla': ['uuid'],
40+
'sla_scenario_association': ['sla', 'scenario'],
41+
'tag': ['uuid'],
42+
'tag_action_association': ['tag_uuid', 'action_uuid'],
43+
'tag_case_association': ['test_case_uuid', 'tag_uuid'],
44+
'teams': ['uuid'],
45+
'test_case': ['uuid'],
46+
'test_suit': ['uuid'],
47+
'test_supported_version': ['test_case_uuid', 'version'],
48+
'testcase_workload_association': ['uuid'],
49+
'user': ['uuid'],
50+
'user_tokens': ['user_token'],
51+
'workflow_task': ['workflow_id'],
52+
'context': ['uuid'],
53+
'test_sla_association': ['test_case', 'sla'],
54+
'teams_association': ['user_uuid', 'team_uuid'],
55+
'teams_resource_permission': ['team_uuid', 'resource_name'],
56+
'label': ['uuid'],
57+
'authentication_config_rules': ['auth_type'],
58+
'authentication': ['uuid'],
59+
'user_authentication_association':
60+
['user_uuid', 'authentication_uuid'],
61+
}
62+
63+
args = vars(parser.parse_args(argv))
64+
filenames = args['filenames']
65+
flag = False
66+
67+
for i in range(len(filenames)):
68+
json_file = filenames[i]
69+
file_name = Path(filenames[i]).stem
70+
if file_name not in table_uuid_mapping:
71+
print(
72+
f"Table {file_name} has no primary key specified to validate "
73+
f"duplicate entries. Please update the plugin code in "
74+
f"https://git.voereir.io/voereir/pre-commit-hooks"
75+
)
76+
continue
77+
78+
primary_keys = table_uuid_mapping[file_name]
79+
with open(json_file, encoding='UTF-8') as f:
80+
json_entries = json.load(f)
81+
duplicate_entries, status = _check_duplicate_entry(
82+
json_entries, primary_keys)
83+
84+
if status:
85+
print(f"Duplicate entries found - {duplicate_entries} in file "
86+
f"{json_file}")
87+
flag = True
88+
89+
return flag
90+
91+
92+
if __name__ == "__main__":
93+
exit(main())

0 commit comments

Comments
 (0)