Skip to content

Commit f4c9889

Browse files
committed
Fix ruff check issues.
1 parent ee966d1 commit f4c9889

File tree

17 files changed

+49
-63
lines changed

17 files changed

+49
-63
lines changed

apps/bfd-model-idr/claims_generator.py

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -10,7 +10,6 @@
1010

1111
import pandas as pd
1212
from faker import Faker
13-
1413
from generator_util import GeneratorUtil
1514

1615
generator = GeneratorUtil()

apps/bfd-model-idr/dd_helper_script.py

Lines changed: 8 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,9 @@
1-
'''
1+
"""
22
This script can be used, with the requisite copybooks from the IDR, to do some comparisons for accuracy on given profiles/applicability in BFD v3.
33
This will be used as part of future work to enhance the IDR copybooks and improve the quality of our data dictionary.
4-
'''
4+
"""
5+
from collections import Counter
6+
57
import pandas as pd
68
import yaml
79

@@ -39,27 +41,25 @@
3941

4042
#If we pull more than the current CLM_FISS / CLM_MCS fields (1 and 0, respectively) then we'll need to profile
4143
#the individual claim types within PAC data. Otherwise, this just updates the source
42-
for cur_source in otherSources:
44+
for cur_source in other_sources:
4345
df = pd.read_excel(other_sources[cur_source], sheet_name='Claim Header',header=3, usecols=['Target Table','Target Column'])
4446
for _, row in df.iterrows():
4547
element_concatenated = f"{str(row['Target Table']).strip()}.{str(row['Target Column']).strip()}"
4648
#there are newlines in some, we should ask IDR to consider changing the DD structure?
4749
if row['Target Table'] != '-' and row['Target Column'] != '-' and "\n" not in element_concatenated:
4850
if element_concatenated in applies_to and cur_source not in applies_to[element_concatenated]['sources']:
4951
applies_to[element_concatenated]['sources'].append(cur_source)
50-
pass
5152
df = pd.read_excel(other_sources[cur_source], sheet_name='Claim Line',header=3, usecols=['Target Table','Target Column'])
5253
for _, row in df.iterrows():
5354
element_concatenated = f"{str(row['Target Table']).strip()}.{str(row['Target Column']).strip()}"
5455
#there are newlines in some, we should ask IDR to consider changing the DD structure?
5556
if row['Target Table'] != '-' and row['Target Column'] != '-' and "\n" not in element_concatenated:
56-
if element_concatenated in appliesTo and cur_source not in applies_to[element_concatenated]['sources']:
57+
if element_concatenated in applies_to and cur_source not in applies_to[element_concatenated]['sources']:
5758
applies_to[element_concatenated]['sources'].append(cur_source)
58-
pass
5959

6060
#this is more of a heuristic. for example, HCPCS_5_MDFR_CD appears to be missing from the copybook but it's there in reality.
6161
profile_divergence_counter = 0
62-
with open(eob_dict_yaml, "r") as f:
62+
with open(eob_dict_yaml) as f:
6363
data = yaml.safe_load(f)
6464
for i in data:
6565
if 'sourceView' in i:
@@ -72,7 +72,7 @@
7272
print("remaining diverging for profiles:",profile_divergence_counter)
7373

7474
source_divergence_counter = 0
75-
with open(eob_dict_yaml, "r") as f:
75+
with open(eob_dict_yaml) as f:
7676
data = yaml.safe_load(f)
7777
for i in data:
7878
if 'sourceView' in i:

apps/bfd-model-idr/gen_dd.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -82,7 +82,7 @@
8282
data = yaml.safe_load(file)
8383
current_resource_type = file_name[0 : len(file_name) - 5]
8484
for entry in data:
85-
if 'suppressInDD' in entry and entry['suppressInDD']:
85+
if entry.get('suppressInDD'):
8686
continue
8787
if "fhirPath" in entry:
8888
entry["appliesTo"].sort()

apps/bfd-model-idr/patient_generator.py

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -7,7 +7,6 @@
77

88
import pandas as pd
99
from faker import Faker
10-
1110
from generator_util import GeneratorUtil
1211

1312
fake = Faker()

apps/bfd-model/bfd-model-rif/src/main/resources/db/scripts/BFD-1700-remove-synthetic/make_sql.py

Lines changed: 14 additions & 20 deletions
Original file line numberDiff line numberDiff line change
@@ -1,10 +1,8 @@
11
#!/usr/bin/python3
2-
'''Create SQL scripts to delete synthetic data.
3-
'''
2+
"""Create SQL scripts to delete synthetic data.
3+
"""
44

55
import sys
6-
from typing import List
7-
86

97
# List of predefined Beneficiary IDs to delete and pattern of bene_ids to match in a NOT LIKE
108
# clause. Source: https://jira.cms.gov/browse/BFD-1686
@@ -58,10 +56,9 @@
5856
# Helpers
5957

6058

61-
def make_bene_id_pattern(bene_ids: List[str], pattern: str, cast_id: bool = False) -> str:
62-
'''Make the WHERE clause to handle bene_ids that are in the list or fit the pattern.
63-
'''
64-
59+
def make_bene_id_pattern(bene_ids: list[str], pattern: str, cast_id: bool = False) -> str:
60+
"""Make the WHERE clause to handle bene_ids that are in the list or fit the pattern.
61+
"""
6562
output = ' bene_id IN (\'' + '\', \''.join(bene_ids) + '\')'
6663
if pattern is not None:
6764
bene_id = 'CAST(bene_id AS CHARACTER VARYING(15))' if cast_id else 'bene_id'
@@ -72,12 +69,11 @@ def make_bene_id_pattern(bene_ids: List[str], pattern: str, cast_id: bool = Fals
7269

7370
def make_claims_sql(bene_id_clause: str, claims_table: str, claim_lines_table: str,
7471
is_count: bool) -> str:
75-
'''Create SQL to count or delete from a claims table and its corresponding claim_lines table.
72+
"""Create SQL to count or delete from a claims table and its corresponding claim_lines table.
7673
7774
We want to run the claims table after the claim_lines table, because we couldn't join / union
7875
if we don't.
79-
'''
80-
76+
"""
8177
# Generate SQL for Claims Lines
8278
output = f"-- {claim_lines_table}\n\n"
8379
if is_count:
@@ -101,8 +97,8 @@ def make_claims_sql(bene_id_clause: str, claims_table: str, claim_lines_table: s
10197

10298

10399
def make_base_sql(bene_id_clause: str, base_table: str, is_count: bool) -> str:
104-
'''Create SQL to count or delete from a table containing a bene_id clause.
105-
'''
100+
"""Create SQL to count or delete from a table containing a bene_id clause.
101+
"""
106102
output = f"-- {base_table}\n\n"
107103

108104
output += f"SELECT COUNT(*) AS {base_table}\n" if is_count else 'DELETE\n'
@@ -113,19 +109,17 @@ def make_base_sql(bene_id_clause: str, base_table: str, is_count: bool) -> str:
113109

114110

115111
def help_text() -> str:
116-
'''Provide help text if the user does not provide valid arguments or asks for help.
117-
'''
118-
112+
"""Provide help text if the user does not provide valid arguments or asks for help.
113+
"""
119114
return 'Usage: make_sql.py [count | delete] [test|prod-sbx|prod]'
120115

121116

122117
# Main
123118

124119

125-
def main(args: List):
126-
'''Main function, called from the command line.
127-
'''
128-
120+
def main(args: list):
121+
"""Main function, called from the command line.
122+
"""
129123
if not args or len(args) < 2:
130124
print(help_text(), file=sys.stderr)
131125
sys.exit()

apps/bfd-pipeline/bfd-pipeline-idr/extractor.py

Lines changed: 2 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -6,13 +6,12 @@
66

77
import psycopg
88
import snowflake.connector
9+
from constants import DEFAULT_MIN_DATE
910
from cryptography.hazmat.backends import default_backend
1011
from cryptography.hazmat.primitives import serialization
12+
from model import DbType, LoadProgress, T, get_min_transaction_date
1113
from psycopg.rows import class_row
1214
from snowflake.connector import DictCursor, SnowflakeConnection
13-
14-
from constants import DEFAULT_MIN_DATE
15-
from model import DbType, LoadProgress, T, get_min_transaction_date
1615
from timer import Timer
1716

1817
cursor_execute_timer = Timer("cursor_execute")

apps/bfd-pipeline/bfd-pipeline-idr/load_synthetic.py

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,6 @@
44
from pathlib import Path
55

66
import psycopg
7-
87
from loader import get_connection_string
98

109
tables = [

apps/bfd-pipeline/bfd-pipeline-idr/loader.py

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,6 @@
44
from datetime import UTC, date, datetime
55

66
import psycopg
7-
87
from constants import DEFAULT_MIN_DATE
98
from model import DbType, LoadProgress, T
109
from timer import Timer

apps/bfd-pipeline/bfd-pipeline-idr/model.py

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -4,9 +4,8 @@
44
from datetime import UTC, date, datetime, timedelta
55
from typing import Annotated, TypeVar
66

7-
from pydantic import BaseModel, BeforeValidator
8-
97
from constants import CLAIM_TYPE_CODES, DEFAULT_MAX_DATE, DEFAULT_MIN_DATE, PART_D_CLAIM_TYPE_CODES
8+
from pydantic import BaseModel, BeforeValidator
109

1110
type DbType = str | float | int | bool | date | datetime
1211

apps/bfd-pipeline/bfd-pipeline-idr/pipeline_nodes.py

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,6 @@
11
# ruff: noqa: ARG001
22
# type: ignore [reportUntypedFunctionDecorator]
33
from hamilton.function_modifiers import config, parameterize, value
4-
54
from model import (
65
IdrBeneficiary,
76
IdrBeneficiaryDualEligibility,

0 commit comments

Comments
 (0)