Skip to content

Commit 91b59f2

Browse files
committed
correct formatting and actually raise exception
Signed-off-by: John Seekins <[email protected]>
1 parent 6b7da7b commit 91b59f2

File tree

2 files changed

+6
-8
lines changed

2 files changed

+6
-8
lines changed

ice_scrapers/agencies.py

Lines changed: 3 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -40,22 +40,18 @@ def scrape_agencies(keep_sheet: bool = True, force_download: bool = True) -> dic
4040
case x if "pending" in x:
4141
schema = copy.deepcopy(pending_agency)
4242
case _:
43-
raise(f"Found an unsupported agency datasheet: {link}")
43+
raise Exception(f"Found an unsupported agency datasheet: {link}")
4444
"""
4545
Yes, polars supports loading from a URL. But this pattern
4646
lets us cache the download
4747
"""
4848
# remove the date so we can easily overwrite the local (cached) file
49-
filename = date_re.sub("", link.split('/')[-1])
49+
filename = date_re.sub("", link.split("/")[-1])
5050
path = f"{SCRIPT_DIR}{os.sep}{filename}"
5151
if force_download or not os.path.exists(path):
5252
logger.info("Downloading agency info sheet from %s", link)
5353
download_file(link, path)
54-
df = polars.read_excel(
55-
drop_empty_rows=True,
56-
raise_if_empty=True,
57-
source=open(path, "rb")
58-
)
54+
df = polars.read_excel(drop_empty_rows=True, raise_if_empty=True, source=open(path, "rb"))
5955
for row in df.iter_rows(named=True):
6056
data = copy.deepcopy(schema)
6157
data["state"] = row["STATE"]

ice_scrapers/general.py

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -11,7 +11,9 @@
1111
from schemas import facilities_schema
1212

1313

14-
def facilities_scrape_wrapper(keep_sheet: bool = True, force_download: bool = True, skip_vera: bool = False) -> tuple[dict, dict]:
14+
def facilities_scrape_wrapper(
15+
keep_sheet: bool = True, force_download: bool = True, skip_vera: bool = False
16+
) -> tuple[dict, dict]:
1517
agencies = scrape_agencies(keep_sheet, force_download)
1618
facilities_data = copy.deepcopy(facilities_schema)
1719
facilities = load_sheet(keep_sheet, force_download)

0 commit comments

Comments
 (0)