Skip to content

Commit 52be5a1

Browse files
committed
chore(lint): improve code formatting and readability across multiple files
1 parent 05178c4 commit 52be5a1

File tree

6 files changed

+36
-23
lines changed

6 files changed

+36
-23
lines changed

pysus/ftp/__init__.py

Lines changed: 16 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -22,11 +22,10 @@
2222
import humanize
2323
from aioftp import Client
2424
from loguru import logger
25+
from pysus.data.local import Data
2526
from tqdm import tqdm
2627
from typing_extensions import Self
2728

28-
from pysus.data.local import Data
29-
3029
# Type aliases
3130
PathLike = Union[str, pathlib.Path]
3231
FileContent = Dict[str, Union["Directory", "File"]]
@@ -400,13 +399,17 @@ def load_directory_content(path: str) -> FileContent:
400399
def line_parser(line: str):
401400
if "<DIR>" in line:
402401
date, time, _, name = line.strip().split(maxsplit=3)
403-
modify = datetime.strptime(f"{date} {time}", "%m-%d-%y %I:%M%p")
402+
modify = datetime.strptime(
403+
f"{date} {time}", "%m-%d-%y %I:%M%p"
404+
)
404405
info = {"size": 0, "type": "dir", "modify": modify}
405406
xpath = f"{path}/{name}"
406407
content[name] = Directory(xpath)
407408
else:
408409
date, time, size, name = line.strip().split(maxsplit=3)
409-
modify = datetime.strptime(f"{date} {time}", "%m-%d-%y %I:%M%p")
410+
modify = datetime.strptime(
411+
f"{date} {time}", "%m-%d-%y %I:%M%p"
412+
)
410413
info: FileInfo = {
411414
"size": size,
412415
"type": "file",
@@ -478,7 +481,9 @@ def content(self) -> List[Union[Directory, File]]:
478481
inside content, `load()` the directory and call `content` again.
479482
"""
480483
if not self.__content__:
481-
logger.info("content is not loaded, use `load()` to load default paths")
484+
logger.info(
485+
"content is not loaded, use `load()` to load default paths"
486+
)
482487
return []
483488
return sorted(list(self.__content__.values()), key=str)
484489

@@ -543,7 +548,9 @@ def get_files(self, *args, **kwargs) -> list[File]:
543548
"""
544549
...
545550

546-
def download(self, files: List[File], local_dir: str = CACHEPATH) -> List[str]:
551+
def download(
552+
self, files: List[File], local_dir: str = CACHEPATH
553+
) -> List[str]:
547554
"""
548555
Downloads a list of Files.
549556
"""
@@ -558,7 +565,9 @@ def download(self, files: List[File], local_dir: str = CACHEPATH) -> List[str]:
558565
return dfiles[0]
559566
return dfiles
560567

561-
async def async_download(self, files: List[File], local_dir: str = CACHEPATH):
568+
async def async_download(
569+
self, files: List[File], local_dir: str = CACHEPATH
570+
):
562571
"""
563572
Asynchronously downloads a list of files
564573
"""

pysus/online_data/ESUS.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -21,7 +21,7 @@ def download(uf, cache=True, checkmemory=True):
2121
today = date.today()
2222
dt = today.strftime("_%d_%m_%Y")
2323
base = f"desc-esus-notifica-estado-{uf}" # desc-notificacoes-esusve-
24-
url = f"https://{user}:{pwd}@elasticsearch-saps.saude.gov.br"
24+
url = f"https://{user}:{pwd}@elasticsearch-saps.saude.gov.br" # noqa: E231
2525
out = f"ESUS_{uf}_{dt}.parquet"
2626

2727
cachefile = os.path.join(CACHEPATH, out)
@@ -36,7 +36,7 @@ def download(uf, cache=True, checkmemory=True):
3636
fname = fetch(base, uf, url)
3737
size = os.stat(fname).st_size
3838
if size > 50e6 and checkmemory:
39-
print(f"Downloaded data is to large:{size / 1e6} MB compressed.")
39+
print(f"Downloaded data is to large: {size / 1e6} MB compressed.")
4040
print(
4141
"Only loading the first 1000 rows. If your computer has enough"
4242
+ " memory, set 'checkmemory' to False"

pysus/online_data/IBGE.py

Lines changed: 9 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,7 @@
11
"""
22
Helper functions to download official statistics from IBGE SIDRA
33
"""
4+
45
import ssl # Builtin
56
from pathlib import Path
67
from tempfile import TemporaryDirectory
@@ -142,7 +143,7 @@ def get_sidra_table(
142143

143144
print(f"Requesting data from {url}")
144145
try:
145-
with (get_legacy_session() as s, s.get(url) as response):
146+
with get_legacy_session() as s, s.get(url) as response:
146147
df = pd.DataFrame(response.json())
147148
except HTTPError:
148149
response = requests.get(url)
@@ -163,7 +164,7 @@ def list_agregados(**kwargs):
163164
url += "&".join([f"{k}={v}" for k, v in kwargs.items()])
164165
print(f"Fetching Data groupings from {url}")
165166
try:
166-
with (get_legacy_session() as s, s.get(url) as response):
167+
with get_legacy_session() as s, s.get(url) as response:
167168
table = pd.DataFrame(response.json())
168169
except requests.exceptions.SSLError as e:
169170
print(f"Failed fetching aggregates: {e}")
@@ -183,7 +184,7 @@ def localidades_por_agregado(agregado: int, nivel: str):
183184
"""
184185
url = APIBASE + f"agregados/{agregado}/localidades/{nivel}"
185186
try:
186-
with (get_legacy_session() as s, s.get(url) as response):
187+
with get_legacy_session() as s, s.get(url) as response:
187188
table = pd.DataFrame(response.json())
188189
except Exception as e:
189190
print(f"Could not download from {url}\n{e}")
@@ -199,7 +200,7 @@ def metadados(agregado: int):
199200
"""
200201
url = APIBASE + f"agregados/{agregado}/metadados"
201202
try:
202-
with (get_legacy_session() as s, s.get(url) as response):
203+
with get_legacy_session() as s, s.get(url) as response:
203204
data = response.json()
204205
except Exception as e:
205206
print(f"Could not download from {url}\n{e}")
@@ -215,7 +216,7 @@ def lista_periodos(agregado: int):
215216
"""
216217
url = APIBASE + f"agregados/{agregado}/periodos"
217218
try:
218-
with (get_legacy_session() as s, s.get(url) as response):
219+
with get_legacy_session() as s, s.get(url) as response:
219220
table = pd.DataFrame(response.json())
220221
except Exception:
221222
return None
@@ -309,10 +310,10 @@ def __init__(
309310
def _fetch_JSON(self):
310311
try:
311312
print(f"Fetching {self.url}")
312-
with (get_legacy_session() as s, s.get(self.url) as response):
313+
with get_legacy_session() as s, s.get(self.url) as response:
313314
self.JSON = response.json()
314315
except Exception as e:
315-
print(f"Couldn't download data:\n{e}")
316+
print("Couldn't download data:", e, sep="\n")
316317

317318
def to_dataframe(self):
318319
return pd.DataFrame(self.JSON)
@@ -389,7 +390,7 @@ def get_population(
389390
opts = ["ALF", "ESCA", "ESCB", "IDOSO", "RENDA"]
390391
if not censo_data or censo_data not in opts:
391392
raise ValueError(
392-
f"Incorrect `censo_data` parameter. Options: {opts}"
393+
f"Incorrect 'censo_data' parameter. Options: {opts}"
393394
)
394395
file = [f for f in files if censo_data in f.name][0].download()
395396
else:

pysus/online_data/Infogripe.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -18,6 +18,6 @@ def list_datasets():
1818

1919

2020
def download(dataset_name):
21-
url = BASEURL + DATASETS[dataset_name]
21+
url = BASEURL + DATASETS[dataset_name] + "?inline=false"
2222
df = pd.read_csv(url, delimiter=";", decimal=",")
2323
return df

pysus/preprocessing/ESUS.py

Lines changed: 5 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -26,8 +26,8 @@ def cases_by_age_and_sex(UF, start="2020-03-01", end="2020-08-31"):
2626
inplace=True,
2727
)
2828
print(
29-
f"Removed {old_size - len(df)} rows with missing dates of symptoms,"
30-
" notification or testing"
29+
f"Removed {old_size - len(df)} rows with missing dates of symptoms, "
30+
"notification or testing"
3131
)
3232

3333
# Desconsiderando os resultados negativos ou inconclusivos
@@ -52,7 +52,9 @@ def cases_by_age_and_sex(UF, start="2020-03-01", end="2020-08-31"):
5252
ini = np.arange(0, 81, 5)
5353
fin = np.arange(5, 86, 5)
5454
fin[-1] = 120
55-
faixa_etaria = {f"[{i},{f})": (i, f) for i, f in zip(ini, fin)}
55+
faixa_etaria = {
56+
f"[{i},{f})": (i, f) for i, f in zip(ini, fin) # noqa: E231
57+
}
5658

5759
labels = list(faixa_etaria.keys())
5860
df["faixa_etaria"] = [

pysus/tests/test_ftp.py

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -2,7 +2,6 @@
22
from pathlib import Path
33

44
import pandas as pd
5-
65
from pysus.data.local import ParquetSet
76
from pysus.ftp import DIRECTORY_CACHE, Database, Directory, File
87
from pysus.ftp.databases import (
@@ -41,7 +40,9 @@ def _test_database(testcase: unittest.TestCase, database: Database):
4140
)
4241
testcase.assertTrue(isinstance(downloaded_file, ParquetSet))
4342
testcase.assertTrue(Path(downloaded_file.path).exists())
44-
testcase.assertTrue(isinstance(downloaded_file.to_dataframe(), pd.DataFrame))
43+
testcase.assertTrue(
44+
isinstance(downloaded_file.to_dataframe(), pd.DataFrame)
45+
)
4546
testcase.assertTrue(not downloaded_file.to_dataframe().empty)
4647

4748

0 commit comments

Comments
 (0)