Skip to content

Commit 13c2dec

Browse files
committed
minor formatting
1 parent 2825876 commit 13c2dec

File tree

2 files changed

+8
-12
lines changed

2 files changed

+8
-12
lines changed

pysus/online_data/ESUS.py

Lines changed: 4 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -7,6 +7,7 @@
77
import time
88
from datetime import date
99

10+
1011
def download(uf, cache=True, checkmemory=True):
1112
"""
1213
Download ESUS data by UF
@@ -19,7 +20,7 @@ def download(uf, cache=True, checkmemory=True):
1920
pwd = 'Za4qNXdyQNSa9YaA'
2021
today = date.today()
2122
dt = today.strftime("_%d_%m_%Y")
22-
base = f'desc-notificacoes-esusve-{uf}' #desc-notificacoes-esusve-
23+
base = f'desc-notificacoes-esusve-{uf}' # desc-notificacoes-esusve-
2324
url = f'https://{user}:{pwd}@elasticsearch-saps.saude.gov.br'
2425
out = f'ESUS_{uf}_{dt}.parquet'
2526

@@ -33,7 +34,7 @@ def download(uf, cache=True, checkmemory=True):
3334
fname = fetch(base, uf, url)
3435
size = os.stat(fname).st_size
3536
if size > 50e6 and checkmemory:
36-
print(f"Downloaded data is to large:{size/1e6} MB compressed.")
37+
print(f"Downloaded data is to large:{size / 1e6} MB compressed.")
3738
print("Only loading the first 1000 rows. If your computer has enough memory, set 'checkmemory' to False")
3839
print(f"The full data is in {fname}")
3940
df = pd.read_csv(fname, chunksize=1000)
@@ -47,7 +48,6 @@ def download(uf, cache=True, checkmemory=True):
4748
return df
4849

4950

50-
5151
def fetch(base, uf, url):
5252
UF = uf.upper()
5353
print(f"Reading ESUS data for {UF}")
@@ -61,16 +61,14 @@ def fetch(base, uf, url):
6161
tempfile = os.path.join(CACHEPATH, f'ESUS_temp_{UF}.csv.gz')
6262
for ch in chunker:
6363
df = pd.DataFrame.from_dict(ch)
64-
df.sintomas = df['sintomas'].str.replace(';', '',) ## remove os ;
64+
df.sintomas = df['sintomas'].str.replace(';', '', ) ## remove os ;
6565
if h:
6666
df.to_csv(tempfile)
6767
h = 0
6868
else:
6969
df.to_csv(tempfile, mode='a', header=False)
7070
# df = pd.read_csv('temp.csv.gz')
7171

72-
73-
7472
return tempfile
7573

7674

pysus/utilities/readdbc.py

Lines changed: 4 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -7,7 +7,6 @@
77
from tempfile import NamedTemporaryFile
88
from io import BytesIO
99
import pandas as pd
10-
import geopandas as gpd
1110
from dbfread import DBF
1211
import geopandas as gpd
1312

@@ -16,7 +15,7 @@
1615
except (ImportError, ModuleNotFoundError):
1716
from _readdbc import ffi, lib
1817

19-
18+
2019
def read_dbc(filename, encoding='utf-8', raw=False):
2120
"""
2221
Opens a DATASUS .dbc file and return its contents as a pandas
@@ -55,7 +54,7 @@ def dbc2dbf(infile, outfile):
5554
# print(os.path.exists(outfile))
5655

5756

58-
def read_dbc_geopandas(filename,encoding='utf-8'):
57+
def read_dbc_geopandas(filename, encoding='utf-8'):
5958
"""
6059
Opens a DATASUS .dbc file and return its contents as a pandas
6160
Dataframe, using geopandas
@@ -68,9 +67,8 @@ def read_dbc_geopandas(filename,encoding='utf-8'):
6867
with NamedTemporaryFile(delete=False) as tf:
6968
out = tf.name + '.dbf'
7069
dbc2dbf(filename, out)
71-
dbf = gpd.read_file(out, encoding=encoding).drop("geometry",axis=1)
70+
dbf = gpd.read_file(out, encoding=encoding).drop("geometry", axis=1)
7271
df = pd.DataFrame(dbf)
7372
os.unlink(tf.name)
7473

75-
return df
76-
74+
return df

0 commit comments

Comments
 (0)