Skip to content

Commit f0c871b

Browse files
Apply Pre-commit fixes
1 parent a32b263 commit f0c871b

File tree

2 files changed

+85
-62
lines changed

2 files changed

+85
-62
lines changed

hypernetx/hif.py

Lines changed: 76 additions & 53 deletions
Original file line numberDiff line numberDiff line change
@@ -29,22 +29,27 @@ def normalize_dataframe(df):
2929
pd.DataFrame
3030
allowed columns are limited to HIF keys
3131
"""
32-
default_cols = ['weight'] + list(set(df.columns).intersection(['direction'])) + ['misc_properties']
32+
default_cols = (
33+
["weight"]
34+
+ list(set(df.columns).intersection(["direction"]))
35+
+ ["misc_properties"]
36+
)
3337
cols = list(set(df.columns).difference(default_cols))
3438
dfdict = df[cols].T.to_dict()
3539
newdf = df[default_cols]
3640
for uid in newdf.index:
37-
newdf.loc[uid]['misc_properties'].update(dfdict[uid])
38-
return newdf.fillna('nil')
39-
40-
def to_hif(hg,filename=None, network_type='undirected', metadata=None):
41+
newdf.loc[uid]["misc_properties"].update(dfdict[uid])
42+
return newdf.fillna("nil")
43+
44+
45+
def to_hif(hg, filename=None, network_type="undirected", metadata=None):
4146
"""
4247
Returns a dictionary object valid for the HIF Json schema
4348
4449
Parameters
4550
----------
4651
hg : hnx.Hypergraph
47-
52+
4853
filename : str, optional
4954
filepath where json object is to be stored, by default None
5055
network_type : str, optional
@@ -57,51 +62,58 @@ def to_hif(hg,filename=None, network_type='undirected', metadata=None):
5762
hif : dict
5863
format is defined by HIF schema
5964
"""
60-
hyp_objs = ['nodes','edges','incidences']
61-
defaults = {part:dict(getattr(hg,part).property_store._defaults) for part in hyp_objs}
65+
hyp_objs = ["nodes", "edges", "incidences"]
66+
defaults = {
67+
part: dict(getattr(hg, part).property_store._defaults) for part in hyp_objs
68+
}
6269
for part in hyp_objs:
63-
misc_properties = defaults[part].pop('misc_properties',{})
64-
defaults[part]['attrs'] = dict(misc_properties)
65-
70+
misc_properties = defaults[part].pop("misc_properties", {})
71+
defaults[part]["attrs"] = dict(misc_properties)
72+
6673
incj = deepcopy(hg.incidences.to_dataframe)
67-
incj.index.names = ['edge','node']
74+
incj.index.names = ["edge", "node"]
6875
incj = normalize_dataframe(incj)
69-
incj = incj.rename(columns={"misc_properties":"attrs"})
76+
incj = incj.rename(columns={"misc_properties": "attrs"})
7077
incj = incj.reset_index().to_dict(orient="records")
71-
78+
7279
edgj = deepcopy(hg.edges.to_dataframe)
73-
edgj.index.names = ['edge']
80+
edgj.index.names = ["edge"]
7481
edgj = normalize_dataframe(edgj)
75-
edgj = edgj.rename(columns={"misc_properties":"attrs"})
82+
edgj = edgj.rename(columns={"misc_properties": "attrs"})
7683
edgj = edgj.reset_index().to_dict(orient="records")
77-
84+
7885
nodj = deepcopy(hg.nodes.to_dataframe)
79-
nodj.index.names = ['node']
86+
nodj.index.names = ["node"]
8087
nodj = normalize_dataframe(nodj)
81-
nodj = nodj.rename(columns={"misc_properties":"attrs"})
88+
nodj = nodj.rename(columns={"misc_properties": "attrs"})
8289
nodj = nodj.reset_index().to_dict(orient="records")
8390

84-
if isinstance(metadata,dict):
85-
metadata = metadata.update({'default_attrs':defaults})
91+
if isinstance(metadata, dict):
92+
metadata = metadata.update({"default_attrs": defaults})
8693
else:
87-
metadata = {'default_attrs':defaults}
94+
metadata = {"default_attrs": defaults}
8895
if hg.name is not None:
89-
metadata['name'] = hg.name
90-
91-
hif = {"edges": edgj, "nodes": nodj, "incidences": incj, "network-type": network_type,
92-
"metadata": metadata}
96+
metadata["name"] = hg.name
97+
98+
hif = {
99+
"edges": edgj,
100+
"nodes": nodj,
101+
"incidences": incj,
102+
"network-type": network_type,
103+
"metadata": metadata,
104+
}
93105
try:
94106
validator(hif)
95-
if filename is not None:
96-
json.dump(hif,open(filename,'w'))
107+
if filename is not None:
108+
json.dump(hif, open(filename, "w"))
97109
return hif
98110
except Exception as ex:
99111
HyperNetXError(ex)
100112

101113

102114
def from_hif(hif=None, filename=None):
103115
"""
104-
Reads HIF formatted string or dictionary and returns corresponding
116+
Reads HIF formatted string or dictionary and returns corresponding
105117
hnx.Hypergraph
106118
107119
Parameters
@@ -116,7 +128,7 @@ def from_hif(hif=None, filename=None):
116128
Returns
117129
-------
118130
hnx.Hypergraph
119-
131+
120132
"""
121133
if hif is not None:
122134
try:
@@ -125,48 +137,59 @@ def from_hif(hif=None, filename=None):
125137
HyperNetXError(ex)
126138
return None
127139
elif filename is not None:
128-
hif = json.load(open(filename,'r'))
140+
hif = json.load(open(filename, "r"))
129141
try:
130142
validator(hif)
131143
except Exception as ex:
132144
HyperNetXError(ex)
133-
return None
145+
return None
134146
else:
135-
print('No data given')
136-
137-
mkdd = lambda : {'weight':1, 'attrs':{}}
147+
print("No data given")
148+
149+
mkdd = lambda: {"weight": 1, "attrs": {}}
138150
hifex = deepcopy(hif)
139-
parts = {part:deepcopy(pd.DataFrame(hifex.get(part,{}))) for part in ['nodes','edges','incidences']}
140-
metadata = hifex.get('metadata',{})
141-
defaults = metadata.get('default_attrs',{})
142-
defaults = {part: defaults.get(part,mkdd()) for part in parts}
151+
parts = {
152+
part: deepcopy(pd.DataFrame(hifex.get(part, {})))
153+
for part in ["nodes", "edges", "incidences"]
154+
}
155+
metadata = hifex.get("metadata", {})
156+
defaults = metadata.get("default_attrs", {})
157+
defaults = {part: defaults.get(part, mkdd()) for part in parts}
143158
# cols = dict()
144-
default_weights = {part:defaults[part].get('weight',1) for part in parts}
159+
default_weights = {part: defaults[part].get("weight", 1) for part in parts}
145160
for part in parts:
146161
if len(part) == 0:
147162
continue
148163
thispart = parts[part]
149164
d = deepcopy(defaults[part])
150-
dkeys = [k for k in d.keys() if k not in ['weight','attrs']]
165+
dkeys = [k for k in d.keys() if k not in ["weight", "attrs"]]
151166
# cols[part] = ['weight'] + dkeys + ['attrs']
152167
if len(dkeys) > 0:
153168
for attr in dkeys:
154-
thispart[attr] = [row.attrs.pop(attr,d[attr]) for row in thispart.itertuples()]
169+
thispart[attr] = [
170+
row.attrs.pop(attr, d[attr]) for row in thispart.itertuples()
171+
]
155172
hyp_objects = dict()
156-
for part in ['nodes','edges']:
173+
for part in ["nodes", "edges"]:
157174
if len(parts[part]) > 0:
158175
uid = part[:-1]
159176
cols = [uid] + list(set(parts[part].columns).difference([uid]))
160177
hyp_objects[part] = parts[part][cols]
161178
else:
162179
hyp_objects[part] = None
163-
cols = ['edge','node'] + list(set(parts['incidences'].columns).difference(['edge','node']))
164-
incidences = parts['incidences'][cols]
165-
name = metadata.get('name',None)
166-
return hnx.Hypergraph(incidences, default_cell_weight=default_weights['incidences'],
167-
misc_cell_properties_col='attrs',
168-
node_properties=hyp_objects['nodes'], default_edge_weight=default_weights['edges'],
169-
edge_properties=hyp_objects['edges'], default_node_weight=default_weights['nodes'],
170-
misc_properties_col='attrs',
171-
name=name
172-
)
180+
cols = ["edge", "node"] + list(
181+
set(parts["incidences"].columns).difference(["edge", "node"])
182+
)
183+
incidences = parts["incidences"][cols]
184+
name = metadata.get("name", None)
185+
return hnx.Hypergraph(
186+
incidences,
187+
default_cell_weight=default_weights["incidences"],
188+
misc_cell_properties_col="attrs",
189+
node_properties=hyp_objects["nodes"],
190+
default_edge_weight=default_weights["edges"],
191+
edge_properties=hyp_objects["edges"],
192+
default_node_weight=default_weights["nodes"],
193+
misc_properties_col="attrs",
194+
name=name,
195+
)

hypernetx/utils/toys/lesmis.py

Lines changed: 9 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -42,7 +42,7 @@ def __init__(self):
4242
@property
4343
def dnames(self):
4444
return self.df_names.set_index("Symbol")
45-
45+
4646
def hypergraph_example(self):
4747

4848
names = self.df_names
@@ -62,11 +62,11 @@ def hypergraph_example(self):
6262
cell_weight_col="weight",
6363
node_properties=nprops,
6464
node_weight_prop_col="weight",
65-
name="LesMis example from HNX"
65+
name="LesMis example from HNX",
6666
)
67-
lm.nodes['JV'].job = 'mayor'
68-
lm.nodes['MY'].avocation = 'to be kind'
69-
lm.nodes['BS'].vocation = 'explorer'
67+
lm.nodes["JV"].job = "mayor"
68+
lm.nodes["MY"].avocation = "to be kind"
69+
lm.nodes["BS"].vocation = "explorer"
7070
return lm
7171

7272

@@ -80,6 +80,7 @@ def lesmis_hypergraph_from_df(df, by="Chapter", on="Characters"):
8080
}
8181
)
8282

83+
8384
def lesmis_hypergraph():
8485
lesmis = LesMis()
8586
names = lesmis.df_names
@@ -100,11 +101,10 @@ def lesmis_hypergraph():
100101
node_properties=nprops,
101102
node_weight_prop_col="weight",
102103
)
103-
lm.nodes['JV'].job = 'mayor'
104-
lm.nodes['MY'].avocation = 'to be kind'
105-
lm.nodes['BS'].vocation = 'explorer'
104+
lm.nodes["JV"].job = "mayor"
105+
lm.nodes["MY"].avocation = "to be kind"
106+
lm.nodes["BS"].vocation = "explorer"
106107
return lm
107-
108108

109109

110110
def book_tour(df, xlabel="Book", ylabel="Volume", s=3.5):

0 commit comments

Comments
 (0)