Skip to content

Commit af6c3f0

Browse files
brendapraggastisryandanehy
authored andcommitted
Pull request #220: Hif
Merge in HYP/hypernetx from hif to master * commit '7a651fba73b67622b40c612e9814d255a3d656ba': bump: version 2.3.9 → 2.3.10 add requests via poetry Apply Pre-commit fixes updated hif.py with docs updated toy and hif.py for HIF collaboration updated hypernetx/__init__ to load hif file adde hif.py updated init to include hif code fixed bug in property store lines 373 and 392 misassigned misc_properties_col removed redundancy from misc_properties Created modules branch in toctree. cost work temporarily storing hif to from
2 parents d641ff0 + 7a651fb commit af6c3f0

File tree

11 files changed

+290
-41
lines changed

11 files changed

+290
-41
lines changed

.cz.toml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
[tool.commitizen]
22
name = "cz_conventional_commits"
3-
version = "2.3.9"
3+
version = "2.3.10"
44
version_provider = "poetry"
55
version_files = [
66
"pyproject.toml",

docs/source/modularity.rst renamed to docs/source/algorithms/hypergraph_modularity_and_clustering.rst

File renamed without changes.

docs/source/algorithms/matching_algorithms.rst

Lines changed: 17 additions & 18 deletions
Original file line numberDiff line numberDiff line change
@@ -35,24 +35,23 @@ These algorithms are crucial for applications that require scalable parallel pro
3535

3636
Usage Example
3737
-------------
38-
Below is an example of how to use the matching algorithms module.
39-
40-
```python
41-
from hypernetx.algorithms import matching_algorithms as ma
42-
43-
# Example hypergraph data
44-
hypergraph = ... # Assume this is a d-uniform hypergraph
45-
46-
# Compute a matching using the O(d²)-approximation algorithm
47-
matching = ma.matching_approximation_d_squared(hypergraph)
48-
49-
# Compute a matching using the d-approximation algorithm
50-
matching_d = ma.matching_approximation_d(hypergraph)
51-
52-
# Compute a matching using the d(d−1 + 1/d)²-approximation algorithm
53-
matching_d_squared = ma.matching_approximation_dd(hypergraph)
54-
55-
print(matching, matching_d, matching_d_squared)
38+
Below is an example of how to use the matching algorithms module.::
39+
40+
from hypernetx.algorithms import matching_algorithms as ma
41+
42+
# Example hypergraph data
43+
hypergraph = ... # Assume this is a d-uniform hypergraph
44+
45+
# Compute a matching using the O(d²)-approximation algorithm
46+
matching = ma.matching_approximation_d_squared(hypergraph)
47+
48+
# Compute a matching using the d-approximation algorithm
49+
matching_d = ma.matching_approximation_d(hypergraph)
50+
51+
# Compute a matching using the d(d−1 + 1/d)²-approximation algorithm
52+
matching_d_squared = ma.matching_approximation_dd(hypergraph)
53+
54+
print(matching, matching_d, matching_d_squared)
5655

5756

5857
References

docs/source/conf.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -19,7 +19,7 @@
1919
import os
2020

2121

22-
__version__ = "2.3.9"
22+
__version__ = "2.3.10"
2323

2424

2525
# If extensions (or modules to document with autodoc) are in another directory,

docs/source/index.rst

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -59,7 +59,7 @@ Contents
5959
A Gentle Introduction to Hypergraph Mathematics <hypergraph101>
6060
Hypergraph Constructors <hypconstructors>
6161
Visualization Widget <widget>
62-
Algorithms: Modularity and Clustering <modularity>
62+
Modules <modules>
6363
Publications <publications>
6464
Contributors Guide <contributions>
6565
license

docs/source/modules.rst

Lines changed: 18 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,18 @@
1+
.. _Modules:
2+
3+
********************
4+
Modules
5+
********************
6+
7+
This page provides additionally documentation for various contributions to HyperNetX through modules.
8+
We use the term module to describe an advanced method for studying hypergraphs.
9+
Each module includes a descriptive document here as well as a notebook showcasing its functionality.
10+
Tutorial notebooks can be found for each module under the `advanced tutorials <https://github.com/pnnl/HyperNetX/tree/master/tutorials/advanced>`_ folder in GitHub.
11+
12+
**Modules with additional documentation:**
13+
14+
.. toctree::
15+
:maxdepth: 1
16+
17+
Hypergraph Modularity and Clustering <algorithms/hypergraph_modularity_and_clustering>
18+
Hypergraph Matching <algorithms/matching_algorithms>

hypernetx/__init__.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -3,12 +3,12 @@
33
HyperNetXError,
44
HyperNetXNotImplementedError,
55
)
6-
from hypernetx.read_write import to_pickle, load_from_pickle
6+
from hypernetx.hif import to_hif, from_hif
77
from hypernetx.classes import *
88
from hypernetx.reports import *
99
from hypernetx.drawing import *
1010
from hypernetx.algorithms import *
1111
from hypernetx.utils import *
1212
from hypernetx.utils.toys import *
1313

14-
__version__ = "2.3.9"
14+
__version__ = "2.3.10"

hypernetx/hif.py

Lines changed: 195 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,195 @@
1+
# Copyright © 2024 Battelle Memorial Institute
2+
# All rights reserved.
3+
4+
import hypernetx as hnx
5+
import pandas as pd
6+
import json
7+
import fastjsonschema
8+
import requests
9+
from copy import deepcopy
10+
from .exception import HyperNetXError
11+
12+
schema_url = "https://raw.githubusercontent.com/pszufe/HIF_validators/main/schemas/hif_schema_v0.1.0.json"
13+
resp = requests.get(schema_url)
14+
schema = json.loads(resp.text)
15+
validator = fastjsonschema.compile(schema)
16+
17+
18+
def normalize_dataframe(df):
19+
"""
20+
Moves common attributes into misc_properties for translating into HIF.
21+
22+
Parameters
23+
----------
24+
df : pd.DataFrame
25+
HypergraphView.dataframe
26+
27+
Returns
28+
-------
29+
pd.DataFrame
30+
allowed columns are limited to HIF keys
31+
"""
32+
default_cols = (
33+
["weight"]
34+
+ list(set(df.columns).intersection(["direction"]))
35+
+ ["misc_properties"]
36+
)
37+
cols = list(set(df.columns).difference(default_cols))
38+
dfdict = df[cols].T.to_dict()
39+
newdf = df[default_cols]
40+
for uid in newdf.index:
41+
newdf.loc[uid]["misc_properties"].update(dfdict[uid])
42+
return newdf.fillna("nil")
43+
44+
45+
def to_hif(hg, filename=None, network_type="undirected", metadata=None):
46+
"""
47+
Returns a dictionary object valid for the HIF Json schema
48+
49+
Parameters
50+
----------
51+
hg : hnx.Hypergraph
52+
53+
filename : str, optional
54+
filepath where json object is to be stored, by default None
55+
network_type : str, optional
56+
One of 'undirected','directed','asc', by default 'undirected'
57+
metadata : dict, optional
58+
Additional information to store, by default None
59+
60+
Returns
61+
-------
62+
hif : dict
63+
format is defined by HIF schema
64+
"""
65+
hyp_objs = ["nodes", "edges", "incidences"]
66+
defaults = {
67+
part: dict(getattr(hg, part).property_store._defaults) for part in hyp_objs
68+
}
69+
for part in hyp_objs:
70+
misc_properties = defaults[part].pop("misc_properties", {})
71+
defaults[part]["attrs"] = dict(misc_properties)
72+
73+
incj = deepcopy(hg.incidences.to_dataframe)
74+
incj.index.names = ["edge", "node"]
75+
incj = normalize_dataframe(incj)
76+
incj = incj.rename(columns={"misc_properties": "attrs"})
77+
incj = incj.reset_index().to_dict(orient="records")
78+
79+
edgj = deepcopy(hg.edges.to_dataframe)
80+
edgj.index.names = ["edge"]
81+
edgj = normalize_dataframe(edgj)
82+
edgj = edgj.rename(columns={"misc_properties": "attrs"})
83+
edgj = edgj.reset_index().to_dict(orient="records")
84+
85+
nodj = deepcopy(hg.nodes.to_dataframe)
86+
nodj.index.names = ["node"]
87+
nodj = normalize_dataframe(nodj)
88+
nodj = nodj.rename(columns={"misc_properties": "attrs"})
89+
nodj = nodj.reset_index().to_dict(orient="records")
90+
91+
if isinstance(metadata, dict):
92+
metadata = metadata.update({"default_attrs": defaults})
93+
else:
94+
metadata = {"default_attrs": defaults}
95+
if hg.name is not None:
96+
metadata["name"] = hg.name
97+
98+
hif = {
99+
"edges": edgj,
100+
"nodes": nodj,
101+
"incidences": incj,
102+
"network-type": network_type,
103+
"metadata": metadata,
104+
}
105+
try:
106+
validator(hif)
107+
if filename is not None:
108+
json.dump(hif, open(filename, "w"))
109+
return hif
110+
except Exception as ex:
111+
HyperNetXError(ex)
112+
113+
114+
def from_hif(hif=None, filename=None):
115+
"""
116+
Reads HIF formatted string or dictionary and returns corresponding
117+
hnx.Hypergraph
118+
119+
Parameters
120+
----------
121+
hif : dict, optional
122+
Useful if file is read by json and inspected before turning into a hypergraph,
123+
by default None
124+
filename : str, optional
125+
Full path to location of HIF formatted JSON in storage,
126+
by default None
127+
128+
Returns
129+
-------
130+
hnx.Hypergraph
131+
132+
"""
133+
if hif is not None:
134+
try:
135+
validator(hif)
136+
except Exception as ex:
137+
HyperNetXError(ex)
138+
return None
139+
elif filename is not None:
140+
hif = json.load(open(filename, "r"))
141+
try:
142+
validator(hif)
143+
except Exception as ex:
144+
HyperNetXError(ex)
145+
return None
146+
else:
147+
print("No data given")
148+
149+
mkdd = lambda: {"weight": 1, "attrs": {}}
150+
hifex = deepcopy(hif)
151+
parts = {
152+
part: deepcopy(pd.DataFrame(hifex.get(part, {})))
153+
for part in ["nodes", "edges", "incidences"]
154+
}
155+
metadata = hifex.get("metadata", {})
156+
defaults = metadata.get("default_attrs", {})
157+
defaults = {part: defaults.get(part, mkdd()) for part in parts}
158+
# cols = dict()
159+
default_weights = {part: defaults[part].get("weight", 1) for part in parts}
160+
for part in parts:
161+
if len(part) == 0:
162+
continue
163+
thispart = parts[part]
164+
d = deepcopy(defaults[part])
165+
dkeys = [k for k in d.keys() if k not in ["weight", "attrs"]]
166+
# cols[part] = ['weight'] + dkeys + ['attrs']
167+
if len(dkeys) > 0:
168+
for attr in dkeys:
169+
thispart[attr] = [
170+
row.attrs.pop(attr, d[attr]) for row in thispart.itertuples()
171+
]
172+
hyp_objects = dict()
173+
for part in ["nodes", "edges"]:
174+
if len(parts[part]) > 0:
175+
uid = part[:-1]
176+
cols = [uid] + list(set(parts[part].columns).difference([uid]))
177+
hyp_objects[part] = parts[part][cols]
178+
else:
179+
hyp_objects[part] = None
180+
cols = ["edge", "node"] + list(
181+
set(parts["incidences"].columns).difference(["edge", "node"])
182+
)
183+
incidences = parts["incidences"][cols]
184+
name = metadata.get("name", None)
185+
return hnx.Hypergraph(
186+
incidences,
187+
default_cell_weight=default_weights["incidences"],
188+
misc_cell_properties_col="attrs",
189+
node_properties=hyp_objects["nodes"],
190+
default_edge_weight=default_weights["edges"],
191+
edge_properties=hyp_objects["edges"],
192+
default_node_weight=default_weights["nodes"],
193+
misc_properties_col="attrs",
194+
name=name,
195+
)

hypernetx/read_write.py

Lines changed: 0 additions & 17 deletions
This file was deleted.

hypernetx/utils/toys/lesmis.py

Lines changed: 53 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -2,6 +2,7 @@
22
# All rights reserved.
33

44
import pandas as pd
5+
import numpy as np
56
from itertools import islice, chain, repeat
67

78
import matplotlib.pyplot as plt
@@ -42,6 +43,32 @@ def __init__(self):
4243
def dnames(self):
4344
return self.df_names.set_index("Symbol")
4445

46+
def hypergraph_example(self):
47+
48+
names = self.df_names
49+
scenes = self.df_scenes
50+
scenes["edge"] = [
51+
".".join([str(scenes.loc[idx][col]) for col in scenes.columns[:-2]])
52+
for idx in scenes.index
53+
]
54+
scenes["node"] = scenes["Characters"]
55+
df = scenes[["edge", "node"]]
56+
cell_weights = df.groupby(["edge"]).count().to_dict()["node"]
57+
df["weight"] = df.edge.map(lambda e: np.round(1 / cell_weights.get(e, 1), 2))
58+
nprops = names
59+
nprops["weight"] = np.round(np.random.uniform(0, 1, len(names)), 2)
60+
lm = hnx.Hypergraph(
61+
df,
62+
cell_weight_col="weight",
63+
node_properties=nprops,
64+
node_weight_prop_col="weight",
65+
name="LesMis example from HNX",
66+
)
67+
lm.nodes["JV"].job = "mayor"
68+
lm.nodes["MY"].avocation = "to be kind"
69+
lm.nodes["BS"].vocation = "explorer"
70+
return lm
71+
4572

4673
def lesmis_hypergraph_from_df(df, by="Chapter", on="Characters"):
4774
cols = df.columns.tolist()
@@ -54,6 +81,32 @@ def lesmis_hypergraph_from_df(df, by="Chapter", on="Characters"):
5481
)
5582

5683

84+
def lesmis_hypergraph():
85+
lesmis = LesMis()
86+
names = lesmis.df_names
87+
scenes = lesmis.df_scenes
88+
scenes["edge"] = [
89+
".".join([str(scenes.loc[idx][col]) for col in scenes.columns[:-2]])
90+
for idx in scenes.index
91+
]
92+
scenes["node"] = scenes["Characters"]
93+
df = scenes[["edge", "node"]]
94+
cell_weights = df.groupby(["edge"]).count().to_dict()["node"]
95+
df["weight"] = df.edge.map(lambda e: np.round(1 / cell_weights.get(e, 1), 2))
96+
nprops = names
97+
nprops["weight"] = np.round(np.random.uniform(0, 1, len(names)), 2)
98+
lm = hnx.Hypergraph(
99+
df,
100+
cell_weight_col="weight",
101+
node_properties=nprops,
102+
node_weight_prop_col="weight",
103+
)
104+
lm.nodes["JV"].job = "mayor"
105+
lm.nodes["MY"].avocation = "to be kind"
106+
lm.nodes["BS"].vocation = "explorer"
107+
return lm
108+
109+
57110
def book_tour(df, xlabel="Book", ylabel="Volume", s=3.5):
58111
"""
59112
Constructs a visualization of hypergraphs stored in an indexed

0 commit comments

Comments
 (0)