Skip to content
Open
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
14 changes: 10 additions & 4 deletions netneurotools/modularity/modules.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@
from sklearn.utils.validation import check_random_state
from scipy import optimize
from scipy.cluster import hierarchy
from joblib import Parallel, delayed

from .. import has_numba
if has_numba:
Expand Down Expand Up @@ -368,7 +369,8 @@ def find_consensus(assignments, null_func=np.mean, return_agreement=False,


def consensus_modularity(adjacency, gamma=1, B='modularity',
repeats=250, null_func=np.mean, seed=None):
repeats=250, null_func=np.mean, seed=None,
n_jobs=1):
"""
Find community assignments from `adjacency` through consensus.

Expand All @@ -394,6 +396,8 @@ def consensus_modularity(adjacency, gamma=1, B='modularity',
Default: `np.mean`
seed : {int, np.random.RandomState instance, None}, optional
Seed for random number generation. Default: None
n_jobs : int, optional
Number of parallel jobs to run. Default: 1

Returns
-------
Expand All @@ -411,9 +415,11 @@ def consensus_modularity(adjacency, gamma=1, B='modularity',
structure in networks. Chaos: An Interdisciplinary Journal of Nonlinear
Science, 23(1), 013142.
"""
# generate community partitions `repeat` times
comms, Q_all = zip(*[bct.community_louvain(adjacency, gamma=gamma, B=B)
for i in range(repeats)])
# generate community partitions `repeat` times with parallelization
comms, Q_all = zip(*Parallel(n_jobs=n_jobs)(
delayed(bct.community_louvain)(adjacency, gamma=gamma, B=B,
seed=seed)
for _ in range(repeats)))
comms = np.column_stack(comms)

# find consensus cluster assignments across all partitoning solutions
Expand Down