Skip to content

Commit

Permalink
docs and typing
Browse files Browse the repository at this point in the history
  • Loading branch information
ryanharvey1 committed Oct 16, 2024
1 parent ee883c4 commit 2f1670a
Show file tree
Hide file tree
Showing 4 changed files with 678 additions and 306 deletions.
39 changes: 19 additions & 20 deletions neuro_py/ensemble/explained_variance.py
Original file line number Diff line number Diff line change
Expand Up @@ -86,32 +86,31 @@ class ExplainedVariance(object):
# Most simple case, returns single explained variance value
>>> expvar = explained_variance.ExplainedVariance(
st=st,
template=beh_epochs[1],
matching=beh_epochs[2],
control=beh_epochs[0],
window=None,
)
>>> st=st,
>>> template=beh_epochs[1],
>>> matching=beh_epochs[2],
>>> control=beh_epochs[0],
>>> window=None,
>>> )
# Get time resolved explained variance across entire session in 200sec bins
>>> expvar = explained_variance.ExplainedVariance(
st=st,
template=beh_epochs[1],
matching=nel.EpochArray([beh_epochs.start, beh_epochs.stop]),
control=beh_epochs[0],
window=200
)
>>> st=st,
>>> template=beh_epochs[1],
>>> matching=nel.EpochArray([beh_epochs.start, beh_epochs.stop]),
>>> control=beh_epochs[0],
>>> window=200
>>> )
# Get time resolved explained variance across entire session in 200sec bins sliding by 100sec
>>> expvar = explained_variance.ExplainedVariance(
st=st,
template=beh_epochs[1],
matching=nel.EpochArray([beh_epochs.start, beh_epochs.stop]),
control=beh_epochs[0],
window=200,
slideby=100
)
>>> st=st,
>>> template=beh_epochs[1],
>>> matching=nel.EpochArray([beh_epochs.start, beh_epochs.stop]),
>>> control=beh_epochs[0],
>>> window=200,
>>> slideby=100
>>> )
"""

def __init__(
Expand Down
21 changes: 16 additions & 5 deletions neuro_py/ensemble/similarity_index.py
Original file line number Diff line number Diff line change
@@ -1,13 +1,16 @@
import itertools
import multiprocessing
from typing import Tuple

import numpy as np
from joblib import Parallel, delayed

from neuro_py.stats.stats import get_significant_events


def similarity_index(patterns, n_shuffles=1000, parallel=True):
def similarity_index(
patterns: np.ndarray, n_shuffles: int = 1000, parallel: bool = True
) -> Tuple[np.ndarray, np.ndarray, np.ndarray]:
"""
Calculate the similarity index of a set of patterns.
Expand All @@ -20,10 +23,18 @@ def similarity_index(patterns, n_shuffles=1000, parallel=True):
attribute large weights to the same neurons, SI will be large;
if assemblies are orthogonal, SI will be zero.
Input:
patterns: list of patterns (n patterns x n neurons)
n_shuffles: number of shuffles to calculate the similarity index
Output:
Parameters
----------
patterns : np.ndarray
List of patterns (n patterns x n neurons).
n_shuffles : int, optional
Number of shuffles to calculate the similarity index, by default 1000.
parallel : bool, optional
Whether to run in parallel, by default True.
Returns
-------
Tuple[np.ndarray, np.ndarray, np.ndarray]
si: similarity index: float (0-1)
combos: list of all possible combinations of patterns
pvalues: list of p-values for each pattern combination
Expand Down
53 changes: 31 additions & 22 deletions neuro_py/ensemble/similaritymat.py
Original file line number Diff line number Diff line change
@@ -1,28 +1,39 @@
from typing import Optional, Tuple, Union

import numpy as np
import scipy.optimize as optimize
from sklearn.metrics.pairwise import cosine_similarity as getsim


def similaritymat(patternsX, patternsY=None, method="cosine", findpairs=False):
def similaritymat(
patternsX: np.ndarray,
patternsY: Optional[np.ndarray] = None,
method: str = "cosine",
findpairs: bool = False,
) -> Union[np.ndarray, Tuple[np.ndarray, np.ndarray, np.ndarray]]:
"""
INPUTS
patternsX: co-activation patterns (assemblies)
- numpy array (assemblies, neurons)
patternsY: co-activation patterns (assemblies)
- numpy array (assemblies, neurons)
- if None, will compute similarity
of patternsX to itself
method: defines similarity measure method
'cosine' - cosine similarity
findpairs: maximizes main diagonal of the sim matrix to define pairs
from patterns X and Y
returns rowind,colind which can be used to reorder
patterns X and Y to maximize the diagonal
OUTPUTS
simmat: similarity matrix
- array (assemblies from X, assemblies from Y)
Calculate the similarity matrix of co-activation patterns (assemblies).
Parameters
----------
patternsX : np.ndarray
Co-activation patterns (assemblies) - numpy array (assemblies, neurons).
patternsY : Optional[np.ndarray], optional
Co-activation patterns (assemblies) - numpy array (assemblies, neurons).
If None, will compute similarity of patternsX to itself, by default None.
method : str, optional
Defines similarity measure method, by default 'cosine'.
'cosine' - cosine similarity.
findpairs : bool, optional
Maximizes main diagonal of the similarity matrix to define pairs
from patterns X and Y. Returns rowind, colind which can be used to reorder
patterns X and Y to maximize the diagonal, by default False.
Returns
-------
Union[np.ndarray, Tuple[np.ndarray, np.ndarray, np.ndarray]]
Similarity matrix (assemblies from X, assemblies from Y).
If findpairs is True, also returns rowind and colind.
"""

if method != "cosine":
Expand All @@ -39,8 +50,6 @@ def fillmissingidxs(ind, n):
ind = np.array(list(ind) + missing)
return ind

import scipy.optimize as optimize

rowind, colind = optimize.linear_sum_assignment(-simmat)

rowind = fillmissingidxs(rowind, np.size(simmat, 0))
Expand Down
Loading

0 comments on commit 2f1670a

Please sign in to comment.