Skip to content

Commit

Permalink
Black reformatting
Browse files Browse the repository at this point in the history
  • Loading branch information
ncaptier committed Nov 2, 2022
1 parent af9759f commit 596ab4e
Show file tree
Hide file tree
Showing 4 changed files with 371 additions and 298 deletions.
53 changes: 27 additions & 26 deletions sica/_whitening.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,40 +16,40 @@


def whitening(
X: Union[np.ndarray, spmatrix],
n_components: int,
svd_solver: str,
chunked: bool,
chunk_size: Union[int, None],
zero_center: bool,
random_state: Optional[Union[int, np.random.RandomState]] = None
X: Union[np.ndarray, spmatrix],
n_components: int,
svd_solver: str,
chunked: bool,
chunk_size: Union[int, None],
zero_center: bool,
random_state: Optional[Union[int, np.random.RandomState]] = None,
) -> Tuple[np.ndarray, np.ndarray]:
""" Whiten data (i.e. transform variables into a set of new uncorrelated and unit-variance variables) and reduce
"""Whiten data (i.e. transform variables into a set of new uncorrelated and unit-variance variables) and reduce
dimension trhough a PCA-like approach. This function handles array-like formats as well as sparse matrices.
Parameters
----------
X : 2D ndarray or spmatrix, shape (n_samples , n_features)
n_components : int
number of pricipal components to compute. If None, n_components = min(X.shape)
svd_solver : str {‘auto’, ‘full’, ‘arpack’, ‘randomized’ , 'lobpcg'}
solver for the different PCA methods. Please note that some solvers may not be compatible with
some PCA methods. See PCA, TruncatedSVD and IncrementalPCA from sklearn.decompostion or
scipy.sparse.linalg.svds.
chunked : boolean
if True, perform an incremental PCA on segments of chunk_size. The incremental PCA automatically
if True, perform an incremental PCA on segments of chunk_size. The incremental PCA automatically
zero centers and ignores settings of random_seed and svd_solver.
chunk_size : int
Number of observations to include in each chunk. Required if chunked=True was passed.
zero_center : boolean
If True, compute standard PCA from covariance matrix. If False, omit zero-centering variables
(uses TruncatedSVD), which allows to handle sparse input efficiently.
random_state : int, RandomState, optional
Change to use different initial states for the optimization. The default is None.
Expand Down Expand Up @@ -106,27 +106,28 @@ def whitening(


def _pca_with_sparse(
X: spmatrix,
npcs: int,
solver: Optional[str] = "arpack",
mu=None,
random_state: Optional[Union[int, np.random.RandomState]] = None) -> Tuple[np.ndarray, np.ndarray]:
""" Compute PCA decomposition with initial centering for sparse input.
X: spmatrix,
npcs: int,
solver: Optional[str] = "arpack",
mu=None,
random_state: Optional[Union[int, np.random.RandomState]] = None,
) -> Tuple[np.ndarray, np.ndarray]:
"""Compute PCA decomposition with initial centering for sparse input.
Parameters
----------
X : spmatrix, shape (n_samples, n_features)
npcs : int
number of PCA componnents.
solver : str, optional
Eigenvalue solver to use. Should be ‘arpack’ or ‘lobpcg’. See scipy.sparse.linalg.svds.
The default is 'arpack'.
mu : TYPE, optional
DESCRIPTION. The default is None.
random_state : int, RandomState, optional
The default is None.
Expand Down
Loading

0 comments on commit 596ab4e

Please sign in to comment.