@@ -33,16 +33,19 @@ def log_multivariate_normal_density(X, means, covars, covariance_type='diag'):
3333 X : array_like, shape (n_samples, n_features)
3434 List of n_features-dimensional data points. Each row corresponds to a
3535 single data point.
36+
3637 means : array_like, shape (n_components, n_features)
3738 List of n_features-dimensional mean vectors for n_components Gaussians.
3839 Each row corresponds to a single mean vector.
40+
3941 covars : array_like
4042 List of n_components covariance parameters for each Gaussian. The shape
4143 depends on `covariance_type`:
4244 (n_components, n_features) if 'spherical',
4345 (n_features, n_features) if 'tied',
4446 (n_components, n_features) if 'diag',
4547 (n_components, n_features, n_features) if 'full'
48+
4649 covariance_type : string
4750 Type of the covariance parameters. Must be one of
4851 'spherical', 'tied', 'diag', 'full'. Defaults to 'diag'.
@@ -119,7 +122,6 @@ class GMM(BaseEstimator):
119122 Initializes parameters such that every mixture component has zero
120123 mean and identity covariance.
121124
122-
123125 Parameters
124126 ----------
125127 n_components : int, optional
@@ -182,8 +184,6 @@ class GMM(BaseEstimator):
182184 converged_ : bool
183185 True when convergence was reached in fit(), False otherwise.
184186
185-
186-
187187 See Also
188188 --------
189189
@@ -268,13 +268,15 @@ def __init__(self, n_components=1, covariance_type='diag',
268268
269269 def _get_covars (self ):
270270 """Covariance parameters for each mixture component.
271- The shape depends on `cvtype`::
272271
273- (`n_states`, 'n_features') if 'spherical',
274- (`n_features`, `n_features`) if 'tied',
275- (`n_states`, `n_features`) if 'diag',
276- (`n_states`, `n_features`, `n_features`) if 'full'
277- """
272+ The shape depends on ``cvtype``::
273+
274+ (n_states, n_features) if 'spherical',
275+ (n_features, n_features) if 'tied',
276+ (n_states, n_features) if 'diag',
277+ (n_states, n_features, n_features) if 'full'
278+
279+ """
278280 if self .covariance_type == 'full' :
279281 return self .covars_
280282 elif self .covariance_type == 'diag' :
@@ -323,8 +325,8 @@ def score_samples(self, X):
323325 raise ValueError ('The shape of X is not compatible with self' )
324326
325327 lpr = (log_multivariate_normal_density (X , self .means_ , self .covars_ ,
326- self .covariance_type )
327- + np .log (self .weights_ ))
328+ self .covariance_type ) +
329+ np .log (self .weights_ ))
328330 logprob = logsumexp (lpr , axis = 1 )
329331 responsibilities = np .exp (lpr - logprob [:, np .newaxis ])
330332 return logprob , responsibilities
@@ -420,8 +422,8 @@ def sample(self, n_samples=1, random_state=None):
420422 return X
421423
422424 def fit_predict (self , X , y = None ):
423- """
424- Fit and then predict labels for data.
425+ """Fit and then predict labels for data.
426+
425427 Warning: due to the final maximization step in the EM algorithm,
426428 with low iterations the prediction may not be 100% accurate
427429
@@ -653,7 +655,7 @@ def aic(self, X):
653655
654656
655657#########################################################################
656- ## some helper routines
658+ # some helper routines
657659#########################################################################
658660
659661
@@ -684,8 +686,7 @@ def _log_multivariate_normal_density_tied(X, means, covars):
684686
685687
686688def _log_multivariate_normal_density_full (X , means , covars , min_covar = 1.e-7 ):
687- """Log probability for full covariance matrices.
688- """
689+ """Log probability for full covariance matrices."""
689690 n_samples , n_dim = X .shape
690691 nmix = len (means )
691692 log_prob = np .empty ((n_samples , nmix ))
@@ -751,8 +752,7 @@ def _validate_covars(covars, covariance_type, n_components):
751752
752753def distribute_covar_matrix_to_match_covariance_type (
753754 tied_cv , covariance_type , n_components ):
754- """Create all the covariance matrices from a given template
755- """
755+ """Create all the covariance matrices from a given template"""
756756 if covariance_type == 'spherical' :
757757 cv = np .tile (tied_cv .mean () * np .ones (tied_cv .shape [1 ]),
758758 (n_components , 1 ))
0 commit comments