omicverse
omicverse copied to clipboard
the error cNMF
cnmf_obj.consensus(k=selected_K,
density_threshold=density_threshold,
show_clustering=True,
close_clustergram_fig=False)
error
{
"name": "TypeError",
"message": "H should have the same dtype as X. Got H.dtype = float32.",
"stack": "---------------------------------------------------------------------------
TypeError Traceback (most recent call last)
Cell In[26], line 1
----> 1 cnmf_obj.consensus(k=selected_K,
2 density_threshold=density_threshold,
3 show_clustering=True,
4 close_clustergram_fig=False)
File ~/mambaforge/envs/omicverse_gpu/lib/python3.10/site-packages/omicverse/single/_cnmf.py:838, in cNMF.consensus(self, k, density_threshold, local_neighborhood_size, show_clustering, skip_density_and_return_after_stats, close_clustergram_fig, refit_usage)
835 spectra_tpm_rf = spectra_tpm.loc[:,hvgs]
837 spectra_tpm_rf = spectra_tpm_rf.div(tpm_stats.loc[hvgs, '__std'], axis=1)
--> 838 rf_usages = self.refit_usage(norm_tpm.X, spectra_tpm_rf)
839 rf_usages = pd.DataFrame(rf_usages, index=norm_counts.obs.index, columns=spectra_tpm_rf.index)
841 save_df_to_npz(median_spectra, self.paths['consensus_spectra']%(k, density_threshold_repl))
File ~/mambaforge/envs/omicverse_gpu/lib/python3.10/site-packages/omicverse/single/_cnmf.py:667, in cNMF.refit_usage(self, X, spectra)
664 else:
665 refit_nmf_kwargs.update(dict(n_components = spectra.shape[0], H = spectra, update_H = False))
--> 667 _, rf_usages = self._nmf(X, nmf_kwargs=refit_nmf_kwargs)
668 if (type(X) is pd.DataFrame) and (type(spectra) is pd.DataFrame):
669 rf_usages = pd.DataFrame(rf_usages, index=X.index, columns=spectra.index)
File ~/mambaforge/envs/omicverse_gpu/lib/python3.10/site-packages/omicverse/single/_cnmf.py:546, in cNMF._nmf(self, X, nmf_kwargs)
535 def _nmf(self, X, nmf_kwargs):
536 \"\"\"
537 Parameters
538 ----------
(...)
544
545 \"\"\"
--> 546 (usages, spectra, niter) = non_negative_factorization(X, **nmf_kwargs)
548 return(spectra, usages)
File ~/mambaforge/envs/omicverse_gpu/lib/python3.10/site-packages/sklearn/utils/_param_validation.py:214, in validate_params.<locals>.decorator.<locals>.wrapper(*args, **kwargs)
208 try:
209 with config_context(
210 skip_parameter_validation=(
211 prefer_skip_nested_validation or global_skip_validation
212 )
213 ):
--> 214 return func(*args, **kwargs)
215 except InvalidParameterError as e:
216 # When the function is just a wrapper around an estimator, we allow
217 # the function to delegate validation to the estimator, but we replace
218 # the name of the estimator by the name of the function in the error
219 # message to avoid confusion.
220 msg = re.sub(
221 r\"parameter of \\w+ must be\",
222 f\"parameter of {func.__qualname__} must be\",
223 str(e),
224 )
File ~/mambaforge/envs/omicverse_gpu/lib/python3.10/site-packages/sklearn/decomposition/_nmf.py:1122, in non_negative_factorization(X, W, H, n_components, init, update_H, solver, beta_loss, tol, max_iter, alpha_W, alpha_H, l1_ratio, random_state, verbose, shuffle)
1119 X = check_array(X, accept_sparse=(\"csr\", \"csc\"), dtype=[np.float64, np.float32])
1121 with config_context(assume_finite=True):
-> 1122 W, H, n_iter = est._fit_transform(X, W=W, H=H, update_H=update_H)
1124 return W, H, n_iter
File ~/mambaforge/envs/omicverse_gpu/lib/python3.10/site-packages/sklearn/decomposition/_nmf.py:1670, in NMF._fit_transform(self, X, y, W, H, update_H)
1663 raise ValueError(
1664 \"When beta_loss <= 0 and X contains zeros, \"
1665 \"the solver may diverge. Please add small values \"
1666 \"to X, or use a positive beta_loss.\"
1667 )
1669 # initialize or check W and H
-> 1670 W, H = self._check_w_h(X, W, H, update_H)
1672 # scale the regularization terms
1673 l1_reg_W, l1_reg_H, l2_reg_W, l2_reg_H = self._compute_regularization(X)
File ~/mambaforge/envs/omicverse_gpu/lib/python3.10/site-packages/sklearn/decomposition/_nmf.py:1202, in _BaseNMF._check_w_h(self, X, W, H, update_H)
1200 _check_init(H, (self._n_components, n_features), \"NMF (input H)\")
1201 if H.dtype != X.dtype:
-> 1202 raise TypeError(
1203 \"H should have the same dtype as X. Got H.dtype = {}.\".format(
1204 H.dtype
1205 )
1206 )
1207 # 'mu' solver should not be initialized by zeros
1208 if self.solver == \"mu\":
TypeError: H should have the same dtype as X. Got H.dtype = float32."
}
https://omicverse.readthedocs.io/en/latest/Tutorials-single/t_cnmf/ https://github.com/dylkot/cNMF/issues/10