qiskit-aqua
qiskit-aqua copied to clipboard
SklearnSVM - ArpackNoConvergence
Information
- Qiskit Aqua version: {'qiskit-terra': '0.16.0', 'qiskit-aer': '0.7.0', 'qiskit-ignis': '0.5.0', 'qiskit-ibmq-provider': '0.11.0', 'qiskit-aqua': '0.8.0', 'qiskit': '0.23.0'}
- Python version:3.8.5
- Operating system:Windows
What is the current behavior?
ArpackNoConvergence Traceback (most recent call last)
~\Anaconda3\envs\qc\lib\site-packages\cvxpy\expressions\constants\constant.py in extremal_eig_near_ref(A, ref, low)
242 sigma = -ref if low else ref
--> 243 ev = SA_eigsh(sigma)
244 except ArpackError:
~\Anaconda3\envs\qc\lib\site-packages\cvxpy\expressions\constants\constant.py in SA_eigsh(sigma)
237 def SA_eigsh(sigma):
--> 238 return eigsh(A, k=1, sigma=sigma, return_eigenvectors=False)
239 # Run eigsh in shift-invert mode, since we're particularly interested in finding
~\Anaconda3\envs\qc\lib\site-packages\scipy\sparse\linalg\eigen\arpack\arpack.py in eigsh(A, k, M, sigma, which, v0, ncv, maxiter, tol, return_eigenvectors, Minv, OPinv, mode)
1686 while not params.converged:
-> 1687 params.iterate()
1688
~\Anaconda3\envs\qc\lib\site-packages\scipy\sparse\linalg\eigen\arpack\arpack.py in iterate(self)
570 elif self.info == 1:
--> 571 self._raise_no_convergence()
572 else:
~\Anaconda3\envs\qc\lib\site-packages\scipy\sparse\linalg\eigen\arpack\arpack.py in _raise_no_convergence(self)
376 k_ok = 0
--> 377 raise ArpackNoConvergence(msg % (num_iter, k_ok, self.k), ev, vec)
378
ArpackNoConvergence: ARPACK error -1: No convergence (501 iterations, 0/1 eigenvectors converged)
During handling of the above exception, another exception occurred:
ArpackNoConvergence Traceback (most recent call last)
<ipython-input-30-95ef4f118c4c> in <module>
10 )
11
---> 12 csvm = SklearnSVM(training_input,test_input).run()
13 print("testing success ratio: ", csvm['testing_accuracy'])
~\Anaconda3\envs\qc\lib\site-packages\qiskit\aqua\algorithms\classical_algorithm.py in run(self)
41 """
42
---> 43 return self._run()
44
45 @abstractmethod
~\Anaconda3\envs\qc\lib\site-packages\qiskit\aqua\algorithms\classifiers\sklearn_svm\sklearn_svm.py in _run(self)
129
130 def _run(self):
--> 131 return self.instance.run()
132
133 @property
~\Anaconda3\envs\qc\lib\site-packages\qiskit\aqua\algorithms\classifiers\sklearn_svm\_sklearn_svm_binary.py in run(self)
141 """
142
--> 143 self.train(self.training_dataset[0], self.training_dataset[1])
144 if self.test_dataset is not None:
145 self.test(self.test_dataset[0], self.test_dataset[1])
~\Anaconda3\envs\qc\lib\site-packages\qiskit\aqua\algorithms\classifiers\sklearn_svm\_sklearn_svm_binary.py in train(self, data, labels)
48 kernel_matrix = self.construct_kernel_matrix(data, data, self.gamma)
49 self._ret['kernel_matrix_training'] = kernel_matrix
---> 50 [alpha, b, support] = optimize_svm(kernel_matrix, labels)
51 alphas = np.array([])
52 svms = np.array([])
~\Anaconda3\envs\qc\lib\site-packages\qiskit\aqua\utils\qp_solver.py in optimize_svm(kernel_matrix, y, scaling, maxiter, show_progress, max_iters)
90 [G@x <= h,
91 A@x == b])
---> 92 prob.solve(verbose=show_progress, qcp=True)
93 result = np.asarray(x.value).reshape((n, 1))
94 alpha = result * scaling
~\Anaconda3\envs\qc\lib\site-packages\cvxpy\problems\problem.py in solve(self, *args, **kwargs)
394 else:
395 solve_func = Problem._solve
--> 396 return solve_func(self, *args, **kwargs)
397
398 @classmethod
~\Anaconda3\envs\qc\lib\site-packages\cvxpy\problems\problem.py in _solve(self, solver, warm_start, verbose, gp, qcp, requires_grad, enforce_dpp, **kwargs)
730 if gp and qcp:
731 raise ValueError("At most one of `gp` and `qcp` can be True.")
--> 732 if qcp and not self.is_dcp():
733 if not self.is_dqcp():
734 raise error.DQCPError("The problem is not DQCP.")
~\Anaconda3\envs\qc\lib\site-packages\cvxpy\utilities\performance_utils.py in _compute_once(self, *args, **kwargs)
68 return cache[key]
69 else:
---> 70 result = func(self, *args, **kwargs)
71 cache[key] = result
72 return result
~\Anaconda3\envs\qc\lib\site-packages\cvxpy\problems\problem.py in is_dcp(self, dpp)
163 True if the Expression is DCP, False otherwise.
164 """
--> 165 return all(
166 expr.is_dcp(dpp) for expr in self.constraints + [self.objective])
167
~\Anaconda3\envs\qc\lib\site-packages\cvxpy\problems\problem.py in <genexpr>(.0)
164 """
165 return all(
--> 166 expr.is_dcp(dpp) for expr in self.constraints + [self.objective])
167
168 @perf.compute_once
~\Anaconda3\envs\qc\lib\site-packages\cvxpy\problems\objective.py in is_dcp(self, dpp)
149 with scopes.dpp_scope():
150 return self.args[0].is_convex()
--> 151 return self.args[0].is_convex()
152
153 def is_dgp(self, dpp=False):
~\Anaconda3\envs\qc\lib\site-packages\cvxpy\utilities\performance_utils.py in _compute_once(self, *args, **kwargs)
68 return cache[key]
69 else:
---> 70 result = func(self, *args, **kwargs)
71 cache[key] = result
72 return result
~\Anaconda3\envs\qc\lib\site-packages\cvxpy\atoms\atom.py in is_convex(self)
171 elif self.is_atom_convex():
172 for idx, arg in enumerate(self.args):
--> 173 if not (arg.is_affine() or
174 (arg.is_convex() and self.is_incr(idx)) or
175 (arg.is_concave() and self.is_decr(idx))):
~\Anaconda3\envs\qc\lib\site-packages\cvxpy\utilities\performance_utils.py in _compute_once(self, *args, **kwargs)
68 return cache[key]
69 else:
---> 70 result = func(self, *args, **kwargs)
71 cache[key] = result
72 return result
~\Anaconda3\envs\qc\lib\site-packages\cvxpy\expressions\expression.py in is_affine(self)
175 """Is the expression affine?
176 """
--> 177 return self.is_constant() or (self.is_convex() and self.is_concave())
178
179 @abc.abstractmethod
~\Anaconda3\envs\qc\lib\site-packages\cvxpy\utilities\performance_utils.py in _compute_once(self, *args, **kwargs)
68 return cache[key]
69 else:
---> 70 result = func(self, *args, **kwargs)
71 cache[key] = result
72 return result
~\Anaconda3\envs\qc\lib\site-packages\cvxpy\atoms\atom.py in is_convex(self)
171 elif self.is_atom_convex():
172 for idx, arg in enumerate(self.args):
--> 173 if not (arg.is_affine() or
174 (arg.is_convex() and self.is_incr(idx)) or
175 (arg.is_concave() and self.is_decr(idx))):
~\Anaconda3\envs\qc\lib\site-packages\cvxpy\utilities\performance_utils.py in _compute_once(self, *args, **kwargs)
68 return cache[key]
69 else:
---> 70 result = func(self, *args, **kwargs)
71 cache[key] = result
72 return result
~\Anaconda3\envs\qc\lib\site-packages\cvxpy\expressions\expression.py in is_affine(self)
175 """Is the expression affine?
176 """
--> 177 return self.is_constant() or (self.is_convex() and self.is_concave())
178
179 @abc.abstractmethod
~\Anaconda3\envs\qc\lib\site-packages\cvxpy\utilities\performance_utils.py in _compute_once(self, *args, **kwargs)
68 return cache[key]
69 else:
---> 70 result = func(self, *args, **kwargs)
71 cache[key] = result
72 return result
~\Anaconda3\envs\qc\lib\site-packages\cvxpy\atoms\atom.py in is_convex(self)
169 if self.is_constant():
170 return True
--> 171 elif self.is_atom_convex():
172 for idx, arg in enumerate(self.args):
173 if not (arg.is_affine() or
~\Anaconda3\envs\qc\lib\site-packages\cvxpy\atoms\quad_form.py in is_atom_convex(self)
62 """
63 P = self.args[1]
---> 64 return P.is_constant() and P.is_psd()
65
66 def is_atom_concave(self):
~\Anaconda3\envs\qc\lib\site-packages\cvxpy\utilities\performance_utils.py in _compute_once(self, *args, **kwargs)
68 return cache[key]
69 else:
---> 70 result = func(self, *args, **kwargs)
71 cache[key] = result
72 return result
~\Anaconda3\envs\qc\lib\site-packages\cvxpy\expressions\constants\constant.py in is_psd(self)
204 # Compute bottom eigenvalue if absent.
205 if self._bottom_eig is None:
--> 206 ev = extremal_eig_near_ref(self.value, EIGVAL_TOL, low=True)
207 self._bottom_eig = ev
208
~\Anaconda3\envs\qc\lib\site-packages\cvxpy\expressions\constants\constant.py in extremal_eig_near_ref(A, ref, low)
245 temp = ref - np.finfo(A.dtype).eps
246 sigma = -temp if low else temp
--> 247 ev = SA_eigsh(sigma)
248 else:
249 if np.isnan(ev):
~\Anaconda3\envs\qc\lib\site-packages\cvxpy\expressions\constants\constant.py in SA_eigsh(sigma)
236
237 def SA_eigsh(sigma):
--> 238 return eigsh(A, k=1, sigma=sigma, return_eigenvectors=False)
239 # Run eigsh in shift-invert mode, since we're particularly interested in finding
240 # eigenvalues in a certain region.
~\Anaconda3\envs\qc\lib\site-packages\scipy\sparse\linalg\eigen\arpack\arpack.py in eigsh(A, k, M, sigma, which, v0, ncv, maxiter, tol, return_eigenvectors, Minv, OPinv, mode)
1685 with _ARPACK_LOCK:
1686 while not params.converged:
-> 1687 params.iterate()
1688
1689 return params.extract(return_eigenvectors)
~\Anaconda3\envs\qc\lib\site-packages\scipy\sparse\linalg\eigen\arpack\arpack.py in iterate(self)
569 pass
570 elif self.info == 1:
--> 571 self._raise_no_convergence()
572 else:
573 raise ArpackError(self.info, infodict=self.iterate_infodict)
~\Anaconda3\envs\qc\lib\site-packages\scipy\sparse\linalg\eigen\arpack\arpack.py in _raise_no_convergence(self)
375 vec = np.zeros((self.n, 0))
376 k_ok = 0
--> 377 raise ArpackNoConvergence(msg % (num_iter, k_ok, self.k), ev, vec)
378
379
ArpackNoConvergence: ARPACK error -1: No convergence (501 iterations, 0/1 eigenvectors converged)
Steps to reproduce the problem
from qiskit.ml.datasets import *
from qiskit.aqua.algorithms import SklearnSVM
feature_dim=2
sample_Total, training_input, test_input, class_labels = breast_cancer(
training_size=25,
test_size=10,
n=feature_dim,
plot_data=True
)
csvm = SklearnSVM(training_input,test_input).run()
print("testing success ratio: ", csvm['testing_accuracy'])
What is the expected behavior?
No error
Suggested solutions
It seems like increasing the 'training_size' to above 20 will create this error.
To me, this seems related to similar issues me and other users (see, for instance https://github.com/Qiskit/qiskit-aqua/issues/1402) are having with QSVM. To me, it seems to be related to non-separability in the data. In this case, with the SklearnSVM, some warnings start appearing when the training size is set to 16, which is exactly the moment in which a point is added that makes the data not linearly-separable (it is apparent by inspection from the data plot).
Maybe the solver used to obtain the SVM model is somehow expecting the data to be separable? Is there a flag to activate a "soft margin" mode or something?
I ran the same training samples on the sklearn version of svm and managed to successfully train and test the model though.
Hmm, I had thought qiskit's SklearnSVM was based on it. Maybe there are slight differences?
Try using the latest version of the qp_solver.py, from qiskit aqua master branch and see if it converges. As stated in #1378, a L2 regularization term was added to the SVM optimizer. Maybe the sklearn svm is soft margin by default (i.e. has L2 regularization) and the qiskit SklearnSVM until the aforementioned recent change was not. Let us know if this change fixes your issue. It fixed mine #1402