glmhmm icon indicating copy to clipboard operation
glmhmm copied to clipboard

ValueError: The user-provided objective function must return a scalar value.

Open QiGuangyao opened this issue 2 years ago • 0 comments

  1. Initialize some weights and fit the GLM to the data. true_GLM.fit(true_x,w_init,true_y) 1 w_init = true_GLM.init_weights() 2 w, phi = true_GLM.fit(true_x,w_init,true_y) 3 ​ 4 ​ 5 ​ 6 ​ 7 ​ 8 ​

ValueError Traceback (most recent call last) File ~/anaconda3/lib/python3.11/site-packages/scipy/optimize/_differentiable_functions.py:141, in ScalarFunction.init..fun_wrapped(x) 140 try: --> 141 fx = np.asarray(fx).item() 142 except (TypeError, ValueError) as e:

ValueError: setting an array element with a sequence. The requested array has an inhomogeneous shape after 1 dimensions. The detected shape was (2,) + inhomogeneous part.

The above exception was the direct cause of the following exception:

ValueError Traceback (most recent call last) Cell In[10], line 2 1 w_init = true_GLM.init_weights() ----> 2 w, phi = true_GLM.fit(true_x,w_init,true_y)

File ~/github/glmhmm/glmhmm/glm.py:243, in GLM.fit(self, x, w, y, compHess, gammas, gaussianPrior) 241 opt_log = lambda w: self.neglogli(x,w,y,reshape_weights=True,gammas=gammas,gaussianPrior=gaussianPrior) # calculate log likelihood 242 simplefilter(action='ignore', category=FutureWarning) # ignore FutureWarning generated by scipy --> 243 OptimizeResult = optimize.minimize(value_and_grad(opt_log),w_flat, jac = "True", method = "L-BFGS-B") 245 self.w = np.hstack((np.zeros((self.d,1)),np.reshape(OptimizeResult.x,(self.d,self.c-1)))) # reshape and update weights 246 # Get updated observation probabilities

File ~/anaconda3/lib/python3.11/site-packages/scipy/optimize/_minimize.py:710, in minimize(fun, x0, args, method, jac, hess, hessp, bounds, constraints, tol, callback, options) 707 res = _minimize_newtoncg(fun, x0, args, jac, hess, hessp, callback, 708 **options) 709 elif meth == 'l-bfgs-b': --> 710 res = _minimize_lbfgsb(fun, x0, args, jac, bounds, 711 callback=callback, **options) 712 elif meth == 'tnc': 713 res = _minimize_tnc(fun, x0, args, jac, bounds, callback=callback, 714 **options)

File ~/anaconda3/lib/python3.11/site-packages/scipy/optimize/_lbfgsb_py.py:307, in _minimize_lbfgsb(fun, x0, args, jac, bounds, disp, maxcor, ftol, gtol, eps, maxfun, maxiter, iprint, callback, maxls, finite_diff_rel_step, **unknown_options) 304 else: 305 iprint = disp --> 307 sf = _prepare_scalar_function(fun, x0, jac=jac, args=args, epsilon=eps, 308 bounds=new_bounds, 309 finite_diff_rel_step=finite_diff_rel_step) 311 func_and_grad = sf.fun_and_grad 313 fortran_int = _lbfgsb.types.intvar.dtype

File ~/anaconda3/lib/python3.11/site-packages/scipy/optimize/_optimize.py:383, in _prepare_scalar_function(fun, x0, jac, args, bounds, epsilon, finite_diff_rel_step, hess) 379 bounds = (-np.inf, np.inf) 381 # ScalarFunction caches. Reuse of fun(x) during grad 382 # calculation reduces overall function evaluations. --> 383 sf = ScalarFunction(fun, x0, args, grad, hess, 384 finite_diff_rel_step, bounds, epsilon=epsilon) 386 return sf

File ~/anaconda3/lib/python3.11/site-packages/scipy/optimize/_differentiable_functions.py:158, in ScalarFunction.init(self, fun, x0, args, grad, hess, finite_diff_rel_step, finite_diff_bounds, epsilon) 155 self.f = fun_wrapped(self.x) 157 self._update_fun_impl = update_fun --> 158 self._update_fun() 160 # Gradient evaluation 161 if callable(grad):

File ~/anaconda3/lib/python3.11/site-packages/scipy/optimize/_differentiable_functions.py:251, in ScalarFunction._update_fun(self) 249 def _update_fun(self): 250 if not self.f_updated: --> 251 self._update_fun_impl() 252 self.f_updated = True

File ~/anaconda3/lib/python3.11/site-packages/scipy/optimize/_differentiable_functions.py:155, in ScalarFunction.init..update_fun() 154 def update_fun(): --> 155 self.f = fun_wrapped(self.x)

File ~/anaconda3/lib/python3.11/site-packages/scipy/optimize/_differentiable_functions.py:143, in ScalarFunction.init..fun_wrapped(x) 141 fx = np.asarray(fx).item() 142 except (TypeError, ValueError) as e: --> 143 raise ValueError( 144 "The user-provided objective function " 145 "must return a scalar value." 146 ) from e 148 if fx < self._lowest_f: 149 self._lowest_x = x

ValueError: The user-provided objective function must return a scalar value.

QiGuangyao avatar Nov 01 '23 18:11 QiGuangyao