handson-ml
handson-ml copied to clipboard
Chapter 2: Shows Error when executing full_pipeline.fit_transform(housing) ValueError: operands could not be broadcast together with shapes (16512,) (3,8)
Error Description:
ValueError Traceback (most recent call last)
<ipython-input-58-c19e1bb11abe> in <module>
8 ])
9 #run full_pipeline
---> 10 housing_prepared=full_pipeline.fit_transform(housing)
~/venv/local/lib/python3.5/site-packages/sklearn/pipeline.py in fit_transform(self, X, y, **fit_params)
910 sum of n_components (output dimension) over transformers.
911 """
--> 912 results = self._parallel_func(X, y, fit_params, _fit_transform_one)
913 if not results:
914 # All transformers are None
~/venv/local/lib/python3.5/site-packages/sklearn/pipeline.py in _parallel_func(self, X, y, fit_params, func)
940 message=self._log_message(name, idx, len(transformers)),
941 **fit_params) for idx, (name, transformer,
--> 942 weight) in enumerate(transformers, 1))
943
944 def transform(self, X):
~/venv/local/lib/python3.5/site-packages/joblib/parallel.py in __call__(self, iterable)
919 # remaining jobs.
920 self._iterating = False
--> 921 if self.dispatch_one_batch(iterator):
922 self._iterating = self._original_iterator is not None
923
~/venv/local/lib/python3.5/site-packages/joblib/parallel.py in dispatch_one_batch(self, iterator)
757 return False
758 else:
--> 759 self._dispatch(tasks)
760 return True
761
~/venv/local/lib/python3.5/site-packages/joblib/parallel.py in _dispatch(self, batch)
714 with self._lock:
715 job_idx = len(self._jobs)
--> 716 job = self._backend.apply_async(batch, callback=cb)
717 # A job can complete so quickly than its callback is
718 # called before we get here, causing self._jobs to
~/venv/local/lib/python3.5/site-packages/joblib/_parallel_backends.py in apply_async(self, func, callback)
180 def apply_async(self, func, callback=None):
181 """Schedule a func to be run"""
--> 182 result = ImmediateResult(func)
183 if callback:
184 callback(result)
~/venv/local/lib/python3.5/site-packages/joblib/_parallel_backends.py in __init__(self, batch)
547 # Don't delay the application, to avoid keeping the input
548 # arguments in memory
--> 549 self.results = batch()
550
551 def get(self):
~/venv/local/lib/python3.5/site-packages/joblib/parallel.py in __call__(self)
223 with parallel_backend(self._backend, n_jobs=self._n_jobs):
224 return [func(*args, **kwargs)
--> 225 for func, args, kwargs in self.items]
226
227 def __len__(self):
~/venv/local/lib/python3.5/site-packages/joblib/parallel.py in <listcomp>(.0)
223 with parallel_backend(self._backend, n_jobs=self._n_jobs):
224 return [func(*args, **kwargs)
--> 225 for func, args, kwargs in self.items]
226
227 def __len__(self):
~/venv/local/lib/python3.5/site-packages/sklearn/pipeline.py in _fit_transform_one(transformer, X, y, weight, message_clsname, message, **fit_params)
714 with _print_elapsed_time(message_clsname, message):
715 if hasattr(transformer, 'fit_transform'):
--> 716 res = transformer.fit_transform(X, y, **fit_params)
717 else:
718 res = transformer.fit(X, y, **fit_params).transform(X)
~/venv/local/lib/python3.5/site-packages/sklearn/pipeline.py in fit_transform(self, X, y, **fit_params)
385 """
386 last_step = self._final_estimator
--> 387 Xt, fit_params = self._fit(X, y, **fit_params)
388 with _print_elapsed_time('Pipeline',
389 self._log_message(len(self.steps) - 1)):
~/venv/local/lib/python3.5/site-packages/sklearn/pipeline.py in _fit(self, X, y, **fit_params)
315 message_clsname='Pipeline',
316 message=self._log_message(step_idx),
--> 317 **fit_params_steps[name])
318 # Replace the transformer of the step with the fitted
319 # transformer. This is necessary when loading the transformer
~/venv/local/lib/python3.5/site-packages/joblib/memory.py in __call__(self, *args, **kwargs)
353
354 def __call__(self, *args, **kwargs):
--> 355 return self.func(*args, **kwargs)
356
357 def call_and_shelve(self, *args, **kwargs):
~/venv/local/lib/python3.5/site-packages/sklearn/pipeline.py in _fit_transform_one(transformer, X, y, weight, message_clsname, message, **fit_params)
714 with _print_elapsed_time(message_clsname, message):
715 if hasattr(transformer, 'fit_transform'):
--> 716 res = transformer.fit_transform(X, y, **fit_params)
717 else:
718 res = transformer.fit(X, y, **fit_params).transform(X)
~/venv/local/lib/python3.5/site-packages/sklearn/base.py in fit_transform(self, X, y, **fit_params)
551 if y is None:
552 # fit method of arity 1 (unsupervised transformation)
--> 553 return self.fit(X, **fit_params).transform(X)
554 else:
555 # fit method of arity 2 (supervised transformation)
<ipython-input-38-b08c34d1bb78> in transform(self, X, y)
20
21 if self.add_bedrooms_per_room:# if adding an additional attribute ,ie add_bedrooms_per_room=True
---> 22 bedrooms_per_room=X[:,bedrooms_ix]/X[:rooms_ix]
23 return np.c_[X,rooms_per_household, population_per_household,bedrooms_per_room]
24 else: #return the existing attributes
ValueError: operands could not be broadcast together with shapes (16512,) (3,8)
Hi @navctns , This might be a duplicate of #347, could you please check out my answers in that issue? Hope this helps!
I resolved this error by updating the libaries to latest i had some mismtach in the version on pandas and scikit learn.