darts
darts copied to clipboard
ValueError: The time series array must not be empty.
When I run this piece of code:
params = {
"input_chunk_length": [16, 64, 128, 256, 365, 512],
"n_epochs": [1, 2, 5, 10, 30],
"output_chunk_length": [days_val],
"random_state": [15]
}
model = NBEATSModel(input_chunk_length=30 , output_chunk_length=days_val, n_epochs = 15 , random_state = 15)
res = model.gridsearch(params, series = series1, val_series = series2)
I obtain this error:
[2022-05-16 11:54:58,150] ERROR | darts.timeseries | ValueError: The time series array must not be empty.
---------------------------------------------------------------------------
ValueError Traceback (most recent call last)
/var/folders/s0/h8zyhzvj5n7_kzy5fh6w8qq40000gn/T/ipykernel_2009/372140350.py in <module>
----> 1 res = model.gridsearch(params, series = series, val_series = series)
~/env2/lib/python3.7/site-packages/darts/models/forecasting/forecasting_model.py in gridsearch(model_class, parameters, series, past_covariates, future_covariates, forecast_horizon, stride, start, last_points_only, val_series, use_fitted_values, metric, reduction, verbose, n_jobs, n_random_samples)
751 return error
752
--> 753 errors = _parallel_apply(iterator, _evaluate_combination, n_jobs, {}, {})
754
755 min_error = min(errors)
~/env2/lib/python3.7/site-packages/darts/utils/utils.py in _parallel_apply(iterator, fn, n_jobs, fn_args, fn_kwargs)
291
292 returned_data = Parallel(n_jobs=n_jobs)(
--> 293 delayed(fn)(*sample, *fn_args, **fn_kwargs) for sample in iterator
294 )
295 return returned_data
~/env2/lib/python3.7/site-packages/joblib/parallel.py in __call__(self, iterable)
1041 # remaining jobs.
1042 self._iterating = False
-> 1043 if self.dispatch_one_batch(iterator):
1044 self._iterating = self._original_iterator is not None
1045
~/env2/lib/python3.7/site-packages/joblib/parallel.py in dispatch_one_batch(self, iterator)
859 return False
860 else:
--> 861 self._dispatch(tasks)
862 return True
863
~/env2/lib/python3.7/site-packages/joblib/parallel.py in _dispatch(self, batch)
777 with self._lock:
778 job_idx = len(self._jobs)
--> 779 job = self._backend.apply_async(batch, callback=cb)
780 # A job can complete so quickly than its callback is
781 # called before we get here, causing self._jobs to
~/env2/lib/python3.7/site-packages/joblib/_parallel_backends.py in apply_async(self, func, callback)
206 def apply_async(self, func, callback=None):
207 """Schedule a func to be run"""
--> 208 result = ImmediateResult(func)
209 if callback:
210 callback(result)
~/env2/lib/python3.7/site-packages/joblib/_parallel_backends.py in __init__(self, batch)
570 # Don't delay the application, to avoid keeping the input
571 # arguments in memory
--> 572 self.results = batch()
573
574 def get(self):
~/env2/lib/python3.7/site-packages/joblib/parallel.py in __call__(self)
261 with parallel_backend(self._backend, n_jobs=self._n_jobs):
262 return [func(*args, **kwargs)
--> 263 for func, args, kwargs in self.items]
264
265 def __reduce__(self):
~/env2/lib/python3.7/site-packages/joblib/parallel.py in <listcomp>(.0)
261 with parallel_backend(self._backend, n_jobs=self._n_jobs):
262 return [func(*args, **kwargs)
--> 263 for func, args, kwargs in self.items]
264
265 def __reduce__(self):
~/env2/lib/python3.7/site-packages/darts/models/forecasting/forecasting_model.py in _evaluate_combination(param_combination)
747 num_samples=1,
748 )
--> 749 error = metric(pred, val_series)
750
751 return error
~/env2/lib/python3.7/site-packages/darts/metrics/metrics.py in wrapper_multi_ts_support(*args, **kwargs)
90 n_jobs=n_jobs,
91 fn_args=args[num_series_in_args:],
---> 92 fn_kwargs=kwargs,
93 )
94
~/env2/lib/python3.7/site-packages/darts/utils/utils.py in _parallel_apply(iterator, fn, n_jobs, fn_args, fn_kwargs)
291
292 returned_data = Parallel(n_jobs=n_jobs)(
--> 293 delayed(fn)(*sample, *fn_args, **fn_kwargs) for sample in iterator
294 )
295 return returned_data
~/env2/lib/python3.7/site-packages/joblib/parallel.py in __call__(self, iterable)
1041 # remaining jobs.
1042 self._iterating = False
-> 1043 if self.dispatch_one_batch(iterator):
1044 self._iterating = self._original_iterator is not None
1045
~/env2/lib/python3.7/site-packages/joblib/parallel.py in dispatch_one_batch(self, iterator)
859 return False
860 else:
--> 861 self._dispatch(tasks)
862 return True
863
~/env2/lib/python3.7/site-packages/joblib/parallel.py in _dispatch(self, batch)
777 with self._lock:
778 job_idx = len(self._jobs)
--> 779 job = self._backend.apply_async(batch, callback=cb)
780 # A job can complete so quickly than its callback is
781 # called before we get here, causing self._jobs to
~/env2/lib/python3.7/site-packages/joblib/_parallel_backends.py in apply_async(self, func, callback)
206 def apply_async(self, func, callback=None):
207 """Schedule a func to be run"""
--> 208 result = ImmediateResult(func)
209 if callback:
210 callback(result)
~/env2/lib/python3.7/site-packages/joblib/_parallel_backends.py in __init__(self, batch)
570 # Don't delay the application, to avoid keeping the input
571 # arguments in memory
--> 572 self.results = batch()
573
574 def get(self):
~/env2/lib/python3.7/site-packages/joblib/parallel.py in __call__(self)
261 with parallel_backend(self._backend, n_jobs=self._n_jobs):
262 return [func(*args, **kwargs)
--> 263 for func, args, kwargs in self.items]
264
265 def __reduce__(self):
~/env2/lib/python3.7/site-packages/joblib/parallel.py in <listcomp>(.0)
261 with parallel_backend(self._backend, n_jobs=self._n_jobs):
262 return [func(*args, **kwargs)
--> 263 for func, args, kwargs in self.items]
264
265 def __reduce__(self):
~/env2/lib/python3.7/site-packages/darts/metrics/metrics.py in wrapper_multivariate_support(*args, **kwargs)
135 pred_series.univariate_component(i),
136 *args[2:],
--> 137 **kwargs
138 )
139 ) # [2:] since we already know the first two arguments are the series
~/env2/lib/python3.7/site-packages/darts/metrics/metrics.py in mape(actual_series, pred_series, intersect, reduction, inter_reduction, n_jobs, verbose)
583
584 y_true, y_hat = _get_values_or_raise(
--> 585 actual_series, pred_series, intersect, remove_nan_union=True
586 )
587 raise_if_not(
~/env2/lib/python3.7/site-packages/darts/metrics/metrics.py in _get_values_or_raise(series_a, series_b, intersect, stochastic_quantile, remove_nan_union)
202 raise_if_not(isinstance(intersect, bool), "The intersect parameter must be a bool")
203
--> 204 series_a_common = series_a.slice_intersect(series_b) if intersect else series_a
205 series_b_common = series_b.slice_intersect(series_a) if intersect else series_b
206
~/env2/lib/python3.7/site-packages/darts/timeseries.py in slice_intersect(self, other)
1671 """
1672 time_index = self.time_index.intersection(other.time_index)
-> 1673 return self[time_index]
1674
1675 def strip(self) -> "TimeSeries":
~/env2/lib/python3.7/site-packages/darts/timeseries.py in __getitem__(self, key)
3238 _set_freq_in_xa(xa_)
3239
-> 3240 return self.__class__(xa_)
3241 elif isinstance(key, pd.RangeIndex):
3242 _check_range()
~/env2/lib/python3.7/site-packages/darts/timeseries.py in __init__(self, xa)
75 logger,
76 )
---> 77 raise_if_not(xa.size > 0, "The time series array must not be empty.", logger)
78 raise_if_not(
79 len(xa.shape) == 3,
~/env2/lib/python3.7/site-packages/darts/logging.py in raise_if_not(condition, message, logger)
82 if not condition:
83 logger.error("ValueError: " + message)
---> 84 raise ValueError(message)
85
86
ValueError: The time series array must not be empty.
- Python version: [e.g. 3.7.6]
- darts version [e.g. 0.17.1]
Could you post the code used to create series1 and series2?
series1 = fill_missing_values(scaler.fit_transform(TimeSeries.from_dataframe(df_train,"Date",col, fill_missing_dates=True, freq="D").add_holidays(country_code="ITA")), fill='auto')
series2 = fill_missing_values(scaler.transform(TimeSeries.from_dataframe(df_train,"Date",col, fill_missing_dates=True, freq="D").add_holidays(country_code="ITA")), fill='auto')
I have tried with TimeSeries generated in other ways, but I got the same error.
I would try breaking each step into seperate lines. One of those functions calls is returning None.
The problem seems to come from the validation series (series2) having no (or insufficient) overlap with series1.