neuralnilm icon indicating copy to clipboard operation
neuralnilm copied to clipboard

IndexError in "notebooks/Neural NILM test"

Open jhyun0919 opened this issue 8 years ago • 3 comments

I am trying to run neural nilm in my pc environment, but the IndexError occurs like below. How can I fix it?

stride_source = StrideSource(
    target_appliance=TARGET_APPLIANCE,
    seq_length=SEQ_LENGTH,
    filename=NILMTK_FILENAME,
    windows=WINDOWS,
    sample_period=SAMPLE_PERIOD,
    stride=STRIDE
)

---------------------------------------------------------------------------
IndexError                                Traceback (most recent call last)
<ipython-input-4-971361e05051> in <module>()
      5     windows=WINDOWS,
      6     sample_period=SAMPLE_PERIOD,
----> 7     stride=STRIDE
      8 )

/Users/JH/Documents/GitHub/neuralnilm/neuralnilm/data/stridesource.pyc in __init__(self, target_appliance, seq_length, filename, windows, sample_period, stride, rng_seed)
     45         self.num_batches_for_validation = None
     46 
---> 47         self._load_data_into_memory()
     48         self._compute_num_sequences_per_building()
     49 

/Users/JH/Documents/GitHub/neuralnilm/neuralnilm/data/stridesource.pyc in _load_data_into_memory(self)
     71 
     72                 mains_meter = elec.mains()
---> 73                 mains_good_sections = mains_meter.good_sections()
     74 
     75                 appliance_meter = elec[self.target_appliance]

/private/var/root/nilmtk/nilmtk/elecmeter.pyc in good_sections(self, **loader_kwargs)
    630         results_obj = GoodSections.results_class(self.device['max_sample_period'])
    631         return self._get_stat_from_cache_or_compute(
--> 632             nodes, results_obj, loader_kwargs)        
    633 
    634     def _get_stat_from_cache_or_compute(self, nodes, results_obj, loader_kwargs):

/private/var/root/nilmtk/nilmtk/elecmeter.pyc in _get_stat_from_cache_or_compute(self, nodes, results_obj, loader_kwargs)
    684         if loader_kwargs.get('preprocessing') is None:
    685             cached_stat = self.get_cached_stat(key_for_cached_stat)
--> 686             results_obj.import_from_cache(cached_stat, sections)
    687 
    688             def find_sections_to_compute():

/private/var/root/nilmtk/nilmtk/stats/goodsectionsresults.pyc in import_from_cache(self, cached_stat, sections)
    116                     timeframes = []
    117                     for _, row in sections_df.iterrows():
--> 118                         section_start = tz_localize_naive(row['section_start'], tz)
    119                         section_end = tz_localize_naive(row['section_end'], tz)
    120                         timeframes.append(TimeFrame(section_start, section_end))

/Users/JH/anaconda2/lib/python2.7/site-packages/pandas/core/series.pyc in __getitem__(self, key)
    555     def __getitem__(self, key):
    556         try:
--> 557             result = self.index.get_value(self, key)
    558 
    559             if not np.isscalar(result):

/Users/JH/anaconda2/lib/python2.7/site-packages/pandas/core/index.pyc in get_value(self, series, key)
   1778         s = getattr(series,'_values',None)
   1779         if isinstance(s, Index) and lib.isscalar(key):
-> 1780             return s[key]
   1781 
   1782         s = _values_from_object(series)

/Users/JH/anaconda2/lib/python2.7/site-packages/pandas/tseries/base.pyc in __getitem__(self, key)
     98         getitem = self._data.__getitem__
     99         if np.isscalar(key):
--> 100             val = getitem(key)
    101             return self._box_func(val)
    102         else:

IndexError: only integers, slices (`:`), ellipsis (`...`), numpy.newaxis (`None`) and integer or boolean arrays are valid indices

jhyun0919 avatar Mar 21 '16 12:03 jhyun0919

I'm sorry, I'm afraid I'm crazy busy with other work at the moment and so this repository is currently unsupported. Please see the README for more details.

JackKelly avatar Mar 22 '16 08:03 JackKelly

@jhyun0919 were you able to run the notebooks and the core source code?

jpcofr avatar Jul 11 '16 09:07 jpcofr

Downgrading pandas to 0.16.2 successfully fixed this issue for me

mlemainque avatar Dec 06 '16 17:12 mlemainque