pyspark_dl_pipeline icon indicating copy to clipboard operation
pyspark_dl_pipeline copied to clipboard

ValueError: Could not interpret optimizer identifier: False

Open rbhatia46 opened this issue 3 years ago • 0 comments

Hi, On running the final cell, i.e. the method dl_pipeline_fit_score_results(), I am getting the following error

---------------------------------------------------------------------------
ValueError                                Traceback (most recent call last)
<ipython-input-54-cb5b6595cf8f> in <module>
      2                               train_data=train_data,
      3                               test_data=test_data,
----> 4                               label='label_index');

<ipython-input-53-1c2301ef586e> in dl_pipeline_fit_score_results(dl_pipeline, train_data, test_data, label)
      4                                   label='label_index'):
      5 
----> 6     fit_dl_pipeline = dl_pipeline.fit(train_data)
      7     pred_train = fit_dl_pipeline.transform(train_data)
      8     pred_test = fit_dl_pipeline.transform(test_data)

/opt/conda/lib/python3.6/site-packages/pyspark/ml/base.py in fit(self, dataset, params)
    130                 return self.copy(params)._fit(dataset)
    131             else:
--> 132                 return self._fit(dataset)
    133         else:
    134             raise ValueError("Params must be either a param map or a list/tuple of param maps, "

/opt/conda/lib/python3.6/site-packages/pyspark/ml/pipeline.py in _fit(self, dataset)
    107                     dataset = stage.transform(dataset)
    108                 else:  # must be an Estimator
--> 109                     model = stage.fit(dataset)
    110                     transformers.append(model)
    111                     if i < indexOfLastEstimator:

/opt/conda/lib/python3.6/site-packages/pyspark/ml/base.py in fit(self, dataset, params)
    130                 return self.copy(params)._fit(dataset)
    131             else:
--> 132                 return self._fit(dataset)
    133         else:
    134             raise ValueError("Params must be either a param map or a list/tuple of param maps, "

/opt/conda/lib/python3.6/site-packages/elephas/ml_model.py in _fit(self, df)
     90                         batch_size=self.get_batch_size(),
     91                         verbose=self.get_verbosity(),
---> 92                         validation_split=self.get_validation_split())
     93 
     94         model_weights = spark_model.master_network.get_weights()

/opt/conda/lib/python3.6/site-packages/elephas/spark_model.py in fit(self, rdd, epochs, batch_size, verbose, validation_split)
    149 
    150         if self.mode in ['asynchronous', 'synchronous', 'hogwild']:
--> 151             self._fit(rdd, epochs, batch_size, verbose, validation_split)
    152         else:
    153             raise ValueError(

/opt/conda/lib/python3.6/site-packages/elephas/spark_model.py in _fit(self, rdd, epochs, batch_size, verbose, validation_split)
    159         self._master_network.compile(optimizer=self.master_optimizer,
    160                                      loss=self.master_loss,
--> 161                                      metrics=self.master_metrics)
    162         if self.mode in ['asynchronous', 'hogwild']:
    163             self.start_server()

/opt/conda/lib/python3.6/site-packages/tensorflow/python/keras/engine/training.py in compile(self, optimizer, loss, metrics, loss_weights, weighted_metrics, run_eagerly, **kwargs)
    539       self._run_eagerly = run_eagerly
    540 
--> 541       self.optimizer = self._get_optimizer(optimizer)
    542       self.compiled_loss = compile_utils.LossesContainer(
    543           loss, loss_weights, output_names=self.output_names)

/opt/conda/lib/python3.6/site-packages/tensorflow/python/keras/engine/training.py in _get_optimizer(self, optimizer)
    565       return opt
    566 
--> 567     return nest.map_structure(_get_single_optimizer, optimizer)
    568 
    569   @trackable.no_automatic_dependency_tracking

/opt/conda/lib/python3.6/site-packages/tensorflow/python/util/nest.py in map_structure(func, *structure, **kwargs)
    633 
    634   return pack_sequence_as(
--> 635       structure[0], [func(*x) for x in entries],
    636       expand_composites=expand_composites)
    637 

/opt/conda/lib/python3.6/site-packages/tensorflow/python/util/nest.py in <listcomp>(.0)
    633 
    634   return pack_sequence_as(
--> 635       structure[0], [func(*x) for x in entries],
    636       expand_composites=expand_composites)
    637 

/opt/conda/lib/python3.6/site-packages/tensorflow/python/keras/engine/training.py in _get_single_optimizer(opt)
    559 
    560     def _get_single_optimizer(opt):
--> 561       opt = optimizers.get(opt)
    562       if (self._dtype_policy.loss_scale is not None and
    563           not isinstance(opt, lso.LossScaleOptimizer)):

/opt/conda/lib/python3.6/site-packages/tensorflow/python/keras/optimizers.py in get(identifier)
    901   else:
    902     raise ValueError(
--> 903         'Could not interpret optimizer identifier: {}'.format(identifier))

ValueError: Could not interpret optimizer identifier: False

rbhatia46 avatar Sep 14 '20 10:09 rbhatia46