attention-lvcsr icon indicating copy to clipboard operation
attention-lvcsr copied to clipboard

decoding CE model with LM

Open synetkim opened this issue 8 years ago • 1 comments

Hello,

I tried to decode CE model with LM, but I got an error like below. (When I decode CE model without LM, it works well.)

  • MODEL=wsj_paper7
  • PART=test
  • BEAM_SIZE=20
  • LM=lm
  • LM_PATH=data/local/nist_lm//wsj_trigram_with_bos
  • ls wsj_paper7/reports test_nolm_20 test_nolm_200
  • LM_CONF='monitoring.search.beam_size 20'
  • '[' yeslm == nolm ']'
  • LM_CONF+=' monitoring.search.char_discount 1.0'
  • LM_CONF+=' net.lm.weight 0.5 net.lm.no_transition_cost 20'
  • LM_CONF+=' net.lm.path '''data/local/nist_lm//wsj_trigram_with_bos/LG_pushed_withsyms.fst''''
  • /speech05-ssd/sykim/attention-lvcsr/bin/run.py search --part=test --report wsj_paper7/reports/test_lm_20 wsj_paper7/annealing1_best_ll.zip /speech05-ssd/sykim/attention-lvcsr/exp/wsj/configs/wsj_paper7.yaml vocabulary data/local/nist_lm//wsj_trigram_with_bos/words.txt net.prior.before 10 monitoring.search.beam_size 20 monitoring.search.char_discount 1.0 net.lm.weight 0.5 net.lm.no_transition_cost 20 net.lm.path ''''data/local/nist_lm//wsj_trigram_with_bos/LG_pushed_withsyms.fst'''' Using gpu device 0: Tesla K20m (CNMeM is enabled with initial size: 90.0% of memory, CuDNN 3007) 2016-06-10 13:12:51,111: pykwalify.core: INFO: validation.valid 2016-06-10 13:12:51,116: pykwalify.core: INFO: validation.valid 2016-06-10 13:12:51,122: pykwalify.core: INFO: validation.valid 2016-06-10 13:12:51,126: pykwalify.core: INFO: validation.valid 2016-06-10 13:12:51,132: main: INFO: Config: {'cmd_args': {'config_changes': <picklable_itertools.extras.equizip object at 0x7fbdfc20f090>, 'config_path': '/speech05-ssd/sykim/attention-lvcsr/exp/wsj/configs/wsj_paper.7yaml', 'decode_only': None, 'decoded_save': None, 'func': 'search', 'load_path': 'wsj_paper7/annealing1_best_ll.zip', 'nll_only': False, 'params': None, 'part': 'test', 'report': 'wsj_paper7/reports/test_lm_20', 'seed': 1, 'validate_config': True}, 'data': {'add_bos': 1, 'add_eos': True, 'batch_size': 20, 'dataset_class': <class 'lvsr.datasets.h5py.H5PYAudioDataset'>, 'dataset_filename': 'wsj.h5', 'default_sources': ['recordings', 'labels'], 'name_mapping': {'test': 'test_eval92', 'train': 'train_si284', 'valid': 'test_dev93'}, 'normalization': '', 'sources_map': {'labels': 'characters', 'recordings': 'fbank_dd', 'uttids': 'uttids'}}, 'initialization': {'/recognizer': {'biases_init': Constant(_constant=0.0), 'rec_weights_init': IsotropicGaussian(_mean=0,_std=0.1), 'weights_init': IsotropicGaussian(_mean=0,_std=0.1)}}, 'monitoring': {'search': {'beam_size': 20, 'char_discount': 1.0, 'round_to_inf': 1000000000.0, 'stop_on': 'optimistic_future_cost'}, 'search_every_batches': 0, 'search_every_epochs': 1, 'validate_every_batches': 0, 'validate_every_epochs': 1}, 'net': {'attention_type': 'content_and_conv', 'bottom': {'activation': <blocks.bricks.simple.Rectifier object at 0x7fbdcd685b50: name=rectifier>, 'bottom_class': <class 'lvsr.bricks.recognizer.SpeechBottom'>, 'dims': []}, 'conv_n': 100, 'criterion': {'name': 'log_likelihood'}, 'dec_transition': <class 'blocks.bricks.recurrent.GatedRecurrent'>, 'dim_dec': 250, 'dims_bidir': [250, 250, 250, 250], 'enc_transition': <class 'blocks.bricks.recurrent.GatedRecurrent'>, 'lm': {'no_transition_cost': 20, 'path': 'data/local/nist_lm//wsj_trigram_with_bos/LG_pushed_withsyms.fst', 'weight': 0.5}, 'max_decoded_length_scale': 3.0, 'post_merge_activation': <blocks.bricks.simple.Rectifier object at 0x7fbdcd6a0650: name=rectifier>, 'post_merge_dims': [250], 'prior': {'after': 100, 'before': 10, 'initial_begin': 0, 'initial_end': 80, 'max_speed': 4.4, 'min_speed': 2.4, 'type': 'window_around_median'}, 'subsample': [1, 1, 2, 2], 'use_states_for_readout': True}, 'parent': '$LVSR/exp/wsj/configs/wsj_paper_base.yaml', 'regularization': {'dropout': False, 'max_norm': 1.0}, 'stages': {'annealing1': {'training': {'epsilon': 1e-10, 'num_epochs': 1, 'restart_from': '_best'}}, 'main': {'training': {'num_epochs': 5, 'restart_from': '_best'}}, 'pretraining': {'net': {'prior': {'initial_begin': 0, 'initial_end': 40, 'max_speed': 2.2, 'min_speed': 1.2, 'type': 'expanding'}}, 'training': {'num_epochs': 1}}}, 'training': {'decay_rate': 0.95, 'epsilon': 1e-08, 'gradient_threshold': 100.0, 'momentum': 0.0, 'rules': ['momentum', 'adadelta'], 'scale': 0.1}, 'vocabulary': 'data/local/nist_lm//wsj_trigram_with_bos/words.txt'} 2016-06-10 11:34:43,337: lvsr.main: INFO: Recognizer initialization started Traceback (most recent call last): File "/speech05-ssd/sykim/attention-lvcsr/bin/run.py", line 154, in getattr(lvsr.main, args.pop('func'))(config, *_args) File "/speech05-ssd/sykim/attention-lvcsr/lvsr/main.py", line 716, in search recognizer = create_model(config, data, load_path) File "/speech05-ssd/sykim/attention-lvcsr/lvsr/main.py", line 223, in create_model recognizer.load_params(load_path) File "/speech05-ssd/sykim/attention-lvcsr/lvsr/bricks/recognizer.py", line 409, in load_params generated = self.get_generate_graph() File "/speech05-ssd/sykim/attention-lvcsr/lvsr/bricks/recognizer.py", line 421, in get_generate_graph *_bottom_inputs) File "/speech05-ssd/sykim/attention-lvcsr/libs/blocks/blocks/bricks/base.py", line 377, in call return self.application.apply(self, _args, *_kwargs) File "/speech05-ssd/sykim/attention-lvcsr/libs/blocks/blocks/bricks/base.py", line 312, in apply outputs = self.application_function(brick, _args, *_kwargs) File "/speech05-ssd/sykim/attention-lvcsr/lvsr/bricks/recognizer.py", line 406, in generate as_dict=True) File "/speech05-ssd/sykim/attention-lvcsr/libs/blocks/blocks/bricks/base.py", line 377, in call return self.application.apply(self, _args, *_kwargs) File "/speech05-ssd/sykim/attention-lvcsr/libs/blocks/blocks/bricks/base.py", line 312, in apply outputs = self.application_function(brick, _args, *_kwargs) File "/speech05-ssd/sykim/attention-lvcsr/libs/blocks/blocks/bricks/recurrent.py", line 179, in recurrent_apply _args, *_kwargs) File "/speech05-ssd/sykim/attention-lvcsr/libs/blocks/blocks/bricks/base.py", line 377, in call return self.application.apply(self, _args, *_kwargs) File "/speech05-ssd/sykim/attention-lvcsr/libs/blocks/blocks/bricks/base.py", line 312, in apply outputs = self.application_function(brick, _args, *_kwargs) File "/speech05-ssd/sykim/attention-lvcsr/libs/blocks/blocks/bricks/sequence_generators.py", line 416, in initial_states batch_size, as_dict=True, _args, *_kwargs) File "/speech05-ssd/sykim/attention-lvcsr/libs/blocks/blocks/bricks/base.py", line 377, in call return self.application.apply(self, _args, *_kwargs) File "/speech05-ssd/sykim/attention-lvcsr/libs/blocks/blocks/bricks/base.py", line 312, in apply outputs = self.application_function(brick, _args, *_kwargs) File "/speech05-ssd/sykim/attention-lvcsr/libs/blocks/blocks/bricks/sequence_generators.py", line 412, in initial_states batch_size, as_dict=True, _args, *_kwargs), File "/speech05-ssd/sykim/attention-lvcsr/libs/blocks/blocks/bricks/base.py", line 377, in call return self.application.apply(self, _args, *_kwargs) File "/speech05-ssd/sykim/attention-lvcsr/libs/blocks/blocks/bricks/base.py", line 312, in apply outputs = self.application_function(brick, _args, *_kwargs) File "/speech05-ssd/sykim/attention-lvcsr/libs/blocks/blocks/bricks/sequence_generators.py", line 900, in initial_states _args, *_kwargs) File "/speech05-ssd/sykim/attention-lvcsr/libs/blocks/blocks/bricks/base.py", line 377, in call return self.application.apply(self, _args, *_kwargs) File "/speech05-ssd/sykim/attention-lvcsr/libs/blocks/blocks/bricks/base.py", line 312, in apply outputs = self.application_function(brick, _args, *_kwargs) File "/speech05-ssd/sykim/attention-lvcsr/lvsr/bricks/language_models.py", line 54, in initial_states states_dict = self.fst.expand({self.fst.fst.start: 0.0}) File "/speech05-ssd/sykim/attention-lvcsr/lvsr/ops.py", line 79, in expand for arc in self.get_arcs(state, EPSILON): File "/speech05-ssd/sykim/attention-lvcsr/lvsr/ops.py", line 58, in get_arcs for arc in self[state] if arc.ilabel == character] File "/speech05-ssd/sykim/attention-lvcsr/lvsr/ops.py", line 49, in getitem return self.fst[state] File "_fst.pyx", line 393, in fst._fst.StdVectorFst.getitem (fst/_fst.cpp:7960) KeyError: 'state index out of range'

synetkim avatar Jun 10 '16 23:06 synetkim

Looks like your fst is corrupted. Try to recompile it.

What does fstprint <fst> | head say?

dmitriy-serdyuk avatar Jun 11 '16 22:06 dmitriy-serdyuk