hands-on-llms icon indicating copy to clipboard operation
hands-on-llms copied to clipboard

TypeError: 'NoneType' object is not subscriptable

Open 999lgn opened this issue 2 months ago • 0 comments

`Traceback` (most recent call last):
  File "/mnt/cephfs/home/shixun2024/miniconda3/envs/GengN/lib/python3.10/runpy.py", line 196, in _run_module_as_main
    return _run_code(code, main_globals, None,
  File "/mnt/cephfs/home/shixun2024/miniconda3/envs/GengN/lib/python3.10/runpy.py", line 86, in _run_code
    exec(code, run_globals)
  File "/mnt/cephfs/home/shixun2024/users/GengNan/hands-on-llms/modules/training_pipeline/tools/train_run.py", line 83, in <module>
    fire.Fire(train)
  File "/mnt/cephfs/home/shixun2024/.cache/pypoetry/virtualenvs/training-pipeline-0MYnI07r-py3.10/lib/python3.10/site-packages/fire/core.py", line 141, in Fire
    component_trace = _Fire(component, args, parsed_flag_args, context, name)
  File "/mnt/cephfs/home/shixun2024/.cache/pypoetry/virtualenvs/training-pipeline-0MYnI07r-py3.10/lib/python3.10/site-packages/fire/core.py", line 475, in _Fire
    component, remaining_args = _CallAndUpdateTrace(
  File "/mnt/cephfs/home/shixun2024/.cache/pypoetry/virtualenvs/training-pipeline-0MYnI07r-py3.10/lib/python3.10/site-packages/fire/core.py", line 691, in _CallAndUpdateTrace
    component = fn(*varargs, **kwargs)
  File "/mnt/cephfs/home/shixun2024/.cache/pypoetry/virtualenvs/training-pipeline-0MYnI07r-py3.10/lib/python3.10/site-packages/beam/app.py", line 1346, in wrapper
    return func(*args, **kwargs)
  File "/mnt/cephfs/home/shixun2024/users/GengNan/hands-on-llms/modules/training_pipeline/tools/train_run.py", line 79, in train
    training_api.train()
  File "/mnt/cephfs/home/shixun2024/users/GengNan/hands-on-llms/modules/training_pipeline/training_pipeline/api/training.py", line 228, in train
    trainer.train()
  File "/mnt/cephfs/home/shixun2024/.cache/pypoetry/virtualenvs/training-pipeline-0MYnI07r-py3.10/lib/python3.10/site-packages/transformers/trainer.py", line 1539, in train
    return inner_training_loop(
  File "/mnt/cephfs/home/shixun2024/.cache/pypoetry/virtualenvs/training-pipeline-0MYnI07r-py3.10/lib/python3.10/site-packages/transformers/trainer.py", line 1809, in _inner_training_loop
    tr_loss_step = self.training_step(model, inputs)
  File "/mnt/cephfs/home/shixun2024/.cache/pypoetry/virtualenvs/training-pipeline-0MYnI07r-py3.10/lib/python3.10/site-packages/transformers/trainer.py", line 2654, in training_step
    loss = self.compute_loss(model, inputs)
  File "/mnt/cephfs/home/shixun2024/.cache/pypoetry/virtualenvs/training-pipeline-0MYnI07r-py3.10/lib/python3.10/site-packages/transformers/trainer.py", line 2679, in compute_loss
    outputs = model(**inputs)
  File "/mnt/cephfs/home/shixun2024/.cache/pypoetry/virtualenvs/training-pipeline-0MYnI07r-py3.10/lib/python3.10/site-packages/torch/nn/modules/module.py", line 1501, in _call_impl
    return forward_call(*args, **kwargs)
  File "/mnt/cephfs/home/shixun2024/.cache/pypoetry/virtualenvs/training-pipeline-0MYnI07r-py3.10/lib/python3.10/site-packages/torch/nn/parallel/data_parallel.py", line 171, in forward
    outputs = self.parallel_apply(replicas, inputs, kwargs)
  File "/mnt/cephfs/home/shixun2024/.cache/pypoetry/virtualenvs/training-pipeline-0MYnI07r-py3.10/lib/python3.10/site-packages/torch/nn/parallel/data_parallel.py", line 181, in parallel_apply
    return parallel_apply(replicas, inputs, kwargs, self.device_ids[:len(replicas)])
  File "/mnt/cephfs/home/shixun2024/.cache/pypoetry/virtualenvs/training-pipeline-0MYnI07r-py3.10/lib/python3.10/site-packages/torch/nn/parallel/parallel_apply.py", line 89, in parallel_apply
    output.reraise()
  File "/mnt/cephfs/home/shixun2024/.cache/pypoetry/virtualenvs/training-pipeline-0MYnI07r-py3.10/lib/python3.10/site-packages/torch/_utils.py", line 644, in reraise
    raise exception
TypeError: Caught TypeError in replica 0 on device 0.
Original Traceback (most recent call last):
  File "/mnt/cephfs/home/shixun2024/.cache/pypoetry/virtualenvs/training-pipeline-0MYnI07r-py3.10/lib/python3.10/site-packages/torch/nn/parallel/parallel_apply.py", line 64, in _worker
    output = module(*input, **kwargs)
  File "/mnt/cephfs/home/shixun2024/.cache/pypoetry/virtualenvs/training-pipeline-0MYnI07r-py3.10/lib/python3.10/site-packages/torch/nn/modules/module.py", line 1501, in _call_impl
    return forward_call(*args, **kwargs)
  File "/mnt/cephfs/home/shixun2024/.cache/pypoetry/virtualenvs/training-pipeline-0MYnI07r-py3.10/lib/python3.10/site-packages/peft/peft_model.py", line 922, in forward
    return self.base_model(
  File "/mnt/cephfs/home/shixun2024/.cache/pypoetry/virtualenvs/training-pipeline-0MYnI07r-py3.10/lib/python3.10/site-packages/torch/nn/modules/module.py", line 1501, in _call_impl
    return forward_call(*args, **kwargs)
  File "/mnt/cephfs/home/shixun2024/.cache/pypoetry/virtualenvs/training-pipeline-0MYnI07r-py3.10/lib/python3.10/site-packages/accelerate/hooks.py", line 165, in new_forward
    output = old_forward(*args, **kwargs)
  File "/mnt/cephfs/home/shixun2024/.cache/pypoetry/virtualenvs/training-pipeline-0MYnI07r-py3.10/lib/python3.10/site-packages/transformers/models/falcon/modeling_falcon.py", line 900, in forward
    transformer_outputs = self.transformer(
  File "/mnt/cephfs/home/shixun2024/.cache/pypoetry/virtualenvs/training-pipeline-0MYnI07r-py3.10/lib/python3.10/site-packages/torch/nn/modules/module.py", line 1501, in _call_impl
    return forward_call(*args, **kwargs)
  File "/mnt/cephfs/home/shixun2024/.cache/pypoetry/virtualenvs/training-pipeline-0MYnI07r-py3.10/lib/python3.10/site-packages/accelerate/hooks.py", line 165, in new_forward
    output = old_forward(*args, **kwargs)
  File "/mnt/cephfs/home/shixun2024/.cache/pypoetry/virtualenvs/training-pipeline-0MYnI07r-py3.10/lib/python3.10/site-packages/transformers/models/falcon/modeling_falcon.py", line 789, in forward
    outputs = torch.utils.checkpoint.checkpoint(
  File "/mnt/cephfs/home/shixun2024/.cache/pypoetry/virtualenvs/training-pipeline-0MYnI07r-py3.10/lib/python3.10/site-packages/torch/utils/checkpoint.py", line 249, in checkpoint
    return CheckpointFunction.apply(function, preserve, *args)
  File "/mnt/cephfs/home/shixun2024/.cache/pypoetry/virtualenvs/training-pipeline-0MYnI07r-py3.10/lib/python3.10/site-packages/torch/autograd/function.py", line 506, in apply
    return super().apply(*args, **kwargs)  # type: ignore[misc]
  File "/mnt/cephfs/home/shixun2024/.cache/pypoetry/virtualenvs/training-pipeline-0MYnI07r-py3.10/lib/python3.10/site-packages/torch/utils/checkpoint.py", line 107, in forward
    outputs = run_function(*args)
  File "/mnt/cephfs/home/shixun2024/.cache/pypoetry/virtualenvs/training-pipeline-0MYnI07r-py3.10/lib/python3.10/site-packages/transformers/models/falcon/modeling_falcon.py", line 785, in custom_forward
    return module(*inputs, use_cache=use_cache, output_attentions=output_attentions)
  File "/mnt/cephfs/home/shixun2024/.cache/pypoetry/virtualenvs/training-pipeline-0MYnI07r-py3.10/lib/python3.10/site-packages/torch/nn/modules/module.py", line 1501, in _call_impl
    return forward_call(*args, **kwargs)
  File "/mnt/cephfs/home/shixun2024/.cache/pypoetry/virtualenvs/training-pipeline-0MYnI07r-py3.10/lib/python3.10/site-packages/accelerate/hooks.py", line 165, in new_forward
    output = old_forward(*args, **kwargs)
  File "/mnt/cephfs/home/shixun2024/.cache/pypoetry/virtualenvs/training-pipeline-0MYnI07r-py3.10/lib/python3.10/site-packages/transformers/models/falcon/modeling_falcon.py", line 453, in forward
    attn_outputs = self.self_attention(
  File "/mnt/cephfs/home/shixun2024/.cache/pypoetry/virtualenvs/training-pipeline-0MYnI07r-py3.10/lib/python3.10/site-packages/torch/nn/modules/module.py", line 1501, in _call_impl
    return forward_call(*args, **kwargs)
  File "/mnt/cephfs/home/shixun2024/.cache/pypoetry/virtualenvs/training-pipeline-0MYnI07r-py3.10/lib/python3.10/site-packages/accelerate/hooks.py", line 165, in new_forward
    output = old_forward(*args, **kwargs)
  File "/mnt/cephfs/home/shixun2024/.cache/pypoetry/virtualenvs/training-pipeline-0MYnI07r-py3.10/lib/python3.10/site-packages/transformers/models/falcon/modeling_falcon.py", line 307, in forward
    query_layer, key_layer = self.maybe_rotary(query_layer, key_layer, past_kv_length)
  File "/mnt/cephfs/home/shixun2024/.cache/pypoetry/virtualenvs/training-pipeline-0MYnI07r-py3.10/lib/python3.10/site-packages/torch/nn/modules/module.py", line 1501, in _call_impl
    return forward_call(*args, **kwargs)
  File "/mnt/cephfs/home/shixun2024/.cache/pypoetry/virtualenvs/training-pipeline-0MYnI07r-py3.10/lib/python3.10/site-packages/transformers/models/falcon/modeling_falcon.py", line 107, in forward
    cos, sin = self.cos_sin(seq_len, past_key_values_length, query.device, query.dtype)
  File "/mnt/cephfs/home/shixun2024/.cache/pypoetry/virtualenvs/training-pipeline-0MYnI07r-py3.10/lib/python3.10/site-packages/transformers/models/falcon/modeling_falcon.py", line 101, in cos_sin
    self.cos_cached[:, past_key_values_length : seq_len + past_key_values_length],
TypeError: 'NoneType' object is not subscriptable

I always meet this problem,how can i solve it?

999lgn avatar Apr 22 '24 03:04 999lgn