transformer-utils
transformer-utils copied to clipboard
Getting an error running sample code
{ "name": "AttributeError", "message": "module 'transformers.file_utils' has no attribute 'hf_bucket_url'", "stack": "--------------------------------------------------------------------------- AttributeError Traceback (most recent call last) Cell In[8], line 8 5 enable_low_memory_load() 7 tok = transformers.AutoTokenizer.from_pretrained("gpt2") ----> 8 model = transformers.AutoModelForCausalLM.from_pretrained('gpt2-xl') 11 # Accessing model configuration 12 model_config = model.config
File ~/.conda/envs/memit/lib/python3.9/site-packages/transformers/models/auto/auto_factory.py:463, in _BaseAutoModelClass.from_pretrained(cls, pretrained_model_name_or_path, *model_args, **kwargs)
461 elif type(config) in cls._model_mapping.keys():
462 model_class = _get_model_class(config, cls._model_mapping)
--> 463 return model_class.from_pretrained(
464 pretrained_model_name_or_path, *model_args, config=config, **hub_kwargs, **kwargs
465 )
466 raise ValueError(
467 f"Unrecognized configuration class {config.class} for this kind of AutoModel: {cls.name}.
"
468 f"Model type should be one of {', '.join(c.name for c in cls._model_mapping.keys())}."
469 )
File ~/.conda/envs/memit/lib/python3.9/site-packages/transformer_utils/low_memory/enable.py:10, in low_memory_from_pretrained(pretrained_model_name_or_path, *args, **kwargs) 9 def low_memory_from_pretrained(pretrained_model_name_or_path, *args, **kwargs): ---> 10 config_path, model_path = huggingface_model_local_paths(pretrained_model_name_or_path) 12 model = low_memory_load(config_path=config_path, model_path=model_path, verbose=False) 14 return model
File ~/.conda/envs/memit/lib/python3.9/site-packages/transformer_utils/util/tfm_utils.py:39, in huggingface_model_local_paths(model_name) 38 def huggingface_model_local_paths(model_name): ---> 39 config_path = get_local_path_from_huggingface_cdn(model_name, "config.json") 41 fix_config_with_missing_model_type(model_name, config_path) 43 model_path = get_local_path_from_huggingface_cdn(model_name, "pytorch_model.bin")
File ~/.conda/envs/memit/lib/python3.9/site-packages/transformer_utils/util/tfm_utils.py:27, in get_local_path_from_huggingface_cdn(key, filename) 26 def get_local_path_from_huggingface_cdn(key, filename): ---> 27 archive_file = transformers.file_utils.hf_bucket_url( 28 key, 29 filename=filename, 30 ) 32 resolved_archive_file = transformers.file_utils.cached_path( 33 archive_file, 34 ) 35 return resolved_archive_file
AttributeError: module 'transformers.file_utils' has no attribute 'hf_bucket_url'" }
Any ideas? All I ran was
import torch import transformers from transformer_utils.low_memory import enable_low_memory_load
enable_low_memory_load()
tok = transformers.AutoTokenizer.from_pretrained("gpt2") model = transformers.AutoModelForCausalLM.from_pretrained('gpt2-xl')