llm
llm copied to clipboard
gpt4all models causes traceback exit
I ran brew intstall llm
on a fresh machine. Then ran llm instal gpt4all
per the instructions in the "Other Models" section of the docs. When running llm models
I got the following output:
Traceback (most recent call last):
File "/opt/homebrew/bin/llm", line 8, in <module>
sys.exit(cli())
^^^^^
File "/opt/homebrew/Cellar/llm/0.13.1/libexec/lib/python3.12/site-packages/click/core.py", line 1157, in __call__
return self.main(*args, **kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/opt/homebrew/Cellar/llm/0.13.1/libexec/lib/python3.12/site-packages/click/core.py", line 1078, in main
rv = self.invoke(ctx)
^^^^^^^^^^^^^^^^
File "/opt/homebrew/Cellar/llm/0.13.1/libexec/lib/python3.12/site-packages/click/core.py", line 1688, in invoke
return _process_result(sub_ctx.command.invoke(sub_ctx))
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/opt/homebrew/Cellar/llm/0.13.1/libexec/lib/python3.12/site-packages/click/core.py", line 1688, in invoke
return _process_result(sub_ctx.command.invoke(sub_ctx))
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/opt/homebrew/Cellar/llm/0.13.1/libexec/lib/python3.12/site-packages/click/core.py", line 1434, in invoke
return ctx.invoke(self.callback, **ctx.params)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/opt/homebrew/Cellar/llm/0.13.1/libexec/lib/python3.12/site-packages/click/core.py", line 783, in invoke
return __callback(*args, **kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/opt/homebrew/Cellar/llm/0.13.1/libexec/lib/python3.12/site-packages/llm/cli.py", line 799, in models_list
for model_with_aliases in get_models_with_aliases():
^^^^^^^^^^^^^^^^^^^^^^^^^
File "/opt/homebrew/Cellar/llm/0.13.1/libexec/lib/python3.12/site-packages/llm/__init__.py", line 80, in get_models_with_aliases
pm.hook.register_models(register=register)
File "/opt/homebrew/Cellar/llm/0.13.1/libexec/lib/python3.12/site-packages/pluggy/_hooks.py", line 501, in __call__
return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/opt/homebrew/Cellar/llm/0.13.1/libexec/lib/python3.12/site-packages/pluggy/_manager.py", line 119, in _hookexec
return self._inner_hookexec(hook_name, methods, kwargs, firstresult)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/opt/homebrew/Cellar/llm/0.13.1/libexec/lib/python3.12/site-packages/pluggy/_callers.py", line 138, in _multicall
raise exception.with_traceback(exception.__traceback__)
File "/opt/homebrew/Cellar/llm/0.13.1/libexec/lib/python3.12/site-packages/pluggy/_callers.py", line 102, in _multicall
res = hook_impl.function(*args)
^^^^^^^^^^^^^^^^^^^^^^^^^
File "/opt/homebrew/Cellar/llm/0.13.1/libexec/lib/python3.12/site-packages/llm_gpt4all.py", line 57, in register_models
models.sort(
File "/opt/homebrew/Cellar/llm/0.13.1/libexec/lib/python3.12/site-packages/llm_gpt4all.py", line 59, in <lambda>
not model.is_installed(),
^^^^^^^^^^^^^^^^^^^^
File "/opt/homebrew/Cellar/llm/0.13.1/libexec/lib/python3.12/site-packages/llm_gpt4all.py", line 179, in is_installed
GPT4All.retrieve_model(
File "/opt/homebrew/Cellar/llm/0.13.1/libexec/lib/python3.12/site-packages/llm_gpt4all.py", line 38, in retrieve_model
return _GPT4All.retrieve_model(model_name, model_path, allow_download, verbose)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/opt/homebrew/Cellar/llm/0.13.1/libexec/lib/python3.12/site-packages/gpt4all/gpt4all.py", line 300, in retrieve_model
raise FileNotFoundError(f"Model file does not exist: {model_dest!r}")
FileNotFoundError: Model file does not exist: PosixPath('/Users/mattly/.cache/gpt4all/mistral-7b-openorca.gguf2.Q4_0.gguf')