private-gpt
private-gpt copied to clipboard
Error occurs when "make run" on Win11
(privategpt) PS D:\AGI\privategpt> $env:PGPT_PROFILES="ollama"; make run
poetry run python -m private_gpt
21:29:52.426 [INFO ] private_gpt.settings.settings_loader - Starting application with profiles=['default', 'ollama']
21:29:53.085 [INFO ] numexpr.utils - Note: NumExpr detected 20 cores but "NUMEXPR_MAX_THREADS" not set, so enforcing safe limit of 8.
21:29:53.085 [INFO ] numexpr.utils - NumExpr defaulting to 8 threads.
None of PyTorch, TensorFlow >= 2.0, or Flax have been found. Models won't be available and only tokenizers, configuration and file/data utilities can be used.
21:29:57.055 [INFO ] private_gpt.components.llm.llm_component - Initializing the LLM in mode=ollama
21:29:57.603 [INFO ] private_gpt.components.embedding.embedding_component - Initializing the embedding model in mode=ollama
Traceback (most recent call last):
File "D:\AGI\pyvenv\privategpt\Lib\site-packages\injector\__init__.py", line 798, in get
return self._context[key]
~~~~~~~~~~~~~^^^^^
KeyError: <class 'private_gpt.ui.ui.PrivateGptUi'>
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "D:\AGI\pyvenv\privategpt\Lib\site-packages\injector\__init__.py", line 798, in get
return self._context[key]
~~~~~~~~~~~~~^^^^^
KeyError: <class 'private_gpt.server.ingest.ingest_service.IngestService'>
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "D:\AGI\pyvenv\privategpt\Lib\site-packages\injector\__init__.py", line 798, in get
return self._context[key]
~~~~~~~~~~~~~^^^^^
KeyError: <class 'private_gpt.components.embedding.embedding_component.EmbeddingComponent'>
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "<frozen runpy>", line 198, in _run_module_as_main
File "<frozen runpy>", line 88, in _run_code
File "D:\AGI\privategpt\private_gpt\__main__.py", line 5, in <module>
from private_gpt.main import app
File "D:\AGI\privategpt\private_gpt\main.py", line 6, in <module>
app = create_app(global_injector)
^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "D:\AGI\privategpt\private_gpt\launcher.py", line 63, in create_app
ui = root_injector.get(PrivateGptUi)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "D:\AGI\pyvenv\privategpt\Lib\site-packages\injector\__init__.py", line 91, in wrapper
return function(*args, **kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^
File "D:\AGI\pyvenv\privategpt\Lib\site-packages\injector\__init__.py", line 974, in get
provider_instance = scope_instance.get(interface, binding.provider)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "D:\AGI\pyvenv\privategpt\Lib\site-packages\injector\__init__.py", line 91, in wrapper
return function(*args, **kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^
File "D:\AGI\pyvenv\privategpt\Lib\site-packages\injector\__init__.py", line 800, in get
instance = self._get_instance(key, provider, self.injector)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "D:\AGI\pyvenv\privategpt\Lib\site-packages\injector\__init__.py", line 811, in _get_instance
return provider.get(injector)
^^^^^^^^^^^^^^^^^^^^^^
File "D:\AGI\pyvenv\privategpt\Lib\site-packages\injector\__init__.py", line 264, in get
return injector.create_object(self._cls)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "D:\AGI\pyvenv\privategpt\Lib\site-packages\injector\__init__.py", line 998, in create_object
self.call_with_injection(init, self_=instance, kwargs=additional_kwargs)
File "D:\AGI\pyvenv\privategpt\Lib\site-packages\injector\__init__.py", line 1031, in call_with_injection
dependencies = self.args_to_inject(
^^^^^^^^^^^^^^^^^^^^
File "D:\AGI\pyvenv\privategpt\Lib\site-packages\injector\__init__.py", line 91, in wrapper
return function(*args, **kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^
File "D:\AGI\pyvenv\privategpt\Lib\site-packages\injector\__init__.py", line 1079, in args_to_inject
instance: Any = self.get(interface)
^^^^^^^^^^^^^^^^^^^
File "D:\AGI\pyvenv\privategpt\Lib\site-packages\injector\__init__.py", line 91, in wrapper
return function(*args, **kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^
File "D:\AGI\pyvenv\privategpt\Lib\site-packages\injector\__init__.py", line 974, in get
provider_instance = scope_instance.get(interface, binding.provider)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "D:\AGI\pyvenv\privategpt\Lib\site-packages\injector\__init__.py", line 91, in wrapper
return function(*args, **kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^
File "D:\AGI\pyvenv\privategpt\Lib\site-packages\injector\__init__.py", line 800, in get
instance = self._get_instance(key, provider, self.injector)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "D:\AGI\pyvenv\privategpt\Lib\site-packages\injector\__init__.py", line 811, in _get_instance
return provider.get(injector)
^^^^^^^^^^^^^^^^^^^^^^
File "D:\AGI\pyvenv\privategpt\Lib\site-packages\injector\__init__.py", line 264, in get
return injector.create_object(self._cls)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "D:\AGI\pyvenv\privategpt\Lib\site-packages\injector\__init__.py", line 998, in create_object
self.call_with_injection(init, self_=instance, kwargs=additional_kwargs)
File "D:\AGI\pyvenv\privategpt\Lib\site-packages\injector\__init__.py", line 1031, in call_with_injection
dependencies = self.args_to_inject(
^^^^^^^^^^^^^^^^^^^^
File "D:\AGI\pyvenv\privategpt\Lib\site-packages\injector\__init__.py", line 91, in wrapper
return function(*args, **kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^
File "D:\AGI\pyvenv\privategpt\Lib\site-packages\injector\__init__.py", line 1079, in args_to_inject
instance: Any = self.get(interface)
^^^^^^^^^^^^^^^^^^^
File "D:\AGI\pyvenv\privategpt\Lib\site-packages\injector\__init__.py", line 91, in wrapper
return function(*args, **kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^
File "D:\AGI\pyvenv\privategpt\Lib\site-packages\injector\__init__.py", line 974, in get
provider_instance = scope_instance.get(interface, binding.provider)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "D:\AGI\pyvenv\privategpt\Lib\site-packages\injector\__init__.py", line 91, in wrapper
return function(*args, **kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^
File "D:\AGI\pyvenv\privategpt\Lib\site-packages\injector\__init__.py", line 800, in get
instance = self._get_instance(key, provider, self.injector)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "D:\AGI\pyvenv\privategpt\Lib\site-packages\injector\__init__.py", line 811, in _get_instance
return provider.get(injector)
^^^^^^^^^^^^^^^^^^^^^^
File "D:\AGI\pyvenv\privategpt\Lib\site-packages\injector\__init__.py", line 264, in get
return injector.create_object(self._cls)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "D:\AGI\pyvenv\privategpt\Lib\site-packages\injector\__init__.py", line 998, in create_object
self.call_with_injection(init, self_=instance, kwargs=additional_kwargs)
File "D:\AGI\pyvenv\privategpt\Lib\site-packages\injector\__init__.py", line 1040, in call_with_injection
return callable(*full_args, **dependencies)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "D:\AGI\privategpt\private_gpt\components\embedding\embedding_component.py", line 71, in __init__
self.embedding_model = OllamaEmbedding(
^^^^^^^^^^^^^^^^
File "D:\AGI\pyvenv\privategpt\Lib\site-packages\llama_index\embeddings\ollama\base.py", line 32, in __init__
super().__init__(
File "D:\AGI\pyvenv\privategpt\Lib\site-packages\pydantic\v1\main.py", line 341, in __init__
raise validation_error
pydantic.v1.error_wrappers.ValidationError: 1 validation error for OllamaEmbedding
base_url
str type expected (type=type_error.str)
make: *** [run] error 1
I follow the installation guide: https://docs.privategpt.dev/installation/getting-started/installation. Does anyone know how to fix it?
Hello, same here
hi, I got this error too
Can you try again? I just try with last changes, and it's working as expected