Logs, error messages, and screenshots:
(opendevin) taozhiyu@603e5f4a42f1 opendevin % make run
Running the app...
Starting backend server...
Waiting for the backend to start...
Connection to localhost port 3000 [tcp/hbci] succeeded!
Backend started successfully.
Starting frontend with npm...
➜ Local: http://localhost:3002/
➜ Network: use --host to expose
➜ press h + enter to show help
ERROR:root: File "/Users/taozhiyu/miniconda3/envs/opendevin/bin/uvicorn", line 8, in
sys.exit(main())
^^^^^^
File "/Users/taozhiyu/miniconda3/envs/opendevin/lib/python3.11/site-packages/click/core.py", line 1157, in call
return self.main(*args, **kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/Users/taozhiyu/miniconda3/envs/opendevin/lib/python3.11/site-packages/click/core.py", line 1078, in main
rv = self.invoke(ctx)
^^^^^^^^^^^^^^^^
File "/Users/taozhiyu/miniconda3/envs/opendevin/lib/python3.11/site-packages/click/core.py", line 1434, in invoke
return ctx.invoke(self.callback, **ctx.params)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/Users/taozhiyu/miniconda3/envs/opendevin/lib/python3.11/site-packages/click/core.py", line 783, in invoke
return __callback(*args, **kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/Users/taozhiyu/miniconda3/envs/opendevin/lib/python3.11/site-packages/uvicorn/main.py", line 409, in main
run(
File "/Users/taozhiyu/miniconda3/envs/opendevin/lib/python3.11/site-packages/uvicorn/main.py", line 575, in run
server.run()
File "/Users/taozhiyu/miniconda3/envs/opendevin/lib/python3.11/site-packages/uvicorn/server.py", line 65, in run
return asyncio.run(self.serve(sockets=sockets))
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/Users/taozhiyu/miniconda3/envs/opendevin/lib/python3.11/asyncio/runners.py", line 190, in run
return runner.run(main)
^^^^^^^^^^^^^^^^
File "/Users/taozhiyu/miniconda3/envs/opendevin/lib/python3.11/asyncio/runners.py", line 118, in run
return self._loop.run_until_complete(task)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "uvloop/loop.pyx", line 1517, in uvloop.loop.Loop.run_until_complete
File "/Users/taozhiyu/miniconda3/envs/opendevin/lib/python3.11/site-packages/uvicorn/server.py", line 69, in serve
await self._serve(sockets)
File "/Users/taozhiyu/miniconda3/envs/opendevin/lib/python3.11/site-packages/uvicorn/server.py", line 76, in _serve
config.load()
File "/Users/taozhiyu/miniconda3/envs/opendevin/lib/python3.11/site-packages/uvicorn/config.py", line 433, in load
self.loaded_app = import_from_string(self.app)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/Users/taozhiyu/miniconda3/envs/opendevin/lib/python3.11/site-packages/uvicorn/importer.py", line 19, in import_from_string
module = importlib.import_module(module_str)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/Users/taozhiyu/miniconda3/envs/opendevin/lib/python3.11/importlib/init.py", line 126, in import_module
return _bootstrap._gcd_import(name[level:], package, level)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "", line 1204, in _gcd_import
File "", line 1176, in _find_and_load
File "", line 1147, in _find_and_load_unlocked
File "", line 690, in _load_unlocked
File "", line 940, in exec_module
File "", line 241, in _call_with_frames_removed
File "/Users/taozhiyu/Downloads/OpenDevin/opendevin/server/listen.py", line 13, in
import agenthub # noqa F401 (we import this to get the agents registered)
^^^^^^^^^^^^^^^
File "/Users/taozhiyu/Downloads/OpenDevin/agenthub/init.py", line 5, in
from . import monologue_agent # noqa: E402
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/Users/taozhiyu/Downloads/OpenDevin/agenthub/monologue_agent/init.py", line 2, in
from .agent import MonologueAgent
File "/Users/taozhiyu/Downloads/OpenDevin/agenthub/monologue_agent/agent.py", line 30, in
from agenthub.monologue_agent.utils.memory import LongTermMemory
File "/Users/taozhiyu/Downloads/OpenDevin/agenthub/monologue_agent/utils/memory.py", line 39, in
embed_model = HuggingFaceEmbedding(
^^^^^^^^^^^^^^^^^^^^^
File "/Users/taozhiyu/miniconda3/envs/opendevin/lib/python3.11/site-packages/llama_index/embeddings/huggingface/base.py", line 86, in init
self._model = SentenceTransformer(
^^^^^^^^^^^^^^^^^^^^
File "/Users/taozhiyu/miniconda3/envs/opendevin/lib/python3.11/site-packages/sentence_transformers/SentenceTransformer.py", line 199, in init
modules = self._load_auto_model(
^^^^^^^^^^^^^^^^^^^^^^
File "/Users/taozhiyu/miniconda3/envs/opendevin/lib/python3.11/site-packages/sentence_transformers/SentenceTransformer.py", line 1134, in _load_auto_model
transformer_model = Transformer(
^^^^^^^^^^^^
File "/Users/taozhiyu/miniconda3/envs/opendevin/lib/python3.11/site-packages/sentence_transformers/models/Transformer.py", line 35, in init
config = AutoConfig.from_pretrained(model_name_or_path, **model_args, cache_dir=cache_dir)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/Users/taozhiyu/miniconda3/envs/opendevin/lib/python3.11/site-packages/transformers/models/auto/configuration_auto.py", line 1138, in from_pretrained
config_dict, unused_kwargs = PretrainedConfig.get_config_dict(pretrained_model_name_or_path, **kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/Users/taozhiyu/miniconda3/envs/opendevin/lib/python3.11/site-packages/transformers/configuration_utils.py", line 631, in get_config_dict
config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/Users/taozhiyu/miniconda3/envs/opendevin/lib/python3.11/site-packages/transformers/configuration_utils.py", line 707, in _get_config_dict
raise EnvironmentError(
ERROR:root:<class 'OSError'>: Can't load the configuration of 'BAAI/bge-small-en-v1.5'. If you were trying to load it from 'https://huggingface.co/models', make sure you don't have a local directory with the same name. Otherwise, make sure 'BAAI/bge-small-en-v1.5' is the correct path to a directory containing a config.json file
i think this is a network issue. try export HF_ENDPOINT=https://hf-mirror.com and see if it helps. the model should be saved in your hf cache, and then you can load the model by model name. i think we might give people some choice about embedding models to use.
i think this is a network issue. try export HF_ENDPOINT=https://hf-mirror.com and see if it helps. the model should be saved in your hf cache, and then you can load the model by model name. i think we might give people some choice about embedding models to use.
actually I have set mirror for GitHub and HF. but the installation of opendevin just ignore all. I must ctrl c to stop installation, and pip install packages with slow connection.
(opendevin) taozhiyu@603e5f4a42f1 opendevin % make run
Running the app...
Starting backend server...
Waiting for the backend to start...
Connection to localhost port 3000 [tcp/hbci] succeeded!
Backend started successfully.
Starting frontend with npm...
➜ Local: http://localhost:3002/
➜ Network: use --host to expose
➜ press h + enter to show help
ERROR:root: File "/Users/taozhiyu/miniconda3/envs/opendevin/bin/uvicorn", line 8, in
sys.exit(main())
^^^^^^
File "/Users/taozhiyu/miniconda3/envs/opendevin/lib/python3.11/site-packages/click/core.py", line 1157, in call
return self.main(*args, **kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/Users/taozhiyu/miniconda3/envs/opendevin/lib/python3.11/site-packages/click/core.py", line 1078, in main
rv = self.invoke(ctx)
^^^^^^^^^^^^^^^^
File "/Users/taozhiyu/miniconda3/envs/opendevin/lib/python3.11/site-packages/click/core.py", line 1434, in invoke
return ctx.invoke(self.callback, **ctx.params)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/Users/taozhiyu/miniconda3/envs/opendevin/lib/python3.11/site-packages/click/core.py", line 783, in invoke
return __callback(*args, **kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/Users/taozhiyu/miniconda3/envs/opendevin/lib/python3.11/site-packages/uvicorn/main.py", line 409, in main
run(
File "/Users/taozhiyu/miniconda3/envs/opendevin/lib/python3.11/site-packages/uvicorn/main.py", line 575, in run
server.run()
File "/Users/taozhiyu/miniconda3/envs/opendevin/lib/python3.11/site-packages/uvicorn/server.py", line 65, in run
return asyncio.run(self.serve(sockets=sockets))
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/Users/taozhiyu/miniconda3/envs/opendevin/lib/python3.11/asyncio/runners.py", line 190, in run
return runner.run(main)
^^^^^^^^^^^^^^^^
File "/Users/taozhiyu/miniconda3/envs/opendevin/lib/python3.11/asyncio/runners.py", line 118, in run
return self._loop.run_until_complete(task)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "uvloop/loop.pyx", line 1517, in uvloop.loop.Loop.run_until_complete
File "/Users/taozhiyu/miniconda3/envs/opendevin/lib/python3.11/site-packages/uvicorn/server.py", line 69, in serve
await self._serve(sockets)
File "/Users/taozhiyu/miniconda3/envs/opendevin/lib/python3.11/site-packages/uvicorn/server.py", line 76, in _serve
config.load()
File "/Users/taozhiyu/miniconda3/envs/opendevin/lib/python3.11/site-packages/uvicorn/config.py", line 433, in load
self.loaded_app = import_from_string(self.app)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/Users/taozhiyu/miniconda3/envs/opendevin/lib/python3.11/site-packages/uvicorn/importer.py", line 19, in import_from_string
module = importlib.import_module(module_str)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/Users/taozhiyu/miniconda3/envs/opendevin/lib/python3.11/importlib/init.py", line 126, in import_module
return _bootstrap._gcd_import(name[level:], package, level)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "", line 1204, in _gcd_import
File "", line 1176, in _find_and_load
File "", line 1147, in _find_and_load_unlocked
File "", line 690, in _load_unlocked
File "", line 940, in exec_module
File "", line 241, in _call_with_frames_removed
File "/Users/taozhiyu/Downloads/OpenDevin/opendevin/server/listen.py", line 13, in
import agenthub # noqa F401 (we import this to get the agents registered)
^^^^^^^^^^^^^^^
File "/Users/taozhiyu/Downloads/OpenDevin/agenthub/init.py", line 5, in
from . import monologue_agent # noqa: E402
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/Users/taozhiyu/Downloads/OpenDevin/agenthub/monologue_agent/init.py", line 2, in
from .agent import MonologueAgent
File "/Users/taozhiyu/Downloads/OpenDevin/agenthub/monologue_agent/agent.py", line 30, in
from agenthub.monologue_agent.utils.memory import LongTermMemory
File "/Users/taozhiyu/Downloads/OpenDevin/agenthub/monologue_agent/utils/memory.py", line 39, in
embed_model = HuggingFaceEmbedding(
^^^^^^^^^^^^^^^^^^^^^
File "/Users/taozhiyu/miniconda3/envs/opendevin/lib/python3.11/site-packages/llama_index/embeddings/huggingface/base.py", line 86, in init
self._model = SentenceTransformer(
^^^^^^^^^^^^^^^^^^^^
File "/Users/taozhiyu/miniconda3/envs/opendevin/lib/python3.11/site-packages/sentence_transformers/SentenceTransformer.py", line 199, in init
modules = self._load_auto_model(
^^^^^^^^^^^^^^^^^^^^^^
File "/Users/taozhiyu/miniconda3/envs/opendevin/lib/python3.11/site-packages/sentence_transformers/SentenceTransformer.py", line 1134, in _load_auto_model
transformer_model = Transformer(
^^^^^^^^^^^^
File "/Users/taozhiyu/miniconda3/envs/opendevin/lib/python3.11/site-packages/sentence_transformers/models/Transformer.py", line 35, in init
config = AutoConfig.from_pretrained(model_name_or_path, **model_args, cache_dir=cache_dir)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/Users/taozhiyu/miniconda3/envs/opendevin/lib/python3.11/site-packages/transformers/models/auto/configuration_auto.py", line 1138, in from_pretrained
config_dict, unused_kwargs = PretrainedConfig.get_config_dict(pretrained_model_name_or_path, **kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/Users/taozhiyu/miniconda3/envs/opendevin/lib/python3.11/site-packages/transformers/configuration_utils.py", line 631, in get_config_dict
config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/Users/taozhiyu/miniconda3/envs/opendevin/lib/python3.11/site-packages/transformers/configuration_utils.py", line 707, in _get_config_dict
raise EnvironmentError(
ERROR:root:<class 'OSError'>: Can't load the configuration of 'BAAI/bge-small-en-v1.5'. If you were trying to load it from 'https://huggingface.co/models', make sure you don't have a local directory with the same name. Otherwise, make sure 'BAAI/bge-small-en-v1.5' is the correct path to a directory containing a config.json file
Then replace model_name='BAAI/bge-small-en-v1.5' in agenthub/monologue_agent/utils/memory.py line 43
from llama_index.embeddings.huggingface import HuggingFaceEmbedding
embed_model = HuggingFaceEmbedding(
model_name='BAAI/bge-small-en-v1.5'
)
to model_name=your-absoulute-model-path like /Users/taozhiyu/miniconda3/envs/opendevin/bge-small-en-v1.5
By the way, when using HuggingFaceEmbedding, it will store model in belowing structure, which is model path is /tmp/llama_index/models--BAAI--bge-small-en-v1.5/snapshots/5c38ec7c405ec4b44b94cc5a9bb96e735b38267a