phidata
phidata copied to clipboard
Ollama giving issues with multiple Tools.
Hey, Thanks for the amazing work.
I am facing issue with Ollama in multiple tools, When using OpenAIChat everything works, but when I use Ollama with Agents I get errors as shown below.
I tried Using Agent with Ollama model with 2 different tools and both gave issues.
- WebAgent: Reproducable Code:
import json
from typing import Optional
from phi.model.openai import OpenAIChat
from textwrap import dedent
from pathlib import Path
from phi.vectordb.pgvector import PgVector
from phi.tools.duckduckgo import DuckDuckGo
from phi.agent import Agent
from phi.tools.postgres import PostgresTools
from phi.storage.agent.sqlite import SqlAgentStorage
from phi.playground import Playground, serve_playground_app
from phi.knowledge.csv import CSVKnowledgeBase
from reranker import BGEFlagReranker
from phi.embedder.openai import OpenAIEmbedder
# from phi.vectordb.chroma import ChromaDb
from dotenv import load_dotenv
from phi.model.ollama import Ollama
# from phi.llm.ollama import OllamaTools
load_dotenv()
cwd = Path(__file__).parent
storage_dir = cwd.joinpath("storage")
storage_dir.mkdir(parents=True, exist_ok=True)
# Storage setup
agent_storage = SqlAgentStorage(
table_name="mygpt_sessions",
db_file=str(storage_dir.joinpath("mygpt.db"))
)
# Initialize reranker
reranker = BGEFlagReranker(model_name="BAAI/bge-reranker-base")
web_instructions = 'Always include sources'
finance_instructions = 'Use tables to display data'
web_agent = Agent(
name="Web Agent",
role="Search the web for information",
model=Ollama(id="qwen2.5:7b"),
tools=[DuckDuckGo()],
instructions=[web_instructions],
show_tool_calls=True,
markdown=True,
)
# Create the playground app
app = Playground(agents=[web_agent]).get_app()
if __name__ == "__main__":
serve_playground_app("reproducable:app", reload=True)
No Errors reported but in UI I don't get any output:
<tool_call> {"name": "duckduckgo_news", "arguments": {"query": "today", "max_results": 5}} </tool_call>
- SQLAgent:
This is a bigger problem
Reproducable Code:
import json
from typing import Optional
from phi.model.openai import OpenAIChat
from textwrap import dedent
from pathlib import Path
from phi.vectordb.pgvector import PgVector
from phi.tools.duckduckgo import DuckDuckGo
from phi.agent import Agent
from phi.tools.postgres import PostgresTools
from phi.storage.agent.sqlite import SqlAgentStorage
from phi.playground import Playground, serve_playground_app
from phi.knowledge.csv import CSVKnowledgeBase
from reranker import BGEFlagReranker
from phi.embedder.openai import OpenAIEmbedder
# from phi.vectordb.chroma import ChromaDb
from dotenv import load_dotenv
from phi.model.ollama import Ollama
# from phi.llm.ollama import OllamaTools
load_dotenv()
## PG Vector
db_url = "postgresql+psycopg://ai:ai@localhost:5432/ai"
cwd = Path(__file__).parent
storage_dir = cwd.joinpath("storage")
storage_dir.mkdir(parents=True, exist_ok=True)
# Storage setup
agent_storage = SqlAgentStorage(
table_name="mygpt_sessions",
db_file=str(storage_dir.joinpath("mygpt.db"))
)
# Initialize reranker
reranker = BGEFlagReranker(model_name="BAAI/bge-reranker-base")
def get_sql_agent() -> Agent:
"""Create an agent for database operations"""
postgres_tools = PostgresTools(
db_name=TARGET_DB_CONFIG["dbname"],
user=TARGET_DB_CONFIG["user"],
password=TARGET_DB_CONFIG["password"],
host=TARGET_DB_CONFIG["host"],
port=TARGET_DB_CONFIG["port"],
run_queries=True,
inspect_queries=True,
summarize_tables=True,
)
return Agent(
name="SQL Assistant",
agent_id="sql_assistant",
role="Database Expert",
# model=OpenAIChat(id="gpt-4o-mini"),
model=Ollama(id="qwen2.5:7b"),
storage=agent_storage,
show_tool_calls=True,
tools=[postgres_tools],
use_tools=True,
debug_mode=True,
markdown=True,
description="I execute SQL queries and analyze results.",
instructions=[
"When handling queries:",
"1. Execute the appropriate SQL query immediately",
"2. Show the query and its results",
"3. Analyze the actual data returned",
"4. Present findings based only on the data",
"Important rules:",
"- Never suggest what queries to run",
"- Don't ask for clarification",
"- Always execute a query",
"- Base analysis only on returned data",
"- Never end with questions or suggestions",
],
)
# Create the team
sqlAgent = get_sql_agent()
# sqlAgent.print_response("What are all the tables?", stream=True)
# # Create the playground app
app = Playground(agents=[sqlAgent]).get_app()
if __name__ == "__main__":
serve_playground_app("reproducable:app", reload=True)
Error:
DEBUG *********** Session ID: 6eadf4df-dbe5-4405-b4f7-afdf3a7c488d ***********
DEBUG Debug logs enabled
INFO: Started server process [254177]
INFO: Waiting for application startup.
INFO: Application startup complete.
INFO: 127.0.0.1:58778 - "OPTIONS /v1/playground/status HTTP/1.1" 200 OK
INFO: 127.0.0.1:58778 - "GET /v1/playground/status HTTP/1.1" 200 OK
INFO: 127.0.0.1:58778 - "OPTIONS /v1/playground/agent/sessions/8aac5756-6221-4bee-8a67-0e08513b2205 HTTP/1.1" 200 OK
DEBUG AgentSessionsRequest: agent_id='5625f52e-186d-4362-a500-4cbc53fd9113' user_id=None
INFO: 127.0.0.1:58778 - "POST /v1/playground/agent/sessions/8aac5756-6221-4bee-8a67-0e08513b2205 HTTP/1.1" 404 Not Found
INFO: 127.0.0.1:58778 - "GET /v1/playground/agent/get HTTP/1.1" 200 OK
DEBUG AgentRunRequest: message='What are all my tables?' agent_id='sql_assistant' stream=True monitor=False session_id=None user_id='alaapdhall79_743d' image=None
DEBUG Creating new session
DEBUG *********** Agent ID: sql_assistant ***********
DEBUG *********** Session ID: ea436cf4-07ec-4678-80d5-3dfc9183ddca ***********
DEBUG Debug logs enabled
DEBUG Created new Agent: agent_id: sql_assistant | session_id: ea436cf4-07ec-4678-80d5-3dfc9183ddca
INFO: 127.0.0.1:42514 - "POST /v1/playground/agent/run HTTP/1.1" 200 OK
DEBUG *********** Async Agent Run Start: 00caf82d-bf27-4ab2-8433-f8e2e90c7f18 ***********
DEBUG Function show_tables from postgres_tools added to model.
DEBUG Function describe_table from postgres_tools added to model.
DEBUG Function inspect_query from postgres_tools added to model.
DEBUG Function run_query from postgres_tools added to model.
DEBUG Function summarize_table from postgres_tools added to model.
DEBUG ---------- Ollama Async Response Start ----------
DEBUG ============== system ==============
DEBUG I execute SQL queries and analyze results.
Your role is: Database Expert
## Instructions
- When handling queries:
- 1. Execute the appropriate SQL query immediately
- 2. Show the query and its results
- 3. Analyze the actual data returned
- 4. Present findings based only on the data
- Important rules:
- - Never suggest what queries to run
- - Don't ask for clarification
- - Always execute a query
- - Base analysis only on returned data
- - Never end with questions or suggestions
- Use markdown to format your answers.
DEBUG ============== user ==============
DEBUG What are all my tables?
ERROR: Exception in ASGI application
Traceback (most recent call last):
File "/home/alaap/miniconda3/envs/envPy310/lib/python3.10/site-packages/starlette/responses.py", line 259, in __call__
await wrap(partial(self.listen_for_disconnect, receive))
File "/home/alaap/miniconda3/envs/envPy310/lib/python3.10/site-packages/starlette/responses.py", line 255, in wrap
await func()
File "/home/alaap/miniconda3/envs/envPy310/lib/python3.10/site-packages/starlette/responses.py", line 232, in listen_for_disconnect
message = await receive()
File "/home/alaap/miniconda3/envs/envPy310/lib/python3.10/site-packages/uvicorn/protocols/http/httptools_impl.py", line 555, in receive
await self.message_event.wait()
File "/home/alaap/miniconda3/envs/envPy310/lib/python3.10/asyncio/locks.py", line 214, in wait
await fut
asyncio.exceptions.CancelledError: Cancelled by cancel scope 77c528696080
During handling of the above exception, another exception occurred:
+ Exception Group Traceback (most recent call last):
| File "/home/alaap/miniconda3/envs/envPy310/lib/python3.10/site-packages/uvicorn/protocols/http/httptools_impl.py", line 401, in run_asgi
| result = await app( # type: ignore[func-returns-value]
| File "/home/alaap/miniconda3/envs/envPy310/lib/python3.10/site-packages/uvicorn/middleware/proxy_headers.py", line 60, in __call__
| return await self.app(scope, receive, send)
| File "/home/alaap/miniconda3/envs/envPy310/lib/python3.10/site-packages/fastapi/applications.py", line 1054, in __call__
| await super().__call__(scope, receive, send)
| File "/home/alaap/miniconda3/envs/envPy310/lib/python3.10/site-packages/starlette/applications.py", line 113, in __call__
| await self.middleware_stack(scope, receive, send)
| File "/home/alaap/miniconda3/envs/envPy310/lib/python3.10/site-packages/starlette/middleware/errors.py", line 187, in __call__
| raise exc
| File "/home/alaap/miniconda3/envs/envPy310/lib/python3.10/site-packages/starlette/middleware/errors.py", line 165, in __call__
| await self.app(scope, receive, _send)
| File "/home/alaap/miniconda3/envs/envPy310/lib/python3.10/site-packages/starlette/middleware/cors.py", line 93, in __call__
| await self.simple_response(scope, receive, send, request_headers=headers)
| File "/home/alaap/miniconda3/envs/envPy310/lib/python3.10/site-packages/starlette/middleware/cors.py", line 144, in simple_response
| await self.app(scope, receive, send)
| File "/home/alaap/miniconda3/envs/envPy310/lib/python3.10/site-packages/starlette/middleware/exceptions.py", line 62, in __call__
| await wrap_app_handling_exceptions(self.app, conn)(scope, receive, send)
| File "/home/alaap/miniconda3/envs/envPy310/lib/python3.10/site-packages/starlette/_exception_handler.py", line 53, in wrapped_app
| raise exc
| File "/home/alaap/miniconda3/envs/envPy310/lib/python3.10/site-packages/starlette/_exception_handler.py", line 42, in wrapped_app
| await app(scope, receive, sender)
| File "/home/alaap/miniconda3/envs/envPy310/lib/python3.10/site-packages/starlette/routing.py", line 715, in __call__
| await self.middleware_stack(scope, receive, send)
| File "/home/alaap/miniconda3/envs/envPy310/lib/python3.10/site-packages/starlette/routing.py", line 735, in app
| await route.handle(scope, receive, send)
| File "/home/alaap/miniconda3/envs/envPy310/lib/python3.10/site-packages/starlette/routing.py", line 288, in handle
| await self.app(scope, receive, send)
| File "/home/alaap/miniconda3/envs/envPy310/lib/python3.10/site-packages/starlette/routing.py", line 76, in app
| await wrap_app_handling_exceptions(app, request)(scope, receive, send)
| File "/home/alaap/miniconda3/envs/envPy310/lib/python3.10/site-packages/starlette/_exception_handler.py", line 53, in wrapped_app
| raise exc
| File "/home/alaap/miniconda3/envs/envPy310/lib/python3.10/site-packages/starlette/_exception_handler.py", line 42, in wrapped_app
| await app(scope, receive, sender)
| File "/home/alaap/miniconda3/envs/envPy310/lib/python3.10/site-packages/starlette/routing.py", line 74, in app
| await response(scope, receive, send)
| File "/home/alaap/miniconda3/envs/envPy310/lib/python3.10/site-packages/starlette/responses.py", line 252, in __call__
| async with anyio.create_task_group() as task_group:
| File "/home/alaap/miniconda3/envs/envPy310/lib/python3.10/site-packages/anyio/_backends/_asyncio.py", line 736, in __aexit__
| raise BaseExceptionGroup(
| exceptiongroup.ExceptionGroup: unhandled errors in a TaskGroup (1 sub-exception)
+-+---------------- 1 ----------------
| Traceback (most recent call last):
| File "/home/alaap/miniconda3/envs/envPy310/lib/python3.10/site-packages/starlette/responses.py", line 255, in wrap
| await func()
| File "/home/alaap/miniconda3/envs/envPy310/lib/python3.10/site-packages/starlette/responses.py", line 244, in stream_response
| async for chunk in self.body_iterator:
| File "/home/alaap/miniconda3/envs/envPy310/lib/python3.10/site-packages/phi/playground/router.py", line 400, in chat_response_streamer
| async for run_response_chunk in run_response:
| File "/home/alaap/miniconda3/envs/envPy310/lib/python3.10/site-packages/phi/agent/agent.py", line 2134, in _arun
| async for model_response_chunk in model_response_stream: # type: ignore
| File "/home/alaap/miniconda3/envs/envPy310/lib/python3.10/site-packages/phi/model/ollama/chat.py", line 667, in aresponse_stream
| async for response in self.ainvoke_stream(messages=messages):
| File "/home/alaap/miniconda3/envs/envPy310/lib/python3.10/site-packages/phi/model/ollama/chat.py", line 253, in ainvoke_stream
| async_stream = await self.get_async_client().chat(
| File "/home/alaap/miniconda3/envs/envPy310/lib/python3.10/site-packages/ollama/_client.py", line 841, in chat
| tools=[tool for tool in _copy_tools(tools)],
| File "/home/alaap/miniconda3/envs/envPy310/lib/python3.10/site-packages/ollama/_client.py", line 841, in <listcomp>
| tools=[tool for tool in _copy_tools(tools)],
| File "/home/alaap/miniconda3/envs/envPy310/lib/python3.10/site-packages/ollama/_client.py", line 1133, in _copy_tools
| yield convert_function_to_tool(unprocessed_tool) if callable(unprocessed_tool) else Tool.model_validate(unprocessed_tool)
| File "/home/alaap/miniconda3/envs/envPy310/lib/python3.10/site-packages/pydantic/main.py", line 596, in model_validate
| return cls.__pydantic_validator__.validate_python(
| pydantic_core._pydantic_core.ValidationError: 1 validation error for Tool
| function.parameters.properties.table_schema.type
| Input should be a valid string [type=string_type, input_value=['string', 'null'], input_type=list]
| For further information visit https://errors.pydantic.dev/2.9/v/string_type
+------------------------------------
Is it a bug that needs to be fixed causing this? or am I doing something wrong?
Please review the code also and suggest me if I used it wrong, I just got started with phiData 2 days back.
Any help would be appreciated, thanks.