langserve
langserve copied to clipboard
Local LLM example with ollama
Hi team,
I'm trying to create an agent which uses a tool which fetches some info on rest api. The agent takes the tool output and present it back to a user based on a system prompt.
I looked over your examples, but i couldn't find an exact one: looking at the the examples there is a local LLM example but without an Agent, and I couldn't make it work.
Can you help me create a working example which runs a agent with a local LLM deployed with ollama ?
`#!/usr/bin/env python """Example LangChain Server that runs a local llm.
Attention This is OK for prototyping / dev usage, but should not be used for production cases when there might be concurrent requests from different users. As of the time of writing, Ollama is designed for single user and cannot handle concurrent requests see this issue: https://github.com/ollama/ollama/issues/358
When deploying local models, make sure you understand whether the model is able to handle concurrent requests or not. If concurrent requests are not handled properly, the server will either crash or will just not be able to handle more than one user at a time. """ from typing import Any
from fastapi import FastAPI from langchain.agents import AgentExecutor from langchain.globals import set_debug from langchain_core.output_parsers import StrOutputParser from langchain_core.prompts import ChatPromptTemplate from langchain_ollama import ChatOllama from langserve import add_routes from pydantic import BaseModel
from tool_custom import get_test_results_tool
set_debug(True)
app = FastAPI( title="LangChain Server", version="1.0", description="Spin up a simple api server using Langchain's Runnable interfaces", )
tools = [get_test_results_tool]
llm = ChatOllama(model="llama3") #or deepseek, doesnt matter llm_with_tools = llm.bind(functions=[tools])
add_routes( app, llm, path="/ollama", )
We need to add these input/output schemas because the current AgentExecutor
is lacking in schemas.
class Input(BaseModel): input: str
class Output(BaseModel): output: Any
prompt = ChatPromptTemplate.from_messages( [ ("system", "You are a helpful assistant."), # Please note that the ordering of the user input vs. # the agent_scratchpad is important. # The agent_scratchpad is a working space for the agent to think, # invoke tools, see tools outputs in order to respond to the given # user input. It has to come AFTER the user input. ("user", "{input}")] )
agent = ( { "input": lambda x: x["input"] } | prompt | llm_with_tools )
agent_executor = AgentExecutor(agent=agent, tools=tools)
add_routes( app, agent_executor.with_types(input_type=Input, output_type=Output).with_config( {"run_name": "agent"} ), path="/agent" )
if name == "main": import uvicorn
uvicorn.run(app, host="localhost", port=8000)
`
Received stacktrace:
[llm/error] [chain:/agent > chain:RunnableSequence > llm:ChatOllama] [28ms] LLM run errored with error: "TypeError(\"AsyncClient.chat() got an unexpected keyword argument 'functions'\")Traceback (most recent call last):\n\n\n File \"C:\\Users\\cdinuta\\PycharmProjects\\TestResultsResearchTeam\\.venv\\Lib\\site-packages\\langchain_core\\language_models\\chat_models.py\", line 510, in astream\n async for chunk in self._astream(\n\n\n File \"C:\\Users\\cdinuta\\PycharmProjects\\TestResultsResearchTeam\\.venv\\Lib\\site-packages\\langchain_ollama\\chat_models.py\", line 755, in _astream\n async for stream_resp in self._acreate_chat_stream(messages, stop, **kwargs):\n\n\n File \"C:\\Users\\cdinuta\\PycharmProjects\\TestResultsResearchTeam\\.venv\\Lib\\site-packages\\langchain_ollama\\chat_models.py\", line 575, in _acreate_chat_stream\n async for part in await self._async_client.chat(**chat_params):\n ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n\n\nTypeError: AsyncClient.chat() got an unexpected keyword argument 'functions'" [chain/error] [chain:/agent > chain:RunnableSequence] [87ms] Chain run errored with error: "TypeError(\"AsyncClient.chat() got an unexpected keyword argument 'functions'\")Traceback (most recent call last):\n\n\n File \"C:\\Users\\cdinuta\\PycharmProjects\\TestResultsResearchTeam\\.venv\\Lib\\site-packages\\langchain_core\\runnables\\base.py\", line 2308, in _atransform_stream_with_config\n chunk: Output = await asyncio.create_task( # type: ignore[call-arg]\n ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n\n\n File \"C:\\Users\\cdinuta\\PycharmProjects\\TestResultsResearchTeam\\.venv\\Lib\\site-packages\\langchain_core\\runnables\\base.py\", line 3392, in _atransform\n async for output in final_pipeline:\n\n\n File \"C:\\Users\\cdinuta\\PycharmProjects\\TestResultsResearchTeam\\.venv\\Lib\\site-packages\\langchain_core\\runnables\\base.py\", line 5584, in atransform\n async for item in self.bound.atransform(\n\n\n File \"C:\\Users\\cdinuta\\PycharmProjects\\TestResultsResearchTeam\\.venv\\Lib\\site-packages\\langchain_core\\runnables\\base.py\", line 1473, in atransform\n async for output in self.astream(final, config, **kwargs):\n\n\n File \"C:\\Users\\cdinuta\\PycharmProjects\\TestResultsResearchTeam\\.venv\\Lib\\site-packages\\langchain_core\\language_models\\chat_models.py\", line 510, in astream\n async for chunk in self._astream(\n\n\n File \"C:\\Users\\cdinuta\\PycharmProjects\\TestResultsResearchTeam\\.venv\\Lib\\site-packages\\langchain_ollama\\chat_models.py\", line 755, in _astream\n async for stream_resp in self._acreate_chat_stream(messages, stop, **kwargs):\n\n\n File \"C:\\Users\\cdinuta\\PycharmProjects\\TestResultsResearchTeam\\.venv\\Lib\\site-packages\\langchain_ollama\\chat_models.py\", line 575, in _acreate_chat_stream\n async for part in await self._async_client.chat(**chat_params):\n ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n\n\nTypeError: AsyncClient.chat() got an unexpected keyword argument 'functions'" [chain/error] [chain:/agent] [113ms] Chain run errored with error: "TypeError(\"AsyncClient.chat() got an unexpected keyword argument 'functions'\")Traceback (most recent call last):\n\n\n File \"C:\\Users\\cdinuta\\PycharmProjects\\TestResultsResearchTeam\\.venv\\Lib\\site-packages\\langchain\\chains\\base.py\", line 212, in ainvoke\n await self._acall(inputs, run_manager=run_manager)\n\n\n File \"C:\\Users\\cdinuta\\PycharmProjects\\TestResultsResearchTeam\\.venv\\Lib\\site-packages\\langchain\\agents\\agent.py\", line 1673, in _acall\n next_step_output = await self._atake_next_step(\n ^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n\n\n File \"C:\\Users\\cdinuta\\PycharmProjects\\TestResultsResearchTeam\\.venv\\Lib\\site-packages\\langchain\\agents\\agent.py\", line 1467, in _atake_next_step\n [\n\n\n File \"C:\\Users\\cdinuta\\PycharmProjects\\TestResultsResearchTeam\\.venv\\Lib\\site-packages\\langchain\\agents\\agent.py\", line 1495, in _aiter_next_step\n output = await self._action_agent.aplan(\n ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n\n\n File \"C:\\Users\\cdinuta\\PycharmProjects\\TestResultsResearchTeam\\.venv\\Lib\\site-packages\\langchain\\agents\\agent.py\", line 504, in aplan\n async for chunk in self.runnable.astream(\n\n\n File \"C:\\Users\\cdinuta\\PycharmProjects\\TestResultsResearchTeam\\.venv\\Lib\\site-packages\\langchain_core\\runnables\\base.py\", line 3439, in astream\n async for chunk in self.atransform(input_aiter(), config, **kwargs):\n\n\n File \"C:\\Users\\cdinuta\\PycharmProjects\\TestResultsResearchTeam\\.venv\\Lib\\site-packages\\langchain_core\\runnables\\base.py\", line 3422, in atransform\n async for chunk in self._atransform_stream_with_config(\n\n\n File \"C:\\Users\\cdinuta\\PycharmProjects\\TestResultsResearchTeam\\.venv\\Lib\\site-packages\\langchain_core\\runnables\\base.py\", line 2308, in _atransform_stream_with_config\n chunk: Output = await asyncio.create_task( # type: ignore[call-arg]\n ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n\n\n File \"C:\\Users\\cdinuta\\PycharmProjects\\TestResultsResearchTeam\\.venv\\Lib\\site-packages\\langchain_core\\runnables\\base.py\", line 3392, in _atransform\n async for output in final_pipeline:\n\n\n File \"C:\\Users\\cdinuta\\PycharmProjects\\TestResultsResearchTeam\\.venv\\Lib\\site-packages\\langchain_core\\runnables\\base.py\", line 5584, in atransform\n async for item in self.bound.atransform(\n\n\n File \"C:\\Users\\cdinuta\\PycharmProjects\\TestResultsResearchTeam\\.venv\\Lib\\site-packages\\langchain_core\\runnables\\base.py\", line 1473, in atransform\n async for output in self.astream(final, config, **kwargs):\n\n\n File \"C:\\Users\\cdinuta\\PycharmProjects\\TestResultsResearchTeam\\.venv\\Lib\\site-packages\\langchain_core\\language_models\\chat_models.py\", line 510, in astream\n async for chunk in self._astream(\n\n\n File \"C:\\Users\\cdinuta\\PycharmProjects\\TestResultsResearchTeam\\.venv\\Lib\\site-packages\\langchain_ollama\\chat_models.py\", line 755, in _astream\n async for stream_resp in self._acreate_chat_stream(messages, stop, **kwargs):\n\n\n File \"C:\\Users\\cdinuta\\PycharmProjects\\TestResultsResearchTeam\\.venv\\Lib\\site-packages\\langchain_ollama\\chat_models.py\", line 575, in _acreate_chat_stream\n async for part in await self._async_client.chat(**chat_params):\n ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n\n\nTypeError: AsyncClient.chat() got an unexpected keyword argument 'functions'" INFO: ::1:62974 - "POST /agent/invoke HTTP/1.1" 500 Internal Server Error ERROR: Exception in ASGI application Traceback (most recent call last): File "C:\Users\cdinuta\PycharmProjects\TestResultsResearchTeam\.venv\Lib\site-packages\uvicorn\protocols\http\h11_impl.py", line 406, in run_asgi result = await app( # type: ignore[func-returns-value] ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "C:\Users\cdinuta\PycharmProjects\TestResultsResearchTeam\.venv\Lib\site-packages\uvicorn\middleware\proxy_headers.py", line 60, in __call__ return await self.app(scope, receive, send) ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "C:\Users\cdinuta\PycharmProjects\TestResultsResearchTeam\.venv\Lib\site-packages\fastapi\applications.py", line 1054, in __call__ await super().__call__(scope, receive, send) File "C:\Users\cdinuta\PycharmProjects\TestResultsResearchTeam\.venv\Lib\site-packages\starlette\applications.py", line 113, in __call__ await self.middleware_stack(scope, receive, send) File "C:\Users\cdinuta\PycharmProjects\TestResultsResearchTeam\.venv\Lib\site-packages\starlette\middleware\errors.py", line 187, in __call__ raise exc File "C:\Users\cdinuta\PycharmProjects\TestResultsResearchTeam\.venv\Lib\site-packages\starlette\middleware\errors.py", line 165, in __call__ await self.app(scope, receive, _send) File "C:\Users\cdinuta\PycharmProjects\TestResultsResearchTeam\.venv\Lib\site-packages\starlette\middleware\exceptions.py", line 62, in __call__ await wrap_app_handling_exceptions(self.app, conn)(scope, receive, send) File "C:\Users\cdinuta\PycharmProjects\TestResultsResearchTeam\.venv\Lib\site-packages\starlette\_exception_handler.py", line 53, in wrapped_app raise exc File "C:\Users\cdinuta\PycharmProjects\TestResultsResearchTeam\.venv\Lib\site-packages\starlette\_exception_handler.py", line 42, in wrapped_app await app(scope, receive, sender) File "C:\Users\cdinuta\PycharmProjects\TestResultsResearchTeam\.venv\Lib\site-packages\starlette\routing.py", line 715, in __call__ await self.middleware_stack(scope, receive, send) File "C:\Users\cdinuta\PycharmProjects\TestResultsResearchTeam\.venv\Lib\site-packages\starlette\routing.py", line 735, in app await route.handle(scope, receive, send) File "C:\Users\cdinuta\PycharmProjects\TestResultsResearchTeam\.venv\Lib\site-packages\starlette\routing.py", line 288, in handle await self.app(scope, receive, send) File "C:\Users\cdinuta\PycharmProjects\TestResultsResearchTeam\.venv\Lib\site-packages\starlette\routing.py", line 76, in app await wrap_app_handling_exceptions(app, request)(scope, receive, send) File "C:\Users\cdinuta\PycharmProjects\TestResultsResearchTeam\.venv\Lib\site-packages\starlette\_exception_handler.py", line 53, in wrapped_app raise exc File "C:\Users\cdinuta\PycharmProjects\TestResultsResearchTeam\.venv\Lib\site-packages\starlette\_exception_handler.py", line 42, in wrapped_app await app(scope, receive, sender) File "C:\Users\cdinuta\PycharmProjects\TestResultsResearchTeam\.venv\Lib\site-packages\starlette\routing.py", line 73, in app response = await f(request) ^^^^^^^^^^^^^^^^ File "C:\Users\cdinuta\PycharmProjects\TestResultsResearchTeam\.venv\Lib\site-packages\fastapi\routing.py", line 301, in app raw_response = await run_endpoint_function( ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "C:\Users\cdinuta\PycharmProjects\TestResultsResearchTeam\.venv\Lib\site-packages\fastapi\routing.py", line 212, in run_endpoint_function return await dependant.call(**values) ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "C:\Users\cdinuta\PycharmProjects\TestResultsResearchTeam\.venv\Lib\site-packages\langserve\server.py", line 503, in invoke return await api_handler.invoke(request) ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "C:\Users\cdinuta\PycharmProjects\TestResultsResearchTeam\.venv\Lib\site-packages\langserve\api_handler.py", line 896, in invoke output = await invoke_coro ^^^^^^^^^^^^^^^^^ File "C:\Users\cdinuta\PycharmProjects\TestResultsResearchTeam\.venv\Lib\site-packages\langchain_core\runnables\base.py", line 5377, in ainvoke return await self.bound.ainvoke( ^^^^^^^^^^^^^^^^^^^^^^^^^ File "C:\Users\cdinuta\PycharmProjects\TestResultsResearchTeam\.venv\Lib\site-packages\langchain\chains\base.py", line 221, in ainvoke raise e File "C:\Users\cdinuta\PycharmProjects\TestResultsResearchTeam\.venv\Lib\site-packages\langchain\chains\base.py", line 212, in ainvoke await self._acall(inputs, run_manager=run_manager) File "C:\Users\cdinuta\PycharmProjects\TestResultsResearchTeam\.venv\Lib\site-packages\langchain\agents\agent.py", line 1673, in _acall next_step_output = await self._atake_next_step( ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "C:\Users\cdinuta\PycharmProjects\TestResultsResearchTeam\.venv\Lib\site-packages\langchain\agents\agent.py", line 1467, in _atake_next_step [ File "C:\Users\cdinuta\PycharmProjects\TestResultsResearchTeam\.venv\Lib\site-packages\langchain\agents\agent.py", line 1495, in _aiter_next_step output = await self._action_agent.aplan( ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "C:\Users\cdinuta\PycharmProjects\TestResultsResearchTeam\.venv\Lib\site-packages\langchain\agents\agent.py", line 504, in aplan async for chunk in self.runnable.astream( File "C:\Users\cdinuta\PycharmProjects\TestResultsResearchTeam\.venv\Lib\site-packages\langchain_core\runnables\base.py", line 3439, in astream async for chunk in self.atransform(input_aiter(), config, **kwargs): File "C:\Users\cdinuta\PycharmProjects\TestResultsResearchTeam\.venv\Lib\site-packages\langchain_core\runnables\base.py", line 3422, in atransform async for chunk in self._atransform_stream_with_config( File "C:\Users\cdinuta\PycharmProjects\TestResultsResearchTeam\.venv\Lib\site-packages\langchain_core\runnables\base.py", line 2308, in _atransform_stream_with_config chunk: Output = await asyncio.create_task( # type: ignore[call-arg] ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "C:\Users\cdinuta\PycharmProjects\TestResultsResearchTeam\.venv\Lib\site-packages\langchain_core\runnables\base.py", line 3392, in _atransform async for output in final_pipeline: File "C:\Users\cdinuta\PycharmProjects\TestResultsResearchTeam\.venv\Lib\site-packages\langchain_core\runnables\base.py", line 5584, in atransform async for item in self.bound.atransform( File "C:\Users\cdinuta\PycharmProjects\TestResultsResearchTeam\.venv\Lib\site-packages\langchain_core\runnables\base.py", line 1473, in atransform async for output in self.astream(final, config, **kwargs): File "C:\Users\cdinuta\PycharmProjects\TestResultsResearchTeam\.venv\Lib\site-packages\langchain_core\language_models\chat_models.py", line 510, in astream async for chunk in self._astream( File "C:\Users\cdinuta\PycharmProjects\TestResultsResearchTeam\.venv\Lib\site-packages\langchain_ollama\chat_models.py", line 755, in _astream async for stream_resp in self._acreate_chat_stream(messages, stop, **kwargs): File "C:\Users\cdinuta\PycharmProjects\TestResultsResearchTeam\.venv\Lib\site-packages\langchain_ollama\chat_models.py", line 575, in _acreate_chat_stream async for part in await self._async_client.chat(**chat_params): ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ TypeError: AsyncClient.chat() got an unexpected keyword argument 'functions'