langchain
langchain copied to clipboard
Enable tool support for ollama
Adds basic support for tools with ollama v0.3, tested with the hairbrush example:
alias LangChain.Function
alias LangChain.Message
alias LangChain.Chains.LLMChain
alias LangChain.ChatModels.ChatOllamaAI
# map of data we want to be passed as `context` to the function when
# executed.
custom_context = %{
"user_id" => 123,
"hairbrush" => "drawer",
"dog" => "backyard",
"sandwich" => "kitchen"
}
# a custom Elixir function made available to the LLM
custom_fn =
Function.new!(%{
name: "custom",
description: "Returns the location of the requested element or item.",
parameters_schema: %{
type: "object",
properties: %{
thing: %{
type: "string",
description: "The thing whose location is being requested."
}
},
required: ["thing"]
},
function: fn %{"thing" => thing} = _arguments, context ->
# our context is a pretend item/location location map
{:ok, context[thing]}
end
})
# create and run the chain
{:ok, updated_chain, %Message{} = message} =
LLMChain.new!(%{
llm: ChatOllamaAI.new!(%{model: "llama3.1", verbose: true}),
custom_context: custom_context,
verbose: true
})
|> LLMChain.add_tools(custom_fn)
|> LLMChain.add_message(Message.new_user!("Where is the hairbrush located?"))
|> LLMChain.run(mode: :while_needs_response)
# print the LLM's answer
IO.puts(message.content)
#=> "The hairbrush is located in the drawer."
I added several tests to the best of my knowledge (I'm new to langchain). Please have a look…
Will this PR be part of the 0.3.0 release?
I'd love to get this merged in if someone wants to pick it up and help finish the changes!
Sorry, I had a lot on my plate… I can take another shot. Since then ollama added async tool support which is not with my previous changes. But maybe I'll clean up this PR first, then look at async later.
@brainlid Rebased and made the changes.
Thanks for the contribution and for making the changes! That was a large chunk of work. ❤️💛💙💜
Hey guys Im testing these changes in the demo project and Im getting:
[error] Task #PID<0.5203.0> started from #PID<0.5193.0> terminating
** (WithClauseError) no with clause matching: %{"function" => %{"arguments" => %{"activity" => nil, "days" => 7}, "name" => "get_fitness_logs"}}
(langchain 0.3.1) lib/message.ex:242: anonymous fn/1 in LangChain.Message.validate_and_parse_tool_calls/1
(elixir 1.16.3) lib/enum.ex:1700: Enum."-map/2-lists^map/1-1-"/2
(langchain 0.3.1) lib/message.ex:241: LangChain.Message.validate_and_parse_tool_calls/1
(langchain 0.3.1) lib/message.ex:162: LangChain.Message.common_validations/1
(langchain 0.3.1) lib/message.ex:132: LangChain.Message.new/1
(langchain 0.3.1) lib/message_delta.ex:254: LangChain.MessageDelta.to_message/1
(langchain 0.3.1) lib/chains/llm_chain.ex:673: LangChain.Chains.LLMChain.delta_to_message_when_complete/1
(elixir 1.16.3) lib/enum.ex:2528: Enum."-reduce/3-lists^foldl/2-0-"/3
(langchain 0.3.1) lib/chains/llm_chain.ex:571: LangChain.Chains.LLMChain.do_run/1
(langchain 0.3.1) lib/chains/llm_chain.ex:520: LangChain.Chains.LLMChain.run_while_needs_response/1
(langchain 0.3.1) lib/chains/llm_chain.ex:398: LangChain.Chains.LLMChain.run/2
(langchain_demo 0.1.0) lib/langchain_demo_web/live/agent_chat_live/index.ex:321: anonymous fn/1 in LangChainDemoWeb.AgentChatLive.Index.run_chain/1
(phoenix_live_view 0.20.17) lib/phoenix_live_view/async.ex:220: Phoenix.LiveView.Async.do_async/5
(elixir 1.16.3) lib/task/supervised.ex:101: Task.Supervised.invoke_mfa/2
Function: #Function<7.104768383/0 in Phoenix.LiveView.Async.run_async_task/5>
Args: []
[error] GenServer #PID<0.5193.0> terminating
** (WithClauseError) no with clause matching: %{"function" => %{"arguments" => %{"activity" => nil, "days" => 7}, "name" => "get_fitness_logs"}}
(langchain 0.3.1) lib/message.ex:242: anonymous fn/1 in LangChain.Message.validate_and_parse_tool_calls/1
(elixir 1.16.3) lib/enum.ex:1700: Enum."-map/2-lists^map/1-1-"/2
(langchain 0.3.1) lib/message.ex:241: LangChain.Message.validate_and_parse_tool_calls/1
(langchain 0.3.1) lib/message.ex:162: LangChain.Message.common_validations/1
(langchain 0.3.1) lib/message.ex:132: LangChain.Message.new/1
(langchain 0.3.1) lib/message_delta.ex:254: LangChain.MessageDelta.to_message/1
(langchain 0.3.1) lib/chains/llm_chain.ex:673: LangChain.Chains.LLMChain.delta_to_message_when_complete/1
(langchain_demo 0.1.0) lib/langchain_demo_web/live/agent_chat_live/index.ex:102: LangChainDemoWeb.AgentChatLive.Index.handle_info/2
(phoenix_live_view 0.20.17) lib/phoenix_live_view/channel.ex:360: Phoenix.LiveView.Channel.handle_info/2
(stdlib 5.2.3) gen_server.erl:1095: :gen_server.try_handle_info/3
(stdlib 5.2.3) gen_server.erl:1183: :gen_server.handle_msg/6
(stdlib 5.2.3) proc_lib.erl:241: :proc_lib.init_p_do_apply/3
Wondering if some of the interfaces has changed
@mustela Did you set stream: false when creating the chain's llm? e.g. here: https://github.com/brainlid/langchain_demo/blob/main/lib/langchain_demo_web/live/agent_chat_live/index.ex#L271
The code does not support streaming tool support (yet).
That's did the trick! thank you @alappe ❤️ ! Looking forward to have streaming tool support 🙏🏼