lagent
lagent copied to clipboard
Added Ollama support
Added support for Ollama and local models. Example usage:
from lagent.actions import ActionExecutor, ArxivSearch, IPythonInterpreter from lagent.agents.react import ReAct from lagent.llms.ollama import OllamaAPI
llm = OllamaAPI(model_type="llama3.1:8b-instruct-q8_0")
def main(): llm = OllamaAPI(model_type="llama3.1:8b-instruct-q8_0") arxiv_search = ArxivSearch() python_interpreter = IPythonInterpreter() action_executor = ActionExecutor(actions=[arxiv_search, python_interpreter])
agent = ReAct(llm=llm, action_executor=action_executor)
task = """
Search for recent papers about computer vision on arXiv and summarize the top 3 results.
Use the ArxivSearch tool to find papers and the IPythonInterpreter to process the results if needed.
"""
try:
response = agent.chat(task)
print("Final Response:")
print(response.response)
print("\nIntermediate Steps:")
for step in response.inner_steps:
print(f"Role: {step['role']}")
print(f"Content: {step['content']}\n")
except Exception as e:
print(f"An error occurred: {e}")
if name == "main": main()
Thanks for your contribution!!! We will review as soon as possible