lagent icon indicating copy to clipboard operation
lagent copied to clipboard

Added Ollama support

Open msamylea opened this issue 1 year ago • 1 comments

Added support for Ollama and local models. Example usage:

from lagent.actions import ActionExecutor, ArxivSearch, IPythonInterpreter from lagent.agents.react import ReAct from lagent.llms.ollama import OllamaAPI

llm = OllamaAPI(model_type="llama3.1:8b-instruct-q8_0")

def main(): llm = OllamaAPI(model_type="llama3.1:8b-instruct-q8_0") arxiv_search = ArxivSearch() python_interpreter = IPythonInterpreter() action_executor = ActionExecutor(actions=[arxiv_search, python_interpreter])

agent = ReAct(llm=llm, action_executor=action_executor)

task = """
Search for recent papers about computer vision on arXiv and summarize the top 3 results.
Use the ArxivSearch tool to find papers and the IPythonInterpreter to process the results if needed.
"""

try:
    response = agent.chat(task)
    
    print("Final Response:")
    print(response.response)
    
    print("\nIntermediate Steps:")
    for step in response.inner_steps:
        print(f"Role: {step['role']}")
        print(f"Content: {step['content']}\n")
except Exception as e:
    print(f"An error occurred: {e}")

if name == "main": main()

msamylea avatar Aug 11 '24 23:08 msamylea

Thanks for your contribution!!! We will review as soon as possible

Harold-lkk avatar Aug 12 '24 05:08 Harold-lkk