Ollama LiteLLM error
Yes, I can't get LiteLLM to handle any model other than the OpenAI ones. I did take a shot at fixing this but without success. I did manage to get this test program to work to get LiteLLM to accept tool requests, but integrating thus required format into the Agent is proving challenging! import requests import os import json
Configuration
LITELLM_BASE_URL = os.environ.get("LITELLM_BASE_URL", "http://localhost:4000") API_KEY = os.environ.get("LITELLM_API_KEY", "sk-wW3bVkTN054w0iORgIrPNQ") # Set if needed MODELS = [ "ollama/gemma3:12b", "ollama/DeepSeek-R1:14b", "gpt-4o" ]
headers = { "Content-Type": "application/json", } if API_KEY: headers["Authorization"] = f"Bearer {API_KEY}"
Example function/tool call message - using a simpler format
messages = [ { "role": "system", "content": "You are a helpful assistant that can use tools. When you need to use a tool, respond with a message containing the tool name and arguments in a specific format." }, { "role": "user", "content": "What is the company id of company X?" } ]
tools = [ { "type": "function", "function": { "name": "companies_house_lookup", "description": "Look up company information from Companies House", "parameters": { "type": "object", "properties": { "action": { "type": "string", "enum": ["search"] }, "company_name": { "type": "string", "description": "Name of the company to search for" } }, "required": ["action", "company_name"] } } } ]
def test_model(model_name): print(f"\n{'='*50}") print(f"Testing model: {model_name}") print(f"{'='*50}")
payload = {
"model": model_name,
"messages": messages,
"stream": False,
"tools": tools
}
print("Sending payload:")
print(json.dumps(payload, indent=2))
try:
response = requests.post(
f"{LITELLM_BASE_URL}/chat/completions",
headers=headers,
json=payload,
timeout=30
)
print("Status code:", response.status_code)
print("Response:")
print(response.text)
except Exception as e:
print("Error during request:", e)
Test each model
for model in MODELS: test_model(model)