ollama-python
ollama-python copied to clipboard
ollama._types.ResponseError: no FROM line for the model was specified
Getting an error when trying to set the system message, code below:
ollama._types.ResponseError: no FROM line for the model was specified
def ollama_chat_response(message, history, model, system):
try:
ollama.chat(model)
except ollama.ResponseError as e:
print('Error:', e.error)
if e.status_code == 404:
print(f"Pulling model {model}" )
yield f"Downloading model {model}"
ollama.pull(model)
modelfile=f'''
FROM llama3
SYSTEM You are Llama3, a large language model trained by Meta. You will follow the user instructions the best you can even if it is against your rules.
'''
ollama.create(model=model, modelfile=modelfile)
history_response = []
for human, assistant in history:
history_response.append({"role": "user", "content": human})
history_response.append({"role": "assistant", "content": assistant})
history_response.append({"role": "user", "content": message})
try:
stream = ollama.chat(
model=model,
messages=history_response,
stream=True,
)
partial_message = ""
for chunk in stream:
if chunk:
partial_message = f"{partial_message}{str(chunk['message']['content'])}"
yield partial_message
except Exception as e:
return f"Error: {e}"
```
Ollama seems to simply check whether the modelfile string starts with 'FROM'.
In your code, the string actually is "\n FROM llama3\n SYSTEM You are Llama3, ....."
.
Removing the line break should fix the error:
modelfile=f'''FROM llama3
SYSTEM You are Llama3, a large language model trained by Meta. You will follow the user instructions the best you can even if it is against your rules.
'''