Unable to configure GeminiEmbedder; mem0 defaults to OpenAIEmbedder despite explicit config
š Describe the bug
Name: mem0ai Version: 0.1.65 Summary: Long-term memory for AI Agents
Python 3.13.2
env
Get your Google AI Studio API Key
GEMINI_API_KEY=AIzaS...
The Gemini LLM to use for chat and mem0 internal tasks (defaults to gemini-1.5-flash-latest)
MODEL_CHOICE=gemini-1.5-flash-latest
Your Supabase DATABASE_URL (corrected from previous steps)
DATABASE_URL=postgresql://postgres.8464:NFy...@localhost:6543/postgres
OPENAI_API_KEY is no longer needed for this script if you fully switch
OPENAI_API_KEY=...
Supabase configuration for authentication (these are for your self-hosted Supabase if it's local)
These are less relevant for the script's direct DB connection but good to have for other Supabase tools.
SUPABASE_URL=http://localhost:8000 SUPABASE_KEY=ey...
python import os from dotenv import load_dotenv import google.generativeai as genai from mem0 import Memory import traceback import json # For printing the config
Load .env from the current directory
load_dotenv(dotenv_path=os.path.join(os.path.dirname(file), '..', '.env'))
--- Google Gemini API Configuration ---
GEMINI_API_KEY = os.getenv("GEMINI_API_KEY") if not GEMINI_API_KEY: raise ValueError("ā GEMINI_API_KEY is missing. Add it to your .env file.")
It's good practice to configure the genai library once
try: genai.configure(api_key=GEMINI_API_KEY) print("ā genai library configured globally with GEMINI_API_KEY.") except Exception as e: print(f"šØ Error configuring Google Gemini API (genai.configure): {e}") exit()
--- Model Definitions ---
DEFAULT_GEMINI_CHAT_MODEL = os.getenv('MODEL_CHOICE', "gemini-1.5-flash-latest") DEFAULT_GOOGLE_EMBEDDING_MODEL = "text-embedding-004" # Confirmed from mem0 tests
--- Mem0 Configuration ---
config = { "llm": { "provider": "gemini", "config": { "model": DEFAULT_GEMINI_CHAT_MODEL, "api_key": GEMINI_API_KEY # Explicitly providing the key } }, "embedding_model": { "provider": "gemini", # Correct provider name for GeminiEmbedder "config": { "model": DEFAULT_GOOGLE_EMBEDDING_MODEL, "api_key": GEMINI_API_KEY # Explicitly providing the key } }, "vector_store": { "provider": "supabase", "config": { "connection_string": os.getenv('DATABASE_URL'), "collection_name": "memories" } } }
print("\nInitializing mem0 with the following configuration:") print(json.dumps(config, indent=2)) print("-" * 30)
Initialize mem0
try: memory = Memory.from_config(config) print("ā mem0 initialized successfully.") except Exception as e: print(f"šØšØšØ CRITICAL ERROR during Memory.from_config šØšØšØ") print(f"Error type: {type(e)}") print(f"Error message: {e}") print("\nFull Traceback for Memory.from_config:") traceback.print_exc() print("-" * 30) print("Exiting due to mem0 initialization failure.") exit()
Initialize the Gemini model for direct chat responses
try: gemini_chat_model = genai.GenerativeModel(DEFAULT_GEMINI_CHAT_MODEL) print(f"ā Gemini chat model ({DEFAULT_GEMINI_CHAT_MODEL}) initialized successfully for direct chat.") except Exception as e: print(f"šØ Error initializing Gemini chat model for direct chat: {e}") traceback.print_exc() print("Exiting due to Gemini chat model initialization failure.") exit()
print("-" * 30)
def chat_with_memories(message: str, user_id: str = "default_user") -> str: print(f"\nš Searching memories for query: '{message}' with user_id: '{user_id}'") try: # This search should now use GeminiEmbedder relevant_memories = memory.search(query=message, user_id=user_id, limit=3) print(f"Retrieved memories results: {relevant_memories.get('results') if relevant_memories else 'None'}") except Exception as e: print(f"šØšØšØ CRITICAL ERROR during memory.search šØšØšØ") print(f"Error type: {type(e)}") print(f"Error message: {e}") print("\nFull Traceback for memory.search:") traceback.print_exc() print("-" * 30) return "Sorry, I encountered an error searching my memories."
memories_str = "\n".join(f"- {entry['memory']}" for entry in relevant_memories.get("results", []))
context_prompt = (
"You are a helpful AI. Answer the question based on the user's query and the following memories "
f"retrieved for them.\n\nUser Memories:\n{memories_str}\n\nUser Query: "
)
full_user_content = f"{context_prompt}{message}"
print(f"š¬ Generating chat response with Gemini ({DEFAULT_GEMINI_CHAT_MODEL})...")
try:
response = gemini_chat_model.generate_content(full_user_content)
assistant_response = response.text
print(f"Gemini raw response: '{assistant_response[:200]}...'")
except Exception as e:
print(f"šØ Error generating content with Gemini chat model: {e}")
traceback.print_exc()
assistant_response = "Sorry, I encountered an issue trying to respond."
print(f"š¾ Adding conversation to memory for user_id: '{user_id}'")
mem0_messages_to_add = [
{"role": "user", "content": message},
{"role": "assistant", "content": assistant_response}
]
try:
# This add will use the "gemini" LLM from mem0 config for processing
memory.add(mem0_messages_to_add, user_id=user_id)
print("ā
Conversation added to memory.")
except Exception as e:
print(f"šØ Error adding to memory: {e}")
traceback.print_exc()
return assistant_response
def main(): print(f"\nChat with AI (LLM: {DEFAULT_GEMINI_CHAT_MODEL}, Embedder: {DEFAULT_GOOGLE_EMBEDDING_MODEL} via Gemini provider - type 'exit' to quit)") print("-" * 30) while True: user_input = input("You: ").strip() if user_input.lower() == 'exit': print("Goodbye!") break
ai_response = chat_with_memories(user_input)
print(f"AI: {ai_response}")
print("-" * 30)
if name == "main": if not os.getenv("DATABASE_URL", "").strip(): raise ValueError("ā DATABASE_URL is missing or empty. Check your .env file formatting.") if not os.getenv("GEMINI_API_KEY", "").strip(): # Also check GEMINI_API_KEY raise ValueError("ā GEMINI_API_KEY is missing or empty. Check your .env file.") main()
error (venv) PS C:\Users\Mind Nudge Lab\local-ai-packaged\mem0-agent> python iterations/v2-supabase-mem0.py ā genai library configured globally with GEMINI_API_KEY.
Initializing mem0 with the following configuration: { "llm": { "provider": "gemini", "config": { "model": "gemini-1.5-flash-latest", "api_key": "AIzaSyCNPOxNKjHfVX1jjgyvoVi4cixMu3Mm9qE" } }, "embedding_model": { "provider": "gemini", "config": { "model": "text-embedding-004", "api_key": "AIzaSyCNPOxNKjHfVX1jjgyvoVi4cixMu3Mm9qE" } }, "vector_store": { "provider": "supabase", "config": { "connection_string": "postgresql://postgres.8464:NFylaucSbw1HlZK6ocrr1DltTuV_MRpBxbR3N9PLY7k@localhost:6543/postgres", "collection_name": "memories" } } }
šØšØšØ CRITICAL ERROR during Memory.from_config šØšØšØ Error type: <class 'openai.OpenAIError'> Error message: The api_key client option must be set either by passing api_key to the client or by setting the OPENAI_API_KEY environment variable
Full Traceback for Memory.from_config:
Traceback (most recent call last):
File "C:\Users\Mind Nudge Lab\local-ai-packaged\mem0-agent\iterations\v2-supabase-mem0.py", line 59, in
memory = Memory.from_config(config)
File "C:\Users\Mind Nudge Lab\local-ai-packaged\mem0-agent\venv\Lib\site-packages\mem0\memory\main.py", line 64, in from_config
return cls(config)
File "C:\Users\Mind Nudge Lab\local-ai-packaged\mem0-agent\venv\Lib\site-packages\mem0\memory\main.py", line 38, in init
self.embedding_model = EmbedderFactory.create(self.config.embedder.provider, self.config.embedder.config)
~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Users\Mind Nudge Lab\local-ai-packaged\mem0-agent\venv\Lib\site-packages\mem0\utils\factory.py", line 58, in create
return embedder_instance(base_config)
File "C:\Users\Mind Nudge Lab\local-ai-packaged\mem0-agent\venv\Lib\site-packages\mem0\embeddings\openai.py", line 19, in init
self.client = OpenAI(api_key=api_key, base_url=base_url)
~~~~~~^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "C:\Users\Mind Nudge Lab\local-ai-packaged\mem0-agent\venv\Lib\site-packages\openai_client.py", line 110, in init
raise OpenAIError(
"The api_key client option must be set either by passing api_key to the client or by setting the OPENAI_API_KEY environment variable"
)
openai.OpenAIError: The api_key client option must be set either by passing api_key to the client or by setting the OPENAI_API_KEY environment variable
Exiting due to mem0 initialization failure.