Use this file to discover all available pages before exploring further.
Agno agents are stateless by default. Each conversation starts fresh. Supermemory changes that - your agents can remember users, recall past conversations, and build on previous interactions.
Fetch user context before running an agent, then store the interaction after.
from agno.agent import Agentfrom agno.models.openai import OpenAIChatfrom supermemory import Supermemoryfrom dotenv import load_dotenvload_dotenv()memory = Supermemory()def get_user_context(user_id: str, query: str) -> str: """Pull user profile and relevant memories.""" result = memory.profile(container_tag=user_id, q=query) static = result.profile.static or [] dynamic = result.profile.dynamic or [] memories = result.search_results.results if result.search_results else [] return f"""User background:{chr(10).join(static) if static else 'No profile yet.'}Recent activity:{chr(10).join(dynamic) if dynamic else 'Nothing recent.'}Related memories:{chr(10).join([m.memory or m.chunk for m in memories[:5]]) if memories else 'None.'}"""def create_agent(user_id: str, task: str) -> Agent: """Create an agent with user context.""" context = get_user_context(user_id, task) return Agent( name="assistant", model=OpenAIChat(id="gpt-4o"), description=f"""You are a helpful assistant.Here's what you know about this user:{context}Use this to personalize your responses.""", markdown=True )def chat(user_id: str, message: str) -> str: """Run the agent and store the interaction.""" agent = create_agent(user_id, message) response = agent.run(message) # Save for next time memory.add( content=f"User: {message}\nAssistant: {response.content}", container_tag=user_id ) return response.content
results = memory.search.memories( q="pasta recipes we discussed", container_tag="user_123", search_mode="hybrid", limit=5)for r in results.results: print(r.memory or r.chunk)
Give your agent tools that can search and store memories directly.
from agno.agent import Agentfrom agno.models.openai import OpenAIChatfrom agno.tools import toolfrom supermemory import Supermemorymemory = Supermemory()@tooldef search_memory(query: str, user_id: str) -> str: """Search for information in the user's memory. Args: query: What to look for user_id: The user's ID """ results = memory.search.memories( q=query, container_tag=user_id, limit=5 ) if not results.results: return "Nothing relevant found in memory." return "\n".join([r.memory or r.chunk for r in results.results])@tooldef remember(content: str, user_id: str) -> str: """Store something important about the user. Args: content: What to remember user_id: The user's ID """ memory.add(content=content, container_tag=user_id) return f"Remembered: {content}"agent = Agent( name="memory_agent", model=OpenAIChat(id="gpt-4o"), tools=[search_memory, remember], description="""You are an assistant with memory.When users share preferences or important info, use the remember tool.When they ask about past conversations, search your memory first.""", markdown=True)