Generative AI
Build a Hybrid-Memory Autonomous Agent with Modular Architecture and Tool Dispatch using OpenAI

class MemoryStoreTool(Tool):
name = "memory_store"
description = "Save an important fact or piece of information to long-term memory."
def __init__(self, memory: MemoryBackend):
self._mem = memory
def run(self, text: str, category: str = "general") -> str:
chunk_id = self._mem.store(text, {"category": category})
return f"Stored as {chunk_id}."
def schema(self) -> Dict:
return {
"type": "function",
"function": {
"name": self.name,
"description": self.description,
"parameters": {
"type": "object",
"properties": {
"text": {"type": "string", "description": "The fact to remember."},
"category": {"type": "string", "description": "Category tag, e.g. 'user_pref', 'task', 'fact'."},
},
"required": ["text"],
},
},
}
class MemorySearchTool(Tool):
name = "memory_search"
description = "Search long-term memory for information relevant to a query."
def __init__(self, memory: MemoryBackend):
self._mem = memory
def run(self, query: str, top_k: int = 3) -> str:
results = self._mem.search(query, top_k=top_k)
if not results:
return "No relevant memories found."
lines = [f"[{r['id']}] (score={r['rrf_score']}) {r['text']}" for r in results]
return "Relevant memories:n" + "n".join(lines)
def schema(self) -> Dict:
return {
"type": "function",
"function": {
"name": self.name,
"description": self.description,
"parameters": {
"type": "object",
"properties": {
"query": {"type": "string", "description": "What to look for."},
"top_k": {"type": "integer", "description": "Max results (default 3)."},
},
"required": ["query"],
},
},
}
class CalculatorTool(Tool):
name = "calculator"
description = "Evaluate a safe mathematical expression, e.g. '2 ** 10 + sqrt(144)'."
def run(self, expression: str) -> str:
allowed = {k: getattr(math, k) for k in dir(math) if not k.startswith("_")}
allowed.update({"abs": abs, "round": round})
try:
result = eval(expression, {"__builtins__": {}}, allowed)
return str(result)
except Exception as exc:
return f"Error: {exc}"
def schema(self) -> Dict:
return {
"type": "function",
"function": {
"name": self.name,
"description": self.description,
"parameters": {
"type": "object",
"properties": {
"expression": {"type": "string", "description": "Math expression to evaluate."},
},
"required": ["expression"],
},
},
}
class WebSnippetTool(Tool):
name = "web_search"
description = "Search the web for current information on a topic (simulated)."
_KB = {
"openai": "OpenAI is an AI safety company that develops the GPT family of models.",
"rag": "Retrieval-Augmented Generation (RAG) combines a retrieval system with an LLM to ground answers in external documents.",
"bm25": "BM25 (Best Match 25) is a probabilistic keyword ranking function used in search engines.",
}
def run(self, query: str) -> str:
q = query.lower()
for kw, snippet in self._KB.items():
if kw in q:
return f"Web snippet for '{query}': {snippet}"
return f"No snippet found for '{query}'. (Mock tool — integrate a real search API here.)"
def schema(self) -> Dict:
return {
"type": "function",
"function": {
"name": self.name,
"description": self.description,
"parameters": {
"type": "object",
"properties": {
"query": {"type": "string", "description": "Search query."},
},
"required": ["query"],
},
},
}
@dataclass
class AgentPersona:
name: str
role: str
traits: List[str]
forbidden_phrases: List[str] = field(default_factory=list)
goals: List[str] = field(default_factory=list)
def compile_system_prompt(self, extra_context: str = "") -> str:
lines = [
f"You are {self.name}, {self.role}.",
"",
"## Core Traits",
*[f"- {t}" for t in self.traits],
]
if self.goals:
lines += ["", "## Goals", *[f"- {g}" for g in self.goals]]
if self.forbidden_phrases:
lines += ["", "## Forbidden Phrases (never say these)", *[f"- "{p}"" for p in self.forbidden_phrases]]
if extra_context:
lines += ["", "## Live Context", extra_context]
lines += [
"",
"## Behaviour",
"- Always reason step-by-step before answering.",
"- Use available tools proactively; never guess when you can look up.",
"- After using memory_search, quote the retrieved ID in your answer.",
"- Keep answers concise unless depth is explicitly requested.",
]
return "n".join(lines)
ARIA = AgentPersona(
name="Aria",
role="a precise, helpful research assistant with a hybrid memory system",
traits=["Methodical", "Curious", "Transparent about uncertainty", "Concise"],
goals=[
"Remember and connect information across conversations",
"Use tools whenever they can improve accuracy",
],
forbidden_phrases=["I cannot", "As an AI language model"],
)
print("✅ Tools and AgentPersona ready.")



