Skip to content

Commit 9907f7a

Browse files
committed
first memory prototype
1 parent eeef131 commit 9907f7a

File tree

1 file changed

+112
-0
lines changed

1 file changed

+112
-0
lines changed

mesa_llm/memory.py

Lines changed: 112 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,112 @@
1+
import os
2+
from collections import deque
3+
from dataclasses import dataclass
4+
5+
from mesa_llm.llm_agent import LLMAgent
6+
from mesa_llm.module_llm import ModuleLLM
7+
8+
9+
@dataclass
10+
class MemoryEntry:
11+
type: str
12+
content: str
13+
step: int
14+
metadata: dict
15+
16+
17+
class Memory:
18+
"""
19+
Create a memory object that stores the agent's short and long term memory
20+
21+
Attributes:
22+
agent : the agent that the memory belongs to
23+
24+
Memory is composed of
25+
- A short term memory who stores the n (int) most recent interactions (observations, planning, discussions)
26+
- A long term memory that is a summary of the memories that are removed from short term memory (summary
27+
completed/refactored as it goes)
28+
29+
"""
30+
31+
def __init__(
32+
self,
33+
agent: LLMAgent,
34+
short_term_capacity: int = 5,
35+
consolidation_capacity: int = 2,
36+
api_key: str = os.getenv("OPENAI_API_KEY"),
37+
llm_model: str = "openai/gpt-4o-mini",
38+
):
39+
"""
40+
Initialize the memory
41+
42+
Args:
43+
short_term_capacity : the number of interactions to store in the short term memory
44+
api_key : the API key to use for the LLM
45+
llm_model : the model to use for the summarization
46+
agent : the agent that the memory belongs to
47+
"""
48+
self.agent = agent
49+
self.llm = ModuleLLM(api_key=api_key, model=llm_model)
50+
self.capacity = short_term_capacity
51+
self.consolidation_capacity = consolidation_capacity
52+
self.short_term_memory = deque()
53+
self.long_term_memory = ""
54+
55+
self.system_prompt = """
56+
You are a helpful assistant that summarizes the short term memory into a long term memory.
57+
The long term memory should be a summary of the short term memory that is concise and informative.
58+
If the short term memory is empty, return the long term memory unchanged.
59+
If the long term memory is not empty, update it to include the new information from the short term memory.
60+
"""
61+
62+
self.llm.set_system_prompt(self.system_prompt)
63+
64+
def add_to_memory(
65+
self, type: str, content: str, step: int, metadata: dict | None = None
66+
):
67+
"""
68+
Add a new entry to the memory
69+
"""
70+
metadata = metadata or {}
71+
new_entry = MemoryEntry(type, content, step, metadata)
72+
self.short_term_memory.append(new_entry)
73+
74+
# Consolidate memory if the short term memory is over capacity
75+
if len(self.short_term_memory) > self.capacity + self.consolidation_capacity:
76+
memories_to_consolidate = [
77+
self.short_term_memory.popleft()
78+
for _ in range(self.consolidation_capacity)
79+
]
80+
self.update_long_term_memory(memories_to_consolidate)
81+
82+
def get_short_term_memory(self) -> list[MemoryEntry]:
83+
"""
84+
Get the short term memory
85+
"""
86+
return list(self.short_term_memory)
87+
88+
def update_long_term_memory(self, memories_to_consolidate: list[MemoryEntry]):
89+
"""
90+
Update the long term memory by summarizing the short term memory with a LLM
91+
"""
92+
entries = [self.convert_entry_to_dict(m) for m in memories_to_consolidate]
93+
94+
prompt = f"""
95+
Short term memory:
96+
{entries}
97+
Long term memory:
98+
{self.long_term_memory}
99+
"""
100+
101+
self.long_term_memory = self.llm.generate(prompt, self.system_prompt)
102+
103+
def convert_entry_to_dict(self, entry: MemoryEntry) -> dict:
104+
"""
105+
Convert a memory entry to a dictionary
106+
"""
107+
return {
108+
"type": entry.type,
109+
"content": entry.content,
110+
"step": entry.step,
111+
"metadata": entry.metadata,
112+
}

0 commit comments

Comments
 (0)