Skip to content

Commit 315a803

Browse files
committed
clean : memory is tested and functional
1 parent d5d4bdf commit 315a803

File tree

3 files changed

+16
-11
lines changed

3 files changed

+16
-11
lines changed

mesa_llm/llm_agent.py

Lines changed: 0 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -16,18 +16,13 @@ class LLMAgent:
1616
llm (ModuleLLM): The internal LLM interface used by the agent.
1717
memory (Memory | None): The memory module attached to this agent, if any.
1818
19-
Notes:
20-
- Each agent can only have one memory instance associated with it.
21-
- If no memory is passed at initialization, one can be attached later using `attach_memory()`.
22-
- Reassigning or replacing memory after it's been attached is not allowed and will raise a ValueError.
2319
"""
2420

2521
def __init__(
2622
self,
2723
api_key: str,
2824
model: str = "openai/gpt-4o",
2925
system_prompt: str | None = None,
30-
memory: Memory | None = None,
3126
):
3227
self.llm = ModuleLLM(api_key=api_key, model=model, system_prompt=system_prompt)
3328
self._memory = Memory(

mesa_llm/memory.py

Lines changed: 11 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,10 +1,13 @@
11
import os
22
from collections import deque
33
from dataclasses import dataclass
4+
from typing import TYPE_CHECKING
45

5-
from mesa_llm.llm_agent import LLMAgent
66
from mesa_llm.module_llm import ModuleLLM
77

8+
if TYPE_CHECKING:
9+
from mesa_llm.llm_agent import LLMAgent
10+
811

912
@dataclass
1013
class MemoryEntry:
@@ -30,7 +33,7 @@ class Memory:
3033

3134
def __init__(
3235
self,
33-
agent: LLMAgent,
36+
agent: "LLMAgent",
3437
short_term_capacity: int = 5,
3538
consolidation_capacity: int = 2,
3639
api_key: str = os.getenv("OPENAI_API_KEY"),
@@ -85,6 +88,12 @@ def get_short_term_memory(self) -> list[MemoryEntry]:
8588
"""
8689
return list(self.short_term_memory)
8790

91+
def get_long_term_memory(self) -> str:
92+
"""
93+
Get the long term memory
94+
"""
95+
return self.long_term_memory
96+
8897
def update_long_term_memory(self, memories_to_consolidate: list[MemoryEntry]):
8998
"""
9099
Update the long term memory by summarizing the short term memory with a LLM

mesa_llm/module_llm.py

Lines changed: 5 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -20,15 +20,15 @@ def __init__(self, api_key: str, model: str, system_prompt: str | None = None):
2020
system_prompt: The system prompt to use for the LLM
2121
"""
2222
self.api_key = api_key
23-
self.model=model
23+
self.model = model
2424
self.system_prompt = system_prompt
2525
provider = self.model.split("/")[0].upper()
2626
os.environ[f"{provider}_API_KEY"] = self.api_key
2727

2828
def set_system_prompt(self, system_prompt: str):
2929
"""Set or update the system prompt."""
3030
self.system_prompt = system_prompt
31-
31+
3232
def set_model(self, api_key: str, model: str):
3333
"""Set or update the model and API key."""
3434
self.api_key = api_key
@@ -53,9 +53,10 @@ def generate(self, prompt: str) -> str:
5353
from dotenv import load_dotenv
5454

5555
load_dotenv()
56+
print("ready to go ------------------------------")
5657

57-
api_key = os.getenv("OPENAI_API_KEY") # Or simply your API key
58-
llm = ModuleLLM(api_key, "openai/gpt-4o")
58+
api_key = os.getenv("GEMINI_API_KEY") # Or simply your API key
59+
llm = ModuleLLM(api_key, "gemini/gemini-2.0-flash")
5960

6061
response = llm.generate("Hello, how are you?")
6162
print(response.choices[0].message.content)

0 commit comments

Comments
 (0)