Skip to content

Structure

__all__ = ['Run', 'BaseConversationMemory', 'ConversationMemory', 'SummaryConversationMemory'] module-attribute

BaseConversationMemory

Bases: SerializableMixin, ABC

Source code in griptape/memory/structure/base_conversation_memory.py
@define
class BaseConversationMemory(SerializableMixin, ABC):
    driver: Optional[BaseConversationMemoryDriver] = field(default=None, kw_only=True)
    runs: list[Run] = field(factory=list, kw_only=True, metadata={"serializable": True})
    structure: Structure = field(init=False)
    autoload: bool = field(default=True, kw_only=True)
    autoprune: bool = field(default=True, kw_only=True)
    max_runs: Optional[int] = field(default=None, kw_only=True, metadata={"serializable": True})

    def __attrs_post_init__(self) -> None:
        if self.driver and self.autoload:
            memory = self.driver.load()
            if memory is not None:
                [self.add_run(r) for r in memory.runs]

    def before_add_run(self) -> None:
        pass

    def add_run(self, run: Run) -> BaseConversationMemory:
        self.before_add_run()
        self.try_add_run(run)
        self.after_add_run()

        return self

    def after_add_run(self) -> None:
        if self.driver:
            self.driver.store(self)

    @abstractmethod
    def try_add_run(self, run: Run) -> None:
        ...

    @abstractmethod
    def to_prompt_stack(self, last_n: Optional[int] = None) -> PromptStack:
        ...

autoload: bool = field(default=True, kw_only=True) class-attribute instance-attribute

autoprune: bool = field(default=True, kw_only=True) class-attribute instance-attribute

driver: Optional[BaseConversationMemoryDriver] = field(default=None, kw_only=True) class-attribute instance-attribute

max_runs: Optional[int] = field(default=None, kw_only=True, metadata={'serializable': True}) class-attribute instance-attribute

runs: list[Run] = field(factory=list, kw_only=True, metadata={'serializable': True}) class-attribute instance-attribute

structure: Structure = field(init=False) class-attribute instance-attribute

__attrs_post_init__()

Source code in griptape/memory/structure/base_conversation_memory.py
def __attrs_post_init__(self) -> None:
    if self.driver and self.autoload:
        memory = self.driver.load()
        if memory is not None:
            [self.add_run(r) for r in memory.runs]

add_run(run)

Source code in griptape/memory/structure/base_conversation_memory.py
def add_run(self, run: Run) -> BaseConversationMemory:
    self.before_add_run()
    self.try_add_run(run)
    self.after_add_run()

    return self

after_add_run()

Source code in griptape/memory/structure/base_conversation_memory.py
def after_add_run(self) -> None:
    if self.driver:
        self.driver.store(self)

before_add_run()

Source code in griptape/memory/structure/base_conversation_memory.py
def before_add_run(self) -> None:
    pass

to_prompt_stack(last_n=None) abstractmethod

Source code in griptape/memory/structure/base_conversation_memory.py
@abstractmethod
def to_prompt_stack(self, last_n: Optional[int] = None) -> PromptStack:
    ...

try_add_run(run) abstractmethod

Source code in griptape/memory/structure/base_conversation_memory.py
@abstractmethod
def try_add_run(self, run: Run) -> None:
    ...

ConversationMemory

Bases: BaseConversationMemory

Source code in griptape/memory/structure/conversation_memory.py
@define
class ConversationMemory(BaseConversationMemory):
    def try_add_run(self, run: Run) -> None:
        self.runs.append(run)

        if self.max_runs:
            while len(self.runs) > self.max_runs:
                self.runs.pop(0)

    def to_prompt_stack(self, last_n: Optional[int] = None) -> PromptStack:
        prompt_stack = PromptStack()
        runs = self.runs[-last_n:] if last_n else self.runs
        for run in runs:
            prompt_stack.add_user_input(run.input)
            prompt_stack.add_assistant_input(run.output)
        return prompt_stack

to_prompt_stack(last_n=None)

Source code in griptape/memory/structure/conversation_memory.py
def to_prompt_stack(self, last_n: Optional[int] = None) -> PromptStack:
    prompt_stack = PromptStack()
    runs = self.runs[-last_n:] if last_n else self.runs
    for run in runs:
        prompt_stack.add_user_input(run.input)
        prompt_stack.add_assistant_input(run.output)
    return prompt_stack

try_add_run(run)

Source code in griptape/memory/structure/conversation_memory.py
def try_add_run(self, run: Run) -> None:
    self.runs.append(run)

    if self.max_runs:
        while len(self.runs) > self.max_runs:
            self.runs.pop(0)

Run

Bases: SerializableMixin

Source code in griptape/memory/structure/run.py
@define
class Run(SerializableMixin):
    id: str = field(default=Factory(lambda: uuid.uuid4().hex), kw_only=True, metadata={"serializable": True})
    input: str = field(kw_only=True, metadata={"serializable": True})
    output: str = field(kw_only=True, metadata={"serializable": True})

id: str = field(default=Factory(lambda: uuid.uuid4().hex), kw_only=True, metadata={'serializable': True}) class-attribute instance-attribute

input: str = field(kw_only=True, metadata={'serializable': True}) class-attribute instance-attribute

output: str = field(kw_only=True, metadata={'serializable': True}) class-attribute instance-attribute

SummaryConversationMemory

Bases: ConversationMemory

Source code in griptape/memory/structure/summary_conversation_memory.py
@define
class SummaryConversationMemory(ConversationMemory):
    offset: int = field(default=1, kw_only=True, metadata={"serializable": True})
    _prompt_driver: BasePromptDriver = field(kw_only=True, default=None, alias="prompt_driver")
    summary: Optional[str] = field(default=None, kw_only=True, metadata={"serializable": True})
    summary_index: int = field(default=0, kw_only=True, metadata={"serializable": True})
    summary_template_generator: J2 = field(default=Factory(lambda: J2("memory/conversation/summary.j2")), kw_only=True)
    summarize_conversation_template_generator: J2 = field(
        default=Factory(lambda: J2("memory/conversation/summarize_conversation.j2")), kw_only=True
    )

    @property
    def prompt_driver(self) -> BasePromptDriver:
        if self._prompt_driver is None:
            if self.structure is not None:
                self._prompt_driver = self.structure.config.global_drivers.prompt_driver
            else:
                raise ValueError("Prompt Driver is not set.")
        return self._prompt_driver

    @prompt_driver.setter
    def prompt_driver(self, value: BasePromptDriver) -> None:
        self._prompt_driver = value

    def to_prompt_stack(self, last_n: Optional[int] = None) -> PromptStack:
        stack = PromptStack()
        if self.summary:
            stack.add_user_input(self.summary_template_generator.render(summary=self.summary))

        for r in self.unsummarized_runs(last_n):
            stack.add_user_input(r.input)
            stack.add_assistant_input(r.output)

        return stack

    def unsummarized_runs(self, last_n: Optional[int] = None) -> list[Run]:
        summary_index_runs = self.runs[self.summary_index :]

        if last_n:
            last_n_runs = self.runs[-last_n:]

            if len(summary_index_runs) > len(last_n_runs):
                return last_n_runs
            else:
                return summary_index_runs
        else:
            return summary_index_runs

    def try_add_run(self, run: Run) -> None:
        super().try_add_run(run)

        unsummarized_runs = self.unsummarized_runs()
        runs_to_summarize = unsummarized_runs[: max(0, len(unsummarized_runs) - self.offset)]

        if len(runs_to_summarize) > 0:
            self.summary = self.summarize_runs(self.summary, runs_to_summarize)
            self.summary_index = 1 + self.runs.index(runs_to_summarize[-1])

    def summarize_runs(self, previous_summary: str | None, runs: list[Run]) -> str | None:
        try:
            if len(runs) > 0:
                summary = self.summarize_conversation_template_generator.render(summary=previous_summary, runs=runs)
                return self.prompt_driver.run(
                    prompt_stack=PromptStack(inputs=[PromptStack.Input(summary, role=PromptStack.USER_ROLE)])
                ).to_text()
            else:
                return previous_summary
        except Exception as e:
            logging.error(f"Error summarizing memory: {type(e).__name__}({e})")

            return previous_summary

offset: int = field(default=1, kw_only=True, metadata={'serializable': True}) class-attribute instance-attribute

prompt_driver: BasePromptDriver property writable

summarize_conversation_template_generator: J2 = field(default=Factory(lambda: J2('memory/conversation/summarize_conversation.j2')), kw_only=True) class-attribute instance-attribute

summary: Optional[str] = field(default=None, kw_only=True, metadata={'serializable': True}) class-attribute instance-attribute

summary_index: int = field(default=0, kw_only=True, metadata={'serializable': True}) class-attribute instance-attribute

summary_template_generator: J2 = field(default=Factory(lambda: J2('memory/conversation/summary.j2')), kw_only=True) class-attribute instance-attribute

summarize_runs(previous_summary, runs)

Source code in griptape/memory/structure/summary_conversation_memory.py
def summarize_runs(self, previous_summary: str | None, runs: list[Run]) -> str | None:
    try:
        if len(runs) > 0:
            summary = self.summarize_conversation_template_generator.render(summary=previous_summary, runs=runs)
            return self.prompt_driver.run(
                prompt_stack=PromptStack(inputs=[PromptStack.Input(summary, role=PromptStack.USER_ROLE)])
            ).to_text()
        else:
            return previous_summary
    except Exception as e:
        logging.error(f"Error summarizing memory: {type(e).__name__}({e})")

        return previous_summary

to_prompt_stack(last_n=None)

Source code in griptape/memory/structure/summary_conversation_memory.py
def to_prompt_stack(self, last_n: Optional[int] = None) -> PromptStack:
    stack = PromptStack()
    if self.summary:
        stack.add_user_input(self.summary_template_generator.render(summary=self.summary))

    for r in self.unsummarized_runs(last_n):
        stack.add_user_input(r.input)
        stack.add_assistant_input(r.output)

    return stack

try_add_run(run)

Source code in griptape/memory/structure/summary_conversation_memory.py
def try_add_run(self, run: Run) -> None:
    super().try_add_run(run)

    unsummarized_runs = self.unsummarized_runs()
    runs_to_summarize = unsummarized_runs[: max(0, len(unsummarized_runs) - self.offset)]

    if len(runs_to_summarize) > 0:
        self.summary = self.summarize_runs(self.summary, runs_to_summarize)
        self.summary_index = 1 + self.runs.index(runs_to_summarize[-1])

unsummarized_runs(last_n=None)

Source code in griptape/memory/structure/summary_conversation_memory.py
def unsummarized_runs(self, last_n: Optional[int] = None) -> list[Run]:
    summary_index_runs = self.runs[self.summary_index :]

    if last_n:
        last_n_runs = self.runs[-last_n:]

        if len(summary_index_runs) > len(last_n_runs):
            return last_n_runs
        else:
            return summary_index_runs
    else:
        return summary_index_runs