Skip to content

Summary conversation memory

SummaryConversationMemory

Bases: ConversationMemory

Source code in griptape/griptape/memory/structure/summary_conversation_memory.py
@define
class SummaryConversationMemory(ConversationMemory):
    offset: int = field(default=1, kw_only=True)
    prompt_driver: BasePromptDriver = field(
        default=Factory(lambda: OpenAiChatPromptDriver(model=OpenAiTokenizer.DEFAULT_OPENAI_GPT_3_CHAT_MODEL)),
        kw_only=True,
    )
    summary: Optional[str] = field(default=None, kw_only=True)
    summary_index: int = field(default=0, kw_only=True)
    summary_template_generator: J2 = field(default=Factory(lambda: J2("memory/conversation/summary.j2")), kw_only=True)
    summarize_conversation_template_generator: J2 = field(
        default=Factory(lambda: J2("memory/conversation/summarize_conversation.j2")), kw_only=True
    )

    @classmethod
    def from_dict(cls, memory_dict: dict) -> SummaryConversationMemory:
        return SummaryConversationMemorySchema().load(memory_dict)

    @classmethod
    def from_json(cls, memory_json: str) -> SummaryConversationMemory:
        return SummaryConversationMemory.from_dict(json.loads(memory_json))

    def to_prompt_stack(self, last_n: Optional[int] = None) -> PromptStack:
        stack = PromptStack()
        if self.summary:
            stack.add_user_input(self.summary_template_generator.render(summary=self.summary))

        for r in self.unsummarized_runs(last_n):
            stack.add_user_input(r.input)
            stack.add_assistant_input(r.output)

        return stack

    def to_dict(self) -> dict:
        return dict(SummaryConversationMemorySchema().dump(self))

    def unsummarized_runs(self, last_n: Optional[int] = None) -> list[Run]:
        summary_index_runs = self.runs[self.summary_index :]

        if last_n:
            last_n_runs = self.runs[-last_n:]

            if len(summary_index_runs) > len(last_n_runs):
                return last_n_runs
            else:
                return summary_index_runs
        else:
            return summary_index_runs

    def try_add_run(self, run: Run) -> None:
        super().try_add_run(run)

        unsummarized_runs = self.unsummarized_runs()
        runs_to_summarize = unsummarized_runs[: max(0, len(unsummarized_runs) - self.offset)]

        if len(runs_to_summarize) > 0:
            self.summary = self.summarize_runs(self.summary, runs_to_summarize)
            self.summary_index = 1 + self.runs.index(runs_to_summarize[-1])

    def summarize_runs(self, previous_summary: str, runs: list[Run]) -> str:
        try:
            if len(runs) > 0:
                summary = self.summarize_conversation_template_generator.render(summary=previous_summary, runs=runs)
                return self.prompt_driver.run(
                    prompt_stack=PromptStack(inputs=[PromptStack.Input(summary, role=PromptStack.USER_ROLE)])
                ).to_text()
            else:
                return previous_summary
        except Exception as e:
            logging.error(f"Error summarizing memory: {type(e).__name__}({e})")

            return previous_summary

offset: int = field(default=1, kw_only=True) class-attribute instance-attribute

prompt_driver: BasePromptDriver = field(default=Factory(lambda : OpenAiChatPromptDriver(model=OpenAiTokenizer.DEFAULT_OPENAI_GPT_3_CHAT_MODEL)), kw_only=True) class-attribute instance-attribute

summarize_conversation_template_generator: J2 = field(default=Factory(lambda : J2('memory/conversation/summarize_conversation.j2')), kw_only=True) class-attribute instance-attribute

summary: Optional[str] = field(default=None, kw_only=True) class-attribute instance-attribute

summary_index: int = field(default=0, kw_only=True) class-attribute instance-attribute

summary_template_generator: J2 = field(default=Factory(lambda : J2('memory/conversation/summary.j2')), kw_only=True) class-attribute instance-attribute

from_dict(memory_dict) classmethod

Source code in griptape/griptape/memory/structure/summary_conversation_memory.py
@classmethod
def from_dict(cls, memory_dict: dict) -> SummaryConversationMemory:
    return SummaryConversationMemorySchema().load(memory_dict)

from_json(memory_json) classmethod

Source code in griptape/griptape/memory/structure/summary_conversation_memory.py
@classmethod
def from_json(cls, memory_json: str) -> SummaryConversationMemory:
    return SummaryConversationMemory.from_dict(json.loads(memory_json))

summarize_runs(previous_summary, runs)

Source code in griptape/griptape/memory/structure/summary_conversation_memory.py
def summarize_runs(self, previous_summary: str, runs: list[Run]) -> str:
    try:
        if len(runs) > 0:
            summary = self.summarize_conversation_template_generator.render(summary=previous_summary, runs=runs)
            return self.prompt_driver.run(
                prompt_stack=PromptStack(inputs=[PromptStack.Input(summary, role=PromptStack.USER_ROLE)])
            ).to_text()
        else:
            return previous_summary
    except Exception as e:
        logging.error(f"Error summarizing memory: {type(e).__name__}({e})")

        return previous_summary

to_dict()

Source code in griptape/griptape/memory/structure/summary_conversation_memory.py
def to_dict(self) -> dict:
    return dict(SummaryConversationMemorySchema().dump(self))

to_prompt_stack(last_n=None)

Source code in griptape/griptape/memory/structure/summary_conversation_memory.py
def to_prompt_stack(self, last_n: Optional[int] = None) -> PromptStack:
    stack = PromptStack()
    if self.summary:
        stack.add_user_input(self.summary_template_generator.render(summary=self.summary))

    for r in self.unsummarized_runs(last_n):
        stack.add_user_input(r.input)
        stack.add_assistant_input(r.output)

    return stack

try_add_run(run)

Source code in griptape/griptape/memory/structure/summary_conversation_memory.py
def try_add_run(self, run: Run) -> None:
    super().try_add_run(run)

    unsummarized_runs = self.unsummarized_runs()
    runs_to_summarize = unsummarized_runs[: max(0, len(unsummarized_runs) - self.offset)]

    if len(runs_to_summarize) > 0:
        self.summary = self.summarize_runs(self.summary, runs_to_summarize)
        self.summary_index = 1 + self.runs.index(runs_to_summarize[-1])

unsummarized_runs(last_n=None)

Source code in griptape/griptape/memory/structure/summary_conversation_memory.py
def unsummarized_runs(self, last_n: Optional[int] = None) -> list[Run]:
    summary_index_runs = self.runs[self.summary_index :]

    if last_n:
        last_n_runs = self.runs[-last_n:]

        if len(summary_index_runs) > len(last_n_runs):
            return last_n_runs
        else:
            return summary_index_runs
    else:
        return summary_index_runs