Skip to content

Structure

__all__ = ['Run', 'ConversationMemory', 'SummaryConversationMemory'] module-attribute

ConversationMemory

Source code in griptape/griptape/memory/structure/conversation_memory.py
@define
class ConversationMemory:
    type: str = field(default=Factory(lambda self: self.__class__.__name__, takes_self=True), kw_only=True)
    driver: BaseConversationMemoryDriver | None = field(default=None, kw_only=True)
    runs: list[Run] = field(factory=list, kw_only=True)
    structure: Structure = field(init=False)
    autoload: bool = field(default=True, kw_only=True)
    autoprune: bool = field(default=True, kw_only=True)
    max_runs: int | None = field(default=None, kw_only=True)

    def __attrs_post_init__(self) -> None:
        if self.driver and self.autoload:
            memory = self.driver.load()
            if memory is not None:
                [self.add_run(r) for r in memory.runs]

    def add_run(self, run: Run) -> ConversationMemory:
        self.before_add_run()
        self.try_add_run(run)
        self.after_add_run()

        return self

    def before_add_run(self) -> None:
        pass

    def try_add_run(self, run: Run) -> None:
        self.runs.append(run)

        if self.max_runs:
            while len(self.runs) > self.max_runs:
                self.runs.pop(0)

    def after_add_run(self) -> None:
        if self.driver:
            self.driver.store(self)

    def to_json(self) -> str:
        return json.dumps(self.to_dict(), indent=2)

    def to_dict(self) -> dict:
        from griptape.schemas import ConversationMemorySchema

        return dict(ConversationMemorySchema().dump(self))

    def to_prompt_stack(self, last_n: int | None = None) -> PromptStack:
        prompt_stack = PromptStack()
        runs = self.runs[-last_n:] if last_n else self.runs
        for run in runs:
            prompt_stack.add_user_input(run.input)
            prompt_stack.add_assistant_input(run.output)
        return prompt_stack

    @classmethod
    def from_dict(cls, memory_dict: dict) -> ConversationMemory:
        from griptape.schemas import ConversationMemorySchema, SummaryConversationMemorySchema

        class_registry.register("ConversationMemory", ConversationMemorySchema)
        class_registry.register("SummaryConversationMemory", SummaryConversationMemorySchema)

        try:
            return class_registry.get_class(memory_dict["type"])().load(memory_dict)
        except RegistryError:
            raise ValueError("Unsupported memory type")

    @classmethod
    def from_json(cls, memory_json: str) -> ConversationMemory:
        return ConversationMemory.from_dict(json.loads(memory_json))

autoload: bool = field(default=True, kw_only=True) class-attribute instance-attribute

autoprune: bool = field(default=True, kw_only=True) class-attribute instance-attribute

driver: BaseConversationMemoryDriver | None = field(default=None, kw_only=True) class-attribute instance-attribute

max_runs: int | None = field(default=None, kw_only=True) class-attribute instance-attribute

runs: list[Run] = field(factory=list, kw_only=True) class-attribute instance-attribute

structure: Structure = field(init=False) class-attribute instance-attribute

type: str = field(default=Factory(lambda : self.__class__.__name__, takes_self=True), kw_only=True) class-attribute instance-attribute

__attrs_post_init__()

Source code in griptape/griptape/memory/structure/conversation_memory.py
def __attrs_post_init__(self) -> None:
    if self.driver and self.autoload:
        memory = self.driver.load()
        if memory is not None:
            [self.add_run(r) for r in memory.runs]

add_run(run)

Source code in griptape/griptape/memory/structure/conversation_memory.py
def add_run(self, run: Run) -> ConversationMemory:
    self.before_add_run()
    self.try_add_run(run)
    self.after_add_run()

    return self

after_add_run()

Source code in griptape/griptape/memory/structure/conversation_memory.py
def after_add_run(self) -> None:
    if self.driver:
        self.driver.store(self)

before_add_run()

Source code in griptape/griptape/memory/structure/conversation_memory.py
def before_add_run(self) -> None:
    pass

from_dict(memory_dict) classmethod

Source code in griptape/griptape/memory/structure/conversation_memory.py
@classmethod
def from_dict(cls, memory_dict: dict) -> ConversationMemory:
    from griptape.schemas import ConversationMemorySchema, SummaryConversationMemorySchema

    class_registry.register("ConversationMemory", ConversationMemorySchema)
    class_registry.register("SummaryConversationMemory", SummaryConversationMemorySchema)

    try:
        return class_registry.get_class(memory_dict["type"])().load(memory_dict)
    except RegistryError:
        raise ValueError("Unsupported memory type")

from_json(memory_json) classmethod

Source code in griptape/griptape/memory/structure/conversation_memory.py
@classmethod
def from_json(cls, memory_json: str) -> ConversationMemory:
    return ConversationMemory.from_dict(json.loads(memory_json))

to_dict()

Source code in griptape/griptape/memory/structure/conversation_memory.py
def to_dict(self) -> dict:
    from griptape.schemas import ConversationMemorySchema

    return dict(ConversationMemorySchema().dump(self))

to_json()

Source code in griptape/griptape/memory/structure/conversation_memory.py
def to_json(self) -> str:
    return json.dumps(self.to_dict(), indent=2)

to_prompt_stack(last_n=None)

Source code in griptape/griptape/memory/structure/conversation_memory.py
def to_prompt_stack(self, last_n: int | None = None) -> PromptStack:
    prompt_stack = PromptStack()
    runs = self.runs[-last_n:] if last_n else self.runs
    for run in runs:
        prompt_stack.add_user_input(run.input)
        prompt_stack.add_assistant_input(run.output)
    return prompt_stack

try_add_run(run)

Source code in griptape/griptape/memory/structure/conversation_memory.py
def try_add_run(self, run: Run) -> None:
    self.runs.append(run)

    if self.max_runs:
        while len(self.runs) > self.max_runs:
            self.runs.pop(0)

Run

Source code in griptape/griptape/memory/structure/run.py
5
6
7
8
9
@define
class Run:
    id: str = field(default=Factory(lambda: uuid.uuid4().hex), kw_only=True)
    input: str = field(kw_only=True)
    output: str = field(kw_only=True)

id: str = field(default=Factory(lambda : uuid.uuid4().hex), kw_only=True) class-attribute instance-attribute

input: str = field(kw_only=True) class-attribute instance-attribute

output: str = field(kw_only=True) class-attribute instance-attribute

SummaryConversationMemory

Bases: ConversationMemory

Source code in griptape/griptape/memory/structure/summary_conversation_memory.py
@define
class SummaryConversationMemory(ConversationMemory):
    offset: int = field(default=1, kw_only=True)
    prompt_driver: BasePromptDriver = field(
        default=Factory(lambda: OpenAiChatPromptDriver(model=OpenAiTokenizer.DEFAULT_OPENAI_GPT_3_CHAT_MODEL)),
        kw_only=True,
    )
    summary: str | None = field(default=None, kw_only=True)
    summary_index: int = field(default=0, kw_only=True)
    summary_template_generator: J2 = field(default=Factory(lambda: J2("memory/conversation/summary.j2")), kw_only=True)
    summarize_conversation_template_generator: J2 = field(
        default=Factory(lambda: J2("memory/conversation/summarize_conversation.j2")), kw_only=True
    )

    @classmethod
    def from_dict(cls, memory_dict: dict) -> SummaryConversationMemory:
        return SummaryConversationMemorySchema().load(memory_dict)

    @classmethod
    def from_json(cls, memory_json: str) -> SummaryConversationMemory:
        return SummaryConversationMemory.from_dict(json.loads(memory_json))

    def to_prompt_stack(self, last_n: int | None = None) -> PromptStack:
        stack = PromptStack()
        if self.summary:
            stack.add_user_input(self.summary_template_generator.render(summary=self.summary))

        for r in self.unsummarized_runs(last_n):
            stack.add_user_input(r.input)
            stack.add_assistant_input(r.output)

        return stack

    def to_dict(self) -> dict:
        return dict(SummaryConversationMemorySchema().dump(self))

    def unsummarized_runs(self, last_n: int | None = None) -> list[Run]:
        summary_index_runs = self.runs[self.summary_index :]

        if last_n:
            last_n_runs = self.runs[-last_n:]

            if len(summary_index_runs) > len(last_n_runs):
                return last_n_runs
            else:
                return summary_index_runs
        else:
            return summary_index_runs

    def try_add_run(self, run: Run) -> None:
        super().try_add_run(run)

        unsummarized_runs = self.unsummarized_runs()
        runs_to_summarize = unsummarized_runs[: max(0, len(unsummarized_runs) - self.offset)]

        if len(runs_to_summarize) > 0:
            self.summary = self.summarize_runs(self.summary, runs_to_summarize)
            self.summary_index = 1 + self.runs.index(runs_to_summarize[-1])

    def summarize_runs(self, previous_summary: str, runs: list[Run]) -> str:
        try:
            if len(runs) > 0:
                summary = self.summarize_conversation_template_generator.render(summary=previous_summary, runs=runs)
                return self.prompt_driver.run(
                    prompt_stack=PromptStack(inputs=[PromptStack.Input(summary, role=PromptStack.USER_ROLE)])
                ).to_text()
            else:
                return previous_summary
        except Exception as e:
            logging.error(f"Error summarizing memory: {type(e).__name__}({e})")

            return previous_summary

offset: int = field(default=1, kw_only=True) class-attribute instance-attribute

prompt_driver: BasePromptDriver = field(default=Factory(lambda : OpenAiChatPromptDriver(model=OpenAiTokenizer.DEFAULT_OPENAI_GPT_3_CHAT_MODEL)), kw_only=True) class-attribute instance-attribute

summarize_conversation_template_generator: J2 = field(default=Factory(lambda : J2('memory/conversation/summarize_conversation.j2')), kw_only=True) class-attribute instance-attribute

summary: str | None = field(default=None, kw_only=True) class-attribute instance-attribute

summary_index: int = field(default=0, kw_only=True) class-attribute instance-attribute

summary_template_generator: J2 = field(default=Factory(lambda : J2('memory/conversation/summary.j2')), kw_only=True) class-attribute instance-attribute

from_dict(memory_dict) classmethod

Source code in griptape/griptape/memory/structure/summary_conversation_memory.py
@classmethod
def from_dict(cls, memory_dict: dict) -> SummaryConversationMemory:
    return SummaryConversationMemorySchema().load(memory_dict)

from_json(memory_json) classmethod

Source code in griptape/griptape/memory/structure/summary_conversation_memory.py
@classmethod
def from_json(cls, memory_json: str) -> SummaryConversationMemory:
    return SummaryConversationMemory.from_dict(json.loads(memory_json))

summarize_runs(previous_summary, runs)

Source code in griptape/griptape/memory/structure/summary_conversation_memory.py
def summarize_runs(self, previous_summary: str, runs: list[Run]) -> str:
    try:
        if len(runs) > 0:
            summary = self.summarize_conversation_template_generator.render(summary=previous_summary, runs=runs)
            return self.prompt_driver.run(
                prompt_stack=PromptStack(inputs=[PromptStack.Input(summary, role=PromptStack.USER_ROLE)])
            ).to_text()
        else:
            return previous_summary
    except Exception as e:
        logging.error(f"Error summarizing memory: {type(e).__name__}({e})")

        return previous_summary

to_dict()

Source code in griptape/griptape/memory/structure/summary_conversation_memory.py
def to_dict(self) -> dict:
    return dict(SummaryConversationMemorySchema().dump(self))

to_prompt_stack(last_n=None)

Source code in griptape/griptape/memory/structure/summary_conversation_memory.py
def to_prompt_stack(self, last_n: int | None = None) -> PromptStack:
    stack = PromptStack()
    if self.summary:
        stack.add_user_input(self.summary_template_generator.render(summary=self.summary))

    for r in self.unsummarized_runs(last_n):
        stack.add_user_input(r.input)
        stack.add_assistant_input(r.output)

    return stack

try_add_run(run)

Source code in griptape/griptape/memory/structure/summary_conversation_memory.py
def try_add_run(self, run: Run) -> None:
    super().try_add_run(run)

    unsummarized_runs = self.unsummarized_runs()
    runs_to_summarize = unsummarized_runs[: max(0, len(unsummarized_runs) - self.offset)]

    if len(runs_to_summarize) > 0:
        self.summary = self.summarize_runs(self.summary, runs_to_summarize)
        self.summary_index = 1 + self.runs.index(runs_to_summarize[-1])

unsummarized_runs(last_n=None)

Source code in griptape/griptape/memory/structure/summary_conversation_memory.py
def unsummarized_runs(self, last_n: int | None = None) -> list[Run]:
    summary_index_runs = self.runs[self.summary_index :]

    if last_n:
        last_n_runs = self.runs[-last_n:]

        if len(summary_index_runs) > len(last_n_runs):
            return last_n_runs
        else:
            return summary_index_runs
    else:
        return summary_index_runs