Skip to content

text_artifact

TextArtifact

Bases: BaseArtifact

Source code in griptape/artifacts/text_artifact.py
@define
class TextArtifact(BaseArtifact):
    value: str = field(converter=str, metadata={"serializable": True})
    embedding: Optional[list[float]] = field(default=None, kw_only=True)

    def __add__(self, other: BaseArtifact) -> TextArtifact:
        return TextArtifact(self.value + other.value)

    def __bool__(self) -> bool:
        return bool(self.value.strip())

    def to_text(self) -> str:
        return self.value

    def generate_embedding(self, driver: BaseEmbeddingDriver) -> list[float]:
        embedding = driver.embed_string(str(self.value))

        if self.embedding is None:
            self.embedding = []
        self.embedding.clear()
        self.embedding.extend(embedding)

        return self.embedding

    def token_count(self, tokenizer: BaseTokenizer) -> int:
        return tokenizer.count_tokens(str(self.value))

embedding: Optional[list[float]] = field(default=None, kw_only=True) class-attribute instance-attribute

value: str = field(converter=str, metadata={'serializable': True}) class-attribute instance-attribute

__add__(other)

Source code in griptape/artifacts/text_artifact.py
def __add__(self, other: BaseArtifact) -> TextArtifact:
    return TextArtifact(self.value + other.value)

__bool__()

Source code in griptape/artifacts/text_artifact.py
def __bool__(self) -> bool:
    return bool(self.value.strip())

generate_embedding(driver)

Source code in griptape/artifacts/text_artifact.py
def generate_embedding(self, driver: BaseEmbeddingDriver) -> list[float]:
    embedding = driver.embed_string(str(self.value))

    if self.embedding is None:
        self.embedding = []
    self.embedding.clear()
    self.embedding.extend(embedding)

    return self.embedding

to_text()

Source code in griptape/artifacts/text_artifact.py
def to_text(self) -> str:
    return self.value

token_count(tokenizer)

Source code in griptape/artifacts/text_artifact.py
def token_count(self, tokenizer: BaseTokenizer) -> int:
    return tokenizer.count_tokens(str(self.value))