Skip to content

google_tokenizer

GoogleTokenizer

Bases: BaseTokenizer

Source code in griptape/tokenizers/google_tokenizer.py
@define()
class GoogleTokenizer(BaseTokenizer):
    MODEL_PREFIXES_TO_MAX_INPUT_TOKENS = {"gemini-1.5-pro": 2097152, "gemini": 1048576}
    MODEL_PREFIXES_TO_MAX_OUTPUT_TOKENS = {"gemini": 8192}

    api_key: str = field(kw_only=True, metadata={"serializable": True})
    _client: Optional[GenerativeModel] = field(
        default=None, kw_only=True, alias="client", metadata={"serializable": False}
    )

    @lazy_property()
    def client(self) -> GenerativeModel:
        genai = import_optional_dependency("google.generativeai")
        genai.configure(api_key=self.api_key)

        return genai.GenerativeModel(self.model)

    def count_tokens(self, text: str) -> int:
        return self.client.count_tokens(text).total_tokens

MODEL_PREFIXES_TO_MAX_INPUT_TOKENS = {'gemini-1.5-pro': 2097152, 'gemini': 1048576} class-attribute instance-attribute

MODEL_PREFIXES_TO_MAX_OUTPUT_TOKENS = {'gemini': 8192} class-attribute instance-attribute

_client = field(default=None, kw_only=True, alias='client', metadata={'serializable': False}) class-attribute instance-attribute

api_key = field(kw_only=True, metadata={'serializable': True}) class-attribute instance-attribute

client()

Source code in griptape/tokenizers/google_tokenizer.py
@lazy_property()
def client(self) -> GenerativeModel:
    genai = import_optional_dependency("google.generativeai")
    genai.configure(api_key=self.api_key)

    return genai.GenerativeModel(self.model)

count_tokens(text)

Source code in griptape/tokenizers/google_tokenizer.py
def count_tokens(self, text: str) -> int:
    return self.client.count_tokens(text).total_tokens