google_tokenizer
Adapted from the Griptape AI Framework documentation.
Bases:
BaseTokenizer
Source Code in griptape/tokenizers/google_tokenizer.py
@define() class GoogleTokenizer(BaseTokenizer): MODEL_PREFIXES_TO_MAX_INPUT_TOKENS = {"gemini-1.5-pro": 2097152, "gemini": 1048576} MODEL_PREFIXES_TO_MAX_OUTPUT_TOKENS = {"gemini": 8192} api_key: str = field(kw_only=True, metadata={"serializable": True}) _client: Optional[GenerativeModel] = field( default=None, kw_only=True, alias="client", metadata={"serializable": False} ) @lazy_property() def client(self) -> GenerativeModel: genai = import_optional_dependency("google.generativeai") genai.configure(api_key=self.api_key) return genai.GenerativeModel(self.model) def count_tokens(self, text: str) -> int: return self.client.count_tokens(text).total_tokens
MODEL_PREFIXES_TO_MAX_INPUT_TOKENS = {'gemini-1.5-pro': 2097152, 'gemini': 1048576}
class-attribute instance-attributeMODEL_PREFIXES_TO_MAX_OUTPUT_TOKENS = {'gemini': 8192}
class-attribute instance-attribute_client = field(default=None, kw_only=True, alias='client', metadata={'serializable': False})
class-attribute instance-attributeapi_key = field(kw_only=True, metadata={'serializable': True})
class-attribute instance-attribute
client()
Source Code in griptape/tokenizers/google_tokenizer.py
@lazy_property() def client(self) -> GenerativeModel: genai = import_optional_dependency("google.generativeai") genai.configure(api_key=self.api_key) return genai.GenerativeModel(self.model)
count_tokens(text)
Source Code in griptape/tokenizers/google_tokenizer.py
def count_tokens(self, text: str) -> int: return self.client.count_tokens(text).total_tokens
- On this page
- client()
- count_tokens(text)
Could this page be better? Report a problem or suggest an addition!