From 5feb7781f5f4412e28383a165e2487680757b8cb Mon Sep 17 00:00:00 2001 From: Ayush Agrawal Date: Mon, 3 Nov 2025 15:28:47 -0800 Subject: [PATCH] chore: export LocalTokenizer to allow usage PiperOrigin-RevId: 827674020 --- .github/workflows/import.yml | 2 +- google/genai/__init__.py | 3 ++- google/genai/_local_tokenizer_loader.py | 4 ++-- google/genai/local_tokenizer.py | 2 +- google/genai/tests/local_tokenizer/test_local_tokenizer.py | 4 ++-- 5 files changed, 8 insertions(+), 7 deletions(-) diff --git a/.github/workflows/import.yml b/.github/workflows/import.yml index 8a110616b..ec309e952 100644 --- a/.github/workflows/import.yml +++ b/.github/workflows/import.yml @@ -27,7 +27,7 @@ jobs: - name: Install dependencies run: | python -m pip install --upgrade pip - pip install setuptools wheel + pip install setuptools wheel sentencepiece protobuf pip install pytest pip install . diff --git a/google/genai/__init__.py b/google/genai/__init__.py index c3462ccbd..386cae784 100644 --- a/google/genai/__init__.py +++ b/google/genai/__init__.py @@ -18,8 +18,9 @@ from . import types from . import version from .client import Client +from .local_tokenizer import LocalTokenizer __version__ = version.__version__ -__all__ = ['Client'] +__all__ = ["Client", "LocalTokenizer"] diff --git a/google/genai/_local_tokenizer_loader.py b/google/genai/_local_tokenizer_loader.py index edffaff41..0ac4289e4 100644 --- a/google/genai/_local_tokenizer_loader.py +++ b/google/genai/_local_tokenizer_loader.py @@ -22,8 +22,8 @@ import uuid import requests # type: ignore -import sentencepiece as spm -from sentencepiece import sentencepiece_model_pb2 +from google3.third_party.sentencepiece.src.python import sentencepiece_processor as spm +from google3.third_party.sentencepiece.src import sentencepiece_model_pb2 # Source of truth: https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models diff --git a/google/genai/local_tokenizer.py b/google/genai/local_tokenizer.py index 5db6d6922..388f80aad 100644 --- a/google/genai/local_tokenizer.py +++ b/google/genai/local_tokenizer.py @@ -19,7 +19,7 @@ from typing import Any, Iterable from typing import Optional, Union -from sentencepiece import sentencepiece_model_pb2 +from google3.third_party.sentencepiece.src import sentencepiece_model_pb2 from . import _common from . import _local_tokenizer_loader as loader diff --git a/google/genai/tests/local_tokenizer/test_local_tokenizer.py b/google/genai/tests/local_tokenizer/test_local_tokenizer.py index fd47e3015..1e2255739 100644 --- a/google/genai/tests/local_tokenizer/test_local_tokenizer.py +++ b/google/genai/tests/local_tokenizer/test_local_tokenizer.py @@ -18,8 +18,8 @@ from sentencepiece import sentencepiece_model_pb2 -from ... import local_tokenizer -from ... import types +from ...private import local_tokenizer +from ...private import types class TestLocalTokenizer(unittest.TestCase):