Code Quality Improvement - remove tokenizers/ from /llms (#7163)

* move tokenizers out of /llms

* use updated tokenizers location

* fix test_google_secret_manager_read_in_memory
This commit is contained in:
Ishaan Jaff 2024-12-10 23:50:15 -08:00 committed by GitHub
parent 350cfc36f7
commit 3055d9b81c
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
8 changed files with 4 additions and 4 deletions

View file

@ -6,12 +6,12 @@ try:
# New and recommended way to access resources # New and recommended way to access resources
from importlib import resources from importlib import resources
filename = str(resources.files(litellm).joinpath("llms/tokenizers")) filename = str(resources.files(litellm).joinpath("litellm_core_utils/tokenizers"))
except (ImportError, AttributeError): except (ImportError, AttributeError):
# Old way to access resources, which setuptools deprecated some time ago # Old way to access resources, which setuptools deprecated some time ago
import pkg_resources # type: ignore import pkg_resources # type: ignore
filename = pkg_resources.resource_filename(__name__, "llms/tokenizers") filename = pkg_resources.resource_filename(__name__, "litellm_core_utils/tokenizers")
os.environ["TIKTOKEN_CACHE_DIR"] = os.getenv( os.environ["TIKTOKEN_CACHE_DIR"] = os.getenv(
"CUSTOM_TIKTOKEN_CACHE_DIR", filename "CUSTOM_TIKTOKEN_CACHE_DIR", filename

View file

@ -135,7 +135,7 @@ from litellm.types.utils import (
Usage, Usage,
) )
with resources.open_text("litellm.llms.tokenizers", "anthropic_tokenizer.json") as f: with resources.open_text("litellm.litellm_core_utils.tokenizers", "anthropic_tokenizer.json") as f:
json_data = json.load(f) json_data = json.load(f)
# Convert to str (if necessary) # Convert to str (if necessary)
claude_json_str = json.dumps(json_data) claude_json_str = json.dumps(json_data)

View file

@ -231,7 +231,7 @@ def test_google_secret_manager_read_in_memory():
from litellm.secret_managers.google_secret_manager import GoogleSecretManager from litellm.secret_managers.google_secret_manager import GoogleSecretManager
# load_vertex_ai_credentials() load_vertex_ai_credentials()
os.environ["GOOGLE_SECRET_MANAGER_PROJECT_ID"] = "adroit-crow-413218" os.environ["GOOGLE_SECRET_MANAGER_PROJECT_ID"] = "adroit-crow-413218"
secret_manager = GoogleSecretManager() secret_manager = GoogleSecretManager()
secret_manager.cache.cache_dict["UNIQUE_KEY"] = None secret_manager.cache.cache_dict["UNIQUE_KEY"] = None