fix(utils.py): fix select tokenizer for custom tokenizer (#7599)
All checks were successful
Read Version from pyproject.toml / read-version (push) Successful in 36s

* fix(utils.py): fix select tokenizer for custom tokenizer

* fix(router.py): fix 'utils/token_counter' endpoint
This commit is contained in:
Krish Dholakia 2025-01-07 22:37:09 -08:00 committed by GitHub
parent 04eb718f7a
commit 07c5f136f1
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
4 changed files with 16 additions and 7 deletions

View file

@ -10,13 +10,21 @@ model_list:
api_key: os.environ/OPENAI_API_KEY
- model_name: chatbot_actions
litellm_params:
model: langfuse/openai-gpt-3.5-turbo
model: langfuse/azure/gpt-4o
api_base: "os.environ/AZURE_API_BASE"
api_key: "os.environ/AZURE_API_KEY"
tpm: 1000000
prompt_id: "jokes"
- model_name: openai-gpt-3.5-turbo
- model_name: openai-deepseek
litellm_params:
model: openai/gpt-3.5-turbo
model: deepseek/deepseek-chat
api_key: os.environ/OPENAI_API_KEY
model_info:
access_groups: ["restricted-models"]
custom_tokenizer:
identifier: deepseek-ai/DeepSeek-V3-Base
revision: main
auth_token: os.environ/HUGGINGFACE_API_KEY
litellm_settings:

View file

@ -5606,6 +5606,7 @@ async def token_counter(request: TokenCountRequest):
_tokenizer_used = litellm.utils._select_tokenizer(
model=model_to_use, custom_tokenizer=custom_tokenizer
)
tokenizer_used = str(_tokenizer_used["type"])
total_tokens = token_counter(
model=model_to_use,

View file

@ -4379,7 +4379,7 @@ class Router:
pass
## GET LITELLM MODEL INFO - raises exception, if model is not mapped
if not model.startswith(custom_llm_provider):
if not model.startswith("{}/".format(custom_llm_provider)):
model_info_name = "{}/{}".format(custom_llm_provider, model)
else:
model_info_name = model

View file

@ -1278,12 +1278,12 @@ def _select_tokenizer(
model: str, custom_tokenizer: Optional[CustomHuggingfaceTokenizer] = None
):
if custom_tokenizer is not None:
custom_tokenizer = Tokenizer.from_pretrained(
custom_tokenizer["identifier"],
_tokenizer = create_pretrained_tokenizer(
identifier=custom_tokenizer["identifier"],
revision=custom_tokenizer["revision"],
auth_token=custom_tokenizer["auth_token"],
)
return {"type": "huggingface_tokenizer", "tokenizer": custom_tokenizer}
return _tokenizer
return _select_tokenizer_helper(model=model)