fix(utils.py): if openai model, don't check hf tokenizers

This commit is contained in:
Krrish Dholakia 2024-08-12 16:28:22 -07:00
parent e9c88952b9
commit a8644d8a7d
2 changed files with 21 additions and 1 deletions

View file

@ -1610,10 +1610,17 @@ def _select_tokenizer(model: str):
# default - tiktoken
else:
tokenizer = None
if (
model in litellm.open_ai_chat_completion_models
or model in litellm.open_ai_text_completion_models
or model in litellm.open_ai_embedding_models
):
return {"type": "openai_tokenizer", "tokenizer": encoding}
try:
tokenizer = Tokenizer.from_pretrained(model)
return {"type": "huggingface_tokenizer", "tokenizer": tokenizer}
except:
except Exception:
return {"type": "openai_tokenizer", "tokenizer": encoding}