mirror of
https://github.com/BerriAI/litellm.git
synced 2025-04-26 03:04:13 +00:00
Merge pull request #1556 from BerriAI/litellm_importlib_issue
fix(utils.py): move from pkg_resources to importlib
This commit is contained in:
commit
b1cced16fc
2 changed files with 7 additions and 11 deletions
0
litellm/llms/tokenizers/__init__.py
Normal file
0
litellm/llms/tokenizers/__init__.py
Normal file
|
@ -2505,24 +2505,20 @@ def get_replicate_completion_pricing(completion_response=None, total_time=0.0):
|
||||||
|
|
||||||
|
|
||||||
def _select_tokenizer(model: str):
|
def _select_tokenizer(model: str):
|
||||||
# cohere
|
from importlib import resources
|
||||||
import pkg_resources
|
|
||||||
|
|
||||||
if model in litellm.cohere_models:
|
if model in litellm.cohere_models:
|
||||||
|
# cohere
|
||||||
tokenizer = Tokenizer.from_pretrained("Cohere/command-nightly")
|
tokenizer = Tokenizer.from_pretrained("Cohere/command-nightly")
|
||||||
return {"type": "huggingface_tokenizer", "tokenizer": tokenizer}
|
return {"type": "huggingface_tokenizer", "tokenizer": tokenizer}
|
||||||
# anthropic
|
# anthropic
|
||||||
elif model in litellm.anthropic_models:
|
elif model in litellm.anthropic_models:
|
||||||
# Read the JSON file
|
with resources.open_text(
|
||||||
filename = pkg_resources.resource_filename(
|
"litellm.llms.tokenizers", "anthropic_tokenizer.json"
|
||||||
__name__, "llms/tokenizers/anthropic_tokenizer.json"
|
) as f:
|
||||||
)
|
|
||||||
with open(filename, "r") as f:
|
|
||||||
json_data = json.load(f)
|
json_data = json.load(f)
|
||||||
# Decode the JSON data from utf-8
|
# Convert to str (if necessary)
|
||||||
json_data_decoded = json.dumps(json_data, ensure_ascii=False)
|
json_str = json.dumps(json_data)
|
||||||
# Convert to str
|
|
||||||
json_str = str(json_data_decoded)
|
|
||||||
# load tokenizer
|
# load tokenizer
|
||||||
tokenizer = Tokenizer.from_str(json_str)
|
tokenizer = Tokenizer.from_str(json_str)
|
||||||
return {"type": "huggingface_tokenizer", "tokenizer": tokenizer}
|
return {"type": "huggingface_tokenizer", "tokenizer": tokenizer}
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue