mirror of
https://github.com/BerriAI/litellm.git
synced 2025-04-25 10:44:24 +00:00
fix(ollama_chat.py): fix default token counting for ollama chat
This commit is contained in:
parent
3e59a02dfb
commit
43f139fafd
2 changed files with 15 additions and 6 deletions
|
@ -2872,8 +2872,13 @@ def token_counter(
|
|||
print_verbose(
|
||||
f"Token Counter - using generic token counter, for model={model}"
|
||||
)
|
||||
enc = tokenizer_json["tokenizer"].encode(text)
|
||||
num_tokens = len(enc)
|
||||
num_tokens = openai_token_counter(
|
||||
text=text, # type: ignore
|
||||
model="gpt-3.5-turbo",
|
||||
messages=messages,
|
||||
is_tool_call=is_tool_call,
|
||||
count_response_tokens=count_response_tokens,
|
||||
)
|
||||
else:
|
||||
num_tokens = len(encoding.encode(text)) # type: ignore
|
||||
return num_tokens
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue