fix(ollama_chat.py): fix default token counting for ollama chat

This commit is contained in:
Krrish Dholakia 2024-01-24 20:09:08 -08:00
parent 3e59a02dfb
commit 43f139fafd
2 changed files with 15 additions and 6 deletions

View file

@ -2872,8 +2872,13 @@ def token_counter(
print_verbose(
f"Token Counter - using generic token counter, for model={model}"
)
enc = tokenizer_json["tokenizer"].encode(text)
num_tokens = len(enc)
num_tokens = openai_token_counter(
text=text, # type: ignore
model="gpt-3.5-turbo",
messages=messages,
is_tool_call=is_tool_call,
count_response_tokens=count_response_tokens,
)
else:
num_tokens = len(encoding.encode(text)) # type: ignore
return num_tokens