fix(utils.py): handle count response tokens false case token counting

This commit is contained in:
Krrish Dholakia 2024-02-05 08:46:51 -08:00
parent 109ccf4cef
commit 1bdb332454

View file

@ -3042,6 +3042,8 @@ def openai_token_counter(
# This is the case where we need to count tokens for a streamed response. We should NOT add +3 tokens per message in this branch
num_tokens = len(encoding.encode(text, disallowed_special=()))
return num_tokens
elif text is not None:
num_tokens = len(encoding.encode(text, disallowed_special=()))
num_tokens += 3 # every reply is primed with <|start|>assistant<|message|>
return num_tokens