bump: version 1.7.13 → 1.7.14

This commit is contained in:
Krrish Dholakia 2023-11-29 15:19:18 -08:00
parent 451851e6a4
commit 6c98715b94
3 changed files with 4 additions and 2 deletions

View file

@ -2212,6 +2212,7 @@ def stream_chunk_builder(chunks: list, messages: Optional[list]=None):
try:
response["usage"]["prompt_tokens"] = token_counter(model=model, messages=messages)
except: # don't allow this failing to block a complete streaming response from being returned
print_verbose(f"token_counter failed, assuming prompt tokens is 0")
response["usage"]["prompt_tokens"] = 0
response["usage"]["completion_tokens"] = token_counter(model=model, text=completion_output)
response["usage"]["total_tokens"] = response["usage"]["prompt_tokens"] + response["usage"]["completion_tokens"]

View file

@ -1613,6 +1613,7 @@ def token_counter(model="", text=None, messages: Optional[List] = None):
# use tiktoken, anthropic, cohere or llama2's tokenizer depending on the model
if text == None:
if messages is not None:
print_verbose(f"token_counter messages received: {messages}")
text = "".join([message["content"] for message in messages])
else:
raise ValueError("text and messages cannot both be None")

View file

@ -1,6 +1,6 @@
[tool.poetry]
name = "litellm"
version = "1.7.13"
version = "1.7.14"
description = "Library to easily interface with LLM API providers"
authors = ["BerriAI"]
license = "MIT License"
@ -27,7 +27,7 @@ requires = ["poetry-core"]
build-backend = "poetry.core.masonry.api"
[tool.commitizen]
version = "1.7.13"
version = "1.7.14"
version_files = [
"pyproject.toml:^version"
]