fix linting errors

This commit is contained in:
Krrish Dholakia 2023-09-22 15:04:18 -07:00
parent e62b584439
commit 4f7a35cbef
2 changed files with 4 additions and 4 deletions

View file

@ -702,19 +702,19 @@ def get_replicate_completion_pricing(completion_response=None, total_time=0.0):
return a100_80gb_price_per_second_public*total_time return a100_80gb_price_per_second_public*total_time
def token_counter(model="", text=None, messages: Optional[list]=None): def token_counter(model="", text=None, messages: Optional[List] = None):
# Args: # Args:
# text: raw text string passed to model # text: raw text string passed to model
# messages: Optional, alternative to passing in text. List of Dicts passed to completion, messages = [{"role": "user", "content": "hello"}] # messages: Optional, alternative to passing in text. List of Dicts passed to completion, messages = [{"role": "user", "content": "hello"}]
# use tiktoken or anthropic's tokenizer depending on the model # use tiktoken or anthropic's tokenizer depending on the model
if text == None: if text == None:
if messages != None: if messages is not None:
text = " ".join([message["content"] for message in messages]) text = " ".join([message["content"] for message in messages])
else: else:
raise ValueError("text and messages cannot both be None") raise ValueError("text and messages cannot both be None")
num_tokens = 0 num_tokens = 0
if model != None: if model is not None:
# cohere # cohere
if model in litellm.cohere_models: if model in litellm.cohere_models:
tokenizer = Tokenizer.from_pretrained("Cohere/command-nightly") tokenizer = Tokenizer.from_pretrained("Cohere/command-nightly")

View file

@ -1,6 +1,6 @@
[tool.poetry] [tool.poetry]
name = "litellm" name = "litellm"
version = "0.1.733" version = "0.1.734"
description = "Library to easily interface with LLM API providers" description = "Library to easily interface with LLM API providers"
authors = ["BerriAI"] authors = ["BerriAI"]
license = "MIT License" license = "MIT License"