fix(main.py): fix linting errors

This commit is contained in:
Krrish Dholakia 2023-11-27 19:11:38 -08:00
parent 5450fb26f4
commit 91e8ff439d
2 changed files with 3 additions and 2 deletions

View file

@ -2108,6 +2108,7 @@ def stream_chunk_builder(chunks: list, messages: Optional[list]=None):
# Extract the "content" strings from the nested dictionaries within "choices" # Extract the "content" strings from the nested dictionaries within "choices"
content_list = [] content_list = []
combined_content = "" combined_content = ""
combined_arguments = ""
if "tool_calls" in chunks[0]["choices"][0]["delta"] and chunks[0]["choices"][0]["delta"]["tool_calls"] is not None: if "tool_calls" in chunks[0]["choices"][0]["delta"] and chunks[0]["choices"][0]["delta"]["tool_calls"] is not None:
argument_list = [] argument_list = []

View file

@ -87,7 +87,7 @@ class Router:
self.routing_strategy = routing_strategy self.routing_strategy = routing_strategy
self.fallbacks = fallbacks or litellm.fallbacks self.fallbacks = fallbacks or litellm.fallbacks
self.context_window_fallbacks = context_window_fallbacks or litellm.context_window_fallbacks self.context_window_fallbacks = context_window_fallbacks or litellm.context_window_fallbacks
self.model_exception_map = {} # dict to store model: list exceptions. self.exceptions = {"gpt-3.5": ["API KEY Error", "Rate Limit Error", "good morning error"]} self.model_exception_map: dict = {} # dict to store model: list exceptions. self.exceptions = {"gpt-3.5": ["API KEY Error", "Rate Limit Error", "good morning error"]}
# make Router.chat.completions.create compatible for openai.chat.completions.create # make Router.chat.completions.create compatible for openai.chat.completions.create
self.chat = litellm.Chat(params=default_litellm_params) self.chat = litellm.Chat(params=default_litellm_params)
@ -364,7 +364,7 @@ class Router:
original_exception = e original_exception = e
### CHECK IF RATE LIMIT / CONTEXT WINDOW ERROR ### CHECK IF RATE LIMIT / CONTEXT WINDOW ERROR
if ((isinstance(original_exception, litellm.ContextWindowExceededError) and context_window_fallbacks is None) if ((isinstance(original_exception, litellm.ContextWindowExceededError) and context_window_fallbacks is None)
or (openai.RateLimitError and fallbacks is not None)): or (isinstance(original_exception, openai.RateLimitError) and fallbacks is not None)):
raise original_exception raise original_exception
### RETRY ### RETRY
for current_attempt in range(num_retries): for current_attempt in range(num_retries):