From be9fa06da6b318ef8f00048cc29d9645864142f0 Mon Sep 17 00:00:00 2001 From: Krrish Dholakia Date: Mon, 27 Nov 2023 19:11:38 -0800 Subject: [PATCH] fix(main.py): fix linting errors --- litellm/main.py | 1 + litellm/router.py | 4 ++-- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/litellm/main.py b/litellm/main.py index d4a3826bb6..f5777f0e0f 100644 --- a/litellm/main.py +++ b/litellm/main.py @@ -2108,6 +2108,7 @@ def stream_chunk_builder(chunks: list, messages: Optional[list]=None): # Extract the "content" strings from the nested dictionaries within "choices" content_list = [] combined_content = "" + combined_arguments = "" if "tool_calls" in chunks[0]["choices"][0]["delta"] and chunks[0]["choices"][0]["delta"]["tool_calls"] is not None: argument_list = [] diff --git a/litellm/router.py b/litellm/router.py index 24e2a50f0a..734cffce5e 100644 --- a/litellm/router.py +++ b/litellm/router.py @@ -87,7 +87,7 @@ class Router: self.routing_strategy = routing_strategy self.fallbacks = fallbacks or litellm.fallbacks self.context_window_fallbacks = context_window_fallbacks or litellm.context_window_fallbacks - self.model_exception_map = {} # dict to store model: list exceptions. self.exceptions = {"gpt-3.5": ["API KEY Error", "Rate Limit Error", "good morning error"]} + self.model_exception_map: dict = {} # dict to store model: list exceptions. self.exceptions = {"gpt-3.5": ["API KEY Error", "Rate Limit Error", "good morning error"]} # make Router.chat.completions.create compatible for openai.chat.completions.create self.chat = litellm.Chat(params=default_litellm_params) @@ -364,7 +364,7 @@ class Router: original_exception = e ### CHECK IF RATE LIMIT / CONTEXT WINDOW ERROR if ((isinstance(original_exception, litellm.ContextWindowExceededError) and context_window_fallbacks is None) - or (openai.RateLimitError and fallbacks is not None)): + or (isinstance(original_exception, openai.RateLimitError) and fallbacks is not None)): raise original_exception ### RETRY for current_attempt in range(num_retries):