fix(openai.py): fix position of invalid_params param

This commit is contained in:
Krrish Dholakia 2024-08-10 09:52:27 -07:00
parent 1553f7fa48
commit dd2ea72cb4
3 changed files with 5 additions and 8 deletions

View file

@ -964,9 +964,9 @@ class OpenAIChatCompletion(BaseLLM):
except openai.UnprocessableEntityError as e: except openai.UnprocessableEntityError as e:
## check if body contains unprocessable params - related issue https://github.com/BerriAI/litellm/issues/4800 ## check if body contains unprocessable params - related issue https://github.com/BerriAI/litellm/issues/4800
if litellm.drop_params is True or drop_params is True: if litellm.drop_params is True or drop_params is True:
invalid_params: List[str] = []
if e.body is not None and isinstance(e.body, dict) and e.body.get("detail"): # type: ignore if e.body is not None and isinstance(e.body, dict) and e.body.get("detail"): # type: ignore
detail = e.body.get("detail") # type: ignore detail = e.body.get("detail") # type: ignore
invalid_params: List[str] = []
if ( if (
isinstance(detail, List) isinstance(detail, List)
and len(detail) > 0 and len(detail) > 0
@ -1096,9 +1096,9 @@ class OpenAIChatCompletion(BaseLLM):
except openai.UnprocessableEntityError as e: except openai.UnprocessableEntityError as e:
## check if body contains unprocessable params - related issue https://github.com/BerriAI/litellm/issues/4800 ## check if body contains unprocessable params - related issue https://github.com/BerriAI/litellm/issues/4800
if litellm.drop_params is True or drop_params is True: if litellm.drop_params is True or drop_params is True:
invalid_params: List[str] = []
if e.body is not None and isinstance(e.body, dict) and e.body.get("detail"): # type: ignore if e.body is not None and isinstance(e.body, dict) and e.body.get("detail"): # type: ignore
detail = e.body.get("detail") # type: ignore detail = e.body.get("detail") # type: ignore
invalid_params: List[str] = []
if ( if (
isinstance(detail, List) isinstance(detail, List)
and len(detail) > 0 and len(detail) > 0
@ -1227,9 +1227,9 @@ class OpenAIChatCompletion(BaseLLM):
except openai.UnprocessableEntityError as e: except openai.UnprocessableEntityError as e:
## check if body contains unprocessable params - related issue https://github.com/BerriAI/litellm/issues/4800 ## check if body contains unprocessable params - related issue https://github.com/BerriAI/litellm/issues/4800
if litellm.drop_params is True or drop_params is True: if litellm.drop_params is True or drop_params is True:
invalid_params: List[str] = []
if e.body is not None and isinstance(e.body, dict) and e.body.get("detail"): # type: ignore if e.body is not None and isinstance(e.body, dict) and e.body.get("detail"): # type: ignore
detail = e.body.get("detail") # type: ignore detail = e.body.get("detail") # type: ignore
invalid_params: List[str] = []
if ( if (
isinstance(detail, List) isinstance(detail, List)
and len(detail) > 0 and len(detail) > 0

View file

@ -243,13 +243,13 @@ def mistral_api_pt(messages):
if k not in special_keys: if k not in special_keys:
extra_args[k] = v extra_args[k] = v
texts = "" texts = ""
if isinstance(m["content"], list): if m.get("content", None) is not None and isinstance(m["content"], list):
for c in m["content"]: for c in m["content"]:
if c["type"] == "image_url": if c["type"] == "image_url":
return messages return messages
elif c["type"] == "text" and isinstance(c["text"], str): elif c["type"] == "text" and isinstance(c["text"], str):
texts += c["text"] texts += c["text"]
elif isinstance(m["content"], str): elif m.get("content", None) is not None and isinstance(m["content"], str):
texts = m["content"] texts = m["content"]
new_m = {"role": m["role"], "content": texts, **extra_args} new_m = {"role": m["role"], "content": texts, **extra_args}

View file

@ -2,6 +2,3 @@ model_list:
- model_name: "*" - model_name: "*"
litellm_params: litellm_params:
model: "*" model: "*"
litellm_settings:
callbacks: ["openmeter"]