LiteLLM Minor Fixes & Improvements (09/25/2024) (#5893)

* fix(langfuse.py): support new langfuse prompt_chat class init params

* fix(langfuse.py): handle new init values on prompt chat + prompt text templates

fixes error caused during langfuse logging

* docs(openai_compatible.md): clarify `openai/` handles correct routing for `/v1/completions` route

Fixes https://github.com/BerriAI/litellm/issues/5876

* fix(utils.py): handle unmapped gemini model optional param translation

Fixes https://github.com/BerriAI/litellm/issues/5888

* fix(o1_transformation.py): fix o-1 validation, to not raise error if temperature=1

Fixes https://github.com/BerriAI/litellm/issues/5884

* fix(prisma_client.py): refresh iam token

Fixes https://github.com/BerriAI/litellm/issues/5896

* fix: pass drop params where required

* fix(utils.py): pass drop_params correctly

* fix(types/vertex_ai.py): fix generation config

* test(test_max_completion_tokens.py): fix test

* fix(vertex_and_google_ai_studio_gemini.py): fix map openai params
This commit is contained in:
Krish Dholakia 2024-09-26 16:41:44 -07:00 committed by GitHub
parent ed5635e9a2
commit 0a03f2f11e
22 changed files with 755 additions and 292 deletions

View file

@ -1,6 +1,7 @@
#### What this does ####
# On success, logs events to Langfuse
import copy
import inspect
import os
import traceback
@ -676,21 +677,37 @@ def _add_prompt_to_generation_params(
elif "version" in user_prompt and "prompt" in user_prompt:
# prompts
if isinstance(user_prompt["prompt"], str):
_prompt_obj = Prompt_Text(
name=user_prompt["name"],
prompt=user_prompt["prompt"],
version=user_prompt["version"],
config=user_prompt.get("config", None),
prompt_text_params = getattr(
Prompt_Text, "model_fields", Prompt_Text.__fields__
)
_data = {
"name": user_prompt["name"],
"prompt": user_prompt["prompt"],
"version": user_prompt["version"],
"config": user_prompt.get("config", None),
}
if "labels" in prompt_text_params and "tags" in prompt_text_params:
_data["labels"] = user_prompt.get("labels", []) or []
_data["tags"] = user_prompt.get("tags", []) or []
_prompt_obj = Prompt_Text(**_data) # type: ignore
generation_params["prompt"] = TextPromptClient(prompt=_prompt_obj)
elif isinstance(user_prompt["prompt"], list):
_prompt_obj = Prompt_Chat(
name=user_prompt["name"],
prompt=user_prompt["prompt"],
version=user_prompt["version"],
config=user_prompt.get("config", None),
prompt_chat_params = getattr(
Prompt_Chat, "model_fields", Prompt_Chat.__fields__
)
_data = {
"name": user_prompt["name"],
"prompt": user_prompt["prompt"],
"version": user_prompt["version"],
"config": user_prompt.get("config", None),
}
if "labels" in prompt_chat_params and "tags" in prompt_chat_params:
_data["labels"] = user_prompt.get("labels", []) or []
_data["tags"] = user_prompt.get("tags", []) or []
_prompt_obj = Prompt_Chat(**_data) # type: ignore
generation_params["prompt"] = ChatPromptClient(prompt=_prompt_obj)
else:
verbose_logger.error(