mirror of
https://github.com/BerriAI/litellm.git
synced 2025-04-25 10:44:24 +00:00
Litellm dev 2024 12 19 p3 (#7322)
All checks were successful
Read Version from pyproject.toml / read-version (push) Successful in 13s
All checks were successful
Read Version from pyproject.toml / read-version (push) Successful in 13s
* fix(utils.py): remove unsupported optional params (if drop_params=True) before passing into map openai params Fixes https://github.com/BerriAI/litellm/issues/7242 * test: new test for langfuse prompt management hook Addresses https://github.com/BerriAI/litellm/issues/3893#issuecomment-2549080296 * feat(main.py): add 'get_chat_completion_prompt' customlogger hook allows for langfuse prompt management Addresses https://github.com/BerriAI/litellm/issues/3893#issuecomment-2549080296 * feat(langfuse_prompt_management.py): working e2e langfuse prompt management works with `langfuse/` route * feat(main.py): initial tracing for dynamic langfuse params allows admin to specify langfuse keys by model in model_list * feat(main.py): support passing langfuse credentials dynamically * fix(langfuse_prompt_management.py): create langfuse client based on dynamic callback params allows dynamic langfuse params to work * fix: fix linting errors * docs(prompt_management.md): refactor docs for sdk + proxy prompt management tutorial * docs(prompt_management.md): cleanup doc * docs: cleanup topnav * docs(prompt_management.md): update docs to be easier to use * fix: remove unused imports * docs(prompt_management.md): add architectural overview doc * fix(litellm_logging.py): fix dynamic param passing * fix(langfuse_prompt_management.py): fix linting errors * fix: fix linting errors * fix: use typing_extensions for typealias to ensure python3.8 compatibility * test: use stream_options in test to account for tiktoken diff * fix: improve import error message, and check run test earlier
This commit is contained in:
parent
2c36f25ae1
commit
27a4d08604
17 changed files with 648 additions and 260 deletions
|
@ -2442,6 +2442,23 @@ def _remove_strict_from_schema(schema):
|
|||
return schema
|
||||
|
||||
|
||||
def _remove_unsupported_params(
|
||||
non_default_params: dict, supported_openai_params: Optional[List[str]]
|
||||
) -> dict:
|
||||
"""
|
||||
Remove unsupported params from non_default_params
|
||||
"""
|
||||
remove_keys = []
|
||||
if supported_openai_params is None:
|
||||
return {} # no supported params, so no optional openai params to send
|
||||
for param in non_default_params.keys():
|
||||
if param not in supported_openai_params:
|
||||
remove_keys.append(param)
|
||||
for key in remove_keys:
|
||||
non_default_params.pop(key, None)
|
||||
return non_default_params
|
||||
|
||||
|
||||
def get_optional_params( # noqa: PLR0915
|
||||
# use the openai defaults
|
||||
# https://platform.openai.com/docs/api-reference/chat/create
|
||||
|
@ -2688,11 +2705,13 @@ def get_optional_params( # noqa: PLR0915
|
|||
# Always keeps this in elif code blocks
|
||||
else:
|
||||
unsupported_params[k] = non_default_params[k]
|
||||
|
||||
if unsupported_params:
|
||||
if litellm.drop_params is True or (
|
||||
drop_params is not None and drop_params is True
|
||||
):
|
||||
pass
|
||||
for k in unsupported_params.keys():
|
||||
non_default_params.pop(k, None)
|
||||
else:
|
||||
raise UnsupportedParamsError(
|
||||
status_code=500,
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue