HumanLoop integration for Prompt Management (#7479)

* feat(humanloop.py): initial commit for humanloop prompt management integration

Closes https://github.com/BerriAI/litellm/issues/213

* feat(humanloop.py): working e2e humanloop prompt management integration

Closes https://github.com/BerriAI/litellm/issues/213

* fix(humanloop.py): fix linting errors

* fix: fix linting erro

* fix: fix test

* test: handle filenotfound error
This commit is contained in:
Krish Dholakia 2024-12-30 22:26:03 -08:00 committed by GitHub
parent 347779b813
commit 41e5b3aa8d
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
9 changed files with 310 additions and 39 deletions

View file

@ -923,44 +923,7 @@ def completion( # type: ignore # noqa: PLR0915
assistant_continue_message=assistant_continue_message,
)
######## end of unpacking kwargs ###########
openai_params = [
"functions",
"function_call",
"temperature",
"temperature",
"top_p",
"n",
"stream",
"stream_options",
"stop",
"max_completion_tokens",
"modalities",
"prediction",
"audio",
"max_tokens",
"presence_penalty",
"frequency_penalty",
"logit_bias",
"user",
"request_timeout",
"api_base",
"api_version",
"api_key",
"deployment_id",
"organization",
"base_url",
"default_headers",
"timeout",
"response_format",
"seed",
"tools",
"tool_choice",
"max_retries",
"parallel_tool_calls",
"logprobs",
"top_logprobs",
"extra_headers",
]
openai_params = litellm.OPENAI_CHAT_COMPLETION_PARAMS
default_params = openai_params + all_litellm_params
litellm_params = {} # used to prevent unbound var errors
non_default_params = {