LiteLLM Minor Fixes & Improvements (10/02/2024) (#6023)

* feat(together_ai/completion): handle together ai completion calls

* fix: handle list of int / list of list of int for text completion calls

* fix(utils.py): check if base model in bedrock converse model list

Fixes https://github.com/BerriAI/litellm/issues/6003

* test(test_optional_params.py): add unit tests for bedrock optional param mapping

Fixes https://github.com/BerriAI/litellm/issues/6003

* feat(utils.py): enable passing dummy tool call for anthropic/bedrock calls if tool_use blocks exist

Fixes https://github.com/BerriAI/litellm/issues/5388

* fixed an issue with tool use of claude models with anthropic and bedrock (#6013)

* fix(utils.py): handle empty schema for anthropic/bedrock

Fixes https://github.com/BerriAI/litellm/issues/6012

* fix: fix linting errors

* fix: fix linting errors

* fix: fix linting errors

* fix(proxy_cli.py): fix import route for app + health checks path (#6026)

* (testing): Enable testing us.anthropic.claude-3-haiku-20240307-v1:0. (#6018)

* fix(proxy_cli.py): fix import route for app + health checks gettsburg.wav

Fixes https://github.com/BerriAI/litellm/issues/5999

---------

Co-authored-by: David Manouchehri <david.manouchehri@ai.moda>

---------

Co-authored-by: Ved Patwardhan <54766411+vedpatwardhan@users.noreply.github.com>
Co-authored-by: David Manouchehri <david.manouchehri@ai.moda>
This commit is contained in:
Krish Dholakia 2024-10-02 22:00:28 -04:00 committed by GitHub
parent 8995ff49ae
commit 14165d3648
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
20 changed files with 443 additions and 125 deletions

View file

@ -80,6 +80,7 @@ from litellm.types.llms.openai import (
AllMessageValues,
ChatCompletionNamedToolChoiceParam,
ChatCompletionToolParam,
ChatCompletionToolParamFunctionChunk,
)
from litellm.types.utils import FileTypes # type: ignore
from litellm.types.utils import (
@ -3360,7 +3361,8 @@ def get_optional_params(
supported_params = get_supported_openai_params(
model=model, custom_llm_provider=custom_llm_provider
)
if model in litellm.BEDROCK_CONVERSE_MODELS:
base_model = litellm.AmazonConverseConfig()._get_base_model(model)
if base_model in litellm.BEDROCK_CONVERSE_MODELS:
_check_valid_arg(supported_params=supported_params)
optional_params = litellm.AmazonConverseConfig().map_openai_params(
model=model,
@ -9255,3 +9257,24 @@ def process_response_headers(response_headers: Union[httpx.Headers, dict]) -> di
**additional_headers,
}
return additional_headers
def add_dummy_tool(custom_llm_provider: str) -> List[ChatCompletionToolParam]:
"""
Prevent Anthropic from raising error when tool_use block exists but no tools are provided.
Relevent Issues: https://github.com/BerriAI/litellm/issues/5388, https://github.com/BerriAI/litellm/issues/5747
"""
return [
ChatCompletionToolParam(
type="function",
function=ChatCompletionToolParamFunctionChunk(
name="dummy-tool",
description="This is a dummy tool call", # provided to satisfy bedrock constraint.
parameters={
"type": "object",
"properties": {},
},
),
)
]