mirror of
https://github.com/BerriAI/litellm.git
synced 2025-04-25 18:54:30 +00:00
Merge pull request #5343 from BerriAI/litellm_sagemaker_chat
feat(sagemaker.py): add sagemaker messages api support
This commit is contained in:
commit
cd61ddc610
6 changed files with 112 additions and 17 deletions
|
@ -383,6 +383,7 @@ async def acompletion(
|
|||
or custom_llm_provider == "vertex_ai_beta"
|
||||
or custom_llm_provider == "gemini"
|
||||
or custom_llm_provider == "sagemaker"
|
||||
or custom_llm_provider == "sagemaker_chat"
|
||||
or custom_llm_provider == "anthropic"
|
||||
or custom_llm_provider == "predibase"
|
||||
or custom_llm_provider == "bedrock"
|
||||
|
@ -2248,7 +2249,10 @@ def completion(
|
|||
|
||||
## RESPONSE OBJECT
|
||||
response = model_response
|
||||
elif custom_llm_provider == "sagemaker":
|
||||
elif (
|
||||
custom_llm_provider == "sagemaker"
|
||||
or custom_llm_provider == "sagemaker_chat"
|
||||
):
|
||||
# boto3 reads keys from .env
|
||||
model_response = sagemaker_llm.completion(
|
||||
model=model,
|
||||
|
@ -2263,6 +2267,9 @@ def completion(
|
|||
encoding=encoding,
|
||||
logging_obj=logging,
|
||||
acompletion=acompletion,
|
||||
use_messages_api=(
|
||||
True if custom_llm_provider == "sagemaker_chat" else False
|
||||
),
|
||||
)
|
||||
if optional_params.get("stream", False):
|
||||
## LOGGING
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue