Merge pull request #1952 from BerriAI/litellm_aioboto3_sagemaker

Implements aioboto3 for sagemaker
This commit is contained in:
Krish Dholakia 2024-02-14 21:47:22 -08:00 committed by GitHub
commit a801728037
9 changed files with 203 additions and 75 deletions

View file

@ -264,6 +264,7 @@ async def acompletion(
or custom_llm_provider == "ollama"
or custom_llm_provider == "ollama_chat"
or custom_llm_provider == "vertex_ai"
or custom_llm_provider == "sagemaker"
or custom_llm_provider in litellm.openai_compatible_providers
): # currently implemented aiohttp calls for just azure, openai, hf, ollama, vertex ai soon all.
init_response = await loop.run_in_executor(None, func_with_context)
@ -1553,6 +1554,7 @@ def completion(
logger_fn=logger_fn,
encoding=encoding,
logging_obj=logging,
acompletion=acompletion,
)
if (
"stream" in optional_params and optional_params["stream"] == True
@ -1560,7 +1562,7 @@ def completion(
print_verbose(f"ENTERS SAGEMAKER CUSTOMSTREAMWRAPPER")
from .llms.sagemaker import TokenIterator
tokenIterator = TokenIterator(model_response)
tokenIterator = TokenIterator(model_response, acompletion=acompletion)
response = CustomStreamWrapper(
completion_stream=tokenIterator,
model=model,