(fix) proxy - streaming sagemaker

This commit is contained in:
ishaan-jaff 2024-01-23 11:12:16 -08:00
parent 505544340b
commit 67dddc94d9
2 changed files with 19 additions and 9 deletions

View file

@ -1672,11 +1672,16 @@ async def completion(
"stream" in data and data["stream"] == True
): # use generate_responses to stream responses
custom_headers = {"x-litellm-model-id": model_id}
return StreamingResponse(
async_data_generator(
stream_content = async_data_generator(
user_api_key_dict=user_api_key_dict,
response=response,
),
)
if response.custom_llm_provider == "sagemaker":
stream_content = data_generator(
response=response,
)
return StreamingResponse(
stream_content,
media_type="text/event-stream",
headers=custom_headers,
)
@ -1834,11 +1839,16 @@ async def chat_completion(
"stream" in data and data["stream"] == True
): # use generate_responses to stream responses
custom_headers = {"x-litellm-model-id": model_id}
return StreamingResponse(
async_data_generator(
stream_content = async_data_generator(
user_api_key_dict=user_api_key_dict,
response=response,
),
)
if response.custom_llm_provider == "sagemaker":
stream_content = data_generator(
response=response,
)
return StreamingResponse(
stream_content,
media_type="text/event-stream",
headers=custom_headers,
)

View file

@ -4,7 +4,7 @@ const openai = require('openai');
process.env.DEBUG=false;
async function runOpenAI() {
const client = new openai.OpenAI({
apiKey: 'sk-yPX56TDqBpr23W7ruFG3Yg',
apiKey: 'sk-JkKeNi6WpWDngBsghJ6B9g',
baseURL: 'http://0.0.0.0:8000'
});