mirror of
https://github.com/BerriAI/litellm.git
synced 2025-04-27 11:43:54 +00:00
fix(huggingface_restapi.py): return streamed response correctly
This commit is contained in:
parent
a7d0f19e6c
commit
9c2cce56f7
2 changed files with 3 additions and 6 deletions
|
@ -631,11 +631,8 @@ class Huggingface(BaseLLM):
|
||||||
logging_obj=logging_obj,
|
logging_obj=logging_obj,
|
||||||
)
|
)
|
||||||
|
|
||||||
async def generator():
|
async for transformed_chunk in streamwrapper:
|
||||||
async for transformed_chunk in streamwrapper:
|
yield transformed_chunk
|
||||||
yield transformed_chunk
|
|
||||||
|
|
||||||
return generator()
|
|
||||||
|
|
||||||
def embedding(
|
def embedding(
|
||||||
self,
|
self,
|
||||||
|
|
|
@ -899,7 +899,7 @@ async def test_sagemaker_streaming_async():
|
||||||
pytest.fail(f"An exception occurred - {str(e)}")
|
pytest.fail(f"An exception occurred - {str(e)}")
|
||||||
|
|
||||||
|
|
||||||
asyncio.run(test_sagemaker_streaming_async())
|
# asyncio.run(test_sagemaker_streaming_async())
|
||||||
|
|
||||||
|
|
||||||
def test_completion_sagemaker_stream():
|
def test_completion_sagemaker_stream():
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue