mirror of
https://github.com/BerriAI/litellm.git
synced 2025-04-27 19:54:13 +00:00
Fix #1119, no content when streaming.
This commit is contained in:
parent
aa649a8599
commit
3b643676d9
1 changed files with 1 additions and 0 deletions
|
@ -221,6 +221,7 @@ async def ollama_async_streaming(url, data, model_response, encoding, logging_ob
|
|||
|
||||
|
||||
async def ollama_acompletion(url, data, model_response, encoding, logging_obj):
|
||||
data["stream"] = False
|
||||
try:
|
||||
timeout = aiohttp.ClientTimeout(total=600) # 10 minutes
|
||||
async with aiohttp.ClientSession(timeout=timeout) as session:
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue