mirror of
https://github.com/BerriAI/litellm.git
synced 2025-04-25 18:54:30 +00:00
fix(utils.py): azure streaming initial format
This commit is contained in:
parent
ae7731b4dc
commit
4348fd6435
1 changed files with 23 additions and 14 deletions
|
@ -18,7 +18,7 @@ import tiktoken
|
||||||
import uuid
|
import uuid
|
||||||
import aiohttp
|
import aiohttp
|
||||||
import logging
|
import logging
|
||||||
import asyncio
|
import asyncio, httpx
|
||||||
import copy
|
import copy
|
||||||
from tokenizers import Tokenizer
|
from tokenizers import Tokenizer
|
||||||
from dataclasses import (
|
from dataclasses import (
|
||||||
|
@ -4089,14 +4089,22 @@ def exception_type(
|
||||||
llm_provider=custom_llm_provider,
|
llm_provider=custom_llm_provider,
|
||||||
response=original_exception.response
|
response=original_exception.response
|
||||||
)
|
)
|
||||||
else:
|
else: # ensure generic errors always return APIConnectionError
|
||||||
exception_mapping_worked = True
|
exception_mapping_worked = True
|
||||||
raise APIConnectionError(
|
if hasattr(original_exception, "request"):
|
||||||
message=f"{str(original_exception)}",
|
raise APIConnectionError(
|
||||||
llm_provider=custom_llm_provider,
|
message=f"{str(original_exception)}",
|
||||||
model=model,
|
llm_provider=custom_llm_provider,
|
||||||
request=original_exception.request
|
model=model,
|
||||||
)
|
request=original_exception.request
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
raise APIConnectionError(
|
||||||
|
message=f"{str(original_exception)}",
|
||||||
|
llm_provider=custom_llm_provider,
|
||||||
|
model=model,
|
||||||
|
request= httpx.Request(method="POST", url="https://api.openai.com/v1/") # stub the request
|
||||||
|
)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
# LOGGING
|
# LOGGING
|
||||||
exception_logging(
|
exception_logging(
|
||||||
|
@ -4400,10 +4408,11 @@ class CustomStreamWrapper:
|
||||||
elif chunk.startswith("data:"):
|
elif chunk.startswith("data:"):
|
||||||
data_json = json.loads(chunk[5:]) # chunk.startswith("data:"):
|
data_json = json.loads(chunk[5:]) # chunk.startswith("data:"):
|
||||||
try:
|
try:
|
||||||
text = data_json["choices"][0]["delta"].get("content", "")
|
if len(data_json["choices"]) > 0:
|
||||||
if data_json["choices"][0].get("finish_reason", None):
|
text = data_json["choices"][0]["delta"].get("content", "")
|
||||||
is_finished = True
|
if data_json["choices"][0].get("finish_reason", None):
|
||||||
finish_reason = data_json["choices"][0]["finish_reason"]
|
is_finished = True
|
||||||
|
finish_reason = data_json["choices"][0]["finish_reason"]
|
||||||
print_verbose(f"text: {text}; is_finished: {is_finished}; finish_reason: {finish_reason}")
|
print_verbose(f"text: {text}; is_finished: {is_finished}; finish_reason: {finish_reason}")
|
||||||
return {"text": text, "is_finished": is_finished, "finish_reason": finish_reason}
|
return {"text": text, "is_finished": is_finished, "finish_reason": finish_reason}
|
||||||
except:
|
except:
|
||||||
|
@ -4725,7 +4734,7 @@ class CustomStreamWrapper:
|
||||||
e.message = str(e)
|
e.message = str(e)
|
||||||
# LOG FAILURE - handle streaming failure logging in the _next_ object, remove `handle_failure` once it's deprecated
|
# LOG FAILURE - handle streaming failure logging in the _next_ object, remove `handle_failure` once it's deprecated
|
||||||
threading.Thread(target=self.logging_obj.failure_handler, args=(e, traceback_exception)).start()
|
threading.Thread(target=self.logging_obj.failure_handler, args=(e, traceback_exception)).start()
|
||||||
return exception_type(model=self.model, custom_llm_provider=self.custom_llm_provider, original_exception=e)
|
raise exception_type(model=self.model, custom_llm_provider=self.custom_llm_provider, original_exception=e)
|
||||||
|
|
||||||
## needs to handle the empty string case (even starting chunk can be an empty string)
|
## needs to handle the empty string case (even starting chunk can be an empty string)
|
||||||
def __next__(self):
|
def __next__(self):
|
||||||
|
@ -4746,7 +4755,7 @@ class CustomStreamWrapper:
|
||||||
raise # Re-raise StopIteration
|
raise # Re-raise StopIteration
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
# Handle other exceptions if needed
|
# Handle other exceptions if needed
|
||||||
pass
|
raise e
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue