forked from phoenix/litellm-mirror
(fix) text_completion use correct finish reason
This commit is contained in:
parent
db135aea4c
commit
e70f588b87
1 changed files with 14 additions and 7 deletions
|
@ -525,7 +525,7 @@ class TextChoices(OpenAIObject):
|
||||||
if finish_reason:
|
if finish_reason:
|
||||||
self.finish_reason = map_finish_reason(finish_reason)
|
self.finish_reason = map_finish_reason(finish_reason)
|
||||||
else:
|
else:
|
||||||
self.finish_reason = "stop"
|
self.finish_reason = None
|
||||||
self.index = index
|
self.index = index
|
||||||
if text is not None:
|
if text is not None:
|
||||||
self.text = text
|
self.text = text
|
||||||
|
@ -1438,7 +1438,7 @@ class Logging:
|
||||||
)
|
)
|
||||||
if callback == "langfuse":
|
if callback == "langfuse":
|
||||||
global langFuseLogger
|
global langFuseLogger
|
||||||
print_verbose("reaches langfuse for logging!")
|
print_verbose("reaches Async langfuse for logging!")
|
||||||
kwargs = {}
|
kwargs = {}
|
||||||
for k, v in self.model_call_details.items():
|
for k, v in self.model_call_details.items():
|
||||||
if (
|
if (
|
||||||
|
@ -1450,7 +1450,9 @@ class Logging:
|
||||||
if "complete_streaming_response" not in kwargs:
|
if "complete_streaming_response" not in kwargs:
|
||||||
return
|
return
|
||||||
else:
|
else:
|
||||||
print_verbose("reaches langfuse for streaming logging!")
|
print_verbose(
|
||||||
|
"reaches Async langfuse for streaming logging!"
|
||||||
|
)
|
||||||
result = kwargs["complete_streaming_response"]
|
result = kwargs["complete_streaming_response"]
|
||||||
if langFuseLogger is None:
|
if langFuseLogger is None:
|
||||||
langFuseLogger = LangFuseLogger()
|
langFuseLogger = LangFuseLogger()
|
||||||
|
@ -1849,6 +1851,11 @@ def client(original_function):
|
||||||
or call_type == CallTypes.aimage_generation.value
|
or call_type == CallTypes.aimage_generation.value
|
||||||
):
|
):
|
||||||
messages = args[0] if len(args) > 0 else kwargs["prompt"]
|
messages = args[0] if len(args) > 0 else kwargs["prompt"]
|
||||||
|
elif (
|
||||||
|
call_type == CallTypes.atext_completion.value
|
||||||
|
or call_type == CallTypes.text_completion.value
|
||||||
|
):
|
||||||
|
messages = args[0] if len(args) > 0 else kwargs["prompt"]
|
||||||
stream = True if "stream" in kwargs and kwargs["stream"] == True else False
|
stream = True if "stream" in kwargs and kwargs["stream"] == True else False
|
||||||
logging_obj = Logging(
|
logging_obj = Logging(
|
||||||
model=model,
|
model=model,
|
||||||
|
@ -7361,16 +7368,16 @@ class TextCompletionStreamWrapper:
|
||||||
response = TextCompletionResponse()
|
response = TextCompletionResponse()
|
||||||
response["id"] = chunk.get("id", None)
|
response["id"] = chunk.get("id", None)
|
||||||
response["object"] = "text_completion"
|
response["object"] = "text_completion"
|
||||||
response["created"] = response.get("created", None)
|
response["created"] = chunk.get("created", None)
|
||||||
response["model"] = response.get("model", None)
|
response["model"] = chunk.get("model", None)
|
||||||
text_choices = TextChoices()
|
text_choices = TextChoices()
|
||||||
if isinstance(
|
if isinstance(
|
||||||
chunk, Choices
|
chunk, Choices
|
||||||
): # chunk should always be of type StreamingChoices
|
): # chunk should always be of type StreamingChoices
|
||||||
raise Exception
|
raise Exception
|
||||||
text_choices["text"] = chunk["choices"][0]["delta"]["content"]
|
text_choices["text"] = chunk["choices"][0]["delta"]["content"]
|
||||||
text_choices["index"] = response["choices"][0]["index"]
|
text_choices["index"] = chunk["choices"][0]["index"]
|
||||||
text_choices["finish_reason"] = response["choices"][0]["finish_reason"]
|
text_choices["finish_reason"] = chunk["choices"][0]["finish_reason"]
|
||||||
response["choices"] = [text_choices]
|
response["choices"] = [text_choices]
|
||||||
return response
|
return response
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue