mirror of
https://github.com/BerriAI/litellm.git
synced 2025-04-25 18:54:30 +00:00
complete transform litellm to responses api
This commit is contained in:
parent
f6080fcc0b
commit
4e2b04a1e0
3 changed files with 83 additions and 3 deletions
|
@ -9,11 +9,13 @@ from litellm.types.llms.openai import (
|
|||
ChatCompletionSystemMessage,
|
||||
ChatCompletionUserMessage,
|
||||
GenericChatCompletionMessage,
|
||||
ResponseAPIUsage,
|
||||
ResponseInputParam,
|
||||
ResponsesAPIOptionalRequestParams,
|
||||
ResponsesAPIResponse,
|
||||
)
|
||||
from litellm.types.utils import ModelResponse
|
||||
from litellm.types.responses.main import GenericResponseOutputItem, OutputText
|
||||
from litellm.types.utils import Choices, Message, ModelResponse, Usage
|
||||
|
||||
|
||||
class LiteLLMCompletionResponsesConfig:
|
||||
|
@ -147,7 +149,10 @@ class LiteLLMCompletionResponsesConfig:
|
|||
),
|
||||
instructions=getattr(chat_completion_response, "instructions", None),
|
||||
metadata=getattr(chat_completion_response, "metadata", None),
|
||||
output=getattr(chat_completion_response, "output", []),
|
||||
output=LiteLLMCompletionResponsesConfig._transform_chat_completion_choices_to_responses_output(
|
||||
chat_completion_response=chat_completion_response,
|
||||
choices=getattr(chat_completion_response, "choices", []),
|
||||
),
|
||||
parallel_tool_calls=getattr(
|
||||
chat_completion_response, "parallel_tool_calls", False
|
||||
),
|
||||
|
@ -168,3 +173,43 @@ class LiteLLMCompletionResponsesConfig:
|
|||
usage=getattr(chat_completion_response, "usage", None),
|
||||
user=getattr(chat_completion_response, "user", None),
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def _transform_chat_completion_choices_to_responses_output(
|
||||
chat_completion_response: ModelResponse,
|
||||
choices: List[Choices],
|
||||
) -> List[GenericResponseOutputItem]:
|
||||
responses_output: List[GenericResponseOutputItem] = []
|
||||
for choice in choices:
|
||||
responses_output.append(
|
||||
GenericResponseOutputItem(
|
||||
id=chat_completion_response.id,
|
||||
status=choice.finish_reason,
|
||||
role=choice.message.role,
|
||||
content=[
|
||||
LiteLLMCompletionResponsesConfig._transform_chat_message_to_response_output_text(
|
||||
choice.message
|
||||
)
|
||||
],
|
||||
)
|
||||
)
|
||||
return responses_output
|
||||
|
||||
@staticmethod
|
||||
def _transform_chat_message_to_response_output_text(
|
||||
message: Message,
|
||||
) -> OutputText:
|
||||
return OutputText(
|
||||
type="text",
|
||||
text=message.content,
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def _transform_chat_completion_usage_to_responses_usage(
|
||||
usage: Usage,
|
||||
) -> ResponseAPIUsage:
|
||||
return ResponseAPIUsage(
|
||||
input_tokens=usage.prompt_tokens,
|
||||
output_tokens=usage.completion_tokens,
|
||||
total_tokens=usage.total_tokens,
|
||||
)
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue