mirror of
https://github.com/BerriAI/litellm.git
synced 2025-04-25 02:34:29 +00:00
fix(openai.py): initial commit adding generic event type for openai responses api streaming
Ensures handling for undocumented event types - e.g. "response.reasoning_summary_part.added"
This commit is contained in:
parent
ebfff975d4
commit
6581712dba
3 changed files with 32 additions and 1 deletions
|
@ -50,7 +50,7 @@ from openai.types.responses.response_create_params import (
|
||||||
ToolParam,
|
ToolParam,
|
||||||
)
|
)
|
||||||
from openai.types.responses.response_function_tool_call import ResponseFunctionToolCall
|
from openai.types.responses.response_function_tool_call import ResponseFunctionToolCall
|
||||||
from pydantic import BaseModel, Discriminator, Field, PrivateAttr
|
from pydantic import BaseModel, ConfigDict, Discriminator, Field, PrivateAttr
|
||||||
from typing_extensions import Annotated, Dict, Required, TypedDict, override
|
from typing_extensions import Annotated, Dict, Required, TypedDict, override
|
||||||
|
|
||||||
from litellm.types.llms.base import BaseLiteLLMOpenAIResponseObject
|
from litellm.types.llms.base import BaseLiteLLMOpenAIResponseObject
|
||||||
|
@ -1013,6 +1013,9 @@ class ResponsesAPIStreamEvents(str, Enum):
|
||||||
RESPONSE_FAILED = "response.failed"
|
RESPONSE_FAILED = "response.failed"
|
||||||
RESPONSE_INCOMPLETE = "response.incomplete"
|
RESPONSE_INCOMPLETE = "response.incomplete"
|
||||||
|
|
||||||
|
# Part added
|
||||||
|
RESPONSE_PART_ADDED = "response.reasoning_summary_part.added"
|
||||||
|
|
||||||
# Output item events
|
# Output item events
|
||||||
OUTPUT_ITEM_ADDED = "response.output_item.added"
|
OUTPUT_ITEM_ADDED = "response.output_item.added"
|
||||||
OUTPUT_ITEM_DONE = "response.output_item.done"
|
OUTPUT_ITEM_DONE = "response.output_item.done"
|
||||||
|
@ -1200,6 +1203,12 @@ class ErrorEvent(BaseLiteLLMOpenAIResponseObject):
|
||||||
param: Optional[str]
|
param: Optional[str]
|
||||||
|
|
||||||
|
|
||||||
|
class GenericEvent(BaseLiteLLMOpenAIResponseObject):
|
||||||
|
type: str
|
||||||
|
|
||||||
|
model_config = ConfigDict(extra="allow", protected_namespaces=())
|
||||||
|
|
||||||
|
|
||||||
# Union type for all possible streaming responses
|
# Union type for all possible streaming responses
|
||||||
ResponsesAPIStreamingResponse = Annotated[
|
ResponsesAPIStreamingResponse = Annotated[
|
||||||
Union[
|
Union[
|
||||||
|
|
21
tests/litellm/types/llms/test_types_llms_openai.py
Normal file
21
tests/litellm/types/llms/test_types_llms_openai.py
Normal file
|
@ -0,0 +1,21 @@
|
||||||
|
import asyncio
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
from typing import Optional
|
||||||
|
from unittest.mock import AsyncMock, patch
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
|
||||||
|
sys.path.insert(0, os.path.abspath("../../.."))
|
||||||
|
import json
|
||||||
|
|
||||||
|
import litellm
|
||||||
|
|
||||||
|
|
||||||
|
def test_generic_event():
|
||||||
|
from litellm.types.llms.openai import GenericEvent
|
||||||
|
|
||||||
|
event = {"type": "test", "test": "test"}
|
||||||
|
event = GenericEvent(**event)
|
||||||
|
assert event.type == "test"
|
||||||
|
assert event.test == "test"
|
|
@ -470,3 +470,4 @@ class TestOpenAIGPT4OAudioTranscription(BaseLLMAudioTranscriptionTest):
|
||||||
|
|
||||||
def get_custom_llm_provider(self) -> litellm.LlmProviders:
|
def get_custom_llm_provider(self) -> litellm.LlmProviders:
|
||||||
return litellm.LlmProviders.OPENAI
|
return litellm.LlmProviders.OPENAI
|
||||||
|
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue