From 6581712dba6f46c04f75be54ea63a0df59a62b4b Mon Sep 17 00:00:00 2001 From: Krrish Dholakia Date: Tue, 22 Apr 2025 08:39:03 -0700 Subject: [PATCH] fix(openai.py): initial commit adding generic event type for openai responses api streaming Ensures handling for undocumented event types - e.g. "response.reasoning_summary_part.added" --- litellm/types/llms/openai.py | 11 +++++++++- .../types/llms/test_types_llms_openai.py | 21 +++++++++++++++++++ tests/llm_translation/test_openai.py | 1 + 3 files changed, 32 insertions(+), 1 deletion(-) create mode 100644 tests/litellm/types/llms/test_types_llms_openai.py diff --git a/litellm/types/llms/openai.py b/litellm/types/llms/openai.py index 24aebf12af..1c4fc8527f 100644 --- a/litellm/types/llms/openai.py +++ b/litellm/types/llms/openai.py @@ -50,7 +50,7 @@ from openai.types.responses.response_create_params import ( ToolParam, ) from openai.types.responses.response_function_tool_call import ResponseFunctionToolCall -from pydantic import BaseModel, Discriminator, Field, PrivateAttr +from pydantic import BaseModel, ConfigDict, Discriminator, Field, PrivateAttr from typing_extensions import Annotated, Dict, Required, TypedDict, override from litellm.types.llms.base import BaseLiteLLMOpenAIResponseObject @@ -1013,6 +1013,9 @@ class ResponsesAPIStreamEvents(str, Enum): RESPONSE_FAILED = "response.failed" RESPONSE_INCOMPLETE = "response.incomplete" + # Part added + RESPONSE_PART_ADDED = "response.reasoning_summary_part.added" + # Output item events OUTPUT_ITEM_ADDED = "response.output_item.added" OUTPUT_ITEM_DONE = "response.output_item.done" @@ -1200,6 +1203,12 @@ class ErrorEvent(BaseLiteLLMOpenAIResponseObject): param: Optional[str] +class GenericEvent(BaseLiteLLMOpenAIResponseObject): + type: str + + model_config = ConfigDict(extra="allow", protected_namespaces=()) + + # Union type for all possible streaming responses ResponsesAPIStreamingResponse = Annotated[ Union[ diff --git a/tests/litellm/types/llms/test_types_llms_openai.py b/tests/litellm/types/llms/test_types_llms_openai.py new file mode 100644 index 0000000000..86c2cb3f1a --- /dev/null +++ b/tests/litellm/types/llms/test_types_llms_openai.py @@ -0,0 +1,21 @@ +import asyncio +import os +import sys +from typing import Optional +from unittest.mock import AsyncMock, patch + +import pytest + +sys.path.insert(0, os.path.abspath("../../..")) +import json + +import litellm + + +def test_generic_event(): + from litellm.types.llms.openai import GenericEvent + + event = {"type": "test", "test": "test"} + event = GenericEvent(**event) + assert event.type == "test" + assert event.test == "test" diff --git a/tests/llm_translation/test_openai.py b/tests/llm_translation/test_openai.py index 295bdb46f1..a470b53589 100644 --- a/tests/llm_translation/test_openai.py +++ b/tests/llm_translation/test_openai.py @@ -470,3 +470,4 @@ class TestOpenAIGPT4OAudioTranscription(BaseLLMAudioTranscriptionTest): def get_custom_llm_provider(self) -> litellm.LlmProviders: return litellm.LlmProviders.OPENAI +