From 69da0ed3b543820b4f7bd0f4ce26d51f23469b51 Mon Sep 17 00:00:00 2001 From: Ishaan Jaff Date: Sat, 22 Mar 2025 10:43:35 -0700 Subject: [PATCH] feat - add openai web search --- .../convert_dict_to_response.py | 1 + litellm/types/llms/openai.py | 22 +++++++++++++++++++ litellm/types/utils.py | 7 ++++++ 3 files changed, 30 insertions(+) diff --git a/litellm/litellm_core_utils/llm_response_utils/convert_dict_to_response.py b/litellm/litellm_core_utils/llm_response_utils/convert_dict_to_response.py index ebb1032a19..d33af2a477 100644 --- a/litellm/litellm_core_utils/llm_response_utils/convert_dict_to_response.py +++ b/litellm/litellm_core_utils/llm_response_utils/convert_dict_to_response.py @@ -494,6 +494,7 @@ def convert_to_model_response_object( # noqa: PLR0915 provider_specific_fields=provider_specific_fields, reasoning_content=reasoning_content, thinking_blocks=thinking_blocks, + annotations=choice["message"].get("annotations", None), ) finish_reason = choice.get("finish_reason", None) if finish_reason is None: diff --git a/litellm/types/llms/openai.py b/litellm/types/llms/openai.py index 4b0be9d5fe..e58f573227 100644 --- a/litellm/types/llms/openai.py +++ b/litellm/types/llms/openai.py @@ -382,6 +382,28 @@ class ChatCompletionThinkingBlock(TypedDict, total=False): cache_control: Optional[Union[dict, ChatCompletionCachedContent]] +class ChatCompletionAnnotationURLCitation(TypedDict, total=False): + end_index: int + """The index of the last character of the URL citation in the message.""" + + start_index: int + """The index of the first character of the URL citation in the message.""" + + title: str + """The title of the web resource.""" + + url: str + """The URL of the web resource.""" + + +class ChatCompletionAnnotation(TypedDict, total=False): + type: Literal["url_citation"] + """The type of the URL citation. Always `url_citation`.""" + + url_citation: ChatCompletionAnnotationURLCitation + """A URL citation when using web search.""" + + class OpenAIChatCompletionTextObject(TypedDict): type: Literal["text"] text: str diff --git a/litellm/types/utils.py b/litellm/types/utils.py index a665428561..58c78dfa29 100644 --- a/litellm/types/utils.py +++ b/litellm/types/utils.py @@ -7,6 +7,7 @@ from typing import Any, Dict, List, Literal, Optional, Tuple, Union from aiohttp import FormData from openai._models import BaseModel as OpenAIObject from openai.types.audio.transcription_create_params import FileTypes # type: ignore +from openai.types.chat.chat_completion import ChatCompletion from openai.types.completion_usage import ( CompletionTokensDetails, CompletionUsage, @@ -27,6 +28,7 @@ from ..litellm_core_utils.core_helpers import map_finish_reason from .guardrails import GuardrailEventHooks from .llms.openai import ( Batch, + ChatCompletionAnnotation, ChatCompletionThinkingBlock, ChatCompletionToolCallChunk, ChatCompletionUsageBlock, @@ -527,6 +529,7 @@ class Message(OpenAIObject): provider_specific_fields: Optional[Dict[str, Any]] = Field( default=None, exclude=True ) + annotations: Optional[List[ChatCompletionAnnotation]] = None def __init__( self, @@ -538,6 +541,7 @@ class Message(OpenAIObject): provider_specific_fields: Optional[Dict[str, Any]] = None, reasoning_content: Optional[str] = None, thinking_blocks: Optional[List[ChatCompletionThinkingBlock]] = None, + annotations: Optional[List[ChatCompletionAnnotation]] = None, **params, ): init_values: Dict[str, Any] = { @@ -566,6 +570,9 @@ class Message(OpenAIObject): if thinking_blocks is not None: init_values["thinking_blocks"] = thinking_blocks + if annotations is not None: + init_values["annotations"] = annotations + if reasoning_content is not None: init_values["reasoning_content"] = reasoning_content