add async assistants delete support

This commit is contained in:
Ishaan Jaff 2024-07-10 11:14:40 -07:00
parent 3480382495
commit 5587dbbd32
2 changed files with 89 additions and 13 deletions

View file

@ -4,11 +4,12 @@ import asyncio
import contextvars import contextvars
import os import os
from functools import partial from functools import partial
from typing import Any, Dict, Iterable, List, Literal, Optional, Union from typing import Any, Coroutine, Dict, Iterable, List, Literal, Optional, Union
import httpx import httpx
from openai import AsyncAzureOpenAI, AsyncOpenAI, AzureOpenAI, OpenAI from openai import AsyncAzureOpenAI, AsyncOpenAI, AzureOpenAI, OpenAI
from openai.types.beta.assistant import Assistant from openai.types.beta.assistant import Assistant
from openai.types.beta.assistant_deleted import AssistantDeleted
import litellm import litellm
from litellm import client from litellm import client
@ -339,6 +340,44 @@ def create_assistants(
return response return response
async def adelete_assistant(
custom_llm_provider: Literal["openai", "azure"],
client: Optional[AsyncOpenAI] = None,
**kwargs,
) -> AssistantDeleted:
loop = asyncio.get_event_loop()
### PASS ARGS TO GET ASSISTANTS ###
kwargs["async_delete_assistants"] = True
try:
kwargs["client"] = client
# Use a partial function to pass your keyword arguments
func = partial(delete_assistant, custom_llm_provider, **kwargs)
# Add the context to the function
ctx = contextvars.copy_context()
func_with_context = partial(ctx.run, func)
_, custom_llm_provider, _, _ = get_llm_provider( # type: ignore
model="", custom_llm_provider=custom_llm_provider
) # type: ignore
# Await normally
init_response = await loop.run_in_executor(None, func_with_context)
if asyncio.iscoroutine(init_response):
response = await init_response
else:
response = init_response
return response # type: ignore
except Exception as e:
raise exception_type(
model="",
custom_llm_provider=custom_llm_provider,
original_exception=e,
completion_kwargs={},
extra_kwargs=kwargs,
)
def delete_assistant( def delete_assistant(
custom_llm_provider: Literal["openai", "azure"], custom_llm_provider: Literal["openai", "azure"],
assistant_id: str, assistant_id: str,
@ -347,11 +386,21 @@ def delete_assistant(
api_base: Optional[str] = None, api_base: Optional[str] = None,
api_version: Optional[str] = None, api_version: Optional[str] = None,
**kwargs, **kwargs,
) -> Dict[str, Any]: ) -> AssistantDeleted:
optional_params = GenericLiteLLMParams( optional_params = GenericLiteLLMParams(
api_key=api_key, api_base=api_base, api_version=api_version, **kwargs api_key=api_key, api_base=api_base, api_version=api_version, **kwargs
) )
async_delete_assistants: Optional[bool] = kwargs.pop(
"async_delete_assistants", None
)
if async_delete_assistants is not None and not isinstance(
async_delete_assistants, bool
):
raise ValueError(
"Invalid value passed in for async_delete_assistants. Only bool or None allowed"
)
### TIMEOUT LOGIC ### ### TIMEOUT LOGIC ###
timeout = optional_params.timeout or kwargs.get("request_timeout", 600) or 600 timeout = optional_params.timeout or kwargs.get("request_timeout", 600) or 600
# set timeout for 10 minutes by default # set timeout for 10 minutes by default
@ -368,7 +417,7 @@ def delete_assistant(
elif timeout is None: elif timeout is None:
timeout = 600.0 timeout = 600.0
response: Optional[Dict[str, Any]] = None response: Optional[AssistantDeleted] = None
if custom_llm_provider == "openai": if custom_llm_provider == "openai":
api_base = ( api_base = (
optional_params.api_base optional_params.api_base
@ -398,6 +447,7 @@ def delete_assistant(
organization=organization, organization=organization,
assistant_id=assistant_id, assistant_id=assistant_id,
client=client, client=client,
async_delete_assistants=async_delete_assistants,
) )
else: else:
raise litellm.exceptions.BadRequestError( raise litellm.exceptions.BadRequestError(

View file

@ -17,6 +17,7 @@ from typing import (
import httpx import httpx
import openai import openai
from openai import AsyncOpenAI, OpenAI from openai import AsyncOpenAI, OpenAI
from openai.types.beta.assistant_deleted import AssistantDeleted
from pydantic import BaseModel from pydantic import BaseModel
from typing_extensions import overload, override from typing_extensions import overload, override
@ -2440,6 +2441,30 @@ class OpenAIAssistantsAPI(BaseLLM):
response = openai_client.beta.assistants.create(**create_assistant_data) response = openai_client.beta.assistants.create(**create_assistant_data)
return response return response
# Delete Assistant
async def async_delete_assistant(
self,
api_key: Optional[str],
api_base: Optional[str],
timeout: Union[float, httpx.Timeout],
max_retries: Optional[int],
organization: Optional[str],
client: Optional[AsyncOpenAI],
assistant_id: str,
) -> AssistantDeleted:
openai_client = self.async_get_openai_client(
api_key=api_key,
api_base=api_base,
timeout=timeout,
max_retries=max_retries,
organization=organization,
client=client,
)
response = await openai_client.beta.assistants.delete(assistant_id=assistant_id)
return response
def delete_assistant( def delete_assistant(
self, self,
api_key: Optional[str], api_key: Optional[str],
@ -2449,17 +2474,18 @@ class OpenAIAssistantsAPI(BaseLLM):
organization: Optional[str], organization: Optional[str],
assistant_id: str, assistant_id: str,
client=None, client=None,
async_create_assistants=None, async_delete_assistants=None,
): ):
# if async_create_assistants is not None and async_create_assistants == True: if async_delete_assistants is not None and async_delete_assistants == True:
# return self.async_create_assistants( return self.async_delete_assistant(
# api_key=api_key, api_key=api_key,
# api_base=api_base, api_base=api_base,
# timeout=timeout, timeout=timeout,
# max_retries=max_retries, max_retries=max_retries,
# organization=organization, organization=organization,
# client=client, client=client,
# ) assistant_id=assistant_id,
)
openai_client = self.get_openai_client( openai_client = self.get_openai_client(
api_key=api_key, api_key=api_key,
api_base=api_base, api_base=api_base,