mirror of
https://github.com/BerriAI/litellm.git
synced 2025-04-25 18:54:30 +00:00
fix: add missing parameters order, limit, before, and after in get_assistants method for openai (#7537)
- Ensured that `before` and `after` parameters are only passed when provided to avoid AttributeError. - Implemented safe access using default values for `before` and `after` to prevent missing attribute issues. - Added consistent handling of `order` and `limit` to improve flexibility and robustness in API calls.
This commit is contained in:
parent
33f301ec86
commit
4b0505dffd
3 changed files with 74 additions and 5 deletions
|
@ -1928,6 +1928,10 @@ class OpenAIAssistantsAPI(BaseLLM):
|
|||
max_retries: Optional[int],
|
||||
organization: Optional[str],
|
||||
client: Optional[AsyncOpenAI],
|
||||
order: Optional[str] = 'desc',
|
||||
limit: Optional[int] = 20,
|
||||
before: Optional[str] = None,
|
||||
after: Optional[str] = None,
|
||||
) -> AsyncCursorPage[Assistant]:
|
||||
openai_client = self.async_get_openai_client(
|
||||
api_key=api_key,
|
||||
|
@ -1937,8 +1941,16 @@ class OpenAIAssistantsAPI(BaseLLM):
|
|||
organization=organization,
|
||||
client=client,
|
||||
)
|
||||
|
||||
response = await openai_client.beta.assistants.list()
|
||||
request_params = {
|
||||
"order": order,
|
||||
"limit": limit,
|
||||
}
|
||||
if before:
|
||||
request_params["before"] = before
|
||||
if after:
|
||||
request_params["after"] = after
|
||||
|
||||
response = await openai_client.beta.assistants.list(**request_params)
|
||||
|
||||
return response
|
||||
|
||||
|
@ -1953,7 +1965,11 @@ class OpenAIAssistantsAPI(BaseLLM):
|
|||
max_retries: Optional[int],
|
||||
organization: Optional[str],
|
||||
client: Optional[AsyncOpenAI],
|
||||
aget_assistants: Literal[True],
|
||||
aget_assistants: Literal[True],
|
||||
order: Optional[str] = 'desc',
|
||||
limit: Optional[int] = 20,
|
||||
before: Optional[str] = None,
|
||||
after: Optional[str] = None,
|
||||
) -> Coroutine[None, None, AsyncCursorPage[Assistant]]:
|
||||
...
|
||||
|
||||
|
@ -1966,7 +1982,11 @@ class OpenAIAssistantsAPI(BaseLLM):
|
|||
max_retries: Optional[int],
|
||||
organization: Optional[str],
|
||||
client: Optional[OpenAI],
|
||||
aget_assistants: Optional[Literal[False]],
|
||||
aget_assistants: Optional[Literal[False]],
|
||||
order: Optional[str] = 'desc',
|
||||
limit: Optional[int] = 20,
|
||||
before: Optional[str] = None,
|
||||
after: Optional[str] = None,
|
||||
) -> SyncCursorPage[Assistant]:
|
||||
...
|
||||
|
||||
|
@ -1981,6 +2001,10 @@ class OpenAIAssistantsAPI(BaseLLM):
|
|||
organization: Optional[str],
|
||||
client=None,
|
||||
aget_assistants=None,
|
||||
order: Optional[str] = 'desc',
|
||||
limit: Optional[int] = 20,
|
||||
before: Optional[str] = None,
|
||||
after: Optional[str] = None,
|
||||
):
|
||||
if aget_assistants is not None and aget_assistants is True:
|
||||
return self.async_get_assistants(
|
||||
|
@ -1990,6 +2014,10 @@ class OpenAIAssistantsAPI(BaseLLM):
|
|||
max_retries=max_retries,
|
||||
organization=organization,
|
||||
client=client,
|
||||
order=order,
|
||||
limit=limit,
|
||||
before=before,
|
||||
after=after,
|
||||
)
|
||||
openai_client = self.get_openai_client(
|
||||
api_key=api_key,
|
||||
|
@ -2000,7 +2028,18 @@ class OpenAIAssistantsAPI(BaseLLM):
|
|||
client=client,
|
||||
)
|
||||
|
||||
response = openai_client.beta.assistants.list()
|
||||
request_params = {
|
||||
"order": order,
|
||||
"limit": limit,
|
||||
}
|
||||
|
||||
if before:
|
||||
request_params["before"] = before
|
||||
if after:
|
||||
request_params["after"] = after
|
||||
|
||||
|
||||
response = openai_client.beta.assistants.list(**request_params)
|
||||
|
||||
return response
|
||||
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue