forked from phoenix/litellm-mirror
test(test_assistants.py): handle openai api instability
This commit is contained in:
parent
28a07ee1a4
commit
8d4e7f9967
1 changed files with 75 additions and 70 deletions
|
@ -220,81 +220,86 @@ async def test_aarun_thread_litellm(sync_mode, provider, is_streaming):
|
||||||
- Create thread
|
- Create thread
|
||||||
- Create run w/ Assistants + Thread
|
- Create run w/ Assistants + Thread
|
||||||
"""
|
"""
|
||||||
if sync_mode:
|
import openai
|
||||||
assistants = litellm.get_assistants(custom_llm_provider=provider)
|
|
||||||
else:
|
|
||||||
assistants = await litellm.aget_assistants(custom_llm_provider=provider)
|
|
||||||
|
|
||||||
## get the first assistant ###
|
try:
|
||||||
assistant_id = assistants.data[0].id
|
if sync_mode:
|
||||||
|
assistants = litellm.get_assistants(custom_llm_provider=provider)
|
||||||
new_thread = test_create_thread_litellm(sync_mode=sync_mode, provider=provider)
|
|
||||||
|
|
||||||
if asyncio.iscoroutine(new_thread):
|
|
||||||
_new_thread = await new_thread
|
|
||||||
else:
|
|
||||||
_new_thread = new_thread
|
|
||||||
|
|
||||||
thread_id = _new_thread.id
|
|
||||||
|
|
||||||
# add message to thread
|
|
||||||
message: MessageData = {"role": "user", "content": "Hey, how's it going?"} # type: ignore
|
|
||||||
|
|
||||||
data = {"custom_llm_provider": provider, "thread_id": _new_thread.id, **message}
|
|
||||||
|
|
||||||
if sync_mode:
|
|
||||||
added_message = litellm.add_message(**data)
|
|
||||||
|
|
||||||
if is_streaming:
|
|
||||||
run = litellm.run_thread_stream(assistant_id=assistant_id, **data)
|
|
||||||
with run as run:
|
|
||||||
assert isinstance(run, AssistantEventHandler)
|
|
||||||
print(run)
|
|
||||||
run.until_done()
|
|
||||||
else:
|
else:
|
||||||
run = litellm.run_thread(
|
assistants = await litellm.aget_assistants(custom_llm_provider=provider)
|
||||||
assistant_id=assistant_id, stream=is_streaming, **data
|
|
||||||
)
|
|
||||||
if run.status == "completed":
|
|
||||||
messages = litellm.get_messages(
|
|
||||||
thread_id=_new_thread.id, custom_llm_provider=provider
|
|
||||||
)
|
|
||||||
assert isinstance(messages.data[0], Message)
|
|
||||||
else:
|
|
||||||
pytest.fail(
|
|
||||||
"An unexpected error occurred when running the thread, {}".format(
|
|
||||||
run
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
else:
|
## get the first assistant ###
|
||||||
added_message = await litellm.a_add_message(**data)
|
assistant_id = assistants.data[0].id
|
||||||
|
|
||||||
if is_streaming:
|
new_thread = test_create_thread_litellm(sync_mode=sync_mode, provider=provider)
|
||||||
run = litellm.arun_thread_stream(assistant_id=assistant_id, **data)
|
|
||||||
async with run as run:
|
if asyncio.iscoroutine(new_thread):
|
||||||
print(f"run: {run}")
|
_new_thread = await new_thread
|
||||||
assert isinstance(
|
|
||||||
run,
|
|
||||||
AsyncAssistantEventHandler,
|
|
||||||
)
|
|
||||||
print(run)
|
|
||||||
await run.until_done()
|
|
||||||
else:
|
else:
|
||||||
run = await litellm.arun_thread(
|
_new_thread = new_thread
|
||||||
custom_llm_provider=provider,
|
|
||||||
thread_id=thread_id,
|
|
||||||
assistant_id=assistant_id,
|
|
||||||
)
|
|
||||||
|
|
||||||
if run.status == "completed":
|
thread_id = _new_thread.id
|
||||||
messages = await litellm.aget_messages(
|
|
||||||
thread_id=_new_thread.id, custom_llm_provider=provider
|
# add message to thread
|
||||||
)
|
message: MessageData = {"role": "user", "content": "Hey, how's it going?"} # type: ignore
|
||||||
assert isinstance(messages.data[0], Message)
|
|
||||||
|
data = {"custom_llm_provider": provider, "thread_id": _new_thread.id, **message}
|
||||||
|
|
||||||
|
if sync_mode:
|
||||||
|
added_message = litellm.add_message(**data)
|
||||||
|
|
||||||
|
if is_streaming:
|
||||||
|
run = litellm.run_thread_stream(assistant_id=assistant_id, **data)
|
||||||
|
with run as run:
|
||||||
|
assert isinstance(run, AssistantEventHandler)
|
||||||
|
print(run)
|
||||||
|
run.until_done()
|
||||||
else:
|
else:
|
||||||
pytest.fail(
|
run = litellm.run_thread(
|
||||||
"An unexpected error occurred when running the thread, {}".format(
|
assistant_id=assistant_id, stream=is_streaming, **data
|
||||||
run
|
|
||||||
)
|
|
||||||
)
|
)
|
||||||
|
if run.status == "completed":
|
||||||
|
messages = litellm.get_messages(
|
||||||
|
thread_id=_new_thread.id, custom_llm_provider=provider
|
||||||
|
)
|
||||||
|
assert isinstance(messages.data[0], Message)
|
||||||
|
else:
|
||||||
|
pytest.fail(
|
||||||
|
"An unexpected error occurred when running the thread, {}".format(
|
||||||
|
run
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
else:
|
||||||
|
added_message = await litellm.a_add_message(**data)
|
||||||
|
|
||||||
|
if is_streaming:
|
||||||
|
run = litellm.arun_thread_stream(assistant_id=assistant_id, **data)
|
||||||
|
async with run as run:
|
||||||
|
print(f"run: {run}")
|
||||||
|
assert isinstance(
|
||||||
|
run,
|
||||||
|
AsyncAssistantEventHandler,
|
||||||
|
)
|
||||||
|
print(run)
|
||||||
|
await run.until_done()
|
||||||
|
else:
|
||||||
|
run = await litellm.arun_thread(
|
||||||
|
custom_llm_provider=provider,
|
||||||
|
thread_id=thread_id,
|
||||||
|
assistant_id=assistant_id,
|
||||||
|
)
|
||||||
|
|
||||||
|
if run.status == "completed":
|
||||||
|
messages = await litellm.aget_messages(
|
||||||
|
thread_id=_new_thread.id, custom_llm_provider=provider
|
||||||
|
)
|
||||||
|
assert isinstance(messages.data[0], Message)
|
||||||
|
else:
|
||||||
|
pytest.fail(
|
||||||
|
"An unexpected error occurred when running the thread, {}".format(
|
||||||
|
run
|
||||||
|
)
|
||||||
|
)
|
||||||
|
except openai.APIError as e:
|
||||||
|
pass
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue