forked from phoenix/litellm-mirror
(test) stream - cache
This commit is contained in:
parent
e49af2272d
commit
e5bc7b338a
1 changed files with 4 additions and 3 deletions
|
@ -110,7 +110,7 @@ def test_async_chat_openai_stream():
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
print(e)
|
print(e)
|
||||||
pytest.fail(f"An error occurred - {str(e)}")
|
pytest.fail(f"An error occurred - {str(e)}")
|
||||||
# test_async_chat_openai_stream()
|
test_async_chat_openai_stream()
|
||||||
|
|
||||||
def test_completion_azure_stream_moderation_failure():
|
def test_completion_azure_stream_moderation_failure():
|
||||||
try:
|
try:
|
||||||
|
@ -317,11 +317,12 @@ def test_redis_cache_completion_stream():
|
||||||
response_2_content += chunk.choices[0].delta.content or ""
|
response_2_content += chunk.choices[0].delta.content or ""
|
||||||
print("\nresponse 1", response_1_content)
|
print("\nresponse 1", response_1_content)
|
||||||
print("\nresponse 2", response_2_content)
|
print("\nresponse 2", response_2_content)
|
||||||
assert response1.id == response2.id, f"Response 1 != Response 2. Same params, Response 1{response1.id} != Response 2{response2.id}"
|
assert response_1_content == response_2_content, f"Response 1 != Response 2. Same params, Response 1{response_1_content} != Response 2{response_2_content}"
|
||||||
litellm.success_callback = []
|
litellm.success_callback = []
|
||||||
litellm._async_success_callback = []
|
litellm._async_success_callback = []
|
||||||
litellm.cache = None
|
litellm.cache = None
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
print(e)
|
print(e)
|
||||||
litellm.success_callback = []
|
litellm.success_callback = []
|
||||||
raise e
|
raise e
|
||||||
|
test_redis_cache_completion_stream()
|
Loading…
Add table
Add a link
Reference in a new issue