mirror of
https://github.com/BerriAI/litellm.git
synced 2025-04-26 03:04:13 +00:00
(ci/cd) test proxy custom logger
This commit is contained in:
parent
0dc7d538b5
commit
c7eebf75a8
1 changed files with 2 additions and 0 deletions
|
@ -141,6 +141,8 @@ def test_chat_completion_stream(client):
|
||||||
print("\n\nHERE is the complete streaming response string", complete_response)
|
print("\n\nHERE is the complete streaming response string", complete_response)
|
||||||
print("\n\nHERE IS the streaming Response from callback\n\n")
|
print("\n\nHERE IS the streaming Response from callback\n\n")
|
||||||
print(my_custom_logger.streaming_response_obj)
|
print(my_custom_logger.streaming_response_obj)
|
||||||
|
import time
|
||||||
|
time.sleep(0.5)
|
||||||
|
|
||||||
streamed_response = my_custom_logger.streaming_response_obj
|
streamed_response = my_custom_logger.streaming_response_obj
|
||||||
assert complete_response == streamed_response["choices"][0]["message"]["content"]
|
assert complete_response == streamed_response["choices"][0]["message"]["content"]
|
Loading…
Add table
Add a link
Reference in a new issue