mirror of
https://github.com/meta-llama/llama-stack.git
synced 2025-08-06 10:42:39 +00:00
test: Bump slow_callback_duration to 200ms to avoid flaky test_chat_completion_doesnt_block_event_loop
Signed-off-by: Yuan Tang <terrytangyuan@gmail.com>
This commit is contained in:
parent
1261bc93bf
commit
f2f1ffde92
1 changed files with 2 additions and 2 deletions
|
@ -187,8 +187,8 @@ def test_chat_completion_doesnt_block_event_loop(caplog):
|
|||
loop.set_debug(True)
|
||||
caplog.set_level(logging.WARNING)
|
||||
|
||||
# Log when event loop is blocked for more than 100ms
|
||||
loop.slow_callback_duration = 0.1
|
||||
# Log when event loop is blocked for more than 200ms
|
||||
loop.slow_callback_duration = 0.2
|
||||
# Sleep for 500ms in our delayed http response
|
||||
sleep_time = 0.5
|
||||
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue