mirror of
https://github.com/meta-llama/llama-stack.git
synced 2025-08-10 04:08:31 +00:00
test: turn off recordable mock for now
Summary: Test Plan:
This commit is contained in:
parent
3e98402aa0
commit
3716de571c
1 changed files with 2 additions and 0 deletions
|
@ -52,6 +52,8 @@ def llama_stack_client_with_mocked_inference(llama_stack_client, request):
|
||||||
|
|
||||||
If --record-responses is passed, it will call the real APIs and record the responses.
|
If --record-responses is passed, it will call the real APIs and record the responses.
|
||||||
"""
|
"""
|
||||||
|
# TODO: will rework this to be more stable
|
||||||
|
return llama_stack_client
|
||||||
if not isinstance(llama_stack_client, LlamaStackAsLibraryClient):
|
if not isinstance(llama_stack_client, LlamaStackAsLibraryClient):
|
||||||
logging.warning(
|
logging.warning(
|
||||||
"llama_stack_client_with_mocked_inference is not supported for this client, returning original client without mocking"
|
"llama_stack_client_with_mocked_inference is not supported for this client, returning original client without mocking"
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue