mirror of
https://github.com/meta-llama/llama-stack.git
synced 2025-12-17 20:02:37 +00:00
Minor
This commit is contained in:
parent
1bcc26ccd1
commit
7a1b60fccf
2 changed files with 4 additions and 16 deletions
|
|
@ -4,23 +4,10 @@
|
|||
# This source code is licensed under the terms described in the LICENSE file in
|
||||
# the root directory of this source tree.
|
||||
|
||||
import sys
|
||||
import traceback
|
||||
import warnings
|
||||
|
||||
import pytest
|
||||
from llama_stack_client.lib.inference.event_logger import EventLogger
|
||||
|
||||
|
||||
def warn_with_traceback(message, category, filename, lineno, file=None, line=None):
|
||||
log = file if hasattr(file, "write") else sys.stderr
|
||||
traceback.print_stack(file=log)
|
||||
log.write(warnings.formatwarning(message, category, filename, lineno, line))
|
||||
|
||||
|
||||
warnings.showwarning = warn_with_traceback
|
||||
|
||||
|
||||
def test_text_chat_completion(llama_stack_client):
|
||||
# non-streaming
|
||||
available_models = [
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue