mirror of
https://github.com/meta-llama/llama-stack.git
synced 2025-08-07 02:58:21 +00:00
Update openai_compat.py
This commit is contained in:
parent
34366f0b01
commit
227760d7ec
1 changed files with 1 additions and 0 deletions
|
@ -175,6 +175,7 @@ def process_chat_completion_response(
|
|||
choice = response.choices[0]
|
||||
|
||||
# TODO: This does not work well with tool calls for vLLM remote provider
|
||||
# Ref: https://github.com/meta-llama/llama-stack/issues/1058
|
||||
raw_message = formatter.decode_assistant_message_from_content(
|
||||
text_from_choice(choice), get_stop_reason(choice.finish_reason)
|
||||
)
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue