mirror of
https://github.com/meta-llama/llama-stack.git
synced 2026-01-01 09:10:01 +00:00
update test to try multi-turn scenarios
This commit is contained in:
parent
eafbde4e17
commit
cd618e9ad0
3 changed files with 198 additions and 64 deletions
|
|
@ -203,13 +203,13 @@ class ChatFormat:
|
|||
tokens.extend(toks)
|
||||
images.extend(imgs)
|
||||
|
||||
if message.role == "assistant" and len(message.tool_calls) > 0:
|
||||
tokens.append(self.tokenizer.special_tokens["<|python_start|>"])
|
||||
# if message.role == "assistant" and len(message.tool_calls) > 0:
|
||||
# tokens.append(self.tokenizer.special_tokens["<|python_start|>"])
|
||||
|
||||
_process_content(message.content)
|
||||
|
||||
if message.role == "assistant" and len(message.tool_calls) > 0:
|
||||
tokens.append(self.tokenizer.special_tokens["<|python_end|>"])
|
||||
# if message.role == "assistant" and len(message.tool_calls) > 0:
|
||||
# tokens.append(self.tokenizer.special_tokens["<|python_end|>"])
|
||||
|
||||
if message.role == "user" and message.context is not None:
|
||||
# This is RAG context; why is it here in the chat format? I don't think
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue