mirror of
https://github.com/meta-llama/llama-stack.git
synced 2025-12-27 23:22:01 +00:00
chore: more code-interpreter removal
Final removal piece of code-interpreter provider. Signed-off-by: Sébastien Han <seb@redhat.com>
This commit is contained in:
parent
e3ad17ec5e
commit
c1f53ddc16
25 changed files with 7 additions and 346 deletions
|
|
@ -30,7 +30,6 @@ from llama_stack.models.llama.llama4.tokenizer import Tokenizer
|
|||
|
||||
from .llama3.interface import LLama31Interface
|
||||
from .llama3.template_data import (
|
||||
system_message_builtin_code_only,
|
||||
system_message_builtin_tools_only,
|
||||
system_message_custom_tools_only,
|
||||
)
|
||||
|
|
@ -164,17 +163,6 @@ def llama3_1_builtin_tool_call_dialog(tool_prompt_format=ToolPromptFormat.json):
|
|||
return messages
|
||||
|
||||
|
||||
def llama3_1_builtin_code_interpreter_dialog(tool_prompt_format=ToolPromptFormat.json):
|
||||
interface = LLama31Interface(tool_prompt_format)
|
||||
|
||||
messages = interface.system_messages(**system_message_builtin_code_only())
|
||||
messages += interface.user_message(
|
||||
content="Write code to check if number is prime. Use it to verify if number 7 is prime"
|
||||
)
|
||||
|
||||
return messages
|
||||
|
||||
|
||||
def llama3_1_builtin_tool_call_with_image_dialog(
|
||||
tool_prompt_format=ToolPromptFormat.json,
|
||||
):
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue