mirror of
https://github.com/meta-llama/llama-stack.git
synced 2025-10-04 12:07:34 +00:00
re-work tool definitions, fix FastAPI issues, fix tool regressions
This commit is contained in:
parent
8d14d4228b
commit
8efe614719
11 changed files with 144 additions and 104 deletions
|
@ -22,6 +22,7 @@ from .ipython_tool.code_execution import (
|
|||
)
|
||||
|
||||
from llama_toolchain.inference.api import * # noqa: F403
|
||||
from llama_toolchain.agentic_system.api import * # noqa: F403
|
||||
|
||||
from .base import BaseTool
|
||||
|
||||
|
@ -55,9 +56,6 @@ class SingleMessageBuiltinTool(BaseTool):
|
|||
tool_name=tool_call.tool_name,
|
||||
content=response,
|
||||
)
|
||||
if attachment := interpret_content_as_attachment(response):
|
||||
message.content = attachment
|
||||
|
||||
return [message]
|
||||
|
||||
@abstractmethod
|
||||
|
@ -316,7 +314,4 @@ class CodeInterpreterTool(BaseTool):
|
|||
tool_name=tool_call.tool_name,
|
||||
content="\n".join(pieces),
|
||||
)
|
||||
if attachment := interpret_content_as_attachment(res["stdout"]):
|
||||
message.content = attachment
|
||||
|
||||
return [message]
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue