mirror of
https://github.com/meta-llama/llama-stack.git
synced 2025-06-28 02:53:30 +00:00
make default tool prompt format none in agent config (#863)
# What does this PR do? Previously the tests hard coded the tool prompt format to be json which will cause it to fail when using 3.2/3.3 family of models. This change make the default to be none for the agent config and just remove the specification in the tests. ## Test Plan LLAMA_STACK_BASE_URL=http://localhost:8321 pytest -v tests/client-sdk/agents/test_agents.py
This commit is contained in:
parent
94ffaf468c
commit
a78f1fc70d
2 changed files with 1 additions and 4 deletions
|
@ -155,9 +155,7 @@ class AgentConfigCommon(BaseModel):
|
|||
toolgroups: Optional[List[AgentToolGroup]] = Field(default_factory=list)
|
||||
client_tools: Optional[List[ToolDef]] = Field(default_factory=list)
|
||||
tool_choice: Optional[ToolChoice] = Field(default=ToolChoice.auto)
|
||||
tool_prompt_format: Optional[ToolPromptFormat] = Field(
|
||||
default=ToolPromptFormat.json
|
||||
)
|
||||
tool_prompt_format: Optional[ToolPromptFormat] = Field(default=None)
|
||||
|
||||
max_infer_iters: int = 10
|
||||
|
||||
|
|
|
@ -98,7 +98,6 @@ def agent_config(llama_stack_client, text_model_id):
|
|||
},
|
||||
toolgroups=[],
|
||||
tool_choice="auto",
|
||||
tool_prompt_format="json",
|
||||
input_shields=available_shields,
|
||||
output_shields=available_shields,
|
||||
enable_session_persistence=False,
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue