mirror of
https://github.com/meta-llama/llama-stack.git
synced 2025-10-03 19:57:35 +00:00
chore: fix agents tests for non-ollama providers, provide max_tokens
This commit is contained in:
parent
24ee577cb0
commit
b4a1a15aa9
1 changed files with 3 additions and 0 deletions
|
@ -68,6 +68,7 @@ def agent_config(llama_stack_client, text_model_id):
|
|||
"temperature": 0.0001,
|
||||
"top_p": 0.9,
|
||||
},
|
||||
"max_tokens": 512,
|
||||
},
|
||||
tools=[],
|
||||
input_shields=available_shields,
|
||||
|
@ -88,6 +89,7 @@ def agent_config_without_safety(text_model_id):
|
|||
"temperature": 0.0001,
|
||||
"top_p": 0.9,
|
||||
},
|
||||
"max_tokens": 512,
|
||||
},
|
||||
tools=[],
|
||||
enable_session_persistence=False,
|
||||
|
@ -198,6 +200,7 @@ def test_tool_config(agent_config):
|
|||
"temperature": 1.0,
|
||||
"top_p": 0.9,
|
||||
},
|
||||
"max_tokens": 512,
|
||||
},
|
||||
toolgroups=[],
|
||||
enable_session_persistence=False,
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue