mirror of
https://github.com/meta-llama/llama-stack.git
synced 2025-06-28 19:04:19 +00:00
many fixes
This commit is contained in:
parent
021976713b
commit
2d40ce2271
4 changed files with 11 additions and 12 deletions
|
@ -291,14 +291,10 @@ def test_response_non_streaming_mcp_tool(request, openai_client, model, provider
|
|||
call = response.output[1]
|
||||
assert call.type == "mcp_call"
|
||||
assert call.name == "get_boiling_point"
|
||||
assert json.loads(call.arguments) == {"liquid_name": "polyjuice", "celcius": True}
|
||||
assert json.loads(call.arguments) == {"liquid_name": "myawesomeliquid", "celsius": True}
|
||||
assert call.error is None
|
||||
assert "-100" in call.output
|
||||
|
||||
from rich.pretty import pprint
|
||||
|
||||
pprint(response)
|
||||
|
||||
# sometimes the model will call the tool again, so we need to get the last message
|
||||
message = response.output[-1]
|
||||
text_content = message.content[0].text
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue