forked from phoenix-oss/llama-stack-mirror
[bugfix] fix client-sdk tests for v1 (#777)
# What does this PR do? - as title, as API have been updated ## Test Plan ``` LLAMA_STACK_BASE_URL="http://localhost:5000" pytest -v tests/client-sdk/ ``` ## Sources Please link relevant resources if necessary. ## Before submitting - [ ] This PR fixes a typo or improves the docs (you can dismiss the other checks if that's the case). - [ ] Ran pre-commit to handle lint / formatting issues. - [ ] Read the [contributor guideline](https://github.com/meta-llama/llama-stack/blob/main/CONTRIBUTING.md), Pull Request section? - [ ] Updated relevant documentation. - [ ] Wrote necessary unit or integration tests.
This commit is contained in:
parent
8fd9bcb8cd
commit
965644ce68
3 changed files with 19 additions and 18 deletions
|
@ -39,7 +39,6 @@ def text_model_id(llama_stack_client):
|
|||
for model in llama_stack_client.models.list().data
|
||||
if model.identifier.startswith("meta-llama") and "405" not in model.identifier
|
||||
]
|
||||
print(available_models)
|
||||
assert len(available_models) > 0
|
||||
return available_models[0]
|
||||
|
||||
|
@ -268,10 +267,6 @@ def test_text_chat_completion_with_tool_calling_and_streaming(
|
|||
stream=True,
|
||||
)
|
||||
tool_invocation_content = extract_tool_invocation_content(response)
|
||||
print(
|
||||
"!!!!tool_invocation_content",
|
||||
tool_invocation_content,
|
||||
)
|
||||
assert tool_invocation_content == "[get_weather, {'location': 'San Francisco, CA'}]"
|
||||
|
||||
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue