forked from phoenix-oss/llama-stack-mirror
merge
This commit is contained in:
commit
a54d757ade
197 changed files with 9392 additions and 3089 deletions
|
@ -8,9 +8,7 @@ from typing import Any, Dict
|
|||
from uuid import uuid4
|
||||
|
||||
import pytest
|
||||
from llama_stack_client.lib.agents.agent import Agent
|
||||
from llama_stack_client.lib.agents.event_logger import EventLogger
|
||||
from llama_stack_client.types.agents.turn_create_params import Document
|
||||
from llama_stack_client import Agent, AgentEventLogger, Document
|
||||
from llama_stack_client.types.shared_params.agent_config import AgentConfig, ToolConfig
|
||||
|
||||
from llama_stack.apis.agents.agents import (
|
||||
|
@ -92,7 +90,7 @@ def test_agent_simple(llama_stack_client_with_mocked_inference, agent_config):
|
|||
session_id=session_id,
|
||||
)
|
||||
|
||||
logs = [str(log) for log in EventLogger().log(simple_hello) if log is not None]
|
||||
logs = [str(log) for log in AgentEventLogger().log(simple_hello) if log is not None]
|
||||
logs_str = "".join(logs)
|
||||
|
||||
assert "hello" in logs_str.lower()
|
||||
|
@ -111,7 +109,7 @@ def test_agent_simple(llama_stack_client_with_mocked_inference, agent_config):
|
|||
session_id=session_id,
|
||||
)
|
||||
|
||||
logs = [str(log) for log in EventLogger().log(bomb_response) if log is not None]
|
||||
logs = [str(log) for log in AgentEventLogger().log(bomb_response) if log is not None]
|
||||
logs_str = "".join(logs)
|
||||
assert "I can't" in logs_str
|
||||
|
||||
|
@ -192,7 +190,7 @@ def test_builtin_tool_web_search(llama_stack_client_with_mocked_inference, agent
|
|||
session_id=session_id,
|
||||
)
|
||||
|
||||
logs = [str(log) for log in EventLogger().log(response) if log is not None]
|
||||
logs = [str(log) for log in AgentEventLogger().log(response) if log is not None]
|
||||
logs_str = "".join(logs)
|
||||
|
||||
assert "tool_execution>" in logs_str
|
||||
|
@ -221,7 +219,7 @@ def test_builtin_tool_code_execution(llama_stack_client_with_mocked_inference, a
|
|||
],
|
||||
session_id=session_id,
|
||||
)
|
||||
logs = [str(log) for log in EventLogger().log(response) if log is not None]
|
||||
logs = [str(log) for log in AgentEventLogger().log(response) if log is not None]
|
||||
logs_str = "".join(logs)
|
||||
|
||||
assert "541" in logs_str
|
||||
|
@ -262,7 +260,7 @@ def test_code_interpreter_for_attachments(llama_stack_client_with_mocked_inferen
|
|||
session_id=session_id,
|
||||
documents=input.get("documents", None),
|
||||
)
|
||||
logs = [str(log) for log in EventLogger().log(response) if log is not None]
|
||||
logs = [str(log) for log in AgentEventLogger().log(response) if log is not None]
|
||||
logs_str = "".join(logs)
|
||||
assert "Tool:code_interpreter" in logs_str
|
||||
|
||||
|
@ -287,7 +285,7 @@ def test_custom_tool(llama_stack_client_with_mocked_inference, agent_config):
|
|||
session_id=session_id,
|
||||
)
|
||||
|
||||
logs = [str(log) for log in EventLogger().log(response) if log is not None]
|
||||
logs = [str(log) for log in AgentEventLogger().log(response) if log is not None]
|
||||
logs_str = "".join(logs)
|
||||
assert "-100" in logs_str
|
||||
assert "get_boiling_point" in logs_str
|
||||
|
|
|
@ -96,7 +96,7 @@ def test_evaluate_benchmark(llama_stack_client, text_model_id, scoring_fn_id):
|
|||
)
|
||||
assert response.job_id == "0"
|
||||
job_status = llama_stack_client.eval.jobs.status(job_id=response.job_id, benchmark_id=benchmark_id)
|
||||
assert job_status and job_status == "completed"
|
||||
assert job_status and job_status.status == "completed"
|
||||
|
||||
eval_response = llama_stack_client.eval.jobs.retrieve(job_id=response.job_id, benchmark_id=benchmark_id)
|
||||
assert eval_response is not None
|
||||
|
|
|
@ -12,6 +12,12 @@ from llama_stack import LlamaStackAsLibraryClient
|
|||
|
||||
class TestProviders:
|
||||
@pytest.mark.asyncio
|
||||
def test_list(self, llama_stack_client: LlamaStackAsLibraryClient | LlamaStackClient):
|
||||
def test_providers(self, llama_stack_client: LlamaStackAsLibraryClient | LlamaStackClient):
|
||||
provider_list = llama_stack_client.providers.list()
|
||||
assert provider_list is not None
|
||||
assert len(provider_list) > 0
|
||||
|
||||
for provider in provider_list:
|
||||
pid = provider.provider_id
|
||||
provider = llama_stack_client.providers.retrieve(pid)
|
||||
assert provider is not None
|
||||
|
|
12
tests/integration/tools/test_tools.py
Normal file
12
tests/integration/tools/test_tools.py
Normal file
|
@ -0,0 +1,12 @@
|
|||
# Copyright (c) Meta Platforms, Inc. and affiliates.
|
||||
# All rights reserved.
|
||||
#
|
||||
# This source code is licensed under the terms described in the LICENSE file in
|
||||
# the root directory of this source tree.
|
||||
|
||||
|
||||
def test_toolsgroups_unregister(llama_stack_client):
|
||||
client = llama_stack_client
|
||||
client.toolgroups.unregister(
|
||||
toolgroup_id="builtin::websearch",
|
||||
)
|
Loading…
Add table
Add a link
Reference in a new issue