From 45ffe87d7c75c1b9fad6b3074882521cc71367a4 Mon Sep 17 00:00:00 2001 From: Ashwin Bharambe Date: Fri, 21 Feb 2025 15:37:23 -0800 Subject: [PATCH] Kill noise from test output --- .../providers/inline/agents/meta_reference/agents.py | 9 +-------- llama_stack/providers/utils/inference/embedding_mixin.py | 4 +++- tests/client-sdk/agents/test_agents.py | 2 -- 3 files changed, 4 insertions(+), 11 deletions(-) diff --git a/llama_stack/providers/inline/agents/meta_reference/agents.py b/llama_stack/providers/inline/agents/meta_reference/agents.py index 8a4d91238..72c1a0f34 100644 --- a/llama_stack/providers/inline/agents/meta_reference/agents.py +++ b/llama_stack/providers/inline/agents/meta_reference/agents.py @@ -11,8 +11,6 @@ import tempfile import uuid from typing import AsyncGenerator, List, Optional, Union -from termcolor import colored - from llama_stack.apis.agents import ( AgentConfig, AgentCreateResponse, @@ -69,12 +67,7 @@ class MetaReferenceAgentsImpl(Agents): # check if "bwrap" is available if not shutil.which("bwrap"): - print( - colored( - "Warning: `bwrap` is not available. Code interpreter tool will not work correctly.", - "yellow", - ) - ) + logger.warning("Warning: `bwrap` is not available. Code interpreter tool will not work correctly.") async def create_agent( self, diff --git a/llama_stack/providers/utils/inference/embedding_mixin.py b/llama_stack/providers/utils/inference/embedding_mixin.py index ac421475f..f43475554 100644 --- a/llama_stack/providers/utils/inference/embedding_mixin.py +++ b/llama_stack/providers/utils/inference/embedding_mixin.py @@ -35,7 +35,9 @@ class SentenceTransformerEmbeddingMixin: ) -> EmbeddingsResponse: model = await self.model_store.get_model(model_id) embedding_model = self._load_sentence_transformer_model(model.provider_resource_id) - embeddings = embedding_model.encode([interleaved_content_as_str(content) for content in contents]) + embeddings = embedding_model.encode( + [interleaved_content_as_str(content) for content in contents], show_progress_bar=False + ) return EmbeddingsResponse(embeddings=embeddings) def _load_sentence_transformer_model(self, model: str) -> "SentenceTransformer": diff --git a/tests/client-sdk/agents/test_agents.py b/tests/client-sdk/agents/test_agents.py index 7ede5e517..c03a2a874 100644 --- a/tests/client-sdk/agents/test_agents.py +++ b/tests/client-sdk/agents/test_agents.py @@ -90,7 +90,6 @@ class TestClientTool(ClientTool): def agent_config(llama_stack_client, text_model_id): available_shields = [shield.identifier for shield in llama_stack_client.shields.list()] available_shields = available_shields[:1] - print(f"Using shield: {available_shields}") agent_config = AgentConfig( model=text_model_id, instructions="You are a helpful assistant", @@ -489,7 +488,6 @@ def test_rag_agent(llama_stack_client, agent_config): ), ] for prompt, expected_kw in user_prompts: - print(f"User> {prompt}") response = rag_agent.create_turn( messages=[{"role": "user", "content": prompt}], session_id=session_id,