mirror of
https://github.com/meta-llama/llama-stack.git
synced 2025-12-22 22:19:49 +00:00
chore(tests): fix responses and vector_io tests
This commit is contained in:
parent
1721aafc1f
commit
1c2ece229c
12 changed files with 41 additions and 49 deletions
|
|
@ -65,7 +65,7 @@ from llama_stack.providers.datatypes import HealthResponse, HealthStatus, Routin
|
|||
from llama_stack.providers.utils.inference.inference_store import InferenceStore
|
||||
from llama_stack.providers.utils.telemetry.tracing import get_current_span
|
||||
|
||||
logger = get_logger(name=__name__, category="core")
|
||||
logger = get_logger(name=__name__, category="inference")
|
||||
|
||||
|
||||
class InferenceRouter(Inference):
|
||||
|
|
@ -854,4 +854,5 @@ class InferenceRouter(Inference):
|
|||
model=model.identifier,
|
||||
object="chat.completion",
|
||||
)
|
||||
logger.debug(f"InferenceRouter.completion_response: {final_response}")
|
||||
await self.store.store_chat_completion(final_response, messages)
|
||||
|
|
|
|||
|
|
@ -63,6 +63,8 @@ class ModelsRoutingTable(CommonRoutingTableImpl, Models):
|
|||
|
||||
async def get_provider_impl(self, model_id: str) -> Any:
|
||||
model = await lookup_model(self, model_id)
|
||||
if model.provider_id not in self.impls_by_provider_id:
|
||||
raise ValueError(f"Provider {model.provider_id} not found in the routing table")
|
||||
return self.impls_by_provider_id[model.provider_id]
|
||||
|
||||
async def register_model(
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue