chore: Stack server no longer depends on llama-stack-client (#4094)

This dependency has been bothering folks for a long time (cc @leseb). We
really needed it due to "library client" which is primarily used for our
tests and is not a part of the Stack server. Anyone who needs to use the
library client can certainly install `llama-stack-client` in their
environment to make that work.

Updated the notebook references to install `llama-stack-client`
additionally when setting things up.
This commit is contained in:
Ashwin Bharambe 2025-11-07 09:54:09 -08:00 committed by GitHub
parent 68c976a2d8
commit f49cb0b717
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
23 changed files with 53 additions and 31 deletions

View file

@ -16,7 +16,6 @@ from unittest.mock import AsyncMock, patch
import pytest
from llama_stack import LlamaStackAsLibraryClient
from llama_stack.apis.datatypes import Api
from llama_stack.apis.inference.inference import (
OpenAIAssistantMessageParam,
@ -24,6 +23,7 @@ from llama_stack.apis.inference.inference import (
OpenAIChatCompletionUsage,
OpenAIChoice,
)
from llama_stack.core.library_client import LlamaStackAsLibraryClient
from llama_stack.core.telemetry.telemetry import MetricEvent

View file

@ -13,7 +13,7 @@ import json
import pytest
from llama_stack import LlamaStackAsLibraryClient
from llama_stack.core.library_client import LlamaStackAsLibraryClient
from llama_stack.models.llama.datatypes import ToolDefinition
from tests.common.mcp import make_mcp_server