provider_id => provider_type, adapter_id => adapter_type

This commit is contained in:
Ashwin Bharambe 2024-10-02 14:05:59 -07:00
parent df68db644b
commit fe4aabd690
21 changed files with 83 additions and 85 deletions

View file

@ -28,7 +28,7 @@ class Api(Enum):
@json_schema_type
class ProviderSpec(BaseModel):
api: Api
provider_id: str
provider_type: str
config_class: str = Field(
...,
description="Fully-qualified classname of the config for this provider",
@ -56,7 +56,7 @@ class RoutableProvider(Protocol):
class GenericProviderConfig(BaseModel):
provider_id: str
provider_type: str
config: Dict[str, Any]
@ -76,7 +76,7 @@ class RoutableProviderConfig(GenericProviderConfig):
# Example: /inference, /safety
@json_schema_type
class AutoRoutedProviderSpec(ProviderSpec):
provider_id: str = "router"
provider_type: str = "router"
config_class: str = ""
docker_image: Optional[str] = None
@ -101,7 +101,7 @@ class AutoRoutedProviderSpec(ProviderSpec):
# Example: /models, /shields
@json_schema_type
class RoutingTableProviderSpec(ProviderSpec):
provider_id: str = "routing_table"
provider_type: str = "routing_table"
config_class: str = ""
docker_image: Optional[str] = None
@ -119,7 +119,7 @@ class RoutingTableProviderSpec(ProviderSpec):
@json_schema_type
class AdapterSpec(BaseModel):
adapter_id: str = Field(
adapter_type: str = Field(
...,
description="Unique identifier for this adapter",
)
@ -179,8 +179,8 @@ class RemoteProviderConfig(BaseModel):
return f"http://{self.host}:{self.port}"
def remote_provider_id(adapter_id: str) -> str:
return f"remote::{adapter_id}"
def remote_provider_type(adapter_type: str) -> str:
return f"remote::{adapter_type}"
@json_schema_type
@ -226,8 +226,8 @@ def remote_provider_spec(
if adapter and adapter.config_class
else "llama_stack.distribution.datatypes.RemoteProviderConfig"
)
provider_id = remote_provider_id(adapter.adapter_id) if adapter else "remote"
provider_type = remote_provider_type(adapter.adapter_type) if adapter else "remote"
return RemoteProviderSpec(
api=api, provider_id=provider_id, config_class=config_class, adapter=adapter
api=api, provider_type=provider_type, config_class=config_class, adapter=adapter
)

View file

@ -14,7 +14,7 @@ def available_providers() -> List[ProviderSpec]:
return [
InlineProviderSpec(
api=Api.agents,
provider_id="meta-reference",
provider_type="meta-reference",
pip_packages=[
"matplotlib",
"pillow",
@ -33,7 +33,7 @@ def available_providers() -> List[ProviderSpec]:
remote_provider_spec(
api=Api.agents,
adapter=AdapterSpec(
adapter_id="sample",
adapter_type="sample",
pip_packages=[],
module="llama_stack.providers.adapters.agents.sample",
config_class="llama_stack.providers.adapters.agents.sample.SampleConfig",

View file

@ -13,7 +13,7 @@ def available_providers() -> List[ProviderSpec]:
return [
InlineProviderSpec(
api=Api.inference,
provider_id="meta-reference",
provider_type="meta-reference",
pip_packages=[
"accelerate",
"blobfile",
@ -30,7 +30,7 @@ def available_providers() -> List[ProviderSpec]:
remote_provider_spec(
api=Api.inference,
adapter=AdapterSpec(
adapter_id="sample",
adapter_type="sample",
pip_packages=[],
module="llama_stack.providers.adapters.inference.sample",
config_class="llama_stack.providers.adapters.inference.sample.SampleConfig",
@ -39,7 +39,7 @@ def available_providers() -> List[ProviderSpec]:
remote_provider_spec(
api=Api.inference,
adapter=AdapterSpec(
adapter_id="ollama",
adapter_type="ollama",
pip_packages=["ollama"],
module="llama_stack.providers.adapters.inference.ollama",
),
@ -47,7 +47,7 @@ def available_providers() -> List[ProviderSpec]:
remote_provider_spec(
api=Api.inference,
adapter=AdapterSpec(
adapter_id="tgi",
adapter_type="tgi",
pip_packages=["huggingface_hub", "aiohttp"],
module="llama_stack.providers.adapters.inference.tgi",
config_class="llama_stack.providers.adapters.inference.tgi.TGIImplConfig",
@ -56,7 +56,7 @@ def available_providers() -> List[ProviderSpec]:
remote_provider_spec(
api=Api.inference,
adapter=AdapterSpec(
adapter_id="hf::serverless",
adapter_type="hf::serverless",
pip_packages=["huggingface_hub", "aiohttp"],
module="llama_stack.providers.adapters.inference.tgi",
config_class="llama_stack.providers.adapters.inference.tgi.InferenceAPIImplConfig",
@ -65,7 +65,7 @@ def available_providers() -> List[ProviderSpec]:
remote_provider_spec(
api=Api.inference,
adapter=AdapterSpec(
adapter_id="hf::endpoint",
adapter_type="hf::endpoint",
pip_packages=["huggingface_hub", "aiohttp"],
module="llama_stack.providers.adapters.inference.tgi",
config_class="llama_stack.providers.adapters.inference.tgi.InferenceEndpointImplConfig",
@ -74,7 +74,7 @@ def available_providers() -> List[ProviderSpec]:
remote_provider_spec(
api=Api.inference,
adapter=AdapterSpec(
adapter_id="fireworks",
adapter_type="fireworks",
pip_packages=[
"fireworks-ai",
],
@ -85,7 +85,7 @@ def available_providers() -> List[ProviderSpec]:
remote_provider_spec(
api=Api.inference,
adapter=AdapterSpec(
adapter_id="together",
adapter_type="together",
pip_packages=[
"together",
],
@ -97,10 +97,8 @@ def available_providers() -> List[ProviderSpec]:
remote_provider_spec(
api=Api.inference,
adapter=AdapterSpec(
adapter_id="bedrock",
pip_packages=[
"boto3"
],
adapter_type="bedrock",
pip_packages=["boto3"],
module="llama_stack.providers.adapters.inference.bedrock",
config_class="llama_stack.providers.adapters.inference.bedrock.BedrockConfig",
),

View file

@ -34,7 +34,7 @@ def available_providers() -> List[ProviderSpec]:
return [
InlineProviderSpec(
api=Api.memory,
provider_id="meta-reference",
provider_type="meta-reference",
pip_packages=EMBEDDING_DEPS + ["faiss-cpu"],
module="llama_stack.providers.impls.meta_reference.memory",
config_class="llama_stack.providers.impls.meta_reference.memory.FaissImplConfig",
@ -42,7 +42,7 @@ def available_providers() -> List[ProviderSpec]:
remote_provider_spec(
Api.memory,
AdapterSpec(
adapter_id="chromadb",
adapter_type="chromadb",
pip_packages=EMBEDDING_DEPS + ["chromadb-client"],
module="llama_stack.providers.adapters.memory.chroma",
),
@ -50,7 +50,7 @@ def available_providers() -> List[ProviderSpec]:
remote_provider_spec(
Api.memory,
AdapterSpec(
adapter_id="pgvector",
adapter_type="pgvector",
pip_packages=EMBEDDING_DEPS + ["psycopg2-binary"],
module="llama_stack.providers.adapters.memory.pgvector",
config_class="llama_stack.providers.adapters.memory.pgvector.PGVectorConfig",
@ -59,7 +59,7 @@ def available_providers() -> List[ProviderSpec]:
remote_provider_spec(
api=Api.memory,
adapter=AdapterSpec(
adapter_id="sample",
adapter_type="sample",
pip_packages=[],
module="llama_stack.providers.adapters.memory.sample",
config_class="llama_stack.providers.adapters.memory.sample.SampleConfig",

View file

@ -19,7 +19,7 @@ def available_providers() -> List[ProviderSpec]:
return [
InlineProviderSpec(
api=Api.safety,
provider_id="meta-reference",
provider_type="meta-reference",
pip_packages=[
"codeshield",
"transformers",
@ -34,7 +34,7 @@ def available_providers() -> List[ProviderSpec]:
remote_provider_spec(
api=Api.safety,
adapter=AdapterSpec(
adapter_id="sample",
adapter_type="sample",
pip_packages=[],
module="llama_stack.providers.adapters.safety.sample",
config_class="llama_stack.providers.adapters.safety.sample.SampleConfig",
@ -43,7 +43,7 @@ def available_providers() -> List[ProviderSpec]:
remote_provider_spec(
api=Api.safety,
adapter=AdapterSpec(
adapter_id="bedrock",
adapter_type="bedrock",
pip_packages=["boto3"],
module="llama_stack.providers.adapters.safety.bedrock",
config_class="llama_stack.providers.adapters.safety.bedrock.BedrockSafetyConfig",
@ -52,7 +52,7 @@ def available_providers() -> List[ProviderSpec]:
remote_provider_spec(
api=Api.safety,
adapter=AdapterSpec(
adapter_id="together",
adapter_type="together",
pip_packages=[
"together",
],

View file

@ -13,7 +13,7 @@ def available_providers() -> List[ProviderSpec]:
return [
InlineProviderSpec(
api=Api.telemetry,
provider_id="meta-reference",
provider_type="meta-reference",
pip_packages=[],
module="llama_stack.providers.impls.meta_reference.telemetry",
config_class="llama_stack.providers.impls.meta_reference.telemetry.ConsoleConfig",
@ -21,7 +21,7 @@ def available_providers() -> List[ProviderSpec]:
remote_provider_spec(
api=Api.telemetry,
adapter=AdapterSpec(
adapter_id="sample",
adapter_type="sample",
pip_packages=[],
module="llama_stack.providers.adapters.telemetry.sample",
config_class="llama_stack.providers.adapters.telemetry.sample.SampleConfig",
@ -30,7 +30,7 @@ def available_providers() -> List[ProviderSpec]:
remote_provider_spec(
api=Api.telemetry,
adapter=AdapterSpec(
adapter_id="opentelemetry-jaeger",
adapter_type="opentelemetry-jaeger",
pip_packages=[
"opentelemetry-api",
"opentelemetry-sdk",