forked from phoenix-oss/llama-stack-mirror
provider_id => provider_type, adapter_id => adapter_type
This commit is contained in:
parent
df68db644b
commit
fe4aabd690
21 changed files with 83 additions and 85 deletions
|
@ -14,7 +14,7 @@ def available_providers() -> List[ProviderSpec]:
|
|||
return [
|
||||
InlineProviderSpec(
|
||||
api=Api.agents,
|
||||
provider_id="meta-reference",
|
||||
provider_type="meta-reference",
|
||||
pip_packages=[
|
||||
"matplotlib",
|
||||
"pillow",
|
||||
|
@ -33,7 +33,7 @@ def available_providers() -> List[ProviderSpec]:
|
|||
remote_provider_spec(
|
||||
api=Api.agents,
|
||||
adapter=AdapterSpec(
|
||||
adapter_id="sample",
|
||||
adapter_type="sample",
|
||||
pip_packages=[],
|
||||
module="llama_stack.providers.adapters.agents.sample",
|
||||
config_class="llama_stack.providers.adapters.agents.sample.SampleConfig",
|
||||
|
|
|
@ -13,7 +13,7 @@ def available_providers() -> List[ProviderSpec]:
|
|||
return [
|
||||
InlineProviderSpec(
|
||||
api=Api.inference,
|
||||
provider_id="meta-reference",
|
||||
provider_type="meta-reference",
|
||||
pip_packages=[
|
||||
"accelerate",
|
||||
"blobfile",
|
||||
|
@ -30,7 +30,7 @@ def available_providers() -> List[ProviderSpec]:
|
|||
remote_provider_spec(
|
||||
api=Api.inference,
|
||||
adapter=AdapterSpec(
|
||||
adapter_id="sample",
|
||||
adapter_type="sample",
|
||||
pip_packages=[],
|
||||
module="llama_stack.providers.adapters.inference.sample",
|
||||
config_class="llama_stack.providers.adapters.inference.sample.SampleConfig",
|
||||
|
@ -39,7 +39,7 @@ def available_providers() -> List[ProviderSpec]:
|
|||
remote_provider_spec(
|
||||
api=Api.inference,
|
||||
adapter=AdapterSpec(
|
||||
adapter_id="ollama",
|
||||
adapter_type="ollama",
|
||||
pip_packages=["ollama"],
|
||||
module="llama_stack.providers.adapters.inference.ollama",
|
||||
),
|
||||
|
@ -47,7 +47,7 @@ def available_providers() -> List[ProviderSpec]:
|
|||
remote_provider_spec(
|
||||
api=Api.inference,
|
||||
adapter=AdapterSpec(
|
||||
adapter_id="tgi",
|
||||
adapter_type="tgi",
|
||||
pip_packages=["huggingface_hub", "aiohttp"],
|
||||
module="llama_stack.providers.adapters.inference.tgi",
|
||||
config_class="llama_stack.providers.adapters.inference.tgi.TGIImplConfig",
|
||||
|
@ -56,7 +56,7 @@ def available_providers() -> List[ProviderSpec]:
|
|||
remote_provider_spec(
|
||||
api=Api.inference,
|
||||
adapter=AdapterSpec(
|
||||
adapter_id="hf::serverless",
|
||||
adapter_type="hf::serverless",
|
||||
pip_packages=["huggingface_hub", "aiohttp"],
|
||||
module="llama_stack.providers.adapters.inference.tgi",
|
||||
config_class="llama_stack.providers.adapters.inference.tgi.InferenceAPIImplConfig",
|
||||
|
@ -65,7 +65,7 @@ def available_providers() -> List[ProviderSpec]:
|
|||
remote_provider_spec(
|
||||
api=Api.inference,
|
||||
adapter=AdapterSpec(
|
||||
adapter_id="hf::endpoint",
|
||||
adapter_type="hf::endpoint",
|
||||
pip_packages=["huggingface_hub", "aiohttp"],
|
||||
module="llama_stack.providers.adapters.inference.tgi",
|
||||
config_class="llama_stack.providers.adapters.inference.tgi.InferenceEndpointImplConfig",
|
||||
|
@ -74,7 +74,7 @@ def available_providers() -> List[ProviderSpec]:
|
|||
remote_provider_spec(
|
||||
api=Api.inference,
|
||||
adapter=AdapterSpec(
|
||||
adapter_id="fireworks",
|
||||
adapter_type="fireworks",
|
||||
pip_packages=[
|
||||
"fireworks-ai",
|
||||
],
|
||||
|
@ -85,7 +85,7 @@ def available_providers() -> List[ProviderSpec]:
|
|||
remote_provider_spec(
|
||||
api=Api.inference,
|
||||
adapter=AdapterSpec(
|
||||
adapter_id="together",
|
||||
adapter_type="together",
|
||||
pip_packages=[
|
||||
"together",
|
||||
],
|
||||
|
@ -97,10 +97,8 @@ def available_providers() -> List[ProviderSpec]:
|
|||
remote_provider_spec(
|
||||
api=Api.inference,
|
||||
adapter=AdapterSpec(
|
||||
adapter_id="bedrock",
|
||||
pip_packages=[
|
||||
"boto3"
|
||||
],
|
||||
adapter_type="bedrock",
|
||||
pip_packages=["boto3"],
|
||||
module="llama_stack.providers.adapters.inference.bedrock",
|
||||
config_class="llama_stack.providers.adapters.inference.bedrock.BedrockConfig",
|
||||
),
|
||||
|
|
|
@ -34,7 +34,7 @@ def available_providers() -> List[ProviderSpec]:
|
|||
return [
|
||||
InlineProviderSpec(
|
||||
api=Api.memory,
|
||||
provider_id="meta-reference",
|
||||
provider_type="meta-reference",
|
||||
pip_packages=EMBEDDING_DEPS + ["faiss-cpu"],
|
||||
module="llama_stack.providers.impls.meta_reference.memory",
|
||||
config_class="llama_stack.providers.impls.meta_reference.memory.FaissImplConfig",
|
||||
|
@ -42,7 +42,7 @@ def available_providers() -> List[ProviderSpec]:
|
|||
remote_provider_spec(
|
||||
Api.memory,
|
||||
AdapterSpec(
|
||||
adapter_id="chromadb",
|
||||
adapter_type="chromadb",
|
||||
pip_packages=EMBEDDING_DEPS + ["chromadb-client"],
|
||||
module="llama_stack.providers.adapters.memory.chroma",
|
||||
),
|
||||
|
@ -50,7 +50,7 @@ def available_providers() -> List[ProviderSpec]:
|
|||
remote_provider_spec(
|
||||
Api.memory,
|
||||
AdapterSpec(
|
||||
adapter_id="pgvector",
|
||||
adapter_type="pgvector",
|
||||
pip_packages=EMBEDDING_DEPS + ["psycopg2-binary"],
|
||||
module="llama_stack.providers.adapters.memory.pgvector",
|
||||
config_class="llama_stack.providers.adapters.memory.pgvector.PGVectorConfig",
|
||||
|
@ -59,7 +59,7 @@ def available_providers() -> List[ProviderSpec]:
|
|||
remote_provider_spec(
|
||||
api=Api.memory,
|
||||
adapter=AdapterSpec(
|
||||
adapter_id="sample",
|
||||
adapter_type="sample",
|
||||
pip_packages=[],
|
||||
module="llama_stack.providers.adapters.memory.sample",
|
||||
config_class="llama_stack.providers.adapters.memory.sample.SampleConfig",
|
||||
|
|
|
@ -19,7 +19,7 @@ def available_providers() -> List[ProviderSpec]:
|
|||
return [
|
||||
InlineProviderSpec(
|
||||
api=Api.safety,
|
||||
provider_id="meta-reference",
|
||||
provider_type="meta-reference",
|
||||
pip_packages=[
|
||||
"codeshield",
|
||||
"transformers",
|
||||
|
@ -34,7 +34,7 @@ def available_providers() -> List[ProviderSpec]:
|
|||
remote_provider_spec(
|
||||
api=Api.safety,
|
||||
adapter=AdapterSpec(
|
||||
adapter_id="sample",
|
||||
adapter_type="sample",
|
||||
pip_packages=[],
|
||||
module="llama_stack.providers.adapters.safety.sample",
|
||||
config_class="llama_stack.providers.adapters.safety.sample.SampleConfig",
|
||||
|
@ -43,7 +43,7 @@ def available_providers() -> List[ProviderSpec]:
|
|||
remote_provider_spec(
|
||||
api=Api.safety,
|
||||
adapter=AdapterSpec(
|
||||
adapter_id="bedrock",
|
||||
adapter_type="bedrock",
|
||||
pip_packages=["boto3"],
|
||||
module="llama_stack.providers.adapters.safety.bedrock",
|
||||
config_class="llama_stack.providers.adapters.safety.bedrock.BedrockSafetyConfig",
|
||||
|
@ -52,7 +52,7 @@ def available_providers() -> List[ProviderSpec]:
|
|||
remote_provider_spec(
|
||||
api=Api.safety,
|
||||
adapter=AdapterSpec(
|
||||
adapter_id="together",
|
||||
adapter_type="together",
|
||||
pip_packages=[
|
||||
"together",
|
||||
],
|
||||
|
|
|
@ -13,7 +13,7 @@ def available_providers() -> List[ProviderSpec]:
|
|||
return [
|
||||
InlineProviderSpec(
|
||||
api=Api.telemetry,
|
||||
provider_id="meta-reference",
|
||||
provider_type="meta-reference",
|
||||
pip_packages=[],
|
||||
module="llama_stack.providers.impls.meta_reference.telemetry",
|
||||
config_class="llama_stack.providers.impls.meta_reference.telemetry.ConsoleConfig",
|
||||
|
@ -21,7 +21,7 @@ def available_providers() -> List[ProviderSpec]:
|
|||
remote_provider_spec(
|
||||
api=Api.telemetry,
|
||||
adapter=AdapterSpec(
|
||||
adapter_id="sample",
|
||||
adapter_type="sample",
|
||||
pip_packages=[],
|
||||
module="llama_stack.providers.adapters.telemetry.sample",
|
||||
config_class="llama_stack.providers.adapters.telemetry.sample.SampleConfig",
|
||||
|
@ -30,7 +30,7 @@ def available_providers() -> List[ProviderSpec]:
|
|||
remote_provider_spec(
|
||||
api=Api.telemetry,
|
||||
adapter=AdapterSpec(
|
||||
adapter_id="opentelemetry-jaeger",
|
||||
adapter_type="opentelemetry-jaeger",
|
||||
pip_packages=[
|
||||
"opentelemetry-api",
|
||||
"opentelemetry-sdk",
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue