mirror of
https://github.com/meta-llama/llama-stack.git
synced 2025-06-28 02:53:30 +00:00
chore: remove torch dep from sentence-transformers
Since the capability moved to the inference provider backends we don't need to do the transformation locally. Thus no deps are needed. Signed-off-by: Sébastien Han <seb@redhat.com>
This commit is contained in:
parent
3c9a10d2fe
commit
73ca0fb37a
1 changed files with 1 additions and 4 deletions
|
@ -48,10 +48,7 @@ def available_providers() -> list[ProviderSpec]:
|
|||
InlineProviderSpec(
|
||||
api=Api.inference,
|
||||
provider_type="inline::sentence-transformers",
|
||||
pip_packages=[
|
||||
"torch torchvision --index-url https://download.pytorch.org/whl/cpu",
|
||||
"sentence-transformers --no-deps",
|
||||
],
|
||||
pip_packages=[],
|
||||
module="llama_stack.providers.inline.inference.sentence_transformers",
|
||||
config_class="llama_stack.providers.inline.inference.sentence_transformers.config.SentenceTransformersInferenceConfig",
|
||||
),
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue