mirror of
https://github.com/meta-llama/llama-stack.git
synced 2025-12-17 19:02:37 +00:00
sentence transformers had a subtle dep on torchao
This commit is contained in:
parent
bd28427d61
commit
104c66f099
2 changed files with 2 additions and 2 deletions
|
|
@ -9,7 +9,6 @@ This section contains documentation for all available providers for the **post_t
|
||||||
```{toctree}
|
```{toctree}
|
||||||
:maxdepth: 1
|
:maxdepth: 1
|
||||||
|
|
||||||
inline_huggingface-cpu
|
|
||||||
inline_huggingface-gpu
|
inline_huggingface-gpu
|
||||||
inline_torchtune-cpu
|
inline_torchtune-cpu
|
||||||
inline_torchtune-gpu
|
inline_torchtune-gpu
|
||||||
|
|
|
||||||
|
|
@ -40,8 +40,9 @@ def available_providers() -> list[ProviderSpec]:
|
||||||
InlineProviderSpec(
|
InlineProviderSpec(
|
||||||
api=Api.inference,
|
api=Api.inference,
|
||||||
provider_type="inline::sentence-transformers",
|
provider_type="inline::sentence-transformers",
|
||||||
|
# CrossEncoder depends on torchao.quantization
|
||||||
pip_packages=[
|
pip_packages=[
|
||||||
"torch torchvision --index-url https://download.pytorch.org/whl/cpu",
|
"torch torchvision torchao --index-url https://download.pytorch.org/whl/cpu",
|
||||||
"sentence-transformers --no-deps",
|
"sentence-transformers --no-deps",
|
||||||
],
|
],
|
||||||
module="llama_stack.providers.inline.inference.sentence_transformers",
|
module="llama_stack.providers.inline.inference.sentence_transformers",
|
||||||
|
|
|
||||||
Loading…
Add table
Add a link
Reference in a new issue