forked from phoenix-oss/llama-stack-mirror
32 lines
405 B
Text
32 lines
405 B
Text
accelerate
|
|
black==24.4.2
|
|
blobfile
|
|
codeshield
|
|
fairscale
|
|
fastapi
|
|
fire
|
|
flake8
|
|
huggingface-hub
|
|
httpx
|
|
hydra-core
|
|
hydra-zen
|
|
json-strong-typing
|
|
matplotlib
|
|
omegaconf
|
|
pandas
|
|
Pillow
|
|
pre-commit
|
|
pydantic==1.10.13
|
|
pydantic_core==2.18.2
|
|
python-dotenv
|
|
python-openapi
|
|
requests
|
|
tiktoken
|
|
torch
|
|
transformers
|
|
ufmt==2.7.0
|
|
usort==1.0.8
|
|
uvicorn
|
|
zmq
|
|
|
|
llama_models[llama3_1] @ git+ssh://git@github.com/meta-llama/llama-models.git
|