mirror of
https://github.com/meta-llama/llama-stack.git
synced 2025-07-15 01:26:10 +00:00
distributions readme typos
This commit is contained in:
parent
44c05c6e7d
commit
46bb8884a7
4 changed files with 6 additions and 6 deletions
|
@ -14,7 +14,7 @@ The `llamastack/distribution-` distribution consists of the following provider c
|
||||||
> This assumes you have an hosted endpoint at Fireworks with API Key.
|
> This assumes you have an hosted endpoint at Fireworks with API Key.
|
||||||
|
|
||||||
```
|
```
|
||||||
$ cd llama-stack/distribution/fireworks
|
$ cd distributions/fireworks
|
||||||
$ ls
|
$ ls
|
||||||
compose.yaml run.yaml
|
compose.yaml run.yaml
|
||||||
$ docker compose up
|
$ docker compose up
|
||||||
|
|
|
@ -13,7 +13,7 @@ The `llamastack/distribution-ollama` distribution consists of the following prov
|
||||||
> This assumes you have access to GPU to start a Ollama server with access to your GPU.
|
> This assumes you have access to GPU to start a Ollama server with access to your GPU.
|
||||||
|
|
||||||
```
|
```
|
||||||
$ cd llama-stack/distribution/ollama/gpu
|
$ cd distributions/ollama/gpu
|
||||||
$ ls
|
$ ls
|
||||||
compose.yaml run.yaml
|
compose.yaml run.yaml
|
||||||
$ docker compose up
|
$ docker compose up
|
||||||
|
@ -44,7 +44,7 @@ docker compose down
|
||||||
> This will start an ollama server with CPU only, please see [Ollama Documentations](https://github.com/ollama/ollama) for serving models on CPU only.
|
> This will start an ollama server with CPU only, please see [Ollama Documentations](https://github.com/ollama/ollama) for serving models on CPU only.
|
||||||
|
|
||||||
```
|
```
|
||||||
$ cd llama-stack/distribution/ollama/cpu
|
$ cd distributions/ollama/cpu
|
||||||
$ ls
|
$ ls
|
||||||
compose.yaml run.yaml
|
compose.yaml run.yaml
|
||||||
$ docker compose up
|
$ docker compose up
|
||||||
|
|
|
@ -15,7 +15,7 @@ The `llamastack/distribution-tgi` distribution consists of the following provide
|
||||||
|
|
||||||
|
|
||||||
```
|
```
|
||||||
$ cd llama_stack/distribution/docker/tgi
|
$ cd distributions/tgi/gpu
|
||||||
$ ls
|
$ ls
|
||||||
compose.yaml tgi-run.yaml
|
compose.yaml tgi-run.yaml
|
||||||
$ docker compose up
|
$ docker compose up
|
||||||
|
@ -43,7 +43,7 @@ docker compose down
|
||||||
> This assumes you have an hosted endpoint compatible with TGI server.
|
> This assumes you have an hosted endpoint compatible with TGI server.
|
||||||
|
|
||||||
```
|
```
|
||||||
$ cd llama-stack/distribution/tgi/cpu
|
$ cd distributions/tgi/cpu
|
||||||
$ ls
|
$ ls
|
||||||
compose.yaml run.yaml
|
compose.yaml run.yaml
|
||||||
$ docker compose up
|
$ docker compose up
|
||||||
|
|
|
@ -17,7 +17,7 @@ The `llamastack/distribution-together` distribution consists of the following pr
|
||||||
> This assumes you have an hosted endpoint at Together with API Key.
|
> This assumes you have an hosted endpoint at Together with API Key.
|
||||||
|
|
||||||
```
|
```
|
||||||
$ cd llama-stack/distribution/together
|
$ cd distributions/together
|
||||||
$ ls
|
$ ls
|
||||||
compose.yaml run.yaml
|
compose.yaml run.yaml
|
||||||
$ docker compose up
|
$ docker compose up
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue