mirror of
https://github.com/meta-llama/llama-stack.git
synced 2025-08-02 08:44:44 +00:00
.
This commit is contained in:
parent
d9db9a01bf
commit
d0854a48b2
4 changed files with 1645 additions and 0 deletions
14
COMMANDS.md
Normal file
14
COMMANDS.md
Normal file
|
@ -0,0 +1,14 @@
|
|||
```bash
|
||||
source ~/miniconda3/bin/activate
|
||||
conda create --prefix ./envs python=3.10
|
||||
|
||||
source ~/miniconda3/bin/activate
|
||||
conda activate ./envs
|
||||
|
||||
pip install -e . \
|
||||
&& llama stack build --config ./build.yaml --image-type conda \
|
||||
&& llama stack run ./run.yaml \
|
||||
--port 5001
|
||||
|
||||
pytest llama_stack/providers/tests/inference/test_text_inference.py -v -k groq --lf -s
|
||||
```
|
23
build.yaml
Normal file
23
build.yaml
Normal file
|
@ -0,0 +1,23 @@
|
|||
version: '2'
|
||||
name: groq
|
||||
distribution_spec:
|
||||
description: Use (an external) Groq server for running LLM inference
|
||||
docker_image: null
|
||||
providers:
|
||||
inference:
|
||||
- remote::groq
|
||||
memory:
|
||||
- inline::faiss
|
||||
safety:
|
||||
- inline::llama-guard
|
||||
agents:
|
||||
- inline::meta-reference
|
||||
telemetry:
|
||||
- inline::meta-reference
|
||||
eval:
|
||||
- inline::meta-reference
|
||||
datasetio:
|
||||
- inline::localfs
|
||||
scoring:
|
||||
- inline::basic
|
||||
image_type: conda
|
1528
hello.ipynb
Normal file
1528
hello.ipynb
Normal file
File diff suppressed because one or more lines are too long
80
run.yaml
Normal file
80
run.yaml
Normal file
|
@ -0,0 +1,80 @@
|
|||
version: '2'
|
||||
image_name: groq
|
||||
docker_image: null
|
||||
conda_env: groq
|
||||
apis:
|
||||
- agents
|
||||
- datasetio
|
||||
- eval
|
||||
- inference
|
||||
- memory
|
||||
- safety
|
||||
- scoring
|
||||
- telemetry
|
||||
providers:
|
||||
inference:
|
||||
- provider_id: groq
|
||||
provider_type: remote::groq
|
||||
config:
|
||||
api_key: ${env.GROQ_API_KEY}
|
||||
memory:
|
||||
- provider_id: faiss
|
||||
provider_type: inline::faiss
|
||||
config:
|
||||
kvstore:
|
||||
type: sqlite
|
||||
namespace: null
|
||||
db_path: ${env.SQLITE_STORE_DIR:~/.llama/distributions/groq}/faiss_store.db
|
||||
safety:
|
||||
- provider_id: llama-guard
|
||||
provider_type: inline::llama-guard
|
||||
config: {}
|
||||
agents:
|
||||
- provider_id: meta-reference
|
||||
provider_type: inline::meta-reference
|
||||
config:
|
||||
persistence_store:
|
||||
type: sqlite
|
||||
namespace: null
|
||||
db_path: ${env.SQLITE_STORE_DIR:~/.llama/distributions/groq}/agents_store.db
|
||||
telemetry:
|
||||
- provider_id: meta-reference
|
||||
provider_type: inline::meta-reference
|
||||
config:
|
||||
service_name: ${env.OTEL_SERVICE_NAME:llama-stack}
|
||||
sinks: ${env.TELEMETRY_SINKS:console,sqlite}
|
||||
sqlite_db_path: ${env.SQLITE_DB_PATH:~/.llama/distributions/groq/trace_store.db}
|
||||
eval:
|
||||
- provider_id: meta-reference
|
||||
provider_type: inline::meta-reference
|
||||
config: {}
|
||||
datasetio:
|
||||
- provider_id: localfs
|
||||
provider_type: inline::localfs
|
||||
config: {}
|
||||
scoring:
|
||||
- provider_id: basic
|
||||
provider_type: inline::basic
|
||||
config: {}
|
||||
metadata_store:
|
||||
namespace: null
|
||||
type: sqlite
|
||||
db_path: ${env.SQLITE_STORE_DIR:~/.llama/distributions/groq}/registry.db
|
||||
models:
|
||||
- metadata: {}
|
||||
model_id: Llama3.2-3B-Instruct
|
||||
provider_id: groq
|
||||
provider_model_id: null
|
||||
- metadata: {}
|
||||
model_id: Llama3.1-8B-Instruct
|
||||
provider_id: groq
|
||||
provider_model_id: null
|
||||
- metadata: {}
|
||||
model_id: Llama-3-70B-Instruct
|
||||
provider_id: groq
|
||||
provider_model_id: null
|
||||
shields: []
|
||||
memory_banks: []
|
||||
datasets: []
|
||||
scoring_fns: []
|
||||
eval_tasks: []
|
Loading…
Add table
Add a link
Reference in a new issue