mirror of
https://github.com/meta-llama/llama-stack.git
synced 2025-12-25 13:21:59 +00:00
setup env vars, fix ollama model reference
This commit is contained in:
parent
522d62d631
commit
12ea3cb442
4 changed files with 28 additions and 15 deletions
15
tests/containers/ollama-with-models.containerfile
Normal file
15
tests/containers/ollama-with-models.containerfile
Normal file
|
|
@ -0,0 +1,15 @@
|
|||
# Containerfile used to build our all in one ollama image to run tests in CI
|
||||
#
|
||||
# podman build --platform linux/amd64 -f ./ollama-with-models.containerfile -t ollama-with-models .
|
||||
#
|
||||
FROM --platform=linux/amd64 ollama/ollama:latest
|
||||
|
||||
# Start ollama and pull models in a single layer
|
||||
RUN ollama serve & \
|
||||
sleep 5 && \
|
||||
ollama pull llama3.2:3b-instruct-fp16 && \
|
||||
ollama pull all-minilm:l6-v2 && \
|
||||
ollama pull llama-guard3:1b
|
||||
|
||||
# Set the entrypoint to start ollama serve
|
||||
ENTRYPOINT ["ollama", "serve"]
|
||||
14
tests/containers/ollama-with-vision-model.containerfile
Normal file
14
tests/containers/ollama-with-vision-model.containerfile
Normal file
|
|
@ -0,0 +1,14 @@
|
|||
# Containerfile used to build our Ollama image with vision model to run tests in CI
|
||||
#
|
||||
# podman build --platform linux/amd64 -f ./ollama-with-vision-model.containerfile -t ollama-with-vision-model .
|
||||
#
|
||||
FROM --platform=linux/amd64 ollama/ollama:latest
|
||||
|
||||
# Start ollama and pull models in a single layer
|
||||
RUN ollama serve & \
|
||||
sleep 5 && \
|
||||
ollama pull llama3.2-vision:11b && \
|
||||
ollama pull all-minilm:l6-v2
|
||||
|
||||
# Set the entrypoint to start ollama serve
|
||||
ENTRYPOINT ["ollama", "serve"]
|
||||
Loading…
Add table
Add a link
Reference in a new issue