Adding docker-compose.yaml, starting to simplify

This commit is contained in:
Ashwin Bharambe 2024-11-16 10:56:38 -08:00
parent e4509cb568
commit f38e76ee98
14 changed files with 516 additions and 386 deletions

View file

@ -4,37 +4,10 @@
# This source code is licensed under the terms described in the LICENSE file in
# the root directory of this source tree.
from typing import Optional
from llama_stack.distribution.datatypes import RemoteProviderConfig
from llama_stack.providers.utils.docker.service_config import DockerComposeServiceConfig
from .config import OllamaImplConfig
DEFAULT_OLLAMA_PORT = 11434
class OllamaImplConfig(RemoteProviderConfig):
port: int = DEFAULT_OLLAMA_PORT
@classmethod
def sample_docker_compose_config(cls) -> Optional[DockerComposeServiceConfig]:
return DockerComposeServiceConfig(
image="ollama/ollama:latest",
volumes=["$HOME/.ollama:/root/.ollama"],
devices=["nvidia.com/gpu=all"],
deploy={
"resources": {
"reservations": {
"devices": [{"driver": "nvidia", "capabilities": ["gpu"]}]
}
}
},
runtime="nvidia",
ports=[f"{DEFAULT_OLLAMA_PORT}:{DEFAULT_OLLAMA_PORT}"],
)
async def get_adapter_impl(config: RemoteProviderConfig, _deps):
async def get_adapter_impl(config: OllamaImplConfig, _deps):
from .ollama import OllamaInferenceAdapter
impl = OllamaInferenceAdapter(config.url)

View file

@ -0,0 +1,65 @@
# Copyright (c) Meta Platforms, Inc. and affiliates.
# All rights reserved.
#
# This source code is licensed under the terms described in the LICENSE file in
# the root directory of this source tree.
from typing import List
from llama_stack.distribution.datatypes import RemoteProviderConfig
from llama_stack.providers.utils.docker.service_config import DockerComposeServiceConfig
DEFAULT_OLLAMA_PORT = 11434
class OllamaImplConfig(RemoteProviderConfig):
port: int = DEFAULT_OLLAMA_PORT
@classmethod
def sample_docker_compose_services(cls) -> List[DockerComposeServiceConfig]:
return [
DockerComposeServiceConfig(
service_name="ollama",
image="ollama/ollama:latest",
volumes=["$HOME/.ollama:/root/.ollama"],
devices=["nvidia.com/gpu=all"],
deploy={
"resources": {
"reservations": {
"devices": [{"driver": "nvidia", "capabilities": ["gpu"]}]
}
}
},
runtime="nvidia",
ports=[f"{DEFAULT_OLLAMA_PORT}:{DEFAULT_OLLAMA_PORT}"],
healthcheck={
"test": ["CMD", "curl", "-f", "http://ollama:11434"],
"interval": "10s",
"timeout": "5s",
"retries": 5,
},
),
DockerComposeServiceConfig(
service_name="ollama-init",
image="ollama/ollama",
depends_on={"ollama": {"condition": "service_healthy"}},
environment={
"OLLAMA_HOST": "ollama",
"OLLAMA_MODELS": "${OLLAMA_MODELS}",
},
volumes=["ollama_data:/root/.ollama"],
entrypoint=(
'sh -c \'max_attempts=30;attempt=0;echo "Waiting for Ollama server...";'
"until curl -s http://ollama:11434 > /dev/null; do"
"attempt=$((attempt + 1));"
"if [ $attempt -ge $max_attempts ]; then"
'echo "Timeout waiting for Ollama server";'
"exit 1;"
"fi;"
'echo "Attempt $attempt: Server not ready yet...";'
"sleep 5;"
"done'"
),
),
]

View file

@ -0,0 +1,55 @@
services:
${SERVICE_NAME:-ollama}:
image: ollama/ollama:latest
ports:
- ${OLLAMA_PORT:-11434}:${OLLAMA_PORT:-11434}
volumes:
- $HOME/.ollama:/root/.ollama
devices:
- nvidia.com/gpu=all
runtime: nvidia
healthcheck:
test: ["CMD", "curl", "-f", "http://ollama:11434"]
interval: 10s
timeout: 5s
retries: 5
${SERVICE_NAME:-ollama}-init:
image: ollama/ollama
depends_on:
- ${SERVICE_NAME:-ollama}:
condition: service_healthy
environment:
- OLLAMA_HOST=ollama
- OLLAMA_MODELS=${OLLAMA_MODELS}
volumes:
- $HOME/.ollama:/root/.ollama
entrypoint: >
sh -c '
max_attempts=30;
attempt=0;
echo "Waiting for Ollama server...";
until curl -s http://ollama:11434 > /dev/null; do
attempt=$((attempt + 1));
if [ $attempt -ge $max_attempts ]; then
echo "Timeout waiting for Ollama server";
exit 1;
fi;
echo "Attempt $attempt: Server not ready yet...";
sleep 5;
done;
echo "Server ready. Pulling models...";
models="${OLLAMA_MODELS}";
for model in $models; do
echo "Pulling $model...";
if ! ollama pull "$model"; then
echo "Failed to pull $model";
exit 1;
fi;
done;
echo "All models pulled successfully"
'