mirror of
https://github.com/meta-llama/llama-stack.git
synced 2025-10-04 04:04:14 +00:00
The starter distribution now comes with all the required packages to support persistent stores—like the agent store, metadata, and inference—using PostgreSQL. We’ve added a new run YAML file, run-with-postgres-store.yaml, to make this setup easy. The file is included in the container image, so users can simply override the entrypoint to use the Postgres-specific config. The documentation has been updated with instructions on how to do that. Closes: #2619 Signed-off-by: Sébastien Han <seb@redhat.com>
38 lines
991 B
Bash
Executable file
38 lines
991 B
Bash
Executable file
#!/usr/bin/env bash
|
|
|
|
# Copyright (c) Meta Platforms, Inc. and affiliates.
|
|
# All rights reserved.
|
|
#
|
|
# This source code is licensed under the terms described in the LICENSE file in
|
|
# the root directory of this source tree.
|
|
|
|
set -euo pipefail
|
|
|
|
determine_server_command() {
|
|
local server_args=()
|
|
|
|
# The env variable will take precedence over the config file
|
|
if [ -n "${ENABLE_POSTGRES_STORE:-}" ]; then
|
|
server_args=("python3" "-m" "llama_stack.core.server.server" "starter::run-with-postgres-store")
|
|
elif [ -n "$1" ]; then
|
|
server_args=("python3" "-m" "llama_stack.core.server.server" "$1")
|
|
fi
|
|
echo "${server_args[@]}"
|
|
}
|
|
|
|
main() {
|
|
echo "Starting Llama Stack server..."
|
|
|
|
local server_command
|
|
server_command=$(determine_server_command "$@")
|
|
|
|
if [[ -z "$server_command" ]]; then
|
|
echo "Error: Could not determine server command"
|
|
exit 1
|
|
fi
|
|
|
|
printf "Executing: %s\n" "$server_command"
|
|
exec $server_command
|
|
}
|
|
|
|
main "$@"
|