mirror of
https://github.com/meta-llama/llama-stack.git
synced 2025-12-21 16:08:40 +00:00
fix: Remove unused TELEMETRY_SINKS and add OTEL_EXPORTER_OTLP_PROTOCOL (#4406)
Changes: o Remove TELEMETRY_SINKS environment variable from scripts (unused) o Replace with OTEL_EXPORTER_OTLP_PROTOCOL in install scripts The TELEMETRY_SINKS variable is no longer use by Python code and has been replaced with the standard OpenTelemetry environment variable OTEL_EXPORTER_OTLP_PROTOCOL=http/protobuf
This commit is contained in:
parent
bd35aa4d78
commit
b6043bd53b
2 changed files with 3 additions and 9 deletions
|
|
@ -20,7 +20,6 @@ WAIT_TIMEOUT=30
|
|||
TEMP_LOG=""
|
||||
WITH_TELEMETRY=true
|
||||
TELEMETRY_SERVICE_NAME="llama-stack"
|
||||
TELEMETRY_SINKS="otel_trace,otel_metric"
|
||||
OTEL_EXPORTER_OTLP_ENDPOINT="http://otel-collector:4318"
|
||||
TEMP_TELEMETRY_DIR=""
|
||||
|
||||
|
|
@ -412,7 +411,6 @@ Options:
|
|||
--no-telemetry, --without-telemetry
|
||||
Skip provisioning the telemetry stack
|
||||
--telemetry-service NAME Service name reported to telemetry (default: ${TELEMETRY_SERVICE_NAME})
|
||||
--telemetry-sinks SINKS Comma-separated telemetry sinks (default: ${TELEMETRY_SINKS})
|
||||
--otel-endpoint URL OTLP endpoint provided to Llama Stack (default: ${OTEL_EXPORTER_OTLP_ENDPOINT})
|
||||
-h, --help Show this help message
|
||||
|
||||
|
|
@ -464,10 +462,6 @@ while [[ $# -gt 0 ]]; do
|
|||
TELEMETRY_SERVICE_NAME="$2"
|
||||
shift 2
|
||||
;;
|
||||
--telemetry-sinks)
|
||||
TELEMETRY_SINKS="$2"
|
||||
shift 2
|
||||
;;
|
||||
--otel-endpoint)
|
||||
OTEL_EXPORTER_OTLP_ENDPOINT="$2"
|
||||
shift 2
|
||||
|
|
@ -630,8 +624,8 @@ fi
|
|||
server_env_opts=()
|
||||
if [ "$WITH_TELEMETRY" = true ]; then
|
||||
server_env_opts+=(
|
||||
-e TELEMETRY_SINKS="${TELEMETRY_SINKS}"
|
||||
-e OTEL_EXPORTER_OTLP_ENDPOINT="${OTEL_EXPORTER_OTLP_ENDPOINT}"
|
||||
-e OTEL_EXPORTER_OTLP_PROTOCOL=http/protobuf
|
||||
-e OTEL_SERVICE_NAME="${TELEMETRY_SERVICE_NAME}"
|
||||
)
|
||||
fi
|
||||
|
|
|
|||
|
|
@ -10,7 +10,7 @@
|
|||
# This script sets up Jaeger, OpenTelemetry Collector, Prometheus, and Grafana using Podman
|
||||
# For whoever is interested in testing the telemetry stack, you can run this script to set up the stack.
|
||||
# export OTEL_EXPORTER_OTLP_ENDPOINT=http://localhost:4318
|
||||
# export TELEMETRY_SINKS=otel_trace,otel_metric
|
||||
# export OTEL_EXPORTER_OTLP_PROTOCOL=http/protobuf
|
||||
# export OTEL_SERVICE_NAME=my-llama-app
|
||||
# Then run the distro server
|
||||
|
||||
|
|
@ -158,7 +158,7 @@ echo " OTEL Collector: http://localhost:4318 (OTLP endpoint)"
|
|||
echo ""
|
||||
echo "🔧 Environment variables for Llama Stack:"
|
||||
echo " export OTEL_EXPORTER_OTLP_ENDPOINT=http://localhost:4318"
|
||||
echo " export TELEMETRY_SINKS=otel_trace,otel_metric"
|
||||
echo " export OTEL_EXPORTER_OTLP_PROTOCOL=http/protobuf"
|
||||
echo " export OTEL_SERVICE_NAME=my-llama-app"
|
||||
echo ""
|
||||
echo "📊 Next steps:"
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue