(fix) remove litellm.telemetry

This commit is contained in:
Ishaan Jaff 2024-03-26 11:21:09 -07:00
parent 4d81df3d6f
commit 6b4b05b58f
3 changed files with 1 additions and 44 deletions

View file

@ -222,16 +222,14 @@ def run_server(
ssl_keyfile_path,
ssl_certfile_path,
):
global feature_telemetry
args = locals()
if local:
from proxy_server import app, save_worker_config, usage_telemetry, ProxyConfig
from proxy_server import app, save_worker_config, ProxyConfig
else:
try:
from .proxy_server import (
app,
save_worker_config,
usage_telemetry,
ProxyConfig,
)
except ImportError as e:
@ -243,10 +241,8 @@ def run_server(
from proxy_server import (
app,
save_worker_config,
usage_telemetry,
ProxyConfig,
)
feature_telemetry = usage_telemetry
if version == True:
pkg_version = importlib.metadata.version("litellm")
click.echo(f"\nLiteLLM: Current Version = {pkg_version}\n")

View file

@ -305,16 +305,6 @@ celery_fn = None # Redis Queue for handling requests
### logger ###
def usage_telemetry(
feature: str,
): # helps us know if people are using this feature. Set `litellm --telemetry False` to your cli call to turn this off
if user_telemetry:
data = {"feature": feature} # "local_proxy_server"
threading.Thread(
target=litellm.utils.litellm_telemetry, args=(data,), daemon=True
).start()
def _get_bearer_token(
api_key: str,
):
@ -2574,7 +2564,6 @@ async def initialize(
if experimental:
pass
user_telemetry = telemetry
usage_telemetry(feature="local_proxy_server")
# for streaming

View file

@ -8301,34 +8301,6 @@ def get_or_generate_uuid():
return uuid_value
def litellm_telemetry(data):
# Load or generate the UUID
uuid_value = ""
try:
uuid_value = get_or_generate_uuid()
except:
uuid_value = str(uuid.uuid4())
try:
# Prepare the data to send to litellm logging api
try:
pkg_version = importlib.metadata.version("litellm")
except:
pkg_version = None
if "model" not in data:
data["model"] = None
payload = {"uuid": uuid_value, "data": data, "version:": pkg_version}
# Make the POST request to litellm logging api
response = requests.post(
"https://litellm-logging.onrender.com/logging",
headers={"Content-Type": "application/json"},
json=payload,
)
response.raise_for_status() # Raise an exception for HTTP errors
except:
# [Non-Blocking Error]
return
######### Secret Manager ############################
# checks if user has passed in a secret manager client
# if passed in then checks the secret there