forked from phoenix/litellm-mirror
(fix) remove litellm.telemetry
This commit is contained in:
parent
4d81df3d6f
commit
6b4b05b58f
3 changed files with 1 additions and 44 deletions
|
@ -222,16 +222,14 @@ def run_server(
|
||||||
ssl_keyfile_path,
|
ssl_keyfile_path,
|
||||||
ssl_certfile_path,
|
ssl_certfile_path,
|
||||||
):
|
):
|
||||||
global feature_telemetry
|
|
||||||
args = locals()
|
args = locals()
|
||||||
if local:
|
if local:
|
||||||
from proxy_server import app, save_worker_config, usage_telemetry, ProxyConfig
|
from proxy_server import app, save_worker_config, ProxyConfig
|
||||||
else:
|
else:
|
||||||
try:
|
try:
|
||||||
from .proxy_server import (
|
from .proxy_server import (
|
||||||
app,
|
app,
|
||||||
save_worker_config,
|
save_worker_config,
|
||||||
usage_telemetry,
|
|
||||||
ProxyConfig,
|
ProxyConfig,
|
||||||
)
|
)
|
||||||
except ImportError as e:
|
except ImportError as e:
|
||||||
|
@ -243,10 +241,8 @@ def run_server(
|
||||||
from proxy_server import (
|
from proxy_server import (
|
||||||
app,
|
app,
|
||||||
save_worker_config,
|
save_worker_config,
|
||||||
usage_telemetry,
|
|
||||||
ProxyConfig,
|
ProxyConfig,
|
||||||
)
|
)
|
||||||
feature_telemetry = usage_telemetry
|
|
||||||
if version == True:
|
if version == True:
|
||||||
pkg_version = importlib.metadata.version("litellm")
|
pkg_version = importlib.metadata.version("litellm")
|
||||||
click.echo(f"\nLiteLLM: Current Version = {pkg_version}\n")
|
click.echo(f"\nLiteLLM: Current Version = {pkg_version}\n")
|
||||||
|
|
|
@ -305,16 +305,6 @@ celery_fn = None # Redis Queue for handling requests
|
||||||
### logger ###
|
### logger ###
|
||||||
|
|
||||||
|
|
||||||
def usage_telemetry(
|
|
||||||
feature: str,
|
|
||||||
): # helps us know if people are using this feature. Set `litellm --telemetry False` to your cli call to turn this off
|
|
||||||
if user_telemetry:
|
|
||||||
data = {"feature": feature} # "local_proxy_server"
|
|
||||||
threading.Thread(
|
|
||||||
target=litellm.utils.litellm_telemetry, args=(data,), daemon=True
|
|
||||||
).start()
|
|
||||||
|
|
||||||
|
|
||||||
def _get_bearer_token(
|
def _get_bearer_token(
|
||||||
api_key: str,
|
api_key: str,
|
||||||
):
|
):
|
||||||
|
@ -2574,7 +2564,6 @@ async def initialize(
|
||||||
if experimental:
|
if experimental:
|
||||||
pass
|
pass
|
||||||
user_telemetry = telemetry
|
user_telemetry = telemetry
|
||||||
usage_telemetry(feature="local_proxy_server")
|
|
||||||
|
|
||||||
|
|
||||||
# for streaming
|
# for streaming
|
||||||
|
|
|
@ -8301,34 +8301,6 @@ def get_or_generate_uuid():
|
||||||
return uuid_value
|
return uuid_value
|
||||||
|
|
||||||
|
|
||||||
def litellm_telemetry(data):
|
|
||||||
# Load or generate the UUID
|
|
||||||
uuid_value = ""
|
|
||||||
try:
|
|
||||||
uuid_value = get_or_generate_uuid()
|
|
||||||
except:
|
|
||||||
uuid_value = str(uuid.uuid4())
|
|
||||||
try:
|
|
||||||
# Prepare the data to send to litellm logging api
|
|
||||||
try:
|
|
||||||
pkg_version = importlib.metadata.version("litellm")
|
|
||||||
except:
|
|
||||||
pkg_version = None
|
|
||||||
if "model" not in data:
|
|
||||||
data["model"] = None
|
|
||||||
payload = {"uuid": uuid_value, "data": data, "version:": pkg_version}
|
|
||||||
# Make the POST request to litellm logging api
|
|
||||||
response = requests.post(
|
|
||||||
"https://litellm-logging.onrender.com/logging",
|
|
||||||
headers={"Content-Type": "application/json"},
|
|
||||||
json=payload,
|
|
||||||
)
|
|
||||||
response.raise_for_status() # Raise an exception for HTTP errors
|
|
||||||
except:
|
|
||||||
# [Non-Blocking Error]
|
|
||||||
return
|
|
||||||
|
|
||||||
|
|
||||||
######### Secret Manager ############################
|
######### Secret Manager ############################
|
||||||
# checks if user has passed in a secret manager client
|
# checks if user has passed in a secret manager client
|
||||||
# if passed in then checks the secret there
|
# if passed in then checks the secret there
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue