use client opt-in

This commit is contained in:
Krrish Dholakia 2023-08-26 10:38:39 -07:00
parent 1192640fb9
commit ff7b4ffcf1
3 changed files with 8 additions and 7 deletions

View file

@ -22,6 +22,7 @@ vertex_project: Optional[str] = None
vertex_location: Optional[str] = None
togetherai_api_key: Optional[str] = None
baseten_key: Optional[str] = None
use_client = False
caching = False
caching_with_models = False # if you want the caching key to be model + prompt
model_alias_map: Dict[str, str] = {}

View file

@ -338,12 +338,12 @@ def client(original_function):
litellm.input_callback.append("lite_debugger")
litellm.success_callback.append("lite_debugger")
litellm.failure_callback.append("lite_debugger")
# else:
# # create a litellm token for users
# litellm.token = get_or_generate_uuid()
# litellm.input_callback.append("lite_debugger")
# litellm.success_callback.append("lite_debugger")
# litellm.failure_callback.append("lite_debugger")
elif litellm.use_client:
# create a litellm token for users
litellm.token = get_or_generate_uuid()
litellm.input_callback.append("lite_debugger")
litellm.success_callback.append("lite_debugger")
litellm.failure_callback.append("lite_debugger")
if (
len(litellm.input_callback) > 0
or len(litellm.success_callback) > 0

View file

@ -1,6 +1,6 @@
[tool.poetry]
name = "litellm"
version = "0.1.485"
version = "0.1.486"
description = "Library to easily interface with LLM API providers"
authors = ["BerriAI"]
license = "MIT License"