update testing

This commit is contained in:
Krrish Dholakia 2023-09-09 16:35:36 -07:00
parent 4dcd008a9a
commit beecb60f51
7 changed files with 9 additions and 17 deletions

View file

@ -945,7 +945,7 @@ def embedding(
try:
response = None
logging = litellm_logging_obj
logging.update_environment_variables(model=model, optional_params={}, litellm_params={"force_timeout": force_timeout, "azure": azure, "litellm_call_id": litellm_call_id, "logger_fn": logger_fn})
logging.update_environment_variables(model=model, user="", optional_params={}, litellm_params={"force_timeout": force_timeout, "azure": azure, "litellm_call_id": litellm_call_id, "logger_fn": logger_fn})
if azure == True:
# azure configs
openai.api_type = "azure"

View file

@ -12,7 +12,7 @@ import litellm
from litellm import embedding, completion
litellm.success_callback = ["posthog", "helicone"]
litellm.failure_callback = ["slack", "sentry", "posthog"]
litellm.failure_callback = ["sentry", "posthog"]
litellm.set_verbose = True

View file

@ -7,10 +7,8 @@ sys.path.insert(
) # Adds the parent directory to the system path
import litellm
from litellm import embedding, completion
from infisical import InfisicalClient
litellm.set_verbose = True
# litellm.secret_manager_client = InfisicalClient(token=os.environ["INFISICAL_TOKEN"])
def test_openai_embedding():

View file

@ -11,7 +11,7 @@ import litellm
from litellm import embedding, completion
litellm.success_callback = ["posthog"]
litellm.failure_callback = ["slack", "sentry", "posthog"]
litellm.failure_callback = ["sentry", "posthog"]
litellm.set_verbose = True

View file

@ -438,12 +438,6 @@ def exception_logging(
def client(original_function):
global liteDebuggerClient, get_all_keys
def check_args(*args, **kwargs):
try:
model = args[0] if len(args) > 0 else kwargs["model"]
except:
raise ValueError("model param not passed in.")
def function_setup(
start_time, *args, **kwargs
): # just run once to check if user wants to send their data anywhere - PostHog/Sentry/Slack/etc.
@ -494,8 +488,9 @@ def client(original_function):
stream = True if "stream" in kwargs and kwargs["stream"] == True else False
logging_obj = Logging(model=model, messages=messages, stream=stream, litellm_call_id=kwargs["litellm_call_id"], function_id=function_id, call_type=call_type, start_time=start_time)
return logging_obj
except: # DO NOT BLOCK running the function because of this
except Exception as e: # DO NOT BLOCK running the function because of this
print_verbose(f"[Non-Blocking] {traceback.format_exc()}; args - {args}; kwargs - {kwargs}")
print(e)
pass
def crash_reporting(*args, **kwargs):
@ -522,11 +517,10 @@ def client(original_function):
result = None
litellm_call_id = str(uuid.uuid4())
kwargs["litellm_call_id"] = litellm_call_id
# check_args(*args, **kwargs)
try:
model = args[0] if len(args) > 0 else kwargs["model"]
except:
raise ValueError("model param not passed in.")
# try:
# model = args[0] if len(args) > 0 else kwargs["model"]
# except:
# raise ValueError("model param not passed in.")
try:
logging_obj = function_setup(start_time, *args, **kwargs)