fix promptlayer logging

This commit is contained in:
ishaan-jaff 2023-09-13 13:22:43 -07:00
parent 508a2c1daf
commit 1913d36e05
3 changed files with 40 additions and 30 deletions

View file

@ -17,8 +17,11 @@ class PromptLayerLogger:
def log_event(self, kwargs, response_obj, start_time, end_time, print_verbose): def log_event(self, kwargs, response_obj, start_time, end_time, print_verbose):
# Method definition # Method definition
try: try:
if 'litellm_logging_obj' in kwargs:
kwargs.pop('litellm_logging_obj')
print_verbose( print_verbose(
f"Prompt Layer Logging - Enters logging function for model {kwargs}" f"Prompt Layer Logging - Enters logging function for model kwargs: {kwargs}\n, response: {response_obj}"
) )
request_response = requests.post( request_response = requests.post(
@ -27,9 +30,7 @@ class PromptLayerLogger:
"function_name": "openai.ChatCompletion.create", "function_name": "openai.ChatCompletion.create",
"kwargs": kwargs, "kwargs": kwargs,
"tags": ["hello", "world"], "tags": ["hello", "world"],
"request_response": dict( "request_response": dict(response_obj),
response_obj
), # TODO: Check if we need a dict
"request_start_time": int(start_time.timestamp()), "request_start_time": int(start_time.timestamp()),
"request_end_time": int(end_time.timestamp()), "request_end_time": int(end_time.timestamp()),
"api_key": self.key, "api_key": self.key,
@ -39,11 +40,9 @@ class PromptLayerLogger:
# "prompt_version":1, # "prompt_version":1,
}, },
) )
print_verbose( print_verbose(
f"Prompt Layer Logging - final response object: {request_response}" f"Prompt Layer Logging: success - final response object: {request_response}"
) )
except: except:
# traceback.print_exc() print_verbose(f"error: Prompt Layer Error - {traceback.format_exc()}")
print_verbose(f"Prompt Layer Error - {traceback.format_exc()}")
pass pass

View file

@ -1,37 +1,48 @@
#### What this tests #### import sys
# This tests if logging to the llmonitor integration actually works import os
# Adds the parent directory to the system path import io
# import sys
# import os
# sys.path.insert(0, os.path.abspath('../..')) sys.path.insert(0, os.path.abspath('../..'))
# from litellm import completion, embedding from litellm import completion
# import litellm import litellm
# litellm.success_callback = ["promptlayer"] litellm.success_callback = ["promptlayer"]
litellm.set_verbose = True
import time
# litellm.set_verbose = True
def test_promptlayer_logging():
try:
# Redirect stdout
old_stdout = sys.stdout
sys.stdout = new_stdout = io.StringIO()
# def test_chat_openai(): response = completion(model="claude-instant-1.2",
# try: messages=[{
# response = completion(model="gpt-3.5-turbo", "role": "user",
# messages=[{ "content": "Hi 👋 - i'm openai"
# "role": "user", }])
# "content": "Hi 👋 - i'm openai"
# }])
# print(response) # Restore stdout
time.sleep(1)
sys.stdout = old_stdout
output = new_stdout.getvalue().strip()
print(output)
if "LiteLLM: Prompt Layer Logging: success" not in output:
raise Exception("Required log message not found!")
except Exception as e:
print(e)
test_promptlayer_logging()
# except Exception as e:
# print(e)
# def test_chat_openai(): # def test_chat_openai():
# litellm.success_callback = ["langfuse"]
# try: # try:
# response = completion(model="replicate/llama-2-70b-chat:2c1608e18606fad2812020dc541930f2d0495ce32eee50074220b87300bc16e1", # response = completion(model="replicate/llama-2-70b-chat:2c1608e18606fad2812020dc541930f2d0495ce32eee50074220b87300bc16e1",
# messages=[{ # messages=[{

View file

@ -1,6 +1,6 @@
[tool.poetry] [tool.poetry]
name = "litellm" name = "litellm"
version = "0.1.615" version = "0.1.618"
description = "Library to easily interface with LLM API providers" description = "Library to easily interface with LLM API providers"
authors = ["BerriAI"] authors = ["BerriAI"]
license = "MIT License" license = "MIT License"