(feat) add langfuse logging tests to ci/cd

This commit is contained in:
ishaan-jaff 2023-12-26 09:16:13 +05:30
parent 7346b1638c
commit 2d5801b69e

View file

@ -18,7 +18,7 @@ import time
import pytest import pytest
def search_logs(log_file_path): def search_logs(log_file_path, num_good_logs=1):
""" """
Searches the given log file for logs containing the "/api/public" string. Searches the given log file for logs containing the "/api/public" string.
@ -48,12 +48,17 @@ def search_logs(log_file_path):
print(line.strip()) print(line.strip())
print("\n\n") print("\n\n")
match = re.search( match = re.search(
r"receive_response_headers.complete return_value=\(b\'HTTP/1.1\', (\d+),", r'"POST /api/public/ingestion HTTP/1.1" (\d+) (\d+)',
line, line,
) )
if match: if match:
status_code = int(match.group(1)) status_code = int(match.group(1))
if status_code != 200 and status_code != 201: print("STATUS CODE", status_code)
if (
status_code != 200
and status_code != 201
and status_code != 207
):
print("got a BAD log") print("got a BAD log")
bad_logs.append(line.strip()) bad_logs.append(line.strip())
else: else:
@ -62,7 +67,9 @@ def search_logs(log_file_path):
print(bad_logs) print(bad_logs)
if len(bad_logs) > 0: if len(bad_logs) > 0:
raise Exception(f"bad logs, Bad logs = {bad_logs}") raise Exception(f"bad logs, Bad logs = {bad_logs}")
assert (
len(good_logs) == num_good_logs
), f"Did not get expected number of good logs, expected {num_good_logs}, got {len(good_logs)}. All logs \n {all_logs}"
print("\nGood Logs") print("\nGood Logs")
print(good_logs) print(good_logs)
if len(good_logs) <= 0: if len(good_logs) <= 0:
@ -92,28 +99,28 @@ def pre_langfuse_setup():
return return
@pytest.mark.skip(reason="beta test - checking langfuse output")
def test_langfuse_logging_async(): def test_langfuse_logging_async():
try: try:
pre_langfuse_setup() pre_langfuse_setup()
litellm.set_verbose = True litellm.set_verbose = True
async def _test_langfuse(): async def _test_langfuse():
return await litellm.acompletion( response = await litellm.acompletion(
model="gpt-3.5-turbo", model="azure/chatgpt-v-2",
messages=[{"role": "user", "content": "This is a test"}], messages=[{"role": "user", "content": "This is a test"}],
max_tokens=100, max_tokens=100,
temperature=0.7, temperature=0.7,
timeout=5, timeout=5,
user="test_user", user="test_user",
) )
asyncio.sleep(1)
return response
response = asyncio.run(_test_langfuse()) response = asyncio.run(_test_langfuse())
print(f"response: {response}") print(f"response: {response}")
# time.sleep(2)
# # check langfuse.log to see if there was a failed response # # check langfuse.log to see if there was a failed response
# search_logs("langfuse.log") search_logs("langfuse.log")
except litellm.Timeout as e: except litellm.Timeout as e:
pass pass
except Exception as e: except Exception as e:
@ -297,4 +304,4 @@ def test_langfuse_logging_tool_calling():
tool_calls = response.choices[0].message.tool_calls tool_calls = response.choices[0].message.tool_calls
test_langfuse_logging_tool_calling() # test_langfuse_logging_tool_calling()