(test) proxy custom logger

This commit is contained in:
ishaan-jaff 2023-12-11 17:50:31 -08:00
parent 1f18093b63
commit 2eb66642c4
2 changed files with 30 additions and 28 deletions

View file

@ -19,24 +19,22 @@ from litellm import RateLimitError
from fastapi.testclient import TestClient from fastapi.testclient import TestClient
from fastapi import FastAPI from fastapi import FastAPI
from litellm.proxy.proxy_server import router, save_worker_config, initialize # Replace with the actual module where your FastAPI router is defined from litellm.proxy.proxy_server import router, save_worker_config, initialize # Replace with the actual module where your FastAPI router is defined
filepath = os.path.dirname(os.path.abspath(__file__))
config_fp = f"{filepath}/test_configs/test_config_custom_auth.yaml"
save_worker_config(config=config_fp, model=None, alias=None, api_base=None, api_version=None, debug=False, temperature=None, max_tokens=None, request_timeout=600, max_budget=None, telemetry=False, drop_params=True, add_function_to_prompt=False, headers=None, save=False, use_queue=False)
app = FastAPI()
app.include_router(router) # Include your router in the test app
@app.on_event("startup")
async def wrapper_startup_event():
initialize(config=config_fp, model=None, alias=None, api_base=None, api_version=None, debug=False, temperature=None, max_tokens=None, request_timeout=600, max_budget=None, telemetry=False, drop_params=True, add_function_to_prompt=False, headers=None, save=False, use_queue=False)
# Here you create a fixture that will be used by your tests # Here you create a fixture that will be used by your tests
# Make sure the fixture returns TestClient(app) # Make sure the fixture returns TestClient(app)
@pytest.fixture(autouse=True) def get_client(config_fp):
def client(): filepath = os.path.dirname(os.path.abspath(__file__))
with TestClient(app) as client: config_fp = f"{filepath}/test_configs/{config_fp}"
yield client initialize(config=config_fp)
app = FastAPI()
app.include_router(router) # Include your router in the test app
return TestClient(app)
def test_custom_auth(client): def test_custom_auth(client):
try: try:
client = get_client(config_fp="test_config_custom_auth.yaml")
# Your test data # Your test data
test_data = { test_data = {
"model": "openai-model", "model": "openai-model",

View file

@ -20,23 +20,24 @@ import importlib, inspect
from fastapi.testclient import TestClient from fastapi.testclient import TestClient
from fastapi import FastAPI from fastapi import FastAPI
from litellm.proxy.proxy_server import router, save_worker_config, initialize # Replace with the actual module where your FastAPI router is defined from litellm.proxy.proxy_server import router, save_worker_config, initialize # Replace with the actual module where your FastAPI router is defined
filepath = os.path.dirname(os.path.abspath(__file__)) filepath = os.path.dirname(os.path.abspath(__file__))
config_fp = f"{filepath}/test_configs/test_custom_logger.yaml"
python_file_path = f"{filepath}/test_configs/custom_callbacks.py" python_file_path = f"{filepath}/test_configs/custom_callbacks.py"
save_worker_config(config=config_fp, model=None, alias=None, api_base=None, api_version=None, debug=False, temperature=None, max_tokens=None, request_timeout=600, max_budget=None, telemetry=False, drop_params=True, add_function_to_prompt=False, headers=None, save=False, use_queue=False)
app = FastAPI() # @app.on_event("startup")
app.include_router(router) # Include your router in the test app # async def wrapper_startup_event():
@app.on_event("startup") # initialize(config=config_fp)
async def wrapper_startup_event():
initialize(config=config_fp)
# Use the app fixture in your client fixture # Use the app fixture in your client fixture
@pytest.fixture()
def client(): def get_client(config_fp):
with TestClient(app) as client: filepath = os.path.dirname(os.path.abspath(__file__))
yield client config_fp = f"{filepath}/test_configs/{config_fp}"
initialize(config=config_fp)
app = FastAPI()
app.include_router(router) # Include your router in the test app
return TestClient(app)
# Your bearer token # Your bearer token
token = os.getenv("PROXY_MASTER_KEY") token = os.getenv("PROXY_MASTER_KEY")
@ -48,8 +49,9 @@ headers = {
print("Testing proxy custom logger") print("Testing proxy custom logger")
def test_embedding(client): def test_embedding():
try: try:
client = get_client(config_fp="test_custom_logger.yaml")
litellm.set_verbose=False litellm.set_verbose=False
from litellm.proxy.utils import get_instance_fn from litellm.proxy.utils import get_instance_fn
my_custom_logger = get_instance_fn( my_custom_logger = get_instance_fn(
@ -94,9 +96,10 @@ def test_embedding(client):
pytest.fail(f"LiteLLM Proxy test failed. Exception {str(e)}") pytest.fail(f"LiteLLM Proxy test failed. Exception {str(e)}")
def test_chat_completion(client): def test_chat_completion():
try: try:
# Your test data # Your test data
client = get_client(config_fp="test_custom_logger.yaml")
print("initialized proxy") print("initialized proxy")
litellm.set_verbose=False litellm.set_verbose=False
from litellm.proxy.utils import get_instance_fn from litellm.proxy.utils import get_instance_fn
@ -157,9 +160,10 @@ def test_chat_completion(client):
pytest.fail(f"LiteLLM Proxy test failed. Exception {str(e)}") pytest.fail(f"LiteLLM Proxy test failed. Exception {str(e)}")
def test_chat_completion_stream(client): def test_chat_completion_stream():
try: try:
# Your test data # Your test data
client = get_client(config_fp="test_custom_logger.yaml")
litellm.set_verbose=False litellm.set_verbose=False
from litellm.proxy.utils import get_instance_fn from litellm.proxy.utils import get_instance_fn
my_custom_logger = get_instance_fn( my_custom_logger = get_instance_fn(