fix(utils.py): fix cached responses - translate dict to objects

This commit is contained in:
Krrish Dholakia 2023-11-10 10:38:20 -08:00
parent 84460b8222
commit a4c9e6bd46
4 changed files with 108 additions and 21 deletions

View file

@ -134,7 +134,7 @@ Who among the mentioned figures from Ancient Greece contributed to the domain of
print(results)
test_multiple_deployments()
# test_multiple_deployments()
### FUNCTION CALLING
def test_function_calling():
@ -228,6 +228,7 @@ def test_function_calling():
def test_acompletion_on_router():
try:
litellm.set_verbose = True
model_list = [
{
"model_name": "gpt-3.5-turbo",
@ -245,16 +246,69 @@ def test_acompletion_on_router():
]
async def get_response():
router = Router(model_list=model_list)
response = await router.acompletion(model="gpt-3.5-turbo", messages=messages)
return response
response = asyncio.run(get_response())
assert isinstance(response['choices'][0]['message']['content'], str)
router = Router(model_list=model_list, redis_host=os.environ["REDIS_HOST"], redis_password=os.environ["REDIS_PASSWORD"], redis_port=os.environ["REDIS_PORT"], cache_responses=True)
response1 = await router.acompletion(model="gpt-3.5-turbo", messages=messages)
print(f"response1: {response1}")
response2 = await router.acompletion(model="gpt-3.5-turbo", messages=messages)
print(f"response2: {response2}")
assert response1["choices"][0]["message"]["content"] == response2["choices"][0]["message"]["content"]
asyncio.run(get_response())
except Exception as e:
traceback.print_exc()
pytest.fail(f"Error occurred: {e}")
test_acompletion_on_router()
def test_function_calling_on_router():
try:
model_list = [
{
"model_name": "gpt-3.5-turbo",
"litellm_params": {
"model": "gpt-3.5-turbo-0613",
"api_key": os.getenv("OPENAI_API_KEY"),
},
},
]
function1 = [
{
"name": "get_current_weather",
"description": "Get the current weather in a given location",
"parameters": {
"type": "object",
"properties": {
"location": {
"type": "string",
"description": "The city and state, e.g. San Francisco, CA",
},
"unit": {"type": "string", "enum": ["celsius", "fahrenheit"]},
},
"required": ["location"],
},
}
]
router = Router(
model_list=model_list,
redis_host=os.getenv("REDIS_HOST"),
redis_password=os.getenv("REDIS_PASSWORD"),
redis_port=os.getenv("REDIS_PORT")
)
async def get_response():
messages=[
{
"role": "user",
"content": "what's the weather in boston"
}
],
response1 = await router.acompletion(model="gpt-3.5-turbo", messages=messages, functions=function1)
print(f"response1: {response1}")
return response
response = asyncio.run(get_response())
assert isinstance(response["choices"][0]["message"]["content"]["function_call"], str)
except Exception as e:
print(f"An exception occurred: {e}")
# test_function_calling_on_router()
def test_aembedding_on_router():
try: