(test) proxy server, add new caching test

This commit is contained in:
ishaan-jaff 2023-10-25 14:07:45 -07:00
parent 4b5429f836
commit 4d40a54edf

View file

@ -56,6 +56,25 @@ try:
except: except:
print(f"response: {response3}") print(f"response: {response3}")
openai.api_key = os.getenv("ANTHROPIC_API_KEY") # this gets passed as a header
# switch caching off using cache flag
response4 = openai.ChatCompletion.create(
model = "claude-instant-1",
messages = [
{
"role": "user",
"content": "write a short poem about litellm"
}
],
caching = False,
)
try:
print(f"response: {response4['choices'][0]['message']['content']}")
except:
print(f"response: {response4}")
assert response1["choices"][0]["message"]["content"] == response2["choices"][0]["message"]["content"] assert response1["choices"][0]["message"]["content"] == response2["choices"][0]["message"]["content"]
assert response1["choices"][0]["message"]["content"] != response4["choices"][0]["message"]["content"]
assert response1["choices"][0]["message"]["content"] != response3["choices"][0]["message"]["content"] assert response1["choices"][0]["message"]["content"] != response3["choices"][0]["message"]["content"]