forked from phoenix/litellm-mirror
(test) proxy server, add new caching test
This commit is contained in:
parent
4b5429f836
commit
4d40a54edf
1 changed files with 19 additions and 0 deletions
|
@ -56,6 +56,25 @@ try:
|
||||||
except:
|
except:
|
||||||
print(f"response: {response3}")
|
print(f"response: {response3}")
|
||||||
|
|
||||||
|
openai.api_key = os.getenv("ANTHROPIC_API_KEY") # this gets passed as a header
|
||||||
|
# switch caching off using cache flag
|
||||||
|
response4 = openai.ChatCompletion.create(
|
||||||
|
model = "claude-instant-1",
|
||||||
|
messages = [
|
||||||
|
{
|
||||||
|
"role": "user",
|
||||||
|
"content": "write a short poem about litellm"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
caching = False,
|
||||||
|
)
|
||||||
|
|
||||||
|
try:
|
||||||
|
print(f"response: {response4['choices'][0]['message']['content']}")
|
||||||
|
except:
|
||||||
|
print(f"response: {response4}")
|
||||||
|
|
||||||
assert response1["choices"][0]["message"]["content"] == response2["choices"][0]["message"]["content"]
|
assert response1["choices"][0]["message"]["content"] == response2["choices"][0]["message"]["content"]
|
||||||
|
assert response1["choices"][0]["message"]["content"] != response4["choices"][0]["message"]["content"]
|
||||||
|
|
||||||
assert response1["choices"][0]["message"]["content"] != response3["choices"][0]["message"]["content"]
|
assert response1["choices"][0]["message"]["content"] != response3["choices"][0]["message"]["content"]
|
Loading…
Add table
Add a link
Reference in a new issue