mirror of
https://github.com/BerriAI/litellm.git
synced 2025-04-24 10:14:26 +00:00
(test) proxy - azure/chat/completion test
This commit is contained in:
parent
ca852e1dcd
commit
39072bd196
1 changed files with 52 additions and 0 deletions
|
@ -46,3 +46,55 @@ def test_chat_completion():
|
|||
|
||||
# Run the test
|
||||
test_chat_completion()
|
||||
|
||||
|
||||
def test_chat_completion_azure():
|
||||
try:
|
||||
# Your test data
|
||||
test_data = {
|
||||
"model": "azure/chatgpt-v-2",
|
||||
"messages": [
|
||||
{
|
||||
"role": "user",
|
||||
"content": "hi"
|
||||
},
|
||||
],
|
||||
"max_tokens": 10,
|
||||
}
|
||||
print("testing proxy server with Azure Request")
|
||||
response = client.post("/v1/chat/completions", json=test_data)
|
||||
|
||||
assert response.status_code == 200
|
||||
result = response.json()
|
||||
print(f"Received response: {result}")
|
||||
except Exception as e:
|
||||
pytest.fail("LiteLLM Proxy test failed. Exception", e)
|
||||
|
||||
# Run the test
|
||||
test_chat_completion()
|
||||
|
||||
|
||||
# def test_embedding():
|
||||
# try:
|
||||
# # Your test data
|
||||
# test_data = {
|
||||
# "model": "",
|
||||
# "messages": [
|
||||
# {
|
||||
# "role": "user",
|
||||
# "content": "hi"
|
||||
# },
|
||||
# ],
|
||||
# "max_tokens": 10,
|
||||
# }
|
||||
# print("testing proxy server with OpenAI embedding")
|
||||
# response = client.post("/v1/embeddings", json=test_data)
|
||||
|
||||
# assert response.status_code == 200
|
||||
# result = response.json()
|
||||
# print(f"Received response: {result}")
|
||||
# except Exception as e:
|
||||
# pytest.fail("LiteLLM Proxy test failed. Exception", e)
|
||||
|
||||
# # Run the test
|
||||
# test_embedding()
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue