forked from phoenix/litellm-mirror
(test) test cloudflare completion
This commit is contained in:
parent
8fcfb7df22
commit
5fc9524a46
1 changed files with 17 additions and 1 deletions
|
@ -1916,7 +1916,7 @@ def test_completion_gemini():
|
|||
pytest.fail(f"Error occurred: {e}")
|
||||
|
||||
|
||||
test_completion_gemini()
|
||||
# test_completion_gemini()
|
||||
|
||||
|
||||
# Palm tests
|
||||
|
@ -2002,6 +2002,22 @@ def test_completion_together_ai_stream():
|
|||
# test_completion_together_ai_stream()
|
||||
|
||||
|
||||
# Cloud flare AI tests
|
||||
def test_completion_cloudflare():
|
||||
try:
|
||||
litellm.set_verbose = True
|
||||
response = completion(
|
||||
model="cloudflare/@cf/meta/llama-2-7b-chat-int8",
|
||||
messages=[{"content": "what llm are you", "role": "user"}],
|
||||
)
|
||||
print(response)
|
||||
|
||||
except Exception as e:
|
||||
pytest.fail(f"Error occurred: {e}")
|
||||
|
||||
|
||||
test_completion_cloudflare()
|
||||
|
||||
# async def get_response(generator):
|
||||
# async for elem in generator:
|
||||
# print(elem)
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue