forked from phoenix/litellm-mirror
(test) async cloudflare
This commit is contained in:
parent
dde6bc4fb6
commit
a999e80b46
2 changed files with 25 additions and 7 deletions
|
@ -2009,6 +2009,7 @@ def test_completion_cloudflare():
|
|||
response = completion(
|
||||
model="cloudflare/@cf/meta/llama-2-7b-chat-int8",
|
||||
messages=[{"content": "what llm are you", "role": "user"}],
|
||||
max_tokens=15,
|
||||
)
|
||||
print(response)
|
||||
|
||||
|
@ -2018,13 +2019,6 @@ def test_completion_cloudflare():
|
|||
|
||||
test_completion_cloudflare()
|
||||
|
||||
# async def get_response(generator):
|
||||
# async for elem in generator:
|
||||
# print(elem)
|
||||
# return
|
||||
|
||||
# test_completion_together_ai_stream()
|
||||
|
||||
|
||||
def test_moderation():
|
||||
import openai
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue