forked from phoenix/litellm-mirror
test_proxy_stream
This commit is contained in:
parent
33f31eb38a
commit
a2ea39818d
2 changed files with 17 additions and 17 deletions
|
@ -1,21 +1,21 @@
|
|||
import openai
|
||||
import os
|
||||
# import openai
|
||||
# import os
|
||||
|
||||
os.environ["OPENAI_API_KEY"] = ""
|
||||
# os.environ["OPENAI_API_KEY"] = ""
|
||||
|
||||
openai.api_key = os.environ["OPENAI_API_KEY"]
|
||||
openai.api_base ="http://localhost:5000"
|
||||
# openai.api_key = os.environ["OPENAI_API_KEY"]
|
||||
# openai.api_base ="http://localhost:5000"
|
||||
|
||||
messages = [
|
||||
{
|
||||
"role": "user",
|
||||
"content": "write a 1 pg essay in liteLLM"
|
||||
}
|
||||
]
|
||||
# messages = [
|
||||
# {
|
||||
# "role": "user",
|
||||
# "content": "write a 1 pg essay in liteLLM"
|
||||
# }
|
||||
# ]
|
||||
|
||||
response = openai.ChatCompletion.create(model="gpt-3.5-turbo", messages=messages, stream=True)
|
||||
print("got response", response)
|
||||
# response is a generator
|
||||
# response = openai.ChatCompletion.create(model="gpt-3.5-turbo", messages=messages, stream=True)
|
||||
# print("got response", response)
|
||||
# # response is a generator
|
||||
|
||||
for chunk in response:
|
||||
print(chunk)
|
||||
# for chunk in response:
|
||||
# print(chunk)
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue