forked from phoenix/litellm-mirror
fix liteLLM proxy
This commit is contained in:
parent
ce4ec195a3
commit
bdfcff3078
3 changed files with 149 additions and 100 deletions
21
cookbook/proxy-server/test_proxy_stream.py
Normal file
21
cookbook/proxy-server/test_proxy_stream.py
Normal file
|
@ -0,0 +1,21 @@
|
|||
import openai
|
||||
import os
|
||||
|
||||
os.environ["OPENAI_API_KEY"] = ""
|
||||
|
||||
openai.api_key = os.environ["OPENAI_API_KEY"]
|
||||
openai.api_base ="http://localhost:5000"
|
||||
|
||||
messages = [
|
||||
{
|
||||
"role": "user",
|
||||
"content": "write a 1 pg essay in liteLLM"
|
||||
}
|
||||
]
|
||||
|
||||
response = openai.ChatCompletion.create(model="gpt-3.5-turbo", messages=messages, stream=True)
|
||||
print("got response", response)
|
||||
# response is a generator
|
||||
|
||||
for chunk in response:
|
||||
print(chunk)
|
Loading…
Add table
Add a link
Reference in a new issue