fix liteLLM proxy

This commit is contained in:
ishaan-jaff 2023-08-16 18:30:08 -07:00
parent ce4ec195a3
commit bdfcff3078
3 changed files with 149 additions and 100 deletions

View file

@ -0,0 +1,21 @@
import openai
import os
os.environ["OPENAI_API_KEY"] = ""
openai.api_key = os.environ["OPENAI_API_KEY"]
openai.api_base ="http://localhost:5000"
messages = [
{
"role": "user",
"content": "write a 1 pg essay in liteLLM"
}
]
response = openai.ChatCompletion.create(model="gpt-3.5-turbo", messages=messages, stream=True)
print("got response", response)
# response is a generator
for chunk in response:
print(chunk)