mirror of
https://github.com/BerriAI/litellm.git
synced 2025-04-25 10:44:24 +00:00
moving proxy server to the top of repo
This commit is contained in:
parent
8543d89418
commit
8ef47524bf
10 changed files with 0 additions and 0 deletions
21
proxy-server/test_proxy_stream.py
Normal file
21
proxy-server/test_proxy_stream.py
Normal file
|
@ -0,0 +1,21 @@
|
|||
# import openai
|
||||
# import os
|
||||
|
||||
# os.environ["OPENAI_API_KEY"] = ""
|
||||
|
||||
# openai.api_key = os.environ["OPENAI_API_KEY"]
|
||||
# openai.api_base ="http://localhost:5000"
|
||||
|
||||
# messages = [
|
||||
# {
|
||||
# "role": "user",
|
||||
# "content": "write a 1 pg essay in liteLLM"
|
||||
# }
|
||||
# ]
|
||||
|
||||
# response = openai.ChatCompletion.create(model="gpt-3.5-turbo", messages=messages, stream=True)
|
||||
# print("got response", response)
|
||||
# # response is a generator
|
||||
|
||||
# for chunk in response:
|
||||
# print(chunk)
|
Loading…
Add table
Add a link
Reference in a new issue