build(openai_proxy/main.py): adding support for routing between multiple azure deployments

This commit is contained in:
Krrish Dholakia 2023-10-25 11:15:53 -07:00
parent f208a1231b
commit b9a4bfc054
15 changed files with 159 additions and 1 deletions

View file

@ -0,0 +1,39 @@
import openai
openai.api_base = "http://0.0.0.0:8000"
print("making request")
openai.api_key = "anything" # this gets passed as a header
response = openai.ChatCompletion.create(
model = "bedrock/anthropic.claude-instant-v1",
messages = [
{
"role": "user",
"content": "this is a test message, what model / llm are you"
}
],
aws_access_key_id="",
aws_secret_access_key="",
aws_region_name="us-west-2",
max_tokens = 10,
)
print(response)
# response = openai.ChatCompletion.create(
# model = "gpt-3.5-turbo",
# messages = [
# {
# "role": "user",
# "content": "this is a test message, what model / llm are you"
# }
# ],
# max_tokens = 10,
# stream=True
# )
# for chunk in response:
# print(chunk)