forked from phoenix/litellm-mirror
(feat) proxy added tests
This commit is contained in:
parent
2bd9b4acd8
commit
e5e82c7474
5 changed files with 116 additions and 311 deletions
39
litellm-proxy/tests/test_bedrock.py
Normal file
39
litellm-proxy/tests/test_bedrock.py
Normal file
|
@ -0,0 +1,39 @@
|
|||
import openai
|
||||
openai.api_base = "http://127.0.0.1:8000"
|
||||
print("making request")
|
||||
openai.api_key = "anything" # this gets passed as a header
|
||||
|
||||
|
||||
response = openai.ChatCompletion.create(
|
||||
model = "bedrock/anthropic.claude-instant-v1",
|
||||
messages = [
|
||||
{
|
||||
"role": "user",
|
||||
"content": "this is a test message, what model / llm are you"
|
||||
}
|
||||
],
|
||||
aws_access_key_id="",
|
||||
aws_secret_access_key="",
|
||||
aws_region_name="us-west-2",
|
||||
max_tokens = 10,
|
||||
)
|
||||
|
||||
|
||||
print(response)
|
||||
|
||||
|
||||
# response = openai.ChatCompletion.create(
|
||||
# model = "gpt-3.5-turbo",
|
||||
# messages = [
|
||||
# {
|
||||
# "role": "user",
|
||||
# "content": "this is a test message, what model / llm are you"
|
||||
# }
|
||||
# ],
|
||||
# max_tokens = 10,
|
||||
# stream=True
|
||||
# )
|
||||
|
||||
|
||||
# for chunk in response:
|
||||
# print(chunk)
|
Loading…
Add table
Add a link
Reference in a new issue