mirror of
https://github.com/BerriAI/litellm.git
synced 2025-04-25 18:54:30 +00:00
refactor: add black formatting
This commit is contained in:
parent
b87d630b0a
commit
4905929de3
156 changed files with 19723 additions and 10869 deletions
|
@ -2,7 +2,7 @@ import sys
|
|||
import os
|
||||
import io
|
||||
|
||||
sys.path.insert(0, os.path.abspath('../..'))
|
||||
sys.path.insert(0, os.path.abspath("../.."))
|
||||
|
||||
from litellm import completion
|
||||
import litellm
|
||||
|
@ -12,7 +12,6 @@ litellm.set_verbose = True
|
|||
import time
|
||||
|
||||
|
||||
|
||||
# def test_promptlayer_logging():
|
||||
# try:
|
||||
# # Redirect stdout
|
||||
|
@ -46,14 +45,13 @@ def test_promptlayer_logging_with_metadata():
|
|||
old_stdout = sys.stdout
|
||||
sys.stdout = new_stdout = io.StringIO()
|
||||
|
||||
response = completion(model="gpt-3.5-turbo",
|
||||
messages=[{
|
||||
"role": "user",
|
||||
"content": "Hi 👋 - i'm ai21"
|
||||
}],
|
||||
temperature=0.2,
|
||||
max_tokens=20,
|
||||
metadata={"model": "ai21"})
|
||||
response = completion(
|
||||
model="gpt-3.5-turbo",
|
||||
messages=[{"role": "user", "content": "Hi 👋 - i'm ai21"}],
|
||||
temperature=0.2,
|
||||
max_tokens=20,
|
||||
metadata={"model": "ai21"},
|
||||
)
|
||||
|
||||
# Restore stdout
|
||||
time.sleep(1)
|
||||
|
@ -66,11 +64,10 @@ def test_promptlayer_logging_with_metadata():
|
|||
except Exception as e:
|
||||
print(e)
|
||||
|
||||
|
||||
test_promptlayer_logging_with_metadata()
|
||||
|
||||
|
||||
|
||||
|
||||
# def test_chat_openai():
|
||||
# try:
|
||||
# response = completion(model="replicate/llama-2-70b-chat:2c1608e18606fad2812020dc541930f2d0495ce32eee50074220b87300bc16e1",
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue