Merge branch 'main' into litellm_bedrock_command_r_support

This commit is contained in:
Krish Dholakia 2024-05-11 21:24:42 -07:00 committed by GitHub
commit 1d651c6049
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
82 changed files with 3661 additions and 605 deletions

View file

@ -1305,7 +1305,7 @@ def test_hf_classifier_task():
########################### End of Hugging Face Tests ##############################################
# def test_completion_hf_api():
# # failing on circle ci commenting out
# # failing on circle-ci commenting out
# try:
# user_message = "write some code to find the sum of two numbers"
# messages = [{ "content": user_message,"role": "user"}]
@ -3300,6 +3300,25 @@ def test_completion_watsonx():
pytest.fail(f"Error occurred: {e}")
def test_completion_stream_watsonx():
litellm.set_verbose = True
model_name = "watsonx/ibm/granite-13b-chat-v2"
try:
response = completion(
model=model_name,
messages=messages,
stop=["stop"],
max_tokens=20,
stream=True,
)
for chunk in response:
print(chunk)
except litellm.APIError as e:
pass
except Exception as e:
pytest.fail(f"Error occurred: {e}")
@pytest.mark.parametrize(
"provider, model, project, region_name, token",
[
@ -3364,6 +3383,26 @@ async def test_acompletion_watsonx():
pytest.fail(f"Error occurred: {e}")
@pytest.mark.asyncio
async def test_acompletion_stream_watsonx():
litellm.set_verbose = True
model_name = "watsonx/ibm/granite-13b-chat-v2"
print("testing watsonx")
try:
response = await litellm.acompletion(
model=model_name,
messages=messages,
temperature=0.2,
max_tokens=80,
stream=True,
)
# Add any assertions here to check the response
async for chunk in response:
print(chunk)
except Exception as e:
pytest.fail(f"Error occurred: {e}")
# test_completion_palm_stream()
# test_completion_deep_infra()