fix(utils.py): mapping stop sequences for palm

This commit is contained in:
Krrish Dholakia 2023-11-01 14:00:45 -07:00
parent a46d6a2dc9
commit f9ff03d5af
2 changed files with 5 additions and 5 deletions

View file

@ -705,7 +705,7 @@ def test_completion_azure3():
except Exception as e: except Exception as e:
pytest.fail(f"Error occurred: {e}") pytest.fail(f"Error occurred: {e}")
test_completion_azure3() # test_completion_azure3()
# new azure test for using litellm. vars, # new azure test for using litellm. vars,
# use the following vars in this test and make an azure_api_call # use the following vars in this test and make an azure_api_call
@ -1149,7 +1149,7 @@ def test_completion_with_fallbacks():
except Exception as e: except Exception as e:
pytest.fail(f"Error occurred: {e}") pytest.fail(f"Error occurred: {e}")
test_completion_with_fallbacks() # test_completion_with_fallbacks()
def test_completion_anyscale_api(): def test_completion_anyscale_api():
try: try:
# litellm.set_verbose=True # litellm.set_verbose=True
@ -1329,13 +1329,13 @@ def test_completion_palm():
# litellm.set_verbose = True # litellm.set_verbose = True
model_name = "palm/chat-bison" model_name = "palm/chat-bison"
try: try:
response = completion(model=model_name, messages=messages) response = completion(model=model_name, messages=messages, stop=["stop"])
# Add any assertions here to check the response # Add any assertions here to check the response
print(response) print(response)
print(response.response_ms) print(response.response_ms)
except Exception as e: except Exception as e:
pytest.fail(f"Error occurred: {e}") pytest.fail(f"Error occurred: {e}")
# test_completion_palm() test_completion_palm()
# test_completion_deep_infra() # test_completion_deep_infra()
# test_completion_ai21() # test_completion_ai21()

View file

@ -1421,7 +1421,7 @@ def get_optional_params( # use the openai defaults
if n: if n:
optional_params["candidate_count"] = n optional_params["candidate_count"] = n
if stop: if stop:
optional_params["stopSequences"] = stop optional_params["stop_sequences"] = stop
if max_tokens: if max_tokens:
optional_params["max_output_tokens"] = max_tokens optional_params["max_output_tokens"] = max_tokens
elif ( elif (