mirror of
https://github.com/BerriAI/litellm.git
synced 2025-04-27 03:34:10 +00:00
test(base_llm_unit_tests.py): add test to ensure drop params is respe… (#8224)
* test(base_llm_unit_tests.py): add test to ensure drop params is respected * fix(types/prometheus.py): use typing_extensions for python3.8 compatibility * build: add cherry picked commits
This commit is contained in:
parent
d60d3ee970
commit
c8494abdea
15 changed files with 250 additions and 71 deletions
|
@ -715,3 +715,42 @@ class BaseOSeriesModelsTest(ABC): # test across azure/openai
|
|||
request_body["messages"][0]["role"] == "developer"
|
||||
), "Got={} instead of system".format(request_body["messages"][0]["role"])
|
||||
assert request_body["messages"][0]["content"] == "Be a good bot!"
|
||||
|
||||
def test_completion_o_series_models_temperature(self):
|
||||
"""
|
||||
Test that temperature is not passed to O-series models
|
||||
"""
|
||||
try:
|
||||
from litellm import completion
|
||||
|
||||
client = self.get_client()
|
||||
|
||||
completion_args = self.get_base_completion_call_args()
|
||||
|
||||
with patch.object(
|
||||
client.chat.completions.with_raw_response, "create"
|
||||
) as mock_client:
|
||||
try:
|
||||
completion(
|
||||
**completion_args,
|
||||
temperature=0.0,
|
||||
messages=[
|
||||
{
|
||||
"role": "user",
|
||||
"content": "Hello, world!",
|
||||
}
|
||||
],
|
||||
drop_params=True,
|
||||
client=client,
|
||||
)
|
||||
except Exception as e:
|
||||
print(f"Error: {e}")
|
||||
|
||||
mock_client.assert_called_once()
|
||||
request_body = mock_client.call_args.kwargs
|
||||
print("request_body: ", request_body)
|
||||
assert (
|
||||
"temperature" not in request_body
|
||||
), "temperature should not be in the request body"
|
||||
except Exception as e:
|
||||
pytest.fail(f"Error occurred: {e}")
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue