fix(main.py): fix passing openrouter specific params (#8184)

* fix(main.py): fix passing openrouter specific params

Fixes https://github.com/BerriAI/litellm/issues/8130

* test(test_get_model_info.py): add check for region name w/ cris model

Resolves https://github.com/BerriAI/litellm/issues/8115
This commit is contained in:
Krish Dholakia 2025-02-02 22:23:14 -08:00 committed by GitHub
parent 2b3a93590c
commit 905aee57b1
3 changed files with 39 additions and 1 deletions

View file

@ -2206,7 +2206,7 @@ def completion( # type: ignore # noqa: PLR0915
data = {"model": model, "messages": messages, **optional_params}
## COMPLETION CALL
response = openai_chat_completions.completion(
response = openai_like_chat_completion.completion(
model=model,
messages=messages,
headers=headers,
@ -2221,6 +2221,8 @@ def completion( # type: ignore # noqa: PLR0915
acompletion=acompletion,
timeout=timeout, # type: ignore
custom_llm_provider="openrouter",
custom_prompt_dict=custom_prompt_dict,
encoding=encoding,
)
## LOGGING
logging.post_call(

View file

@ -2605,6 +2605,21 @@ def test_completion_openrouter1():
pytest.fail(f"Error occurred: {e}")
def test_completion_openrouter_reasoning_effort():
try:
litellm.set_verbose = True
response = completion(
model="openrouter/deepseek/deepseek-r1",
messages=messages,
include_reasoning=True,
max_tokens=5,
)
# Add any assertions here to check the response
print(response)
except Exception as e:
pytest.fail(f"Error occurred: {e}")
# test_completion_openrouter1()

View file

@ -376,3 +376,24 @@ def test_get_model_info_huggingface_models(monkeypatch):
providers=["huggingface"],
**info,
)
@pytest.mark.parametrize(
"model, provider",
[
("bedrock/us-east-2/us.anthropic.claude-3-haiku-20240307-v1:0", None),
(
"bedrock/us-east-2/us.anthropic.claude-3-haiku-20240307-v1:0",
"bedrock",
),
],
)
def test_get_model_info_cost_calculator_bedrock_region_cris_stripped(model, provider):
"""
ensure cross region inferencing model is used correctly
Relevant Issue: https://github.com/BerriAI/litellm/issues/8115
"""
info = get_model_info(model=model, custom_llm_provider=provider)
print("info", info)
assert info["key"] == "us.anthropic.claude-3-haiku-20240307-v1:0"
assert info["litellm_provider"] == "bedrock"