mirror of
https://github.com/BerriAI/litellm.git
synced 2025-04-26 11:14:04 +00:00
fix(main.py): fix passing openrouter specific params (#8184)
* fix(main.py): fix passing openrouter specific params Fixes https://github.com/BerriAI/litellm/issues/8130 * test(test_get_model_info.py): add check for region name w/ cris model Resolves https://github.com/BerriAI/litellm/issues/8115
This commit is contained in:
parent
2b3a93590c
commit
905aee57b1
3 changed files with 39 additions and 1 deletions
|
@ -2206,7 +2206,7 @@ def completion( # type: ignore # noqa: PLR0915
|
|||
data = {"model": model, "messages": messages, **optional_params}
|
||||
|
||||
## COMPLETION CALL
|
||||
response = openai_chat_completions.completion(
|
||||
response = openai_like_chat_completion.completion(
|
||||
model=model,
|
||||
messages=messages,
|
||||
headers=headers,
|
||||
|
@ -2221,6 +2221,8 @@ def completion( # type: ignore # noqa: PLR0915
|
|||
acompletion=acompletion,
|
||||
timeout=timeout, # type: ignore
|
||||
custom_llm_provider="openrouter",
|
||||
custom_prompt_dict=custom_prompt_dict,
|
||||
encoding=encoding,
|
||||
)
|
||||
## LOGGING
|
||||
logging.post_call(
|
||||
|
|
|
@ -2605,6 +2605,21 @@ def test_completion_openrouter1():
|
|||
pytest.fail(f"Error occurred: {e}")
|
||||
|
||||
|
||||
def test_completion_openrouter_reasoning_effort():
|
||||
try:
|
||||
litellm.set_verbose = True
|
||||
response = completion(
|
||||
model="openrouter/deepseek/deepseek-r1",
|
||||
messages=messages,
|
||||
include_reasoning=True,
|
||||
max_tokens=5,
|
||||
)
|
||||
# Add any assertions here to check the response
|
||||
print(response)
|
||||
except Exception as e:
|
||||
pytest.fail(f"Error occurred: {e}")
|
||||
|
||||
|
||||
# test_completion_openrouter1()
|
||||
|
||||
|
||||
|
|
|
@ -376,3 +376,24 @@ def test_get_model_info_huggingface_models(monkeypatch):
|
|||
providers=["huggingface"],
|
||||
**info,
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"model, provider",
|
||||
[
|
||||
("bedrock/us-east-2/us.anthropic.claude-3-haiku-20240307-v1:0", None),
|
||||
(
|
||||
"bedrock/us-east-2/us.anthropic.claude-3-haiku-20240307-v1:0",
|
||||
"bedrock",
|
||||
),
|
||||
],
|
||||
)
|
||||
def test_get_model_info_cost_calculator_bedrock_region_cris_stripped(model, provider):
|
||||
"""
|
||||
ensure cross region inferencing model is used correctly
|
||||
Relevant Issue: https://github.com/BerriAI/litellm/issues/8115
|
||||
"""
|
||||
info = get_model_info(model=model, custom_llm_provider=provider)
|
||||
print("info", info)
|
||||
assert info["key"] == "us.anthropic.claude-3-haiku-20240307-v1:0"
|
||||
assert info["litellm_provider"] == "bedrock"
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue