LiteLLM Minor Fixes & Improvements (01/10/2025) - p1 (#7670)

* test(test_get_model_info.py): add unit test confirming router deployment updates global 'get_model_info'

* fix(get_supported_openai_params.py): fix custom llm provider 'get_supported_openai_params'

Fixes https://github.com/BerriAI/litellm/issues/7668

* docs(azure.md): clarify how azure ad token refresh on proxy works

Closes https://github.com/BerriAI/litellm/issues/7665
This commit is contained in:
Krish Dholakia 2025-01-10 17:49:05 -08:00 committed by GitHub
parent 8576ca8ccb
commit a3e65c9bcb
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
6 changed files with 112 additions and 5 deletions

View file

@ -397,3 +397,58 @@ async def test_image_generation_async_additional_params():
mock_client.call_args.kwargs["optional_params"] == {
"my_custom_param": "my-custom-param"
}
def test_get_supported_openai_params():
class MyCustomLLM(CustomLLM):
# This is what `get_supported_openai_params` should be returning:
def get_supported_openai_params(self, model: str) -> list[str]:
return [
"tools",
"tool_choice",
"temperature",
"top_p",
"top_k",
"min_p",
"typical_p",
"stop",
"seed",
"response_format",
"max_tokens",
"presence_penalty",
"frequency_penalty",
"repeat_penalty",
"tfs_z",
"mirostat_mode",
"mirostat_tau",
"mirostat_eta",
"logit_bias",
]
def completion(self, *args, **kwargs) -> litellm.ModelResponse:
return litellm.completion(
model="gpt-3.5-turbo",
messages=[{"role": "user", "content": "Hello world"}],
mock_response="Hi!",
) # type: ignore
my_custom_llm = MyCustomLLM()
litellm.custom_provider_map = [ # 👈 KEY STEP - REGISTER HANDLER
{"provider": "my-custom-llm", "custom_handler": my_custom_llm}
]
resp = completion(
model="my-custom-llm/my-fake-model",
messages=[{"role": "user", "content": "Hello world!"}],
)
assert resp.choices[0].message.content == "Hi!"
# Get supported openai params
from litellm import get_supported_openai_params
response = get_supported_openai_params(model="my-custom-llm/my-fake-model")
assert response is not None