mirror of
https://github.com/BerriAI/litellm.git
synced 2025-04-25 10:44:24 +00:00
Improved O3 + Azure O3 support (#8181)
All checks were successful
Read Version from pyproject.toml / read-version (push) Successful in 13s
All checks were successful
Read Version from pyproject.toml / read-version (push) Successful in 13s
* fix: support azure o3 model family for fake streaming workaround (#8162) * fix: support azure o3 model family for fake streaming workaround * refactor: rename helper to is_o_series_model for clarity * update function calling parameters for o3 models (#8178) * refactor(o1_transformation.py): refactor o1 config to be o series config, expand o series model check to o3 ensures max_tokens is correctly translated for o3 * feat(openai/): refactor o1 files to be 'o_series' files expands naming to cover o3 * fix(azure/chat/o1_handler.py): azure openai is an instance of openai - was causing resets * test(test_azure_o_series.py): assert stream faked for azure o3 mini Resolves https://github.com/BerriAI/litellm/pull/8162 * fix(o1_transformation.py): fix o1 transformation logic to handle explicit o1_series routing * docs(azure.md): update doc with `o_series/` model name --------- Co-authored-by: byrongrogan <47910641+byrongrogan@users.noreply.github.com> Co-authored-by: Low Jian Sheng <15527690+lowjiansheng@users.noreply.github.com>
This commit is contained in:
parent
91ed05df29
commit
23f458d2da
14 changed files with 211 additions and 37 deletions
|
@ -3485,7 +3485,7 @@ def get_optional_params( # noqa: PLR0915
|
|||
),
|
||||
)
|
||||
elif custom_llm_provider == "azure":
|
||||
if litellm.AzureOpenAIO1Config().is_o1_model(model=model):
|
||||
if litellm.AzureOpenAIO1Config().is_o_series_model(model=model):
|
||||
optional_params = litellm.AzureOpenAIO1Config().map_openai_params(
|
||||
non_default_params=non_default_params,
|
||||
optional_params=optional_params,
|
||||
|
@ -5918,9 +5918,9 @@ class ProviderConfigManager:
|
|||
"""
|
||||
if (
|
||||
provider == LlmProviders.OPENAI
|
||||
and litellm.openAIO1Config.is_model_o1_reasoning_model(model=model)
|
||||
and litellm.openaiOSeriesConfig.is_model_o_series_model(model=model)
|
||||
):
|
||||
return litellm.OpenAIO1Config()
|
||||
return litellm.openaiOSeriesConfig
|
||||
elif litellm.LlmProviders.DEEPSEEK == provider:
|
||||
return litellm.DeepSeekChatConfig()
|
||||
elif litellm.LlmProviders.GROQ == provider:
|
||||
|
@ -5993,7 +5993,7 @@ class ProviderConfigManager:
|
|||
):
|
||||
return litellm.AI21ChatConfig()
|
||||
elif litellm.LlmProviders.AZURE == provider:
|
||||
if litellm.AzureOpenAIO1Config().is_o1_model(model=model):
|
||||
if litellm.AzureOpenAIO1Config().is_o_series_model(model=model):
|
||||
return litellm.AzureOpenAIO1Config()
|
||||
return litellm.AzureOpenAIConfig()
|
||||
elif litellm.LlmProviders.AZURE_AI == provider:
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue