mirror of
https://github.com/BerriAI/litellm.git
synced 2025-04-26 03:04:13 +00:00
feat(custom_logger.py): expose new async_dataset_hook
for modifying… (#6331)
* feat(custom_logger.py): expose new `async_dataset_hook` for modifying/rejecting argilla items before logging Allows user more control on what gets logged to argilla for annotations * feat(google_ai_studio_endpoints.py): add new `/azure/*` pass through route enables pass-through for azure provider * feat(utils.py): support checking ollama `/api/show` endpoint for retrieving ollama model info Fixes https://github.com/BerriAI/litellm/issues/6322 * fix(user_api_key_auth.py): add `/key/delete` to an allowed_ui_routes Fixes https://github.com/BerriAI/litellm/issues/6236 * fix(user_api_key_auth.py): remove type ignore * fix(user_api_key_auth.py): route ui vs. api token checks differently Fixes https://github.com/BerriAI/litellm/issues/6238 * feat(internal_user_endpoints.py): support setting models as a default internal user param Closes https://github.com/BerriAI/litellm/issues/6239 * fix(user_api_key_auth.py): fix exception string * fix(user_api_key_auth.py): fix error string * fix: fix test
This commit is contained in:
parent
7cc12bd5c6
commit
905ebeb924
16 changed files with 422 additions and 153 deletions
|
@ -1821,6 +1821,7 @@ def supports_function_calling(
|
|||
model=model, custom_llm_provider=custom_llm_provider
|
||||
)
|
||||
|
||||
## CHECK IF MODEL SUPPORTS FUNCTION CALLING ##
|
||||
model_info = litellm.get_model_info(
|
||||
model=model, custom_llm_provider=custom_llm_provider
|
||||
)
|
||||
|
@ -4768,6 +4769,8 @@ def get_model_info( # noqa: PLR0915
|
|||
supports_assistant_prefill=None,
|
||||
supports_prompt_caching=None,
|
||||
)
|
||||
elif custom_llm_provider == "ollama" or custom_llm_provider == "ollama_chat":
|
||||
return litellm.OllamaConfig().get_model_info(model)
|
||||
else:
|
||||
"""
|
||||
Check if: (in order of specificity)
|
||||
|
@ -4964,7 +4967,9 @@ def get_model_info( # noqa: PLR0915
|
|||
supports_audio_input=_model_info.get("supports_audio_input", False),
|
||||
supports_audio_output=_model_info.get("supports_audio_output", False),
|
||||
)
|
||||
except Exception:
|
||||
except Exception as e:
|
||||
if "OllamaError" in str(e):
|
||||
raise e
|
||||
raise Exception(
|
||||
"This model isn't mapped yet. model={}, custom_llm_provider={}. Add it here - https://github.com/BerriAI/litellm/blob/main/model_prices_and_context_window.json.".format(
|
||||
model, custom_llm_provider
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue