mirror of
https://github.com/BerriAI/litellm.git
synced 2025-04-25 18:54:30 +00:00
(feat) add nvidia nim embeddings (#6032)
* nvidia nim support embedding config * add nvidia config in init * nvidia nim embeddings * docs nvidia nim embeddings * docs embeddings on nvidia nim * fix llm translation test
This commit is contained in:
parent
57563b1f3a
commit
2d8b7ca3a6
8 changed files with 238 additions and 9 deletions
|
@ -2552,9 +2552,9 @@ def get_optional_params_image_gen(
|
|||
def get_optional_params_embeddings(
|
||||
# 2 optional params
|
||||
model: str,
|
||||
user=None,
|
||||
encoding_format=None,
|
||||
dimensions=None,
|
||||
user: Optional[str] = None,
|
||||
encoding_format: Optional[str] = None,
|
||||
dimensions: Optional[int] = None,
|
||||
custom_llm_provider="",
|
||||
drop_params: Optional[bool] = None,
|
||||
additional_drop_params: Optional[bool] = None,
|
||||
|
@ -2595,7 +2595,6 @@ def get_optional_params_embeddings(
|
|||
default_params=default_params,
|
||||
additional_drop_params=additional_drop_params,
|
||||
)
|
||||
|
||||
## raise exception if non-default value passed for non-openai/azure embedding calls
|
||||
if custom_llm_provider == "openai":
|
||||
# 'dimensions` is only supported in `text-embedding-3` and later models
|
||||
|
@ -2627,6 +2626,17 @@ def get_optional_params_embeddings(
|
|||
)
|
||||
final_params = {**optional_params, **kwargs}
|
||||
return final_params
|
||||
elif custom_llm_provider == "nvidia_nim":
|
||||
supported_params = get_supported_openai_params(
|
||||
model=model or "",
|
||||
custom_llm_provider="nvidia_nim",
|
||||
request_type="embeddings",
|
||||
)
|
||||
_check_valid_arg(supported_params=supported_params)
|
||||
optional_params = litellm.nvidiaNimEmbeddingConfig.map_openai_params(
|
||||
non_default_params=non_default_params, optional_params={}, kwargs=kwargs
|
||||
)
|
||||
return optional_params
|
||||
elif custom_llm_provider == "vertex_ai":
|
||||
supported_params = get_supported_openai_params(
|
||||
model=model,
|
||||
|
@ -4308,7 +4318,10 @@ def get_supported_openai_params(
|
|||
else:
|
||||
return litellm.FireworksAIConfig().get_supported_openai_params()
|
||||
elif custom_llm_provider == "nvidia_nim":
|
||||
return litellm.NvidiaNimConfig().get_supported_openai_params(model=model)
|
||||
if request_type == "chat_completion":
|
||||
return litellm.nvidiaNimConfig.get_supported_openai_params(model=model)
|
||||
elif request_type == "embeddings":
|
||||
return litellm.nvidiaNimEmbeddingConfig.get_supported_openai_params()
|
||||
elif custom_llm_provider == "cerebras":
|
||||
return litellm.CerebrasConfig().get_supported_openai_params(model=model)
|
||||
elif custom_llm_provider == "ai21_chat":
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue