feat(databricks.py): add embedding model support

This commit is contained in:
Krrish Dholakia 2024-05-23 18:22:03 -07:00
parent d2229dcd21
commit 43353c28b3
7 changed files with 310 additions and 18 deletions

View file

@ -766,7 +766,13 @@ class EmbeddingResponse(OpenAIObject):
_hidden_params: dict = {}
def __init__(
self, model=None, usage=None, stream=False, response_ms=None, data=None
self,
model=None,
usage=None,
stream=False,
response_ms=None,
data=None,
**params,
):
object = "list"
if response_ms:
@ -5033,6 +5039,19 @@ def get_optional_params_embeddings(
default_params = {"user": None, "encoding_format": None, "dimensions": None}
def _check_valid_arg(supported_params: Optional[list]):
if supported_params is None:
return
unsupported_params = {}
for k in non_default_params.keys():
if k not in supported_params:
unsupported_params[k] = non_default_params[k]
if unsupported_params and not litellm.drop_params:
raise UnsupportedParamsError(
status_code=500,
message=f"{custom_llm_provider} does not support parameters: {unsupported_params}, for model={model}. To drop these, set `litellm.drop_params=True` or for proxy:\n\n`litellm_settings:\n drop_params: true`\n",
)
non_default_params = {
k: v
for k, v in passed_params.items()
@ -5058,6 +5077,18 @@ def get_optional_params_embeddings(
non_default_params.pop(k, None)
final_params = {**non_default_params, **kwargs}
return final_params
if custom_llm_provider == "databricks":
supported_params = get_supported_openai_params(
model=model or "",
custom_llm_provider="databricks",
request_type="embeddings",
)
_check_valid_arg(supported_params=supported_params)
optional_params = litellm.DatabricksEmbeddingConfig().map_openai_params(
non_default_params=non_default_params, optional_params={}
)
final_params = {**optional_params, **kwargs}
return final_params
if custom_llm_provider == "vertex_ai":
if len(non_default_params.keys()) > 0:
if litellm.drop_params is True: # drop the unsupported non-default values
@ -5844,6 +5875,14 @@ def get_optional_params(
optional_params = litellm.MistralConfig().map_openai_params(
non_default_params=non_default_params, optional_params=optional_params
)
elif custom_llm_provider == "databricks":
supported_params = get_supported_openai_params(
model=model, custom_llm_provider=custom_llm_provider
)
_check_valid_arg(supported_params=supported_params)
optional_params = litellm.DatabricksConfig().map_openai_params(
non_default_params=non_default_params, optional_params=optional_params
)
elif custom_llm_provider == "groq":
supported_params = get_supported_openai_params(
model=model, custom_llm_provider=custom_llm_provider
@ -6331,7 +6370,11 @@ def get_first_chars_messages(kwargs: dict) -> str:
return ""
def get_supported_openai_params(model: str, custom_llm_provider: str) -> Optional[list]:
def get_supported_openai_params(
model: str,
custom_llm_provider: str,
request_type: Literal["chat_completion", "embeddings"] = "chat_completion",
) -> Optional[list]:
"""
Returns the supported openai params for a given model + provider
@ -6504,6 +6547,11 @@ def get_supported_openai_params(model: str, custom_llm_provider: str) -> Optiona
"frequency_penalty",
"presence_penalty",
]
elif custom_llm_provider == "databricks":
if request_type == "chat_completion":
return litellm.DatabricksConfig().get_supported_openai_params()
elif request_type == "embeddings":
return litellm.DatabricksEmbeddingConfig().get_supported_openai_params()
elif custom_llm_provider == "palm" or custom_llm_provider == "gemini":
return ["temperature", "top_p", "stream", "n", "stop", "max_tokens"]
elif custom_llm_provider == "vertex_ai":