mirror of
https://github.com/BerriAI/litellm.git
synced 2025-04-26 03:04:13 +00:00
feat(databricks/chat): support structured outputs on databricks
Closes https://github.com/BerriAI/litellm/pull/6978 - handles content as list for dbrx, - handles streaming+response_format for dbrx
This commit is contained in:
parent
12aea45447
commit
0caf804f4c
18 changed files with 538 additions and 193 deletions
50
litellm/llms/databricks/embed/handler.py
Normal file
50
litellm/llms/databricks/embed/handler.py
Normal file
|
@ -0,0 +1,50 @@
|
|||
"""
|
||||
Calling logic for Databricks embeddings
|
||||
"""
|
||||
|
||||
from typing import Optional
|
||||
|
||||
import litellm
|
||||
from litellm.utils import EmbeddingResponse
|
||||
|
||||
from ...openai_like.embedding.handler import OpenAILikeEmbeddingHandler
|
||||
from ..common_utils import DatabricksBase
|
||||
|
||||
|
||||
class DatabricksEmbeddingHandler(OpenAILikeEmbeddingHandler, DatabricksBase):
|
||||
def embedding(
|
||||
self,
|
||||
model: str,
|
||||
input: list,
|
||||
timeout: float,
|
||||
logging_obj,
|
||||
api_key: Optional[str],
|
||||
api_base: Optional[str],
|
||||
optional_params: dict,
|
||||
model_response: Optional[litellm.utils.EmbeddingResponse] = None,
|
||||
client=None,
|
||||
aembedding=None,
|
||||
custom_endpoint: Optional[bool] = None,
|
||||
headers: Optional[dict] = None,
|
||||
) -> EmbeddingResponse:
|
||||
api_base, headers = self.databricks_validate_environment(
|
||||
api_base=api_base,
|
||||
api_key=api_key,
|
||||
endpoint_type="embeddings",
|
||||
custom_endpoint=custom_endpoint,
|
||||
headers=headers,
|
||||
)
|
||||
return super().embedding(
|
||||
model=model,
|
||||
input=input,
|
||||
timeout=timeout,
|
||||
logging_obj=logging_obj,
|
||||
api_key=api_key,
|
||||
api_base=api_base,
|
||||
optional_params=optional_params,
|
||||
model_response=model_response,
|
||||
client=client,
|
||||
aembedding=aembedding,
|
||||
custom_endpoint=True,
|
||||
headers=headers,
|
||||
)
|
Loading…
Add table
Add a link
Reference in a new issue