forked from phoenix/litellm-mirror
feat(databricks.py): adds databricks support - completion, async, streaming
Closes https://github.com/BerriAI/litellm/issues/2160
This commit is contained in:
parent
54591e3920
commit
d2229dcd21
9 changed files with 691 additions and 5 deletions
|
@ -404,6 +404,7 @@ class OpenAIChatCompletion(BaseLLM):
|
|||
self,
|
||||
model_response: ModelResponse,
|
||||
timeout: Union[float, httpx.Timeout],
|
||||
optional_params: dict,
|
||||
model: Optional[str] = None,
|
||||
messages: Optional[list] = None,
|
||||
print_verbose: Optional[Callable] = None,
|
||||
|
@ -411,7 +412,6 @@ class OpenAIChatCompletion(BaseLLM):
|
|||
api_base: Optional[str] = None,
|
||||
acompletion: bool = False,
|
||||
logging_obj=None,
|
||||
optional_params=None,
|
||||
litellm_params=None,
|
||||
logger_fn=None,
|
||||
headers: Optional[dict] = None,
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue