mirror of
https://github.com/BerriAI/litellm.git
synced 2025-04-26 03:04:13 +00:00
LiteLLM Minor Fixes & Improvements (11/19/2024) (#6820)
* fix(anthropic/chat/transformation.py): add json schema as values: json_schema fixes passing pydantic obj to anthropic Fixes https://github.com/BerriAI/litellm/issues/6766 * (feat): Add timestamp_granularities parameter to transcription API (#6457) * Add timestamp_granularities parameter to transcription API * add param to the local test * fix(databricks/chat.py): handle max_retries optional param handling for openai-like calls Fixes issue with calling finetuned vertex ai models via databricks route * build(ui/): add team admins via proxy ui * fix: fix linting error * test: fix test * docs(vertex.md): refactor docs * test: handle overloaded anthropic model error * test: remove duplicate test * test: fix test * test: update test to handle model overloaded error --------- Co-authored-by: Show <35062952+BrunooShow@users.noreply.github.com>
This commit is contained in:
parent
7a060fcc22
commit
777ed61d0b
15 changed files with 200 additions and 193 deletions
|
@ -4728,6 +4728,7 @@ def transcription(
|
|||
response_format: Optional[
|
||||
Literal["json", "text", "srt", "verbose_json", "vtt"]
|
||||
] = None,
|
||||
timestamp_granularities: Optional[List[Literal["word", "segment"]]] = None,
|
||||
temperature: Optional[int] = None, # openai defaults this to 0
|
||||
## LITELLM PARAMS ##
|
||||
user: Optional[str] = None,
|
||||
|
@ -4777,6 +4778,7 @@ def transcription(
|
|||
language=language,
|
||||
prompt=prompt,
|
||||
response_format=response_format,
|
||||
timestamp_granularities=timestamp_granularities,
|
||||
temperature=temperature,
|
||||
custom_llm_provider=custom_llm_provider,
|
||||
drop_params=drop_params,
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue