mirror of
https://github.com/BerriAI/litellm.git
synced 2025-04-26 03:04:13 +00:00
Add OpenAI gpt-4o-transcribe support (#9517)
* refactor: introduce new transformation config for gpt-4o-transcribe models * refactor: expose new transformation configs for audio transcription * ci: fix config yml * feat(openai/transcriptions): support provider config transformation on openai audio transcriptions allows gpt-4o and whisper audio transformation to work as expected * refactor: migrate fireworks ai + deepgram to new transform request pattern * feat(openai/): working support for gpt-4o-audio-transcribe * build(model_prices_and_context_window.json): add gpt-4o-transcribe to model cost map * build(model_prices_and_context_window.json): specify what endpoints are supported for `/audio/transcriptions` * fix(get_supported_openai_params.py): fix return * refactor(deepgram/): migrate unit test to deepgram handler * refactor: cleanup unused imports * fix(get_supported_openai_params.py): fix linting error * test: update test
This commit is contained in:
parent
f2df53771c
commit
d58fe5a9f9
20 changed files with 402 additions and 92 deletions
|
@ -5095,6 +5095,12 @@ def transcription(
|
|||
response: Optional[
|
||||
Union[TranscriptionResponse, Coroutine[Any, Any, TranscriptionResponse]]
|
||||
] = None
|
||||
|
||||
provider_config = ProviderConfigManager.get_provider_audio_transcription_config(
|
||||
model=model,
|
||||
provider=LlmProviders(custom_llm_provider),
|
||||
)
|
||||
|
||||
if custom_llm_provider == "azure":
|
||||
# azure configs
|
||||
api_base = api_base or litellm.api_base or get_secret_str("AZURE_API_BASE")
|
||||
|
@ -5161,12 +5167,15 @@ def transcription(
|
|||
max_retries=max_retries,
|
||||
api_base=api_base,
|
||||
api_key=api_key,
|
||||
provider_config=provider_config,
|
||||
litellm_params=litellm_params_dict,
|
||||
)
|
||||
elif custom_llm_provider == "deepgram":
|
||||
response = base_llm_http_handler.audio_transcriptions(
|
||||
model=model,
|
||||
audio_file=file,
|
||||
optional_params=optional_params,
|
||||
litellm_params=litellm_params_dict,
|
||||
model_response=model_response,
|
||||
atranscription=atranscription,
|
||||
client=(
|
||||
|
@ -5185,6 +5194,7 @@ def transcription(
|
|||
api_key=api_key,
|
||||
custom_llm_provider="deepgram",
|
||||
headers={},
|
||||
provider_config=provider_config,
|
||||
)
|
||||
if response is None:
|
||||
raise ValueError("Unmapped provider passed in. Unable to get the response.")
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue