mirror of
https://github.com/BerriAI/litellm.git
synced 2025-04-25 18:54:30 +00:00
(Feat) Add StructuredOutputs support for Fireworks.AI (#7085)
* fix model cost map fireworks ai "supports_response_schema": true, * fix supports_response_schema * fix map openai params fireworks ai * test_map_response_format * test_map_response_format
This commit is contained in:
parent
c33cebbed0
commit
aeb561213f
5 changed files with 88 additions and 2 deletions
|
@ -1763,9 +1763,14 @@ def supports_response_schema(model: str, custom_llm_provider: Optional[str]) ->
|
|||
model=model, custom_llm_provider=custom_llm_provider
|
||||
)
|
||||
|
||||
if custom_llm_provider == "predibase": # predibase supports this globally
|
||||
return True
|
||||
# providers that globally support response schema
|
||||
PROVIDERS_GLOBALLY_SUPPORT_RESPONSE_SCHEMA = [
|
||||
litellm.LlmProviders.PREDIBASE,
|
||||
litellm.LlmProviders.FIREWORKS_AI,
|
||||
]
|
||||
|
||||
if custom_llm_provider in PROVIDERS_GLOBALLY_SUPPORT_RESPONSE_SCHEMA:
|
||||
return True
|
||||
try:
|
||||
## GET MODEL INFO
|
||||
model_info = litellm.get_model_info(
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue