forked from phoenix/litellm-mirror
fix supports vision
This commit is contained in:
parent
6e86e4291a
commit
341f88d191
3 changed files with 10 additions and 5 deletions
|
@ -4,6 +4,9 @@ model_list:
|
|||
model: openai/fake
|
||||
api_key: fake-key
|
||||
api_base: https://exampleopenaiendpoint-production.up.railway.app/
|
||||
- model_name: gemini-flash
|
||||
litellm_params:
|
||||
model: gemini/gemini-1.5-flash
|
||||
|
||||
general_settings:
|
||||
master_key: sk-1234
|
||||
|
|
|
@ -73,6 +73,7 @@ class ModelInfo(TypedDict, total=False):
|
|||
supported_openai_params: Required[Optional[List[str]]]
|
||||
supports_system_messages: Optional[bool]
|
||||
supports_response_schema: Optional[bool]
|
||||
supports_vision: Optional[bool]
|
||||
|
||||
|
||||
class GenericStreamingChunk(TypedDict):
|
||||
|
|
|
@ -4829,6 +4829,7 @@ def get_model_info(model: str, custom_llm_provider: Optional[str] = None) -> Mod
|
|||
supports_response_schema=_model_info.get(
|
||||
"supports_response_schema", None
|
||||
),
|
||||
supports_vision=_model_info.get("supports_vision", None),
|
||||
)
|
||||
except Exception:
|
||||
raise Exception(
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue