mirror of
https://github.com/BerriAI/litellm.git
synced 2025-04-26 19:24:27 +00:00
QA: ensure all bedrock regional models have same supported_
as base + Anthropic nested pydantic object support (#7844)
* build: ensure all regional bedrock models have same supported values as base bedrock model prevents drift * test(base_llm_unit_tests.py): add testing for nested pydantic objects * fix(test_utils.py): add test_get_potential_model_names * fix(anthropic/chat/transformation.py): support nested pydantic objects Fixes https://github.com/BerriAI/litellm/issues/7755
This commit is contained in:
parent
37ed49fe72
commit
6eb2346fd6
12 changed files with 259 additions and 62 deletions
|
@ -4,13 +4,25 @@ Common base config for all LLM providers
|
|||
|
||||
import types
|
||||
from abc import ABC, abstractmethod
|
||||
from typing import TYPE_CHECKING, Any, AsyncIterator, Iterator, List, Optional, Union
|
||||
from typing import (
|
||||
TYPE_CHECKING,
|
||||
Any,
|
||||
AsyncIterator,
|
||||
Iterator,
|
||||
List,
|
||||
Optional,
|
||||
Type,
|
||||
Union,
|
||||
)
|
||||
|
||||
import httpx
|
||||
from pydantic import BaseModel
|
||||
|
||||
from litellm.types.llms.openai import AllMessageValues
|
||||
from litellm.types.utils import ModelResponse
|
||||
|
||||
from ..base_utils import type_to_response_format_param
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from litellm.litellm_core_utils.litellm_logging import Logging as _LiteLLMLoggingObj
|
||||
|
||||
|
@ -71,6 +83,11 @@ class BaseConfig(ABC):
|
|||
and v is not None
|
||||
}
|
||||
|
||||
def get_json_schema_from_pydantic_object(
|
||||
self, response_format: Optional[Union[Type[BaseModel], dict]]
|
||||
) -> Optional[dict]:
|
||||
return type_to_response_format_param(response_format=response_format)
|
||||
|
||||
def should_fake_stream(
|
||||
self,
|
||||
model: Optional[str],
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue