mirror of
https://github.com/BerriAI/litellm.git
synced 2025-04-25 10:44:24 +00:00
Litellm remove circular imports (#7232)
* fix(utils.py): initial commit to remove circular imports - moves llmproviders to utils.py * fix(router.py): fix 'litellm.EmbeddingResponse' import from router.py ' * refactor: fix litellm.ModelResponse import on pass through endpoints * refactor(litellm_logging.py): fix circular import for custom callbacks literal * fix(factory.py): fix circular imports inside prompt factory * fix(cost_calculator.py): fix circular import for 'litellm.Usage' * fix(proxy_server.py): fix potential circular import with `litellm.Router' * fix(proxy/utils.py): fix potential circular import in `litellm.Router` * fix: remove circular imports in 'auth_checks' and 'guardrails/' * fix(prompt_injection_detection.py): fix router impor t * fix(vertex_passthrough_logging_handler.py): fix potential circular imports in vertex pass through * fix(anthropic_pass_through_logging_handler.py): fix potential circular imports * fix(slack_alerting.py-+-ollama_chat.py): fix modelresponse import * fix(base.py): fix potential circular import * fix(handler.py): fix potential circular ref in codestral + cohere handler's * fix(azure.py): fix potential circular imports * fix(gpt_transformation.py): fix modelresponse import * fix(litellm_logging.py): add logging base class - simplify typing makes it easy for other files to type check the logging obj without introducing circular imports * fix(azure_ai/embed): fix potential circular import on handler.py * fix(databricks/): fix potential circular imports in databricks/ * fix(vertex_ai/): fix potential circular imports on vertex ai embeddings * fix(vertex_ai/image_gen): fix import * fix(watsonx-+-bedrock): cleanup imports * refactor(anthropic-pass-through-+-petals): cleanup imports * refactor(huggingface/): cleanup imports * fix(ollama-+-clarifai): cleanup circular imports * fix(openai_like/): fix impor t * fix(openai_like/): fix embedding handler cleanup imports * refactor(openai.py): cleanup imports * fix(sagemaker/transformation.py): fix import * ci(config.yml): add circular import test to ci/cd
This commit is contained in:
parent
0dbf71291e
commit
516c2a6a70
48 changed files with 489 additions and 256 deletions
|
@ -126,6 +126,7 @@ from litellm.types.utils import (
|
|||
EmbeddingResponse,
|
||||
Function,
|
||||
ImageResponse,
|
||||
LlmProviders,
|
||||
Message,
|
||||
ModelInfo,
|
||||
ModelResponse,
|
||||
|
@ -147,6 +148,7 @@ claude_json_str = json.dumps(json_data)
|
|||
import importlib.metadata
|
||||
from concurrent.futures import ThreadPoolExecutor
|
||||
from typing import (
|
||||
TYPE_CHECKING,
|
||||
Any,
|
||||
Callable,
|
||||
Dict,
|
||||
|
@ -163,6 +165,8 @@ from typing import (
|
|||
|
||||
from openai import OpenAIError as OriginalError
|
||||
|
||||
from litellm.llms.base_llm.transformation import BaseConfig
|
||||
|
||||
from ._logging import verbose_logger
|
||||
from .caching.caching import (
|
||||
Cache,
|
||||
|
@ -235,7 +239,6 @@ last_fetched_at = None
|
|||
last_fetched_at_keys = None
|
||||
######## Model Response #########################
|
||||
|
||||
|
||||
# All liteLLM Model responses will be in this format, Follows the OpenAI Format
|
||||
# https://docs.litellm.ai/docs/completion/output
|
||||
# {
|
||||
|
@ -6205,13 +6208,10 @@ def validate_chat_completion_user_messages(messages: List[AllMessageValues]):
|
|||
return messages
|
||||
|
||||
|
||||
from litellm.llms.base_llm.transformation import BaseConfig
|
||||
|
||||
|
||||
class ProviderConfigManager:
|
||||
@staticmethod
|
||||
def get_provider_chat_config( # noqa: PLR0915
|
||||
model: str, provider: litellm.LlmProviders
|
||||
model: str, provider: LlmProviders
|
||||
) -> BaseConfig:
|
||||
"""
|
||||
Returns the provider config for a given provider.
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue