Litellm remove circular imports (#7232)

* fix(utils.py): initial commit to remove circular imports - moves llmproviders to utils.py

* fix(router.py): fix 'litellm.EmbeddingResponse' import from router.py

'

* refactor: fix litellm.ModelResponse import on pass through endpoints

* refactor(litellm_logging.py): fix circular import for custom callbacks literal

* fix(factory.py): fix circular imports inside prompt factory

* fix(cost_calculator.py): fix circular import for 'litellm.Usage'

* fix(proxy_server.py): fix potential circular import with `litellm.Router'

* fix(proxy/utils.py): fix potential circular import in `litellm.Router`

* fix: remove circular imports in 'auth_checks' and 'guardrails/'

* fix(prompt_injection_detection.py): fix router impor t

* fix(vertex_passthrough_logging_handler.py): fix potential circular imports in vertex pass through

* fix(anthropic_pass_through_logging_handler.py): fix potential circular imports

* fix(slack_alerting.py-+-ollama_chat.py): fix modelresponse import

* fix(base.py): fix potential circular import

* fix(handler.py): fix potential circular ref in codestral + cohere handler's

* fix(azure.py): fix potential circular imports

* fix(gpt_transformation.py): fix modelresponse import

* fix(litellm_logging.py): add logging base class - simplify typing

makes it easy for other files to type check the logging obj without introducing circular imports

* fix(azure_ai/embed): fix potential circular import on handler.py

* fix(databricks/): fix potential circular imports in databricks/

* fix(vertex_ai/): fix potential circular imports on vertex ai embeddings

* fix(vertex_ai/image_gen): fix import

* fix(watsonx-+-bedrock): cleanup imports

* refactor(anthropic-pass-through-+-petals): cleanup imports

* refactor(huggingface/): cleanup imports

* fix(ollama-+-clarifai): cleanup circular imports

* fix(openai_like/): fix impor t

* fix(openai_like/): fix embedding handler

cleanup imports

* refactor(openai.py): cleanup imports

* fix(sagemaker/transformation.py): fix import

* ci(config.yml): add circular import test to ci/cd
This commit is contained in:
Krish Dholakia 2024-12-14 16:28:34 -08:00 committed by GitHub
parent 0dbf71291e
commit 516c2a6a70
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
48 changed files with 489 additions and 256 deletions

View file

@ -89,7 +89,7 @@ class AzureAIEmbedding(OpenAIChatCompletion):
embedding_response = response.json()
embedding_headers = dict(response.headers)
returned_response: litellm.EmbeddingResponse = convert_to_model_response_object( # type: ignore
returned_response: EmbeddingResponse = convert_to_model_response_object( # type: ignore
response_object=embedding_response,
model_response_object=model_response,
response_type="embedding",
@ -104,7 +104,7 @@ class AzureAIEmbedding(OpenAIChatCompletion):
data: ImageEmbeddingRequest,
timeout: float,
logging_obj,
model_response: litellm.EmbeddingResponse,
model_response: EmbeddingResponse,
optional_params: dict,
api_key: Optional[str],
api_base: Optional[str],
@ -132,7 +132,7 @@ class AzureAIEmbedding(OpenAIChatCompletion):
embedding_response = response.json()
embedding_headers = dict(response.headers)
returned_response: litellm.EmbeddingResponse = convert_to_model_response_object( # type: ignore
returned_response: EmbeddingResponse = convert_to_model_response_object( # type: ignore
response_object=embedding_response,
model_response_object=model_response,
response_type="embedding",
@ -213,14 +213,14 @@ class AzureAIEmbedding(OpenAIChatCompletion):
input: List,
timeout: float,
logging_obj,
model_response: litellm.EmbeddingResponse,
model_response: EmbeddingResponse,
optional_params: dict,
api_key: Optional[str] = None,
api_base: Optional[str] = None,
client=None,
aembedding=None,
max_retries: Optional[int] = None,
) -> litellm.EmbeddingResponse:
) -> EmbeddingResponse:
"""
- Separate image url from text
-> route image url call to `/image/embeddings`