(code quality) run ruff rule to ban unused imports (#7313)

* remove unused imports

* fix AmazonConverseConfig

* fix test

* fix import

* ruff check fixes

* test fixes

* fix testing

* fix imports
This commit is contained in:
Ishaan Jaff 2024-12-19 12:33:42 -08:00 committed by GitHub
parent 7e01805caa
commit 62a1cdec47
347 changed files with 5473 additions and 7207 deletions

View file

@ -1,8 +1,5 @@
import hashlib
import json
import os
import time
import traceback
import types
from typing import (
Any,
@ -22,32 +19,18 @@ from openai import AsyncOpenAI, OpenAI
from openai.types.beta.assistant_deleted import AssistantDeleted
from openai.types.file_deleted import FileDeleted
from pydantic import BaseModel
from typing_extensions import overload, override
from typing_extensions import overload
import litellm
from litellm import LlmProviders
from litellm._logging import verbose_logger
from litellm.litellm_core_utils.litellm_logging import Logging as LiteLLMLoggingObj
from litellm.litellm_core_utils.prompt_templates.factory import (
custom_prompt,
prompt_factory,
)
from litellm.llms.base_llm.chat.transformation import BaseConfig, BaseLLMException
from litellm.llms.bedrock.chat.invoke_handler import MockResponseIterator
from litellm.llms.custom_httpx.http_handler import _DEFAULT_TTL_FOR_HTTPX_CLIENTS
from litellm.secret_managers.main import get_secret_str
from litellm.types.utils import (
EmbeddingResponse,
ImageResponse,
ModelResponse,
ProviderField,
TextCompletionResponse,
Usage,
)
from litellm.types.utils import EmbeddingResponse, ImageResponse, ModelResponse
from litellm.utils import (
Choices,
CustomStreamWrapper,
Message,
ProviderConfigManager,
convert_to_model_response_object,
)