mirror of
https://github.com/BerriAI/litellm.git
synced 2025-04-26 03:04:13 +00:00
* build(pyproject.toml): add new dev dependencies - for type checking * build: reformat files to fit black * ci: reformat to fit black * ci(test-litellm.yml): make tests run clear * build(pyproject.toml): add ruff * fix: fix ruff checks * build(mypy/): fix mypy linting errors * fix(hashicorp_secret_manager.py): fix passing cert for tls auth * build(mypy/): resolve all mypy errors * test: update test * fix: fix black formatting * build(pre-commit-config.yaml): use poetry run black * fix(proxy_server.py): fix linting error * fix: fix ruff safe representation error
54 lines
1.8 KiB
Python
54 lines
1.8 KiB
Python
"""
|
|
Translates from OpenAI's `/v1/chat/completions` to DeepSeek's `/v1/chat/completions`
|
|
"""
|
|
|
|
from typing import List, Optional, Tuple
|
|
|
|
from litellm.litellm_core_utils.prompt_templates.common_utils import (
|
|
handle_messages_with_content_list_to_str_conversion,
|
|
)
|
|
from litellm.secret_managers.main import get_secret_str
|
|
from litellm.types.llms.openai import AllMessageValues
|
|
|
|
from ...openai.chat.gpt_transformation import OpenAIGPTConfig
|
|
|
|
|
|
class DeepSeekChatConfig(OpenAIGPTConfig):
|
|
def _transform_messages(
|
|
self, messages: List[AllMessageValues], model: str
|
|
) -> List[AllMessageValues]:
|
|
"""
|
|
DeepSeek does not support content in list format.
|
|
"""
|
|
messages = handle_messages_with_content_list_to_str_conversion(messages)
|
|
return super()._transform_messages(messages=messages, model=model)
|
|
|
|
def _get_openai_compatible_provider_info(
|
|
self, api_base: Optional[str], api_key: Optional[str]
|
|
) -> Tuple[Optional[str], Optional[str]]:
|
|
api_base = (
|
|
api_base
|
|
or get_secret_str("DEEPSEEK_API_BASE")
|
|
or "https://api.deepseek.com/beta"
|
|
) # type: ignore
|
|
dynamic_api_key = api_key or get_secret_str("DEEPSEEK_API_KEY")
|
|
return api_base, dynamic_api_key
|
|
|
|
def get_complete_url(
|
|
self,
|
|
api_base: Optional[str],
|
|
model: str,
|
|
optional_params: dict,
|
|
litellm_params: dict,
|
|
stream: Optional[bool] = None,
|
|
) -> str:
|
|
"""
|
|
If api_base is not provided, use the default DeepSeek /chat/completions endpoint.
|
|
"""
|
|
if not api_base:
|
|
api_base = "https://api.deepseek.com/beta"
|
|
|
|
if not api_base.endswith("/chat/completions"):
|
|
api_base = f"{api_base}/chat/completions"
|
|
|
|
return api_base
|