mirror of
https://github.com/BerriAI/litellm.git
synced 2025-04-25 18:54:30 +00:00
* feat(proxy_cli.py): add new 'log_config' cli param Allows passing logging.conf to uvicorn on startup * docs(cli.md): add logging conf to uvicorn cli docs * fix(get_llm_provider_logic.py): fix default api base for litellm_proxy Fixes https://github.com/BerriAI/litellm/issues/6332 * feat(openai_like/embedding): Add support for jina ai embeddings Closes https://github.com/BerriAI/litellm/issues/6337 * docs(deploy.md): update entrypoint.sh filepath post-refactor Fixes outdated docs * feat(prometheus.py): emit time_to_first_token metric on prometheus Closes https://github.com/BerriAI/litellm/issues/6334 * fix(prometheus.py): only emit time to first token metric if stream is True enables more accurate ttft usage * test: handle vertex api instability * fix(get_llm_provider_logic.py): fix import * fix(openai.py): fix deepinfra default api base * fix(anthropic/transformation.py): remove anthropic beta header (#6361)
45 lines
1.6 KiB
Python
45 lines
1.6 KiB
Python
"""
|
|
Translate from OpenAI's `/v1/chat/completions` to VLLM's `/v1/chat/completions`
|
|
"""
|
|
|
|
import types
|
|
from typing import List, Optional, Tuple, Union
|
|
|
|
from pydantic import BaseModel
|
|
|
|
import litellm
|
|
from litellm.secret_managers.main import get_secret_str
|
|
from litellm.types.llms.openai import AllMessageValues, ChatCompletionAssistantMessage
|
|
|
|
from ....utils import _remove_additional_properties, _remove_strict_from_schema
|
|
from ...OpenAI.chat.gpt_transformation import OpenAIGPTConfig
|
|
|
|
|
|
class HostedVLLMChatConfig(OpenAIGPTConfig):
|
|
def map_openai_params(
|
|
self,
|
|
non_default_params: dict,
|
|
optional_params: dict,
|
|
model: str,
|
|
drop_params: bool,
|
|
) -> dict:
|
|
_tools = non_default_params.pop("tools", None)
|
|
if _tools is not None:
|
|
# remove 'additionalProperties' from tools
|
|
_tools = _remove_additional_properties(_tools)
|
|
# remove 'strict' from tools
|
|
_tools = _remove_strict_from_schema(_tools)
|
|
if _tools is not None:
|
|
non_default_params["tools"] = _tools
|
|
return super().map_openai_params(
|
|
non_default_params, optional_params, model, drop_params
|
|
)
|
|
|
|
def _get_openai_compatible_provider_info(
|
|
self, api_base: Optional[str], api_key: Optional[str]
|
|
) -> Tuple[Optional[str], Optional[str]]:
|
|
api_base = api_base or get_secret_str("HOSTED_VLLM_API_BASE") # type: ignore
|
|
dynamic_api_key = (
|
|
api_key or get_secret_str("HOSTED_VLLM_API_KEY") or ""
|
|
) # vllm does not require an api key
|
|
return api_base, dynamic_api_key
|