Add /vllm/* and /mistral/* passthrough endpoints (adds support for Mistral OCR via passthrough)

* feat(llm_passthrough_endpoints.py): support mistral passthrough

Closes https://github.com/BerriAI/litellm/issues/9051

* feat(llm_passthrough_endpoints.py): initial commit for adding vllm passthrough route

* feat(vllm/common_utils.py): add new vllm model info route

make it possible to use vllm passthrough route via factory function

* fix(llm_passthrough_endpoints.py): add all methods to vllm passthrough route

* fix: fix linting error

* fix: fix linting error

* fix: fix ruff check

* fix(proxy/_types.py): add new passthrough routes

* docs(config_settings.md): add mistral env vars to docs
This commit is contained in:
Krish Dholakia 2025-04-14 22:06:33 -07:00 committed by GitHub
parent 8faf56922c
commit 9b0f871129
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
12 changed files with 450 additions and 176 deletions

View file

@ -0,0 +1,75 @@
from typing import List, Optional, Union
import httpx
import litellm
from litellm.llms.base_llm.base_utils import BaseLLMModelInfo
from litellm.llms.base_llm.chat.transformation import BaseLLMException
from litellm.secret_managers.main import get_secret_str
from litellm.types.llms.openai import AllMessageValues
from litellm.utils import _add_path_to_api_base
class VLLMError(BaseLLMException):
pass
class VLLMModelInfo(BaseLLMModelInfo):
def validate_environment(
self,
headers: dict,
model: str,
messages: List[AllMessageValues],
optional_params: dict,
litellm_params: dict,
api_key: Optional[str] = None,
api_base: Optional[str] = None,
) -> dict:
"""Google AI Studio sends api key in query params"""
return headers
@staticmethod
def get_api_base(api_base: Optional[str] = None) -> Optional[str]:
api_base = api_base or get_secret_str("VLLM_API_BASE")
if api_base is None:
raise ValueError(
"VLLM_API_BASE is not set. Please set the environment variable, to use VLLM's pass-through - `{LITELLM_API_BASE}/vllm/{endpoint}`."
)
return api_base
@staticmethod
def get_api_key(api_key: Optional[str] = None) -> Optional[str]:
return None
@staticmethod
def get_base_model(model: str) -> Optional[str]:
return model
def get_models(
self, api_key: Optional[str] = None, api_base: Optional[str] = None
) -> List[str]:
api_base = VLLMModelInfo.get_api_base(api_base)
api_key = VLLMModelInfo.get_api_key(api_key)
endpoint = "/v1/models"
if api_base is None or api_key is None:
raise ValueError(
"GEMINI_API_BASE or GEMINI_API_KEY is not set. Please set the environment variable, to query Gemini's `/models` endpoint."
)
url = _add_path_to_api_base(api_base, endpoint)
response = litellm.module_level_client.get(
url=url,
)
response.raise_for_status()
models = response.json()["data"]
return [model["id"] for model in models]
def get_error_class(
self, error_message: str, status_code: int, headers: Union[dict, httpx.Headers]
) -> BaseLLMException:
return VLLMError(
status_code=status_code, message=error_message, headers=headers
)