mirror of
https://github.com/BerriAI/litellm.git
synced 2025-04-27 11:43:54 +00:00
* fix: add lm_studio support * fix(cohere_transformation.py): fix transformation logic for azure cohere embedding model name Fixes https://github.com/BerriAI/litellm/issues/6540 * fix(utils.py): require base64 str to begin with `data:` Fixes https://github.com/BerriAI/litellm/issues/6541 * fix: cleanup tests * docs(guardrails.md): fix typo * fix(opentelemetry.py): move to `.exception` and update 'response_obj' value to handle 'None' case Fixes https://github.com/BerriAI/litellm/issues/6510 * fix: fix linting noqa placement
26 lines
949 B
Python
26 lines
949 B
Python
"""
|
|
Translate from OpenAI's `/v1/chat/completions` to LM Studio's `/chat/completions`
|
|
"""
|
|
|
|
import types
|
|
from typing import List, Optional, Tuple, Union
|
|
|
|
from pydantic import BaseModel
|
|
|
|
import litellm
|
|
from litellm.secret_managers.main import get_secret_str
|
|
from litellm.types.llms.openai import AllMessageValues, ChatCompletionAssistantMessage
|
|
|
|
from ....utils import _remove_additional_properties, _remove_strict_from_schema
|
|
from ...OpenAI.chat.gpt_transformation import OpenAIGPTConfig
|
|
|
|
|
|
class LMStudioChatConfig(OpenAIGPTConfig):
|
|
def _get_openai_compatible_provider_info(
|
|
self, api_base: Optional[str], api_key: Optional[str]
|
|
) -> Tuple[Optional[str], Optional[str]]:
|
|
api_base = api_base or get_secret_str("LM_STUDIO_API_BASE") # type: ignore
|
|
dynamic_api_key = (
|
|
api_key or get_secret_str("LM_STUDIO_API_KEY") or ""
|
|
) # vllm does not require an api key
|
|
return api_base, dynamic_api_key
|