litellm-mirror/litellm/llms/lm_studio/chat/transformation.py
Krish Dholakia 22b8f93f53
LiteLLM Minor Fixes & Improvements (11/01/2024) (#6551)
* fix: add lm_studio support

* fix(cohere_transformation.py): fix transformation logic for azure cohere embedding model name

Fixes https://github.com/BerriAI/litellm/issues/6540

* fix(utils.py): require base64 str to begin with `data:`

Fixes https://github.com/BerriAI/litellm/issues/6541

* fix: cleanup tests

* docs(guardrails.md): fix typo

* fix(opentelemetry.py): move to `.exception` and update 'response_obj' value to handle 'None' case

Fixes https://github.com/BerriAI/litellm/issues/6510

* fix: fix linting noqa placement
2024-11-02 02:09:31 +05:30

26 lines
949 B
Python

"""
Translate from OpenAI's `/v1/chat/completions` to LM Studio's `/chat/completions`
"""
import types
from typing import List, Optional, Tuple, Union
from pydantic import BaseModel
import litellm
from litellm.secret_managers.main import get_secret_str
from litellm.types.llms.openai import AllMessageValues, ChatCompletionAssistantMessage
from ....utils import _remove_additional_properties, _remove_strict_from_schema
from ...OpenAI.chat.gpt_transformation import OpenAIGPTConfig
class LMStudioChatConfig(OpenAIGPTConfig):
def _get_openai_compatible_provider_info(
self, api_base: Optional[str], api_key: Optional[str]
) -> Tuple[Optional[str], Optional[str]]:
api_base = api_base or get_secret_str("LM_STUDIO_API_BASE") # type: ignore
dynamic_api_key = (
api_key or get_secret_str("LM_STUDIO_API_KEY") or ""
) # vllm does not require an api key
return api_base, dynamic_api_key