litellm/tests/llm_translation/test_azure_ai.py
Krish Dholakia 22b8f93f53
LiteLLM Minor Fixes & Improvements (11/01/2024) (#6551)
* fix: add lm_studio support

* fix(cohere_transformation.py): fix transformation logic for azure cohere embedding model name

Fixes https://github.com/BerriAI/litellm/issues/6540

* fix(utils.py): require base64 str to begin with `data:`

Fixes https://github.com/BerriAI/litellm/issues/6541

* fix: cleanup tests

* docs(guardrails.md): fix typo

* fix(opentelemetry.py): move to `.exception` and update 'response_obj' value to handle 'None' case

Fixes https://github.com/BerriAI/litellm/issues/6510

* fix: fix linting noqa placement
2024-11-02 02:09:31 +05:30

41 lines
994 B
Python

# What is this?
## Unit tests for Azure AI integration
import asyncio
import os
import sys
import traceback
from dotenv import load_dotenv
import litellm.types
import litellm.types.utils
from litellm.llms.anthropic.chat import ModelResponseIterator
load_dotenv()
import io
import os
sys.path.insert(
0, os.path.abspath("../..")
) # Adds the parent directory to the system path
from typing import Optional
from unittest.mock import MagicMock, patch
import pytest
import litellm
@pytest.mark.parametrize(
"model_group_header, expected_model",
[
("offer-cohere-embed-multili-paygo", "Cohere-embed-v3-multilingual"),
("offer-cohere-embed-english-paygo", "Cohere-embed-v3-english"),
],
)
def test_map_azure_model_group(model_group_header, expected_model):
from litellm.llms.azure_ai.embed.cohere_transformation import AzureAICohereConfig
config = AzureAICohereConfig()
assert config._map_azure_model_group(model_group_header) == expected_model