(fix) OpenAI's optional messages[].name does not work with Mistral API (#6701)

* use helper for _transform_messages mistral

* add test_message_with_name to base LLMChat test

* fix linting
This commit is contained in:
Ishaan Jaff 2024-11-11 18:03:41 -08:00 committed by GitHub
parent c3bc9e6b12
commit 9d20c19e0c
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
4 changed files with 100 additions and 38 deletions

View file

@ -45,6 +45,14 @@ class BaseLLMChatTest(ABC):
)
assert response is not None
def test_message_with_name(self):
base_completion_call_args = self.get_base_completion_call_args()
messages = [
{"role": "user", "content": "Hello", "name": "test_name"},
]
response = litellm.completion(**base_completion_call_args, messages=messages)
assert response is not None
@pytest.fixture
def pdf_messages(self):
import base64

View file

@ -0,0 +1,34 @@
import asyncio
import os
import sys
import traceback
from dotenv import load_dotenv
import litellm.types
import litellm.types.utils
from litellm.llms.anthropic.chat import ModelResponseIterator
load_dotenv()
import io
import os
sys.path.insert(
0, os.path.abspath("../..")
) # Adds the parent directory to the system path
from typing import Optional
from unittest.mock import MagicMock, patch
import pytest
import litellm
from litellm.llms.anthropic.common_utils import process_anthropic_headers
from httpx import Headers
from base_llm_unit_tests import BaseLLMChatTest
class TestMistralCompletion(BaseLLMChatTest):
def get_base_completion_call_args(self) -> dict:
litellm.set_verbose = True
return {"model": "mistral/mistral-small-latest"}