test: remove integration test for github_copilot + fix misisng mock

This commit is contained in:
Son H. Nguyen 2025-03-19 23:15:22 +07:00
parent fa5b36f592
commit 3a827acae5

View file

@ -74,9 +74,9 @@ def test_github_copilot_config_get_openai_compatible_provider_info():
assert "Failed to get API key" in str(excinfo.value)
@patch("litellm.litellm_core_utils.get_llm_provider_logic.get_llm_provider")
@patch("litellm.main.get_llm_provider")
@patch("litellm.llms.openai.openai.OpenAIChatCompletion.completion")
def test_completion_github_copilot_mock_response(mock_completion, mock_get_provider):
def test_completion_github_copilot_mock_response(mock_completion, mock_get_llm_provider):
"""Test the completion function with GitHub Copilot provider."""
# Mock completion response
@ -99,7 +99,7 @@ def test_completion_github_copilot_mock_response(mock_completion, mock_get_provi
# Patch the get_llm_provider function instead of the config method
# Make it return the expected tuple directly
mock_get_provider.return_value = (
mock_get_llm_provider.return_value = (
"gpt-4",
"github_copilot",
"gh.test-key-123456789",
@ -114,6 +114,15 @@ def test_completion_github_copilot_mock_response(mock_completion, mock_get_provi
assert response is not None
# Verify the get_llm_provider call was made with the expected params
mock_get_llm_provider.assert_called_once()
args, kwargs = mock_get_llm_provider.call_args
print(kwargs.get("model"))
assert kwargs.get("model") is "github_copilot/gpt-4"
assert kwargs.get("custom_llm_provider") is None
assert kwargs.get("api_key") is None
assert kwargs.get("api_base") is None
# Verify the completion call was made with the expected params
mock_completion.assert_called_once()
args, kwargs = mock_completion.call_args
@ -151,74 +160,74 @@ def test_authenticator_get_api_key(mock_get_api_key):
assert "Failed to get API key" in str(excinfo.value)
def test_completion_github_copilot(stream=False):
try:
litellm.set_verbose = True
messages = [
{"role": "system", "content": "You are an AI programming assistant."},
{
"role": "user",
"content": "Write a Python function to calculate fibonacci numbers",
},
]
extra_headers = {
"editor-version": "Neovim/0.9.0",
"Copilot-Integration-Id": "vscode-chat",
}
response = completion(
model="github_copilot/gpt-4",
messages=messages,
stream=stream,
extra_headers=extra_headers,
)
print(response)
# def test_completion_github_copilot(stream=False):
# try:
# litellm.set_verbose = True
# messages = [
# {"role": "system", "content": "You are an AI programming assistant."},
# {
# "role": "user",
# "content": "Write a Python function to calculate fibonacci numbers",
# },
# ]
# extra_headers = {
# "editor-version": "Neovim/0.9.0",
# "Copilot-Integration-Id": "vscode-chat",
# }
# response = completion(
# model="github_copilot/gpt-4",
# messages=messages,
# stream=stream,
# extra_headers=extra_headers,
# )
# print(response)
if stream is True:
for chunk in response:
print(chunk)
assert chunk is not None
assert isinstance(chunk, litellm.ModelResponseStream)
assert isinstance(chunk.choices[0], litellm.utils.StreamingChoices)
# if stream is True:
# for chunk in response:
# print(chunk)
# assert chunk is not None
# assert isinstance(chunk, litellm.ModelResponseStream)
# assert isinstance(chunk.choices[0], litellm.utils.StreamingChoices)
else:
assert response is not None
assert isinstance(response, litellm.ModelResponse)
assert response.choices[0].message.content is not None
except Exception as e:
pytest.fail(f"Error occurred: {e}")
# else:
# assert response is not None
# assert isinstance(response, litellm.ModelResponse)
# assert response.choices[0].message.content is not None
# except Exception as e:
# pytest.fail(f"Error occurred: {e}")
def test_completion_github_copilot_sonnet_3_7_thought(stream=False):
try:
litellm.set_verbose = True
messages = [
{"role": "system", "content": "You are an AI programming assistant."},
{
"role": "user",
"content": "Write a Python function to calculate fibonacci numbers",
},
]
extra_headers = {
"editor-version": "Neovim/0.9.0",
"Copilot-Integration-Id": "vscode-chat",
}
response = completion(
model="github_copilot/claude-3.7-sonnet-thought",
messages=messages,
stream=stream,
extra_headers=extra_headers,
)
print(response)
# def test_completion_github_copilot_sonnet_3_7_thought(stream=False):
# try:
# litellm.set_verbose = True
# messages = [
# {"role": "system", "content": "You are an AI programming assistant."},
# {
# "role": "user",
# "content": "Write a Python function to calculate fibonacci numbers",
# },
# ]
# extra_headers = {
# "editor-version": "Neovim/0.9.0",
# "Copilot-Integration-Id": "vscode-chat",
# }
# response = completion(
# model="github_copilot/claude-3.7-sonnet-thought",
# messages=messages,
# stream=stream,
# extra_headers=extra_headers,
# )
# print(response)
if stream is True:
for chunk in response:
print(chunk)
assert chunk is not None
assert isinstance(chunk, litellm.ModelResponseStream)
assert isinstance(chunk.choices[0], litellm.utils.StreamingChoices)
# if stream is True:
# for chunk in response:
# print(chunk)
# assert chunk is not None
# assert isinstance(chunk, litellm.ModelResponseStream)
# assert isinstance(chunk.choices[0], litellm.utils.StreamingChoices)
else:
assert response is not None
assert isinstance(response, litellm.ModelResponse)
assert response.choices[0].message.content is not None
except Exception as e:
pytest.fail(f"Error occurred: {e}")
# else:
# assert response is not None
# assert isinstance(response, litellm.ModelResponse)
# assert response.choices[0].message.content is not None
# except Exception as e:
# pytest.fail(f"Error occurred: {e}")