test(test_completion.py-+-test_streaming.py): add ollama endpoint to ci/cd pipeline

This commit is contained in:
Krrish Dholakia 2023-12-22 12:21:33 +05:30
parent 57607f111a
commit eb2d13e2fb
3 changed files with 47 additions and 3 deletions

View file

@ -137,13 +137,14 @@ def get_ollama_response(
additional_args={"api_base": url, "complete_input_dict": data, "headers": {}, "acompletion": acompletion,},
)
if acompletion is True:
if optional_params.get("stream", False):
if optional_params.get("stream", False) == True:
response = ollama_async_streaming(url=url, data=data, model_response=model_response, encoding=encoding, logging_obj=logging_obj)
else:
response = ollama_acompletion(url=url, data=data, model_response=model_response, encoding=encoding, logging_obj=logging_obj)
return response
elif optional_params.get("stream", False):
elif optional_params.get("stream", False) == True:
return ollama_completion_stream(url=url, data=data, logging_obj=logging_obj)
response = requests.post(
url=f"{url}",
json=data,

View file

@ -571,6 +571,22 @@ def test_completion_openai_litellm_key():
# test_completion_openai_litellm_key()
def test_completion_ollama_hosted():
try:
litellm.set_verbose = True
response = completion(
model="ollama/phi",
messages=messages,
max_tokens=10,
api_base="https://test-ollama-endpoint.onrender.com"
)
# Add any assertions here to check the response
print(response)
except Exception as e:
pytest.fail(f"Error occurred: {e}")
# test_completion_ollama_hosted()
def test_completion_openrouter1():
try:
response = completion(
@ -626,7 +642,7 @@ def test_completion_anyscale_with_functions():
print(response)
except Exception as e:
pytest.fail(f"Error occurred: {e}")
test_completion_anyscale_with_functions()
# test_completion_anyscale_with_functions()
def test_completion_azure_key_completion_arg():
# this tests if we can pass api_key to completion, when it's not in the env

View file

@ -276,6 +276,33 @@ def test_completion_azure_function_calling_stream():
# test_completion_azure_function_calling_stream()
def test_completion_ollama_hosted_stream():
try:
litellm.set_verbose = True
response = completion(
model="ollama/phi",
messages=messages,
max_tokens=10,
api_base="https://test-ollama-endpoint.onrender.com",
stream=True
)
# Add any assertions here to check the response
complete_response = ""
# Add any assertions here to check the response
for idx, init_chunk in enumerate(response):
chunk, finished = streaming_format_tests(idx, init_chunk)
complete_response += chunk
if finished:
assert isinstance(init_chunk.choices[0], litellm.utils.StreamingChoices)
break
if complete_response.strip() == "":
raise Exception("Empty response received")
print(f"complete_response: {complete_response}")
except Exception as e:
pytest.fail(f"Error occurred: {e}")
# test_completion_ollama_hosted_stream()
def test_completion_claude_stream():
try:
messages = [