mirror of
https://github.com/BerriAI/litellm.git
synced 2025-04-25 18:54:30 +00:00
test(test_completion.py-+-test_streaming.py): add ollama endpoint to ci/cd pipeline
This commit is contained in:
parent
57607f111a
commit
eb2d13e2fb
3 changed files with 47 additions and 3 deletions
|
@ -137,13 +137,14 @@ def get_ollama_response(
|
|||
additional_args={"api_base": url, "complete_input_dict": data, "headers": {}, "acompletion": acompletion,},
|
||||
)
|
||||
if acompletion is True:
|
||||
if optional_params.get("stream", False):
|
||||
if optional_params.get("stream", False) == True:
|
||||
response = ollama_async_streaming(url=url, data=data, model_response=model_response, encoding=encoding, logging_obj=logging_obj)
|
||||
else:
|
||||
response = ollama_acompletion(url=url, data=data, model_response=model_response, encoding=encoding, logging_obj=logging_obj)
|
||||
return response
|
||||
elif optional_params.get("stream", False):
|
||||
elif optional_params.get("stream", False) == True:
|
||||
return ollama_completion_stream(url=url, data=data, logging_obj=logging_obj)
|
||||
|
||||
response = requests.post(
|
||||
url=f"{url}",
|
||||
json=data,
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue