bump: version 1.7.0 → 1.7.1

This commit is contained in:
Krrish Dholakia 2023-11-25 12:34:28 -08:00
parent 620633ec28
commit 30f47d3169
4 changed files with 33 additions and 33 deletions

View file

@ -199,7 +199,7 @@ class OpenAIChatCompletion(BaseLLM):
api_key=api_key, api_key=api_key,
additional_args={"headers": headers, "api_base": api_base, "acompletion": acompletion, "complete_input_dict": data}, additional_args={"headers": headers, "api_base": api_base, "acompletion": acompletion, "complete_input_dict": data},
) )
try: try:
if acompletion is True: if acompletion is True:
if optional_params.get("stream", False): if optional_params.get("stream", False):

View file

@ -229,7 +229,7 @@ def test_completion_azure_stream():
print(f"completion_response: {complete_response}") print(f"completion_response: {complete_response}")
except Exception as e: except Exception as e:
pytest.fail(f"Error occurred: {e}") pytest.fail(f"Error occurred: {e}")
test_completion_azure_stream() # test_completion_azure_stream()
def test_completion_claude_stream(): def test_completion_claude_stream():
try: try:
@ -290,35 +290,35 @@ def test_completion_palm_stream():
pytest.fail(f"Error occurred: {e}") pytest.fail(f"Error occurred: {e}")
# test_completion_palm_stream() # test_completion_palm_stream()
# def test_completion_deep_infra_stream(): def test_completion_deep_infra_stream():
# # deep infra currently includes role in the 2nd chunk # deep infra currently includes role in the 2nd chunk
# # waiting for them to make a fix on this # waiting for them to make a fix on this
# try: try:
# messages = [ messages = [
# {"role": "system", "content": "You are a helpful assistant."}, {"role": "system", "content": "You are a helpful assistant."},
# { {
# "role": "user", "role": "user",
# "content": "how does a court case get to the Supreme Court?", "content": "how does a court case get to the Supreme Court?",
# }, },
# ] ]
# print("testing deep infra streaming") print("testing deep infra streaming")
# response = completion( response = completion(
# model="deepinfra/meta-llama/Llama-2-70b-chat-hf", messages=messages, stream=True, max_tokens=80 model="deepinfra/meta-llama/Llama-2-70b-chat-hf", messages=messages, stream=True, max_tokens=80
# ) )
# complete_response = "" complete_response = ""
# # Add any assertions here to check the response # Add any assertions here to check the response
# for idx, chunk in enumerate(response): for idx, chunk in enumerate(response):
# chunk, finished = streaming_format_tests(idx, chunk) chunk, finished = streaming_format_tests(idx, chunk)
# if finished: if finished:
# break break
# complete_response += chunk complete_response += chunk
# if complete_response.strip() == "": if complete_response.strip() == "":
# raise Exception("Empty response received") raise Exception("Empty response received")
# print(f"completion_response: {complete_response}") print(f"completion_response: {complete_response}")
# except Exception as e: except Exception as e:
# pytest.fail(f"Error occurred: {e}") pytest.fail(f"Error occurred: {e}")
# test_completion_deep_infra_stream() test_completion_deep_infra_stream()
def test_completion_claude_stream_bad_key(): def test_completion_claude_stream_bad_key():
try: try:

View file

@ -2301,7 +2301,7 @@ def get_optional_params( # use the openai defaults
if n: if n:
optional_params["n"] = n optional_params["n"] = n
if stream: if stream:
optional_params["stream"] = str optional_params["stream"] = stream
if stop: if stop:
optional_params["stop"] = stop optional_params["stop"] = stop
if max_tokens: if max_tokens:

View file

@ -1,6 +1,6 @@
[tool.poetry] [tool.poetry]
name = "litellm" name = "litellm"
version = "1.7.0" version = "1.7.1"
description = "Library to easily interface with LLM API providers" description = "Library to easily interface with LLM API providers"
authors = ["BerriAI"] authors = ["BerriAI"]
license = "MIT License" license = "MIT License"
@ -27,7 +27,7 @@ requires = ["poetry-core"]
build-backend = "poetry.core.masonry.api" build-backend = "poetry.core.masonry.api"
[tool.commitizen] [tool.commitizen]
version = "1.7.0" version = "1.7.1"
version_files = [ version_files = [
"pyproject.toml:^version" "pyproject.toml:^version"
] ]