forked from phoenix/litellm-mirror
bump: version 0.13.3.dev1 → 0.13.3.dev2
This commit is contained in:
parent
9d673e3f8c
commit
4dd1913da1
3 changed files with 10 additions and 5 deletions
|
@ -14,9 +14,9 @@ import time
|
||||||
|
|
||||||
def test_langfuse_logging_async():
|
def test_langfuse_logging_async():
|
||||||
async def _test_langfuse():
|
async def _test_langfuse():
|
||||||
await litellm.acompletion(
|
return await litellm.acompletion(
|
||||||
model="gpt-3.5-turbo",
|
model="gpt-3.5-turbo",
|
||||||
prompt="This is a test",
|
messages=[{"role": "user", "content":"This is a test"}],
|
||||||
max_tokens=1000,
|
max_tokens=1000,
|
||||||
temperature=0.7,
|
temperature=0.7,
|
||||||
)
|
)
|
||||||
|
|
|
@ -828,7 +828,12 @@ def client(original_function):
|
||||||
model = args[0] if len(args) > 0 else kwargs["model"]
|
model = args[0] if len(args) > 0 else kwargs["model"]
|
||||||
call_type = original_function.__name__
|
call_type = original_function.__name__
|
||||||
if call_type == CallTypes.completion.value:
|
if call_type == CallTypes.completion.value:
|
||||||
messages = args[1] if len(args) > 1 else kwargs["messages"]
|
if len(args) > 1:
|
||||||
|
messages = args[1]
|
||||||
|
elif kwargs.get("messages", None):
|
||||||
|
messages = kwargs["messages"]
|
||||||
|
elif kwargs.get("prompt", None):
|
||||||
|
messages = kwargs["prompt"]
|
||||||
elif call_type == CallTypes.embedding.value:
|
elif call_type == CallTypes.embedding.value:
|
||||||
messages = args[1] if len(args) > 1 else kwargs["input"]
|
messages = args[1] if len(args) > 1 else kwargs["input"]
|
||||||
stream = True if "stream" in kwargs and kwargs["stream"] == True else False
|
stream = True if "stream" in kwargs and kwargs["stream"] == True else False
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
[tool.poetry]
|
[tool.poetry]
|
||||||
name = "litellm"
|
name = "litellm"
|
||||||
version = "0.13.3.dev1"
|
version = "0.13.3.dev2"
|
||||||
description = "Library to easily interface with LLM API providers"
|
description = "Library to easily interface with LLM API providers"
|
||||||
authors = ["BerriAI"]
|
authors = ["BerriAI"]
|
||||||
license = "MIT License"
|
license = "MIT License"
|
||||||
|
@ -26,7 +26,7 @@ requires = ["poetry-core"]
|
||||||
build-backend = "poetry.core.masonry.api"
|
build-backend = "poetry.core.masonry.api"
|
||||||
|
|
||||||
[tool.commitizen]
|
[tool.commitizen]
|
||||||
version = "0.13.3.dev1"
|
version = "0.13.3.dev2"
|
||||||
version_files = [
|
version_files = [
|
||||||
"pyproject.toml:^version"
|
"pyproject.toml:^version"
|
||||||
]
|
]
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue