build: bump version

This commit is contained in:
Krrish Dholakia 2024-10-08 22:10:06 -07:00
parent 9695c1af10
commit 945267a511
2 changed files with 8 additions and 8 deletions

View file

@ -1491,8 +1491,8 @@
"max_tokens": 8192,
"max_input_tokens": 8192,
"max_output_tokens": 8192,
"input_cost_per_token": 0.00000059,
"output_cost_per_token": 0.00000079,
"input_cost_per_token": 0.00000005,
"output_cost_per_token": 0.00000008,
"litellm_provider": "groq",
"mode": "chat",
"supports_function_calling": true
@ -5303,7 +5303,7 @@
"input_cost_per_token": 0.0,
"output_cost_per_token": 0.0,
"litellm_provider": "ollama",
"mode": "completion"
"mode": "chat"
},
"ollama/llama2:7b": {
"max_tokens": 4096,
@ -5312,7 +5312,7 @@
"input_cost_per_token": 0.0,
"output_cost_per_token": 0.0,
"litellm_provider": "ollama",
"mode": "completion"
"mode": "chat"
},
"ollama/llama2:13b": {
"max_tokens": 4096,
@ -5321,7 +5321,7 @@
"input_cost_per_token": 0.0,
"output_cost_per_token": 0.0,
"litellm_provider": "ollama",
"mode": "completion"
"mode": "chat"
},
"ollama/llama2:70b": {
"max_tokens": 4096,
@ -5330,7 +5330,7 @@
"input_cost_per_token": 0.0,
"output_cost_per_token": 0.0,
"litellm_provider": "ollama",
"mode": "completion"
"mode": "chat"
},
"ollama/llama2-uncensored": {
"max_tokens": 4096,

View file

@ -1,6 +1,6 @@
[tool.poetry]
name = "litellm"
version = "1.48.19"
version = "1.48.20"
description = "Library to easily interface with LLM API providers"
authors = ["BerriAI"]
license = "MIT"
@ -91,7 +91,7 @@ requires = ["poetry-core", "wheel"]
build-backend = "poetry.core.masonry.api"
[tool.commitizen]
version = "1.48.19"
version = "1.48.20"
version_files = [
"pyproject.toml:^version"
]