forked from phoenix/litellm-mirror
build: bump version
This commit is contained in:
parent
9695c1af10
commit
945267a511
2 changed files with 8 additions and 8 deletions
|
@ -1491,8 +1491,8 @@
|
||||||
"max_tokens": 8192,
|
"max_tokens": 8192,
|
||||||
"max_input_tokens": 8192,
|
"max_input_tokens": 8192,
|
||||||
"max_output_tokens": 8192,
|
"max_output_tokens": 8192,
|
||||||
"input_cost_per_token": 0.00000059,
|
"input_cost_per_token": 0.00000005,
|
||||||
"output_cost_per_token": 0.00000079,
|
"output_cost_per_token": 0.00000008,
|
||||||
"litellm_provider": "groq",
|
"litellm_provider": "groq",
|
||||||
"mode": "chat",
|
"mode": "chat",
|
||||||
"supports_function_calling": true
|
"supports_function_calling": true
|
||||||
|
@ -5303,7 +5303,7 @@
|
||||||
"input_cost_per_token": 0.0,
|
"input_cost_per_token": 0.0,
|
||||||
"output_cost_per_token": 0.0,
|
"output_cost_per_token": 0.0,
|
||||||
"litellm_provider": "ollama",
|
"litellm_provider": "ollama",
|
||||||
"mode": "completion"
|
"mode": "chat"
|
||||||
},
|
},
|
||||||
"ollama/llama2:7b": {
|
"ollama/llama2:7b": {
|
||||||
"max_tokens": 4096,
|
"max_tokens": 4096,
|
||||||
|
@ -5312,7 +5312,7 @@
|
||||||
"input_cost_per_token": 0.0,
|
"input_cost_per_token": 0.0,
|
||||||
"output_cost_per_token": 0.0,
|
"output_cost_per_token": 0.0,
|
||||||
"litellm_provider": "ollama",
|
"litellm_provider": "ollama",
|
||||||
"mode": "completion"
|
"mode": "chat"
|
||||||
},
|
},
|
||||||
"ollama/llama2:13b": {
|
"ollama/llama2:13b": {
|
||||||
"max_tokens": 4096,
|
"max_tokens": 4096,
|
||||||
|
@ -5321,7 +5321,7 @@
|
||||||
"input_cost_per_token": 0.0,
|
"input_cost_per_token": 0.0,
|
||||||
"output_cost_per_token": 0.0,
|
"output_cost_per_token": 0.0,
|
||||||
"litellm_provider": "ollama",
|
"litellm_provider": "ollama",
|
||||||
"mode": "completion"
|
"mode": "chat"
|
||||||
},
|
},
|
||||||
"ollama/llama2:70b": {
|
"ollama/llama2:70b": {
|
||||||
"max_tokens": 4096,
|
"max_tokens": 4096,
|
||||||
|
@ -5330,7 +5330,7 @@
|
||||||
"input_cost_per_token": 0.0,
|
"input_cost_per_token": 0.0,
|
||||||
"output_cost_per_token": 0.0,
|
"output_cost_per_token": 0.0,
|
||||||
"litellm_provider": "ollama",
|
"litellm_provider": "ollama",
|
||||||
"mode": "completion"
|
"mode": "chat"
|
||||||
},
|
},
|
||||||
"ollama/llama2-uncensored": {
|
"ollama/llama2-uncensored": {
|
||||||
"max_tokens": 4096,
|
"max_tokens": 4096,
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
[tool.poetry]
|
[tool.poetry]
|
||||||
name = "litellm"
|
name = "litellm"
|
||||||
version = "1.48.19"
|
version = "1.48.20"
|
||||||
description = "Library to easily interface with LLM API providers"
|
description = "Library to easily interface with LLM API providers"
|
||||||
authors = ["BerriAI"]
|
authors = ["BerriAI"]
|
||||||
license = "MIT"
|
license = "MIT"
|
||||||
|
@ -91,7 +91,7 @@ requires = ["poetry-core", "wheel"]
|
||||||
build-backend = "poetry.core.masonry.api"
|
build-backend = "poetry.core.masonry.api"
|
||||||
|
|
||||||
[tool.commitizen]
|
[tool.commitizen]
|
||||||
version = "1.48.19"
|
version = "1.48.20"
|
||||||
version_files = [
|
version_files = [
|
||||||
"pyproject.toml:^version"
|
"pyproject.toml:^version"
|
||||||
]
|
]
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue