Add source + support_function_calling tag

This commit is contained in:
Daniel Bichuetti 2024-05-23 20:43:20 -03:00 committed by GitHub
parent a086088dc7
commit 7fdb5aeb24
No known key found for this signature in database
GPG key ID: B5690EEEBB952194

View file

@ -3384,9 +3384,10 @@
"output_cost_per_token": 0.00000015,
"litellm_provider": "anyscale",
"mode": "chat",
"supports_function_calling": true
"supports_function_calling": true,
"source": "https://docs.anyscale.com/preview/endpoints/text-generation/supported-models/mistralai-Mistral-7B-Instruct-v0.1"
},
"anyscale/Mixtral-8x7B-Instruct-v0.1": {
"anyscale/mistralai/Mixtral-8x7B-Instruct-v0.1": {
"max_tokens": 16384,
"max_input_tokens": 16384,
"max_output_tokens": 16384,
@ -3394,7 +3395,8 @@
"output_cost_per_token": 0.00000015,
"litellm_provider": "anyscale",
"mode": "chat",
"supports_function_calling": true
"supports_function_calling": true,
"source": "https://docs.anyscale.com/preview/endpoints/text-generation/supported-models/mistralai-Mixtral-8x7B-Instruct-v0.1"
},
"anyscale/mistralai/Mixtral-8x22B-Instruct-v0.1": {
"max_tokens": 65536,
@ -3403,7 +3405,9 @@
"input_cost_per_token": 0.00000090,
"output_cost_per_token": 0.00000090,
"litellm_provider": "anyscale",
"mode": "chat"
"mode": "chat",
"supports_function_calling": true,
"source": "https://docs.anyscale.com/preview/endpoints/text-generation/supported-models/mistralai-Mixtral-8x22B-Instruct-v0.1"
},
"anyscale/HuggingFaceH4/zephyr-7b-beta": {
"max_tokens": 16384,
@ -3421,7 +3425,8 @@
"input_cost_per_token": 0.00000015,
"output_cost_per_token": 0.00000015,
"litellm_provider": "anyscale",
"mode": "chat"
"mode": "chat",
"source": "https://docs.anyscale.com/preview/endpoints/text-generation/supported-models/google-gemma-7b-it"
},
"anyscale/meta-llama/Llama-2-7b-chat-hf": {
"max_tokens": 4096,
@ -3466,7 +3471,8 @@
"input_cost_per_token": 0.000001,
"output_cost_per_token": 0.000001,
"litellm_provider": "anyscale",
"mode": "chat"
"mode": "chat",
"source" : "https://docs.anyscale.com/preview/endpoints/text-generation/supported-models/codellama-CodeLlama-70b-Instruct-hf"
},
"anyscale/meta-llama/Meta-Llama-3-8B-Instruct": {
"max_tokens": 8192,
@ -3475,7 +3481,8 @@
"input_cost_per_token": 0.00000015,
"output_cost_per_token": 0.00000015,
"litellm_provider": "anyscale",
"mode": "chat"
"mode": "chat",
"source": "https://docs.anyscale.com/preview/endpoints/text-generation/supported-models/meta-llama-Meta-Llama-3-8B-Instruct"
},
"anyscale/meta-llama/Meta-Llama-3-70B-Instruct": {
"max_tokens": 8192,
@ -3484,7 +3491,8 @@
"input_cost_per_token": 0.00000100,
"output_cost_per_token": 0.00000100,
"litellm_provider": "anyscale",
"mode": "chat"
"mode": "chat",
"source" : "https://docs.anyscale.com/preview/endpoints/text-generation/supported-models/meta-llama-Meta-Llama-3-70B-Instruct"
},
"cloudflare/@cf/meta/llama-2-7b-chat-fp16": {
"max_tokens": 3072,