forked from phoenix/litellm-mirror
Update Mistral docs, copy backup file
This commit is contained in:
parent
9b6bea5c1d
commit
6a899433df
2 changed files with 50 additions and 12 deletions
|
@ -42,7 +42,7 @@ for chunk in response:
|
|||
|
||||
|
||||
## Supported Models
|
||||
All models listed here https://docs.mistral.ai/platform/endpoints are supported. We actively maintain the list of models, pricing, token window, etc. [here](https://github.com/BerriAI/litellm/blob/c1b25538277206b9f00de5254d80d6a83bb19a29/model_prices_and_context_window.json).
|
||||
All models listed here https://docs.mistral.ai/platform/endpoints are supported. We actively maintain the list of models, pricing, token window, etc. [here](https://github.com/BerriAI/litellm/blob/main/model_prices_and_context_window.json).
|
||||
|
||||
| Model Name | Function Call |
|
||||
|----------------|--------------------------------------------------------------|
|
||||
|
@ -52,6 +52,7 @@ All models listed here https://docs.mistral.ai/platform/endpoints are supported.
|
|||
| Mistral 7B | `completion(model="mistral/open-mistral-7b", messages)` |
|
||||
| Mixtral 8x7B | `completion(model="mistral/open-mixtral-8x7b", messages)` |
|
||||
| Mixtral 8x22B | `completion(model="mistral/open-mixtral-8x22b", messages)` |
|
||||
| Codestral | `completion(model="mistral/codestral-latest", messages)` |
|
||||
|
||||
## Function Calling
|
||||
|
||||
|
|
|
@ -692,8 +692,8 @@
|
|||
"max_tokens": 8191,
|
||||
"max_input_tokens": 32000,
|
||||
"max_output_tokens": 8191,
|
||||
"input_cost_per_token": 0.00000015,
|
||||
"output_cost_per_token": 0.00000046,
|
||||
"input_cost_per_token": 0.00000025,
|
||||
"output_cost_per_token": 0.00000025,
|
||||
"litellm_provider": "mistral",
|
||||
"mode": "chat"
|
||||
},
|
||||
|
@ -701,8 +701,8 @@
|
|||
"max_tokens": 8191,
|
||||
"max_input_tokens": 32000,
|
||||
"max_output_tokens": 8191,
|
||||
"input_cost_per_token": 0.000002,
|
||||
"output_cost_per_token": 0.000006,
|
||||
"input_cost_per_token": 0.000001,
|
||||
"output_cost_per_token": 0.000003,
|
||||
"litellm_provider": "mistral",
|
||||
"supports_function_calling": true,
|
||||
"mode": "chat"
|
||||
|
@ -711,8 +711,8 @@
|
|||
"max_tokens": 8191,
|
||||
"max_input_tokens": 32000,
|
||||
"max_output_tokens": 8191,
|
||||
"input_cost_per_token": 0.000002,
|
||||
"output_cost_per_token": 0.000006,
|
||||
"input_cost_per_token": 0.000001,
|
||||
"output_cost_per_token": 0.000003,
|
||||
"litellm_provider": "mistral",
|
||||
"supports_function_calling": true,
|
||||
"mode": "chat"
|
||||
|
@ -748,8 +748,8 @@
|
|||
"max_tokens": 8191,
|
||||
"max_input_tokens": 32000,
|
||||
"max_output_tokens": 8191,
|
||||
"input_cost_per_token": 0.000008,
|
||||
"output_cost_per_token": 0.000024,
|
||||
"input_cost_per_token": 0.000004,
|
||||
"output_cost_per_token": 0.000012,
|
||||
"litellm_provider": "mistral",
|
||||
"mode": "chat",
|
||||
"supports_function_calling": true
|
||||
|
@ -758,26 +758,63 @@
|
|||
"max_tokens": 8191,
|
||||
"max_input_tokens": 32000,
|
||||
"max_output_tokens": 8191,
|
||||
"input_cost_per_token": 0.000008,
|
||||
"output_cost_per_token": 0.000024,
|
||||
"input_cost_per_token": 0.000004,
|
||||
"output_cost_per_token": 0.000012,
|
||||
"litellm_provider": "mistral",
|
||||
"mode": "chat",
|
||||
"supports_function_calling": true
|
||||
},
|
||||
"mistral/open-mistral-7b": {
|
||||
"max_tokens": 8191,
|
||||
"max_input_tokens": 32000,
|
||||
"max_output_tokens": 8191,
|
||||
"input_cost_per_token": 0.00000025,
|
||||
"output_cost_per_token": 0.00000025,
|
||||
"litellm_provider": "mistral",
|
||||
"mode": "chat"
|
||||
},
|
||||
"mistral/open-mixtral-8x7b": {
|
||||
"max_tokens": 8191,
|
||||
"max_input_tokens": 32000,
|
||||
"max_output_tokens": 8191,
|
||||
"input_cost_per_token": 0.0000007,
|
||||
"output_cost_per_token": 0.0000007,
|
||||
"litellm_provider": "mistral",
|
||||
"mode": "chat",
|
||||
"supports_function_calling": true
|
||||
},
|
||||
"mistral/open-mixtral-8x22b": {
|
||||
"max_tokens": 8191,
|
||||
"max_input_tokens": 64000,
|
||||
"max_output_tokens": 8191,
|
||||
"input_cost_per_token": 0.000002,
|
||||
"output_cost_per_token": 0.000006,
|
||||
"litellm_provider": "mistral",
|
||||
"mode": "chat",
|
||||
"supports_function_calling": true
|
||||
},
|
||||
"mistral/codestral-latest": {
|
||||
"max_tokens": 8191,
|
||||
"max_input_tokens": 32000,
|
||||
"max_output_tokens": 8191,
|
||||
"input_cost_per_token": 0.000001,
|
||||
"output_cost_per_token": 0.000003,
|
||||
"litellm_provider": "mistral",
|
||||
"mode": "chat"
|
||||
},
|
||||
"mistral/codestral-2405": {
|
||||
"max_tokens": 8191,
|
||||
"max_input_tokens": 32000,
|
||||
"max_output_tokens": 8191,
|
||||
"input_cost_per_token": 0.000001,
|
||||
"output_cost_per_token": 0.000003,
|
||||
"litellm_provider": "mistral",
|
||||
"mode": "chat"
|
||||
},
|
||||
"mistral/mistral-embed": {
|
||||
"max_tokens": 8192,
|
||||
"max_input_tokens": 8192,
|
||||
"input_cost_per_token": 0.000000111,
|
||||
"input_cost_per_token": 0.0000001,
|
||||
"litellm_provider": "mistral",
|
||||
"mode": "embedding"
|
||||
},
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue