forked from phoenix/litellm-mirror
fix supported LLM on docs
This commit is contained in:
parent
37d743fb2b
commit
0cc561bc5d
8 changed files with 11 additions and 11 deletions
2
cookbook/LiteLLM_AB_TestLLMs.ipynb
vendored
2
cookbook/LiteLLM_AB_TestLLMs.ipynb
vendored
|
@ -50,7 +50,7 @@
|
|||
"\n",
|
||||
"\n",
|
||||
"# define a dict of model id and % of requests for model\n",
|
||||
"# see models here: https://docs.litellm.ai/docs/completion/supported\n",
|
||||
"# see models here: https://docs.litellm.ai/docs/providers\n",
|
||||
"split_per_model = {\n",
|
||||
"\t\"gpt-4\": 0.3,\n",
|
||||
"\t\"gpt-3.5-turbo\": 0.7\n",
|
||||
|
|
2
cookbook/LiteLLM_Comparing_LLMs.ipynb
vendored
2
cookbook/LiteLLM_Comparing_LLMs.ipynb
vendored
|
@ -100,7 +100,7 @@
|
|||
"source": [
|
||||
"results = [] # for storing results\n",
|
||||
"\n",
|
||||
"models = ['gpt-3.5-turbo', 'claude-2'] # define what models you're testing, see: https://docs.litellm.ai/docs/completion/supported\n",
|
||||
"models = ['gpt-3.5-turbo', 'claude-2'] # define what models you're testing, see: https://docs.litellm.ai/docs/providers\n",
|
||||
"for question in questions:\n",
|
||||
" row = [question]\n",
|
||||
" for model in models:\n",
|
||||
|
|
|
@ -22,7 +22,7 @@
|
|||
</h4>
|
||||
|
||||
<h4 align="center">
|
||||
<a href="https://docs.litellm.ai/docs/completion/supported" target="_blank">100+ Supported Models</a> |
|
||||
<a href="https://docs.litellm.ai/docs/providers" target="_blank">100+ Supported Models</a> |
|
||||
<a href="https://docs.litellm.ai/docs/" target="_blank">Docs</a> |
|
||||
<a href="https://litellm.ai/playground" target="_blank">Demo Website</a>
|
||||
</h4>
|
||||
|
@ -101,7 +101,7 @@ python3 main.py
|
|||
### Set your LLM Configs
|
||||
Set your LLMs and LLM weights you want to run A/B testing with
|
||||
In main.py set your selected LLMs you want to AB test in `llm_dict`
|
||||
You can A/B test more than 100+ LLMs using LiteLLM https://docs.litellm.ai/docs/completion/supported
|
||||
You can A/B test more than 100+ LLMs using LiteLLM https://docs.litellm.ai/docs/providers
|
||||
```python
|
||||
llm_dict = {
|
||||
"gpt-4": 0.2,
|
||||
|
@ -114,7 +114,7 @@ llm_dict = {
|
|||
#### Setting your API Keys
|
||||
Set your LLM API keys in a .env file in the directory or set them as `os.environ` variables.
|
||||
|
||||
See https://docs.litellm.ai/docs/completion/supported for the format of API keys
|
||||
See https://docs.litellm.ai/docs/providers for the format of API keys
|
||||
|
||||
LiteLLM generalizes api keys to follow the following format
|
||||
`PROVIDER_API_KEY`
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue