forked from phoenix/litellm-mirror
add proxy key details to docs
This commit is contained in:
parent
8094301424
commit
102fd83ea6
5 changed files with 23 additions and 14 deletions
|
@ -11,6 +11,10 @@ We provide a free $10 community-key for testing all providers on LiteLLM. You ca
|
||||||
import os
|
import os
|
||||||
os.environ["AI21_API_KEY"] = "sk-litellm-7_NPZhMGxY2GoHC59LgbDw" # [OPTIONAL] replace with your ai21 key
|
os.environ["AI21_API_KEY"] = "sk-litellm-7_NPZhMGxY2GoHC59LgbDw" # [OPTIONAL] replace with your ai21 key
|
||||||
```
|
```
|
||||||
|
**Need a dedicated key?**
|
||||||
|
Email us @ krrish@berri.ai
|
||||||
|
|
||||||
|
[**See all supported models by the litellm api key**](../proxy_api.md#supported-models-for-litellm-key)
|
||||||
|
|
||||||
### Sample Usage
|
### Sample Usage
|
||||||
|
|
||||||
|
@ -24,10 +28,6 @@ messages = [{"role": "user", "content": "Write me a poem about the blue sky"}]
|
||||||
|
|
||||||
completion(model="j2-light", messages=messages)
|
completion(model="j2-light", messages=messages)
|
||||||
```
|
```
|
||||||
**Need a dedicated key?**
|
|
||||||
Email us @ krrish@berri.ai
|
|
||||||
|
|
||||||
[**See all supported models by the litellm api key**](../proxy_api.md#supported-models-for-litellm-key)
|
|
||||||
|
|
||||||
### AI21 Models
|
### AI21 Models
|
||||||
|
|
||||||
|
|
|
@ -9,6 +9,11 @@ import os
|
||||||
|
|
||||||
os.environ["ANTHROPIC_API_KEY"] = "sk-litellm-7_NPZhMGxY2GoHC59LgbDw" # [OPTIONAL] replace with your anthropic key
|
os.environ["ANTHROPIC_API_KEY"] = "sk-litellm-7_NPZhMGxY2GoHC59LgbDw" # [OPTIONAL] replace with your anthropic key
|
||||||
```
|
```
|
||||||
|
**Need a dedicated key?**
|
||||||
|
Email us @ krrish@berri.ai
|
||||||
|
|
||||||
|
## Supported Models for LiteLLM Key
|
||||||
|
These are the models that currently work with the "sk-litellm-.." keys.
|
||||||
|
|
||||||
## Sample Usage
|
## Sample Usage
|
||||||
|
|
||||||
|
|
|
@ -10,6 +10,10 @@ import os
|
||||||
|
|
||||||
os.environ["NLP_CLOUD_API_KEY"] = "sk-litellm-7_NPZhMGxY2GoHC59LgbDw" # [OPTIONAL] replace with your nlp cloud key
|
os.environ["NLP_CLOUD_API_KEY"] = "sk-litellm-7_NPZhMGxY2GoHC59LgbDw" # [OPTIONAL] replace with your nlp cloud key
|
||||||
```
|
```
|
||||||
|
**Need a dedicated key?**
|
||||||
|
Email us @ krrish@berri.ai
|
||||||
|
|
||||||
|
[**See all supported models by the litellm api key**](../proxy_api.md#supported-models-for-litellm-key)
|
||||||
|
|
||||||
## Sample Usage
|
## Sample Usage
|
||||||
|
|
||||||
|
|
|
@ -9,6 +9,10 @@ import os
|
||||||
|
|
||||||
os.environ["OPENAI_API_KEY"] = "sk-litellm-7_NPZhMGxY2GoHC59LgbDw" # [OPTIONAL] replace with your openai key
|
os.environ["OPENAI_API_KEY"] = "sk-litellm-7_NPZhMGxY2GoHC59LgbDw" # [OPTIONAL] replace with your openai key
|
||||||
```
|
```
|
||||||
|
**Need a dedicated key?**
|
||||||
|
Email us @ krrish@berri.ai
|
||||||
|
|
||||||
|
[**See all supported models by the litellm api key**](../proxy_api.md#supported-models-for-litellm-key)
|
||||||
|
|
||||||
### Usage
|
### Usage
|
||||||
```python
|
```python
|
||||||
|
@ -24,11 +28,6 @@ messages = [{ "content": "Hello, how are you?","role": "user"}]
|
||||||
response = completion("gpt-3.5-turbo", messages)
|
response = completion("gpt-3.5-turbo", messages)
|
||||||
```
|
```
|
||||||
|
|
||||||
**Need a dedicated key?**
|
|
||||||
Email us @ krrish@berri.ai
|
|
||||||
|
|
||||||
[**See all supported models by the litellm api key**](../proxy_api.md#supported-models-for-litellm-key)
|
|
||||||
|
|
||||||
### OpenAI Chat Completion Models
|
### OpenAI Chat Completion Models
|
||||||
|
|
||||||
| Model Name | Function Call | Required OS Variables |
|
| Model Name | Function Call | Required OS Variables |
|
||||||
|
|
|
@ -9,6 +9,12 @@ import os
|
||||||
os.environ["TOGETHERAI_API_KEY"] = "sk-litellm-7_NPZhMGxY2GoHC59LgbDw" # [OPTIONAL] replace with your together ai key
|
os.environ["TOGETHERAI_API_KEY"] = "sk-litellm-7_NPZhMGxY2GoHC59LgbDw" # [OPTIONAL] replace with your together ai key
|
||||||
```
|
```
|
||||||
|
|
||||||
|
**Need a dedicated key?**
|
||||||
|
Email us @ krrish@berri.ai
|
||||||
|
|
||||||
|
[**See all supported models by the litellm api key**](../proxy_api.md#supported-models-for-litellm-key)
|
||||||
|
|
||||||
|
|
||||||
## Sample Usage
|
## Sample Usage
|
||||||
|
|
||||||
```python
|
```python
|
||||||
|
@ -22,11 +28,6 @@ messages = [{"role": "user", "content": "Write me a poem about the blue sky"}]
|
||||||
completion(model="together_ai/togethercomputer/Llama-2-7B-32K-Instruct", messages=messages)
|
completion(model="together_ai/togethercomputer/Llama-2-7B-32K-Instruct", messages=messages)
|
||||||
```
|
```
|
||||||
|
|
||||||
**Need a dedicated key?**
|
|
||||||
Email us @ krrish@berri.ai
|
|
||||||
|
|
||||||
[**See all supported models by the litellm api key**](../proxy_api.md#supported-models-for-litellm-key)
|
|
||||||
|
|
||||||
## Together AI Models
|
## Together AI Models
|
||||||
liteLLM supports `non-streaming` and `streaming` requests to all models on https://api.together.xyz/
|
liteLLM supports `non-streaming` and `streaming` requests to all models on https://api.together.xyz/
|
||||||
|
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue