(docs) cloudflare

This commit is contained in:
ishaan-jaff 2023-12-29 12:10:32 +05:30
parent 243ad31e90
commit 52a9696303
2 changed files with 59 additions and 0 deletions

View file

@ -0,0 +1,58 @@
# Cloudflare Workers AI
https://developers.cloudflare.com/workers-ai/models/text-generation/
## API Key
```python
# env variable
os.environ['CLOUDFLARE_API_KEY'] = "3dnSGlxxxx"
os.environ['CLOUDFLARE_ACCOUNT_ID'] = "03xxxxx"
```
## Sample Usage
```python
from litellm import completion
import os
os.environ['CLOUDFLARE_API_KEY'] = "3dnSGlxxxx"
os.environ['CLOUDFLARE_ACCOUNT_ID'] = "03xxxxx"
response = completion(
model="cloudflare/@cf/meta/llama-2-7b-chat-int8",
messages=[
{"role": "user", "content": "hello from litellm"}
],
)
print(response)
```
## Sample Usage - Streaming
```python
from litellm import completion
import os
os.environ['CLOUDFLARE_API_KEY'] = "3dnSGlxxxx"
os.environ['CLOUDFLARE_ACCOUNT_ID'] = "03xxxxx"
response = completion(
model="cloudflare/@hf/thebloke/codellama-7b-instruct-awq",
messages=[
{"role": "user", "content": "hello from litellm"}
],
stream=True
)
for chunk in response:
print(chunk)
```
## Supported Models
All models listed here https://developers.cloudflare.com/workers-ai/models/text-generation/ are supported
| Model Name | Function Call |
|-----------------------------------|----------------------------------------------------------|
| @cf/meta/llama-2-7b-chat-fp16 | `completion(model="mistral/mistral-tiny", messages)` |
| @cf/meta/llama-2-7b-chat-int8 | `completion(model="mistral/mistral-small", messages)` |
| @cf/mistral/mistral-7b-instruct-v0.1 | `completion(model="mistral/mistral-medium", messages)` |
| @hf/thebloke/codellama-7b-instruct-awq | `completion(model="codellama/codellama-medium", messages)` |

View file

@ -75,6 +75,7 @@ const sidebars = {
"providers/anyscale",
"providers/perplexity",
"providers/vllm",
"providers/cloudflare_workers",
"providers/deepinfra",
"providers/ai21",
"providers/nlp_cloud",