diff --git a/docs/my-website/docs/providers/friendliai.md b/docs/my-website/docs/providers/friendliai.md new file mode 100644 index 000000000..137c3dde3 --- /dev/null +++ b/docs/my-website/docs/providers/friendliai.md @@ -0,0 +1,60 @@ +# FriendliAI +https://suite.friendli.ai/ + +**We support ALL FriendliAI models, just set `friendliai/` as a prefix when sending completion requests** + +## API Key +```python +# env variable +os.environ['FRIENDLI_TOKEN'] +os.environ['FRIENDLI_API_BASE'] # Optional. Set this when using dedicated endpoint. +``` + +## Sample Usage +```python +from litellm import completion +import os + +os.environ['FRIENDLI_TOKEN'] = "" +response = completion( + model="friendliai/mixtral-8x7b-instruct-v0-1", + messages=[ + {"role": "user", "content": "hello from litellm"} + ], +) +print(response) +``` + +## Sample Usage - Streaming +```python +from litellm import completion +import os + +os.environ['FRIENDLI_TOKEN'] = "" +response = completion( + model="friendliai/mixtral-8x7b-instruct-v0-1", + messages=[ + {"role": "user", "content": "hello from litellm"} + ], + stream=True +) + +for chunk in response: + print(chunk) +``` + + +## Supported Models +### Serverless Endpoints +We support ALL FriendliAI AI models, just set `friendliai/` as a prefix when sending completion requests + +| Model Name | Function Call | +|--------------------------|------------------------------------------------------------------------------------------------------------------------------------------------------------------| +| mixtral-8x7b-instruct | `completion(model="friendliai/mixtral-8x7b-instruct-v0-1", messages)` | +| meta-llama-3-8b-instruct | `completion(model="friendliai/meta-llama-3-8b-instruct", messages)` | +| meta-llama-3-70b-instruct | `completion(model="friendliai/meta-llama-3-70b-instruct", messages)` | + +### Dedicated Endpoints +``` +model="friendliai/$ENDPOINT_ID:$ADAPTER_ROUTE" +``` diff --git a/docs/my-website/sidebars.js b/docs/my-website/sidebars.js index c3f7e9249..d228e09d2 100644 --- a/docs/my-website/sidebars.js +++ b/docs/my-website/sidebars.js @@ -158,6 +158,7 @@ const sidebars = { "providers/triton-inference-server", "providers/ollama", "providers/perplexity", + "providers/friendliai", "providers/groq", "providers/deepseek", "providers/fireworks_ai", diff --git a/litellm/utils.py b/litellm/utils.py index a6d3d8603..03bbb0e8c 100644 --- a/litellm/utils.py +++ b/litellm/utils.py @@ -4486,7 +4486,10 @@ def get_llm_provider( or get_secret("TOGETHER_AI_TOKEN") ) elif custom_llm_provider == "friendliai": - api_base = "https://inference.friendli.ai/v1" + api_base = ( + get_secret("FRIENDLI_API_BASE") + or "https://inference.friendli.ai/v1" + ) dynamic_api_key = ( api_key or get_secret("FRIENDLIAI_API_KEY")