diff --git a/docs/my-website/docs/providers/friendli_ai.md b/docs/my-website/docs/providers/friendli_ai.md index 9de3fa896..e0498f0ee 100644 --- a/docs/my-website/docs/providers/friendli_ai.md +++ b/docs/my-website/docs/providers/friendli_ai.md @@ -1,7 +1,7 @@ # FriendliAI https://suite.friendli.ai/ -**We support ALL FriendliAI models, just set `friendli_ai/` as a prefix when sending completion requests** +**We support ALL FriendliAI models, just set `friendliai/` as a prefix when sending completion requests** ## API Key ```python @@ -16,7 +16,7 @@ import os os.environ['FRIENDLI_TOKEN'] = "" response = completion( - model="friendli_ai/mixtral-8x7b-instruct-v0-1", + model="friendliai/mixtral-8x7b-instruct-v0-1", messages=[ {"role": "user", "content": "hello from litellm"} ], @@ -31,7 +31,7 @@ import os os.environ['FRIENDLI_TOKEN'] = "" response = completion( - model="friendli_ai/mixtral-8x7b-instruct-v0-1", + model="friendliai/mixtral-8x7b-instruct-v0-1", messages=[ {"role": "user", "content": "hello from litellm"} ], @@ -44,10 +44,10 @@ for chunk in response: ## Supported Models - ALL FriendliAI Models Supported! -We support ALL FriendliAI AI models, just set `friendli_ai/` as a prefix when sending completion requests +We support ALL FriendliAI AI models, just set `friendliai/` as a prefix when sending completion requests | Model Name | Function Call | |--------------------------|------------------------------------------------------------------------------------------------------------------------------------------------------------------| -| mixtral-8x7b-instruct | `completion(model="friendli_ai/mixtral-8x7b-instruct-v0-1", messages)` | -| meta-llama-3-8b-instruct | `completion(model="friendli_ai/meta-llama-3-8b-instruct", messages)` | -| meta-llama-3-70b-instruct | `completion(model="friendli_ai/meta-llama-3-70b-instruct", messages)` | +| mixtral-8x7b-instruct | `completion(model="friendliai/mixtral-8x7b-instruct-v0-1", messages)` | +| meta-llama-3-8b-instruct | `completion(model="friendliai/meta-llama-3-8b-instruct", messages)` | +| meta-llama-3-70b-instruct | `completion(model="friendliai/meta-llama-3-70b-instruct", messages)` | diff --git a/docs/my-website/sidebars.js b/docs/my-website/sidebars.js index 2ed1388d5..9b860260c 100644 --- a/docs/my-website/sidebars.js +++ b/docs/my-website/sidebars.js @@ -150,7 +150,7 @@ const sidebars = { "providers/groq", "providers/deepseek", "providers/fireworks_ai", - "providers/friendli_ai", + "providers/friendliai", "providers/vllm", "providers/xinference", "providers/cloudflare_workers", diff --git a/litellm/__init__.py b/litellm/__init__.py index cbe82f96c..fd0a5fa2b 100644 --- a/litellm/__init__.py +++ b/litellm/__init__.py @@ -406,7 +406,7 @@ openai_compatible_providers: List = [ "xinference", "together_ai", "fireworks_ai", - "friendli_ai", + "friendliai", ] @@ -630,7 +630,7 @@ provider_list: List = [ "cloudflare", "xinference", "fireworks_ai", - "friendli_ai", + "friendliai", "watsonx", "triton", "predibase", diff --git a/litellm/main.py b/litellm/main.py index cd329852b..d95c4f9af 100644 --- a/litellm/main.py +++ b/litellm/main.py @@ -1051,7 +1051,7 @@ def completion( # note: if a user sets a custom base - we should ensure this works # allow for the setting of dynamic and stateful api-bases api_base = ( - api_base # for deepinfra/perplexity/anyscale/groq/friendli_ai we check in get_llm_provider and pass in the api base from there + api_base # for deepinfra/perplexity/anyscale/groq/friendliai we check in get_llm_provider and pass in the api base from there or litellm.api_base or get_secret("OPENAI_API_BASE") or "https://api.openai.com/v1" @@ -1065,7 +1065,7 @@ def completion( # set API KEY api_key = ( api_key - or litellm.api_key # for deepinfra/perplexity/anyscale/friendli_ai we check in get_llm_provider and pass in the api key from there + or litellm.api_key # for deepinfra/perplexity/anyscale/friendliai we check in get_llm_provider and pass in the api key from there or litellm.openai_key or get_secret("OPENAI_API_KEY") ) @@ -4288,7 +4288,7 @@ def speech( response: Optional[HttpxBinaryResponseContent] = None if custom_llm_provider == "openai": api_base = ( - api_base # for deepinfra/perplexity/anyscale/groq/friendli_ai we check in get_llm_provider and pass in the api base from there + api_base # for deepinfra/perplexity/anyscale/groq/friendliai we check in get_llm_provider and pass in the api base from there or litellm.api_base or get_secret("OPENAI_API_BASE") or "https://api.openai.com/v1" diff --git a/litellm/utils.py b/litellm/utils.py index f98e904ff..09438bc9a 100644 --- a/litellm/utils.py +++ b/litellm/utils.py @@ -6607,9 +6607,9 @@ def get_llm_provider( or get_secret("TOGETHERAI_API_KEY") or get_secret("TOGETHER_AI_TOKEN") ) - elif custom_llm_provider == "friendli_ai": + elif custom_llm_provider == "friendliai": api_base = "https://inference.friendli.ai/v1" - dynamic_api_key = get_secret("FRIENDLI_AI_API_KEY") or get_secret( + dynamic_api_key = get_secret("FRIENDLIAI_API_KEY") or get_secret( "FRIENDLI_TOKEN" ) if api_base is not None and not isinstance(api_base, str): @@ -6660,9 +6660,9 @@ def get_llm_provider( custom_llm_provider = "deepseek" dynamic_api_key = get_secret("DEEPSEEK_API_KEY") elif endpoint == "inference.friendli.ai/v1": - custom_llm_provider = "friendli_ai" + custom_llm_provider = "friendliai" dynamic_api_key = get_secret( - "FRIENDLI_AI_API_KEY" + "FRIENDLIAI_API_KEY" ) or get_secret("FRIENDLI_TOKEN") if api_base is not None and not isinstance(api_base, str):