forked from phoenix/litellm-mirror
change friendli_ai -> friendliai
This commit is contained in:
parent
b9d261ce21
commit
18cc703aa2
5 changed files with 17 additions and 17 deletions
|
@ -1,7 +1,7 @@
|
||||||
# FriendliAI
|
# FriendliAI
|
||||||
https://suite.friendli.ai/
|
https://suite.friendli.ai/
|
||||||
|
|
||||||
**We support ALL FriendliAI models, just set `friendli_ai/` as a prefix when sending completion requests**
|
**We support ALL FriendliAI models, just set `friendliai/` as a prefix when sending completion requests**
|
||||||
|
|
||||||
## API Key
|
## API Key
|
||||||
```python
|
```python
|
||||||
|
@ -16,7 +16,7 @@ import os
|
||||||
|
|
||||||
os.environ['FRIENDLI_TOKEN'] = ""
|
os.environ['FRIENDLI_TOKEN'] = ""
|
||||||
response = completion(
|
response = completion(
|
||||||
model="friendli_ai/mixtral-8x7b-instruct-v0-1",
|
model="friendliai/mixtral-8x7b-instruct-v0-1",
|
||||||
messages=[
|
messages=[
|
||||||
{"role": "user", "content": "hello from litellm"}
|
{"role": "user", "content": "hello from litellm"}
|
||||||
],
|
],
|
||||||
|
@ -31,7 +31,7 @@ import os
|
||||||
|
|
||||||
os.environ['FRIENDLI_TOKEN'] = ""
|
os.environ['FRIENDLI_TOKEN'] = ""
|
||||||
response = completion(
|
response = completion(
|
||||||
model="friendli_ai/mixtral-8x7b-instruct-v0-1",
|
model="friendliai/mixtral-8x7b-instruct-v0-1",
|
||||||
messages=[
|
messages=[
|
||||||
{"role": "user", "content": "hello from litellm"}
|
{"role": "user", "content": "hello from litellm"}
|
||||||
],
|
],
|
||||||
|
@ -44,10 +44,10 @@ for chunk in response:
|
||||||
|
|
||||||
|
|
||||||
## Supported Models - ALL FriendliAI Models Supported!
|
## Supported Models - ALL FriendliAI Models Supported!
|
||||||
We support ALL FriendliAI AI models, just set `friendli_ai/` as a prefix when sending completion requests
|
We support ALL FriendliAI AI models, just set `friendliai/` as a prefix when sending completion requests
|
||||||
|
|
||||||
| Model Name | Function Call |
|
| Model Name | Function Call |
|
||||||
|--------------------------|------------------------------------------------------------------------------------------------------------------------------------------------------------------|
|
|--------------------------|------------------------------------------------------------------------------------------------------------------------------------------------------------------|
|
||||||
| mixtral-8x7b-instruct | `completion(model="friendli_ai/mixtral-8x7b-instruct-v0-1", messages)` |
|
| mixtral-8x7b-instruct | `completion(model="friendliai/mixtral-8x7b-instruct-v0-1", messages)` |
|
||||||
| meta-llama-3-8b-instruct | `completion(model="friendli_ai/meta-llama-3-8b-instruct", messages)` |
|
| meta-llama-3-8b-instruct | `completion(model="friendliai/meta-llama-3-8b-instruct", messages)` |
|
||||||
| meta-llama-3-70b-instruct | `completion(model="friendli_ai/meta-llama-3-70b-instruct", messages)` |
|
| meta-llama-3-70b-instruct | `completion(model="friendliai/meta-llama-3-70b-instruct", messages)` |
|
||||||
|
|
|
@ -150,7 +150,7 @@ const sidebars = {
|
||||||
"providers/groq",
|
"providers/groq",
|
||||||
"providers/deepseek",
|
"providers/deepseek",
|
||||||
"providers/fireworks_ai",
|
"providers/fireworks_ai",
|
||||||
"providers/friendli_ai",
|
"providers/friendliai",
|
||||||
"providers/vllm",
|
"providers/vllm",
|
||||||
"providers/xinference",
|
"providers/xinference",
|
||||||
"providers/cloudflare_workers",
|
"providers/cloudflare_workers",
|
||||||
|
|
|
@ -406,7 +406,7 @@ openai_compatible_providers: List = [
|
||||||
"xinference",
|
"xinference",
|
||||||
"together_ai",
|
"together_ai",
|
||||||
"fireworks_ai",
|
"fireworks_ai",
|
||||||
"friendli_ai",
|
"friendliai",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
|
||||||
|
@ -630,7 +630,7 @@ provider_list: List = [
|
||||||
"cloudflare",
|
"cloudflare",
|
||||||
"xinference",
|
"xinference",
|
||||||
"fireworks_ai",
|
"fireworks_ai",
|
||||||
"friendli_ai",
|
"friendliai",
|
||||||
"watsonx",
|
"watsonx",
|
||||||
"triton",
|
"triton",
|
||||||
"predibase",
|
"predibase",
|
||||||
|
|
|
@ -1051,7 +1051,7 @@ def completion(
|
||||||
# note: if a user sets a custom base - we should ensure this works
|
# note: if a user sets a custom base - we should ensure this works
|
||||||
# allow for the setting of dynamic and stateful api-bases
|
# allow for the setting of dynamic and stateful api-bases
|
||||||
api_base = (
|
api_base = (
|
||||||
api_base # for deepinfra/perplexity/anyscale/groq/friendli_ai we check in get_llm_provider and pass in the api base from there
|
api_base # for deepinfra/perplexity/anyscale/groq/friendliai we check in get_llm_provider and pass in the api base from there
|
||||||
or litellm.api_base
|
or litellm.api_base
|
||||||
or get_secret("OPENAI_API_BASE")
|
or get_secret("OPENAI_API_BASE")
|
||||||
or "https://api.openai.com/v1"
|
or "https://api.openai.com/v1"
|
||||||
|
@ -1065,7 +1065,7 @@ def completion(
|
||||||
# set API KEY
|
# set API KEY
|
||||||
api_key = (
|
api_key = (
|
||||||
api_key
|
api_key
|
||||||
or litellm.api_key # for deepinfra/perplexity/anyscale/friendli_ai we check in get_llm_provider and pass in the api key from there
|
or litellm.api_key # for deepinfra/perplexity/anyscale/friendliai we check in get_llm_provider and pass in the api key from there
|
||||||
or litellm.openai_key
|
or litellm.openai_key
|
||||||
or get_secret("OPENAI_API_KEY")
|
or get_secret("OPENAI_API_KEY")
|
||||||
)
|
)
|
||||||
|
@ -4288,7 +4288,7 @@ def speech(
|
||||||
response: Optional[HttpxBinaryResponseContent] = None
|
response: Optional[HttpxBinaryResponseContent] = None
|
||||||
if custom_llm_provider == "openai":
|
if custom_llm_provider == "openai":
|
||||||
api_base = (
|
api_base = (
|
||||||
api_base # for deepinfra/perplexity/anyscale/groq/friendli_ai we check in get_llm_provider and pass in the api base from there
|
api_base # for deepinfra/perplexity/anyscale/groq/friendliai we check in get_llm_provider and pass in the api base from there
|
||||||
or litellm.api_base
|
or litellm.api_base
|
||||||
or get_secret("OPENAI_API_BASE")
|
or get_secret("OPENAI_API_BASE")
|
||||||
or "https://api.openai.com/v1"
|
or "https://api.openai.com/v1"
|
||||||
|
|
|
@ -6607,9 +6607,9 @@ def get_llm_provider(
|
||||||
or get_secret("TOGETHERAI_API_KEY")
|
or get_secret("TOGETHERAI_API_KEY")
|
||||||
or get_secret("TOGETHER_AI_TOKEN")
|
or get_secret("TOGETHER_AI_TOKEN")
|
||||||
)
|
)
|
||||||
elif custom_llm_provider == "friendli_ai":
|
elif custom_llm_provider == "friendliai":
|
||||||
api_base = "https://inference.friendli.ai/v1"
|
api_base = "https://inference.friendli.ai/v1"
|
||||||
dynamic_api_key = get_secret("FRIENDLI_AI_API_KEY") or get_secret(
|
dynamic_api_key = get_secret("FRIENDLIAI_API_KEY") or get_secret(
|
||||||
"FRIENDLI_TOKEN"
|
"FRIENDLI_TOKEN"
|
||||||
)
|
)
|
||||||
if api_base is not None and not isinstance(api_base, str):
|
if api_base is not None and not isinstance(api_base, str):
|
||||||
|
@ -6660,9 +6660,9 @@ def get_llm_provider(
|
||||||
custom_llm_provider = "deepseek"
|
custom_llm_provider = "deepseek"
|
||||||
dynamic_api_key = get_secret("DEEPSEEK_API_KEY")
|
dynamic_api_key = get_secret("DEEPSEEK_API_KEY")
|
||||||
elif endpoint == "inference.friendli.ai/v1":
|
elif endpoint == "inference.friendli.ai/v1":
|
||||||
custom_llm_provider = "friendli_ai"
|
custom_llm_provider = "friendliai"
|
||||||
dynamic_api_key = get_secret(
|
dynamic_api_key = get_secret(
|
||||||
"FRIENDLI_AI_API_KEY"
|
"FRIENDLIAI_API_KEY"
|
||||||
) or get_secret("FRIENDLI_TOKEN")
|
) or get_secret("FRIENDLI_TOKEN")
|
||||||
|
|
||||||
if api_base is not None and not isinstance(api_base, str):
|
if api_base is not None and not isinstance(api_base, str):
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue