Added compatibility guidance, etc. for xAI Grok model (#8282)

* Various updates

Signed-off-by: Zhaohan Dong <65422392+zhaohan-dong@users.noreply.github.com>

* Update xAI branding

Signed-off-by: Zhaohan Dong <65422392+zhaohan-dong@users.noreply.github.com>

* Revert changes

Signed-off-by: Zhaohan Dong <65422392+zhaohan-dong@users.noreply.github.com>

---------

Signed-off-by: Zhaohan Dong <65422392+zhaohan-dong@users.noreply.github.com>
This commit is contained in:
Zhaohan Dong 2025-02-06 01:21:47 +00:00 committed by GitHub
parent fbe3c58372
commit 88e7046165
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
4 changed files with 5 additions and 2 deletions

View file

@ -8,6 +8,7 @@ Use `litellm.supports_function_calling(model="")` -> returns `True` if model sup
assert litellm.supports_function_calling(model="gpt-3.5-turbo") == True assert litellm.supports_function_calling(model="gpt-3.5-turbo") == True
assert litellm.supports_function_calling(model="azure/gpt-4-1106-preview") == True assert litellm.supports_function_calling(model="azure/gpt-4-1106-preview") == True
assert litellm.supports_function_calling(model="palm/chat-bison") == False assert litellm.supports_function_calling(model="palm/chat-bison") == False
assert litellm.supports_function_calling(model="xai/grok-2-latest") == True
assert litellm.supports_function_calling(model="ollama/llama2") == False assert litellm.supports_function_calling(model="ollama/llama2") == False
``` ```

View file

@ -44,6 +44,7 @@ Use `litellm.get_supported_openai_params()` for an updated list of params for ea
|Anthropic| ✅ | ✅ | ✅ |✅ | ✅ | ✅ | ✅ | | | | | | |✅ | ✅ | | ✅ | ✅ | | | ✅ | |Anthropic| ✅ | ✅ | ✅ |✅ | ✅ | ✅ | ✅ | | | | | | |✅ | ✅ | | ✅ | ✅ | | | ✅ |
|OpenAI| ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ |✅ | ✅ | ✅ | ✅ |✅ | ✅ | ✅ | ✅ | ✅ | |OpenAI| ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ |✅ | ✅ | ✅ | ✅ |✅ | ✅ | ✅ | ✅ | ✅ |
|Azure OpenAI| ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ |✅ | ✅ | ✅ | ✅ |✅ | ✅ | | | ✅ | |Azure OpenAI| ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ |✅ | ✅ | ✅ | ✅ |✅ | ✅ | | | ✅ |
|xAI| ✅ | | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | | | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | |
|Replicate | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | | | | | | |Replicate | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | | | | | |
|Anyscale | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | |Anyscale | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ |
|Cohere| ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | | | |Cohere| ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | | |

View file

@ -1,13 +1,13 @@
import Tabs from '@theme/Tabs'; import Tabs from '@theme/Tabs';
import TabItem from '@theme/TabItem'; import TabItem from '@theme/TabItem';
# XAI # xAI
https://docs.x.ai/docs https://docs.x.ai/docs
:::tip :::tip
**We support ALL XAI models, just set `model=xai/<any-model-on-xai>` as a prefix when sending litellm requests** **We support ALL xAI models, just set `model=xai/<any-model-on-xai>` as a prefix when sending litellm requests**
::: :::

View file

@ -30,6 +30,7 @@ import os
# Set OpenAI API key # Set OpenAI API key
os.environ["OPENAI_API_KEY"] = "Your API Key" os.environ["OPENAI_API_KEY"] = "Your API Key"
os.environ["ANTHROPIC_API_KEY"] = "Your API Key" os.environ["ANTHROPIC_API_KEY"] = "Your API Key"
os.environ["XAI_API_KEY"] = "Your API Key"
os.environ["REPLICATE_API_KEY"] = "Your API Key" os.environ["REPLICATE_API_KEY"] = "Your API Key"
os.environ["TOGETHERAI_API_KEY"] = "Your API Key" os.environ["TOGETHERAI_API_KEY"] = "Your API Key"
``` ```