diff --git a/docs/my-website/docs/completion/prefix.md b/docs/my-website/docs/completion/prefix.md
new file mode 100644
index 0000000000..e3619a2a02
--- /dev/null
+++ b/docs/my-website/docs/completion/prefix.md
@@ -0,0 +1,119 @@
+import Tabs from '@theme/Tabs';
+import TabItem from '@theme/TabItem';
+
+# Pre-fix Assistant Messages
+
+Supported by:
+- Deepseek
+- Mistral
+- Anthropic
+
+```python
+{
+ "role": "assistant",
+ "content": "..",
+ ...
+ "prefix": true # 👈 KEY CHANGE
+}
+```
+
+## Quick Start
+
+
+
+
+```python
+from litellm import completion
+import os
+
+os.environ["DEEPSEEK_API_KEY"] = ""
+
+response = completion(
+ model="deepseek/deepseek-chat",
+ messages=[
+ {"role": "user", "content": "Who won the world cup in 2022?"},
+ {"role": "assistant", "content": "Argentina", "prefix": True}
+ ]
+)
+print(response.choices[0].message.content)
+```
+
+
+
+```bash
+curl http://0.0.0.0:4000/v1/chat/completions \
+ -H "Content-Type: application/json" \
+ -H "Authorization: Bearer $LITELLM_KEY" \
+ -d '{
+ "model": "deepseek/deepseek-chat",
+ "messages": [
+ {
+ "role": "user",
+ "content": "Who won the world cup in 2022?"
+ },
+ {
+ "role": "assistant",
+ "content": "Argentina", "prefix": true
+ }
+ ]
+}'
+```
+
+
+
+**Expected Response**
+
+```bash
+{
+ "id": "3b66124d79a708e10c603496b363574c",
+ "choices": [
+ {
+ "finish_reason": "stop",
+ "index": 0,
+ "message": {
+ "content": " won the FIFA World Cup in 2022.",
+ "role": "assistant",
+ "tool_calls": null,
+ "function_call": null
+ }
+ }
+ ],
+ "created": 1723323084,
+ "model": "deepseek/deepseek-chat",
+ "object": "chat.completion",
+ "system_fingerprint": "fp_7e0991cad4",
+ "usage": {
+ "completion_tokens": 12,
+ "prompt_tokens": 16,
+ "total_tokens": 28,
+ },
+ "service_tier": null
+}
+```
+
+## Check Model Support
+
+Call `litellm.get_model_info` to check if a model/provider supports `response_format`.
+
+
+
+
+```python
+from litellm import get_model_info
+
+params = get_model_info(model="deepseek/deepseek-chat")
+
+assert params["supports_assistant_prefill"] is True
+```
+
+
+
+
+Call the `/model/info` endpoint to get a list of models + their supported params.
+
+```bash
+curl -X GET 'http://0.0.0.0:4000/v1/model/info' \
+-H 'Authorization: Bearer $LITELLM_KEY' \
+```
+
+
\ No newline at end of file
diff --git a/docs/my-website/sidebars.js b/docs/my-website/sidebars.js
index be1ef96213..7df5e61578 100644
--- a/docs/my-website/sidebars.js
+++ b/docs/my-website/sidebars.js
@@ -162,6 +162,7 @@ const sidebars = {
"completion/input",
"completion/provider_specific_params",
"completion/json_mode",
+ "completion/prefix",
"completion/drop_params",
"completion/prompt_formatting",
"completion/output",
diff --git a/litellm/proxy/_experimental/out/404.html b/litellm/proxy/_experimental/out/404.html
deleted file mode 100644
index a8e00954eb..0000000000
--- a/litellm/proxy/_experimental/out/404.html
+++ /dev/null
@@ -1 +0,0 @@
-
404: This page could not be found.LiteLLM Dashboard404
This page could not be found.
\ No newline at end of file
diff --git a/litellm/proxy/_experimental/out/model_hub.html b/litellm/proxy/_experimental/out/model_hub.html
deleted file mode 100644
index 2ddd137b18..0000000000
--- a/litellm/proxy/_experimental/out/model_hub.html
+++ /dev/null
@@ -1 +0,0 @@
-LiteLLM Dashboard
\ No newline at end of file
diff --git a/litellm/proxy/_experimental/out/onboarding.html b/litellm/proxy/_experimental/out/onboarding.html
deleted file mode 100644
index 872d3c9573..0000000000
--- a/litellm/proxy/_experimental/out/onboarding.html
+++ /dev/null
@@ -1 +0,0 @@
-LiteLLM Dashboard
\ No newline at end of file
diff --git a/litellm/utils.py b/litellm/utils.py
index af5d371dcc..30c9347616 100644
--- a/litellm/utils.py
+++ b/litellm/utils.py
@@ -4585,7 +4585,7 @@ def get_llm_provider(
api_base = (
api_base
or get_secret("DEEPSEEK_API_BASE")
- or "https://api.deepseek.com/v1"
+ or "https://api.deepseek.com/beta"
) # type: ignore
dynamic_api_key = api_key or get_secret("DEEPSEEK_API_KEY")
elif custom_llm_provider == "fireworks_ai":