diff --git a/litellm/integrations/email_templates/templates.py b/litellm/integrations/email_templates/templates.py
new file mode 100644
index 0000000000..7029e8ce12
--- /dev/null
+++ b/litellm/integrations/email_templates/templates.py
@@ -0,0 +1,62 @@
+"""
+Email Templates used by the LiteLLM Email Service in slack_alerting.py
+"""
+
+KEY_CREATED_EMAIL_TEMPLATE = """
+
+
+
Hi {recipient_email},
+
+ I'm happy to provide you with an OpenAI Proxy API Key, loaded with ${key_budget} per month.
+
+
+ Key: {key_token}
+
+
+
+ + import openai + client = openai.OpenAI( + api_key="{key_token}", + base_url={{base_url}} + ) + + response = client.chat.completions.create( + model="gpt-3.5-turbo", # model to send to the proxy + messages = [ + {{ + "role": "user", + "content": "this is a test request, write a short poem" + }} + ] + ) + ++ + + If you have any questions, please send an email to {email_support_contact}
Hi {recipient_email},
+
+ You were invited to use OpenAI Proxy API for team {team_name}
+
+ Get Started here
+
+
+ If you have any questions, please send an email to {email_support_contact}
+
+ Best,
+ The LiteLLM team
+"""
diff --git a/litellm/integrations/slack_alerting.py b/litellm/integrations/slack_alerting.py
index 0b62d6c690..8c271c6844 100644
--- a/litellm/integrations/slack_alerting.py
+++ b/litellm/integrations/slack_alerting.py
@@ -18,6 +18,7 @@ from litellm.proxy._types import WebhookEvent
import random
from typing import TypedDict
from openai import APIError
+from .email_templates.templates import *
import litellm.types
from litellm.types.router import LiteLLM_Params
@@ -68,67 +69,6 @@ class SlackAlertingArgsEnum(Enum):
max_outage_alert_list_size: int = 1 * 10
-KEY_CREATED_EMAIL_TEMPLATE = """
-
-
-
Hi {recipient_email},
-
- I'm happy to provide you with an OpenAI Proxy API Key, loaded with ${key_budget} per month.
-
-
- Key: {key_token}
-
-
-
- - import openai - client = openai.OpenAI( - api_key="{key_token}", - base_url={{base_url}} - ) - - response = client.chat.completions.create( - model="gpt-3.5-turbo", # model to send to the proxy - messages = [ - {{ - "role": "user", - "content": "this is a test request, write a short poem" - }} - ] - ) - -- - - If you have any questions, please send an email to {email_support_contact}
Hi {recipient_email},
-
- You were invited to use OpenAI Proxy API for team {team_name}
-
-
-
-
-
- If you have any questions, please send an email to {email_support_contact}
-
- Best,
- The LiteLLM team
-"""
-
-
class SlackAlertingArgs(LiteLLMBase):
daily_report_frequency: int = Field(
default=int(