diff --git a/docs/my-website/docs/simple_proxy.md b/docs/my-website/docs/simple_proxy.md
index 7fac284213..a511ae0f77 100644
--- a/docs/my-website/docs/simple_proxy.md
+++ b/docs/my-website/docs/simple_proxy.md
@@ -1,4 +1,7 @@
+import Image from '@theme/IdealImage';
+
# OpenAI Proxy Server
+
A simple, fast, and lightweight **OpenAI-compatible server** to call 100+ LLM APIs.
[](https://deploy.cloud.run?git_repo=https://github.com/BerriAI/litellm)
@@ -62,4 +65,21 @@ Looking for the CLI tool/local proxy? It's [here](./proxy_server.md)
[](https://deploy.cloud.run?git_repo=https://github.com/BerriAI/litellm)
+### Set LLM API Keys
+#### Environment Variables
+More info [here](https://cloud.google.com/run/docs/configuring/services/environment-variables#console)
+
+1. In the Google Cloud console, go to Cloud Run: [Go to Cloud Run](https://console.cloud.google.com/run)
+
+2. Click on the **litellm** service
+
+
+3. Click **Edit and Deploy New Revision**
+
+
+4. Enter your Environment Variables
+Example `OPENAI_API_KEY`, `ANTHROPIC_API_KEY`
+
+
+
diff --git a/docs/my-website/img/cloud_run1.png b/docs/my-website/img/cloud_run1.png
new file mode 100644
index 0000000000..93eba46555
Binary files /dev/null and b/docs/my-website/img/cloud_run1.png differ
diff --git a/docs/my-website/img/cloud_run2.png b/docs/my-website/img/cloud_run2.png
new file mode 100644
index 0000000000..44cfd55d33
Binary files /dev/null and b/docs/my-website/img/cloud_run2.png differ
diff --git a/docs/my-website/img/cloud_run3.png b/docs/my-website/img/cloud_run3.png
new file mode 100644
index 0000000000..a6844023f1
Binary files /dev/null and b/docs/my-website/img/cloud_run3.png differ