From 573515ec3d9aded7ee76b59cad793bd5057effe8 Mon Sep 17 00:00:00 2001 From: ishaan-jaff Date: Wed, 14 Feb 2024 11:34:18 -0800 Subject: [PATCH] (docs) add moderations endpoint to docs --- litellm/proxy/proxy_server.py | 11 +++++++++++ 1 file changed, 11 insertions(+) diff --git a/litellm/proxy/proxy_server.py b/litellm/proxy/proxy_server.py index 9b99d08c7..23c25b081 100644 --- a/litellm/proxy/proxy_server.py +++ b/litellm/proxy/proxy_server.py @@ -2814,6 +2814,17 @@ async def moderations( request: Request, user_api_key_dict: UserAPIKeyAuth = Depends(user_api_key_auth), ): + """ + The moderations endpoint is a tool you can use to check whether content complies with an LLM Providers policies. + + Quick Start + ``` + curl --location 'http://0.0.0.0:4000/moderations' \ + --header 'Content-Type: application/json' \ + --header 'Authorization: Bearer sk-1234' \ + --data '{"input": "Sample text goes here", "model": "text-moderation-stable"}' + ``` + """ global proxy_logging_obj try: # Use orjson to parse JSON data, orjson speeds up requests significantly