diff --git a/cookbook/Proxy_Batch_Users.ipynb b/cookbook/Proxy_Batch_Users.ipynb new file mode 100644 index 000000000..4e3c7ce92 --- /dev/null +++ b/cookbook/Proxy_Batch_Users.ipynb @@ -0,0 +1,203 @@ +{ + "nbformat": 4, + "nbformat_minor": 0, + "metadata": { + "colab": { + "provenance": [] + }, + "kernelspec": { + "name": "python3", + "display_name": "Python 3" + }, + "language_info": { + "name": "python" + } + }, + "cells": [ + { + "cell_type": "markdown", + "source": [ + "# Environment Setup" + ], + "metadata": { + "id": "680oRk1af-xJ" + } + }, + { + "cell_type": "code", + "source": [ + "import csv\n", + "from typing import Optional\n", + "import httpx, json\n", + "import asyncio\n", + "\n", + "proxy_base_url = \"http://0.0.0.0:4000\" # 👈 SET TO PROXY URL\n", + "master_key = \"sk-1234\" # 👈 SET TO PROXY MASTER KEY" + ], + "metadata": { + "id": "X7TgJFn8f88p" + }, + "execution_count": null, + "outputs": [] + }, + { + "cell_type": "code", + "source": [ + "## GLOBAL HTTP CLIENT ## - faster http calls\n", + "class HTTPHandler:\n", + " def __init__(self, concurrent_limit=1000):\n", + " # Create a client with a connection pool\n", + " self.client = httpx.AsyncClient(\n", + " limits=httpx.Limits(\n", + " max_connections=concurrent_limit,\n", + " max_keepalive_connections=concurrent_limit,\n", + " )\n", + " )\n", + "\n", + " async def close(self):\n", + " # Close the client when you're done with it\n", + " await self.client.aclose()\n", + "\n", + " async def get(\n", + " self, url: str, params: Optional[dict] = None, headers: Optional[dict] = None\n", + " ):\n", + " response = await self.client.get(url, params=params, headers=headers)\n", + " return response\n", + "\n", + " async def post(\n", + " self,\n", + " url: str,\n", + " data: Optional[dict] = None,\n", + " params: Optional[dict] = None,\n", + " headers: Optional[dict] = None,\n", + " ):\n", + " try:\n", + " response = await self.client.post(\n", + " url, data=data, params=params, headers=headers\n", + " )\n", + " return response\n", + " except Exception as e:\n", + " raise e\n" + ], + "metadata": { + "id": "rauw8EOhgBz5" + }, + "execution_count": null, + "outputs": [] + }, + { + "cell_type": "markdown", + "source": [ + "# Import Sheet\n", + "\n", + "\n", + "Format: | ID | Name | Max Budget |" + ], + "metadata": { + "id": "7LXN8zaLgOie" + } + }, + { + "cell_type": "code", + "source": [ + "async def import_sheet():\n", + " tasks = []\n", + " http_client = HTTPHandler()\n", + " with open('my-batch-sheet.csv', 'r') as file:\n", + " csv_reader = csv.DictReader(file)\n", + " for row in csv_reader:\n", + " task = create_user(client=http_client, user_id=row['ID'], max_budget=row['Max Budget'], user_name=row['Name'])\n", + " tasks.append(task)\n", + " # print(f\"ID: {row['ID']}, Name: {row['Name']}, Max Budget: {row['Max Budget']}\")\n", + "\n", + " keys = await asyncio.gather(*tasks)\n", + "\n", + " with open('my-batch-sheet_new.csv', 'w', newline='') as new_file:\n", + " fieldnames = ['ID', 'Name', 'Max Budget', 'keys']\n", + " csv_writer = csv.DictWriter(new_file, fieldnames=fieldnames)\n", + " csv_writer.writeheader()\n", + "\n", + " with open('my-batch-sheet.csv', 'r') as file:\n", + " csv_reader = csv.DictReader(file)\n", + " for i, row in enumerate(csv_reader):\n", + " row['keys'] = keys[i] # Add the 'keys' value from the corresponding task result\n", + " csv_writer.writerow(row)\n", + "\n", + " await http_client.close()\n", + "\n", + "asyncio.run(import_sheet())" + ], + "metadata": { + "id": "oiED0usegPGf" + }, + "execution_count": null, + "outputs": [] + }, + { + "cell_type": "markdown", + "source": [ + "# Create Users + Keys\n", + "\n", + "- Creates a user\n", + "- Creates a key with max budget" + ], + "metadata": { + "id": "E7M0Li_UgJeZ" + } + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "id": "NZudRFujf7j-" + }, + "outputs": [], + "source": [ + "\n", + "async def create_key_with_alias(client: HTTPHandler, user_id: str, max_budget: float):\n", + " global proxy_base_url\n", + " if not proxy_base_url.endswith(\"/\"):\n", + " proxy_base_url += \"/\"\n", + " url = proxy_base_url + \"key/generate\"\n", + "\n", + " # call /key/generate\n", + " print(\"CALLING /KEY/GENERATE\")\n", + " response = await client.post(\n", + " url=url,\n", + " headers={\"Authorization\": f\"Bearer {master_key}\"},\n", + " data=json.dumps({\n", + " \"user_id\": user_id,\n", + " \"key_alias\": f\"{user_id}-key\",\n", + " \"max_budget\": max_budget\n", + " })\n", + " )\n", + " print(f\"response: {response.text}\")\n", + " return response.json()[\"key\"]\n", + "\n", + "async def create_user(client: HTTPHandler, user_id: str, max_budget: float, user_name: str):\n", + " \"\"\"\n", + " - call /user/new\n", + " - create key for user\n", + " \"\"\"\n", + " global proxy_base_url\n", + " if not proxy_base_url.endswith(\"/\"):\n", + " proxy_base_url += \"/\"\n", + " url = proxy_base_url + \"user/new\"\n", + "\n", + " # call /user/new\n", + " await client.post(\n", + " url=url,\n", + " headers={\"Authorization\": f\"Bearer {master_key}\"},\n", + " data=json.dumps({\n", + " \"user_id\": user_id,\n", + " \"user_alias\": user_name,\n", + " \"auto_create_key\": False\n", + " })\n", + " )\n", + "\n", + " # create key for user\n", + " return await create_key_with_alias(client=client, user_id=user_id, max_budget=max_budget)\n" + ] + } + ] +} \ No newline at end of file