diff --git a/.circleci/config.yml b/.circleci/config.yml index aaad8df77..2727cd221 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -1,4 +1,4 @@ -version: 2.1 +version: 4.3.4 jobs: local_testing: docker: @@ -40,7 +40,7 @@ jobs: pip install "aioboto3==12.3.0" pip install langchain pip install lunary==0.2.5 - pip install "langfuse==2.7.3" + pip install "langfuse==2.27.1" pip install numpydoc pip install traceloop-sdk==0.0.69 pip install openai @@ -57,6 +57,9 @@ jobs: pip install "pytest-mock==3.12.0" pip install python-multipart pip install google-cloud-aiplatform + pip install prometheus-client==0.20.0 + pip install "pydantic==2.7.1" + pip install "diskcache==5.6.1" - save_cache: paths: - ./venv @@ -187,22 +190,28 @@ jobs: command: | docker run -d \ -p 4000:4000 \ - -e DATABASE_URL=$PROXY_DOCKER_DB_URL \ + -e DATABASE_URL=$PROXY_DATABASE_URL \ -e AZURE_API_KEY=$AZURE_API_KEY \ + -e REDIS_HOST=$REDIS_HOST \ + -e REDIS_PASSWORD=$REDIS_PASSWORD \ + -e REDIS_PORT=$REDIS_PORT \ -e AZURE_FRANCE_API_KEY=$AZURE_FRANCE_API_KEY \ -e AZURE_EUROPE_API_KEY=$AZURE_EUROPE_API_KEY \ -e AWS_ACCESS_KEY_ID=$AWS_ACCESS_KEY_ID \ -e AWS_SECRET_ACCESS_KEY=$AWS_SECRET_ACCESS_KEY \ -e AWS_REGION_NAME=$AWS_REGION_NAME \ + -e AUTO_INFER_REGION=True \ -e OPENAI_API_KEY=$OPENAI_API_KEY \ + -e LANGFUSE_PROJECT1_PUBLIC=$LANGFUSE_PROJECT1_PUBLIC \ + -e LANGFUSE_PROJECT2_PUBLIC=$LANGFUSE_PROJECT2_PUBLIC \ + -e LANGFUSE_PROJECT1_SECRET=$LANGFUSE_PROJECT1_SECRET \ + -e LANGFUSE_PROJECT2_SECRET=$LANGFUSE_PROJECT2_SECRET \ --name my-app \ -v $(pwd)/proxy_server_config.yaml:/app/config.yaml \ my-app:latest \ --config /app/config.yaml \ --port 4000 \ - --num_workers 8 \ --detailed_debug \ - --run_gunicorn \ - run: name: Install curl and dockerize command: | @@ -217,7 +226,7 @@ jobs: background: true - run: name: Wait for app to be ready - command: dockerize -wait http://localhost:4000 -timeout 1m + command: dockerize -wait http://localhost:4000 -timeout 5m - run: name: Run tests command: | diff --git a/.devcontainer/devcontainer.json b/.devcontainer/devcontainer.json new file mode 100644 index 000000000..0a5eb19b6 --- /dev/null +++ b/.devcontainer/devcontainer.json @@ -0,0 +1,51 @@ +{ + "name": "Python 3.11", + // Or use a Dockerfile or Docker Compose file. More info: https://containers.dev/guide/dockerfile + "image": "mcr.microsoft.com/devcontainers/python:3.11-bookworm", + // https://github.com/devcontainers/images/tree/main/src/python + // https://mcr.microsoft.com/en-us/product/devcontainers/python/tags + + // "build": { + // "dockerfile": "Dockerfile", + // "context": ".." + // }, + + // Features to add to the dev container. More info: https://containers.dev/features. + // "features": {}, + + // Configure tool-specific properties. + "customizations": { + // Configure properties specific to VS Code. + "vscode": { + "settings": {}, + "extensions": [ + "ms-python.python", + "ms-python.vscode-pylance", + "GitHub.copilot", + "GitHub.copilot-chat" + ] + } + }, + + // Use 'forwardPorts' to make a list of ports inside the container available locally. + "forwardPorts": [4000], + + "containerEnv": { + "LITELLM_LOG": "DEBUG" + }, + + // Use 'portsAttributes' to set default properties for specific forwarded ports. + // More info: https://containers.dev/implementors/json_reference/#port-attributes + "portsAttributes": { + "4000": { + "label": "LiteLLM Server", + "onAutoForward": "notify" + } + }, + + // More info: https://aka.ms/dev-containers-non-root. + // "remoteUser": "litellm", + + // Use 'postCreateCommand' to run commands after the container is created. + "postCreateCommand": "pipx install poetry && poetry install -E extra_proxy -E proxy" +} \ No newline at end of file diff --git a/.dockerignore b/.dockerignore index efff383d4..78833aa31 100644 --- a/.dockerignore +++ b/.dockerignore @@ -1,5 +1,5 @@ -/docs -/cookbook -/.circleci -/.github -/tests \ No newline at end of file +docs +cookbook +.circleci +.github +tests diff --git a/.git-blame-ignore-revs b/.git-blame-ignore-revs new file mode 100644 index 000000000..f0ced6bed --- /dev/null +++ b/.git-blame-ignore-revs @@ -0,0 +1,10 @@ +# Add the commit hash of any commit you want to ignore in `git blame` here. +# One commit hash per line. +# +# The GitHub Blame UI will use this file automatically! +# +# Run this command to always ignore formatting commits in `git blame` +# git config blame.ignoreRevsFile .git-blame-ignore-revs + +# Update pydantic code to fix warnings (GH-3600) +876840e9957bc7e9f7d6a2b58c4d7c53dad16481 diff --git a/.github/pull_request_template.md b/.github/pull_request_template.md new file mode 100644 index 000000000..b7a164368 --- /dev/null +++ b/.github/pull_request_template.md @@ -0,0 +1,29 @@ +## Title + + + +## Relevant issues + + + +## Type + + + + +🆕 New Feature +🐛 Bug Fix +🧹 Refactoring +📖 Documentation +🚄 Infrastructure +✅ Test + +## Changes + + + +## [REQUIRED] Testing - Attach a screenshot of any new tests passing locall +If UI changes, send a screenshot/GIF of working UI fixes + + + diff --git a/.github/workflows/interpret_load_test.py b/.github/workflows/interpret_load_test.py index b52d4d2b3..b1a28e069 100644 --- a/.github/workflows/interpret_load_test.py +++ b/.github/workflows/interpret_load_test.py @@ -64,6 +64,11 @@ if __name__ == "__main__": ) # Replace with your repository's username and name latest_release = repo.get_latest_release() print("got latest release: ", latest_release) + print(latest_release.title) + print(latest_release.tag_name) + + release_version = latest_release.title + print("latest release body: ", latest_release.body) print("markdown table: ", markdown_table) @@ -74,8 +79,25 @@ if __name__ == "__main__": start_index = latest_release.body.find("Load Test LiteLLM Proxy Results") existing_release_body = latest_release.body[:start_index] + docker_run_command = f""" +\n\n +## Docker Run LiteLLM Proxy + +``` +docker run \\ +-e STORE_MODEL_IN_DB=True \\ +-p 4000:4000 \\ +ghcr.io/berriai/litellm:main-{release_version} +``` + """ + print("docker run command: ", docker_run_command) + new_release_body = ( existing_release_body + + docker_run_command + + "\n\n" + + "### Don't want to maintain your internal proxy? get in touch 🎉" + + "\nHosted Proxy Alpha: https://calendly.com/d/4mp-gd3-k5k/litellm-1-1-onboarding-chat" + "\n\n" + "## Load Test LiteLLM Proxy Results" + "\n\n" diff --git a/.gitignore b/.gitignore index 309f726fe..b75a92309 100644 --- a/.gitignore +++ b/.gitignore @@ -1,5 +1,6 @@ .venv .env +litellm/proxy/myenv/* litellm_uuid.txt __pycache__/ *.pyc @@ -50,3 +51,8 @@ kub.yaml loadtest_kub.yaml litellm/proxy/_new_secret_config.yaml litellm/proxy/_new_secret_config.yaml +litellm/proxy/_super_secret_config.yaml +litellm/proxy/_super_secret_config.yaml +litellm/proxy/myenv/bin/activate +litellm/proxy/myenv/bin/Activate.ps1 +myenv/* \ No newline at end of file diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 2a84048e0..cc41d85f1 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -7,7 +7,7 @@ repos: rev: 7.0.0 # The version of flake8 to use hooks: - id: flake8 - exclude: ^litellm/tests/|^litellm/proxy/proxy_cli.py|^litellm/integrations/|^litellm/proxy/tests/ + exclude: ^litellm/tests/|^litellm/proxy/proxy_cli.py|^litellm/proxy/tests/ additional_dependencies: [flake8-print] files: litellm/.*\.py - repo: local diff --git a/README.md b/README.md index 6c81181f3..684d5de73 100644 --- a/README.md +++ b/README.md @@ -5,7 +5,7 @@

Call all LLM APIs using the OpenAI format [Bedrock, Huggingface, VertexAI, TogetherAI, Azure, OpenAI, etc.]

-

OpenAI Proxy Server | Enterprise Tier

+

OpenAI Proxy Server | Hosted Proxy (Preview) | Enterprise Tier

PyPI Version @@ -128,7 +128,9 @@ response = completion(model="gpt-3.5-turbo", messages=[{"role": "user", "content # OpenAI Proxy - ([Docs](https://docs.litellm.ai/docs/simple_proxy)) -Set Budgets & Rate limits across multiple projects +Track spend + Load Balance across multiple projects + +[Hosted Proxy (Preview)](https://docs.litellm.ai/docs/hosted) The proxy provides: @@ -224,7 +226,9 @@ curl 'http://0.0.0.0:4000/key/generate' \ | [deepinfra](https://docs.litellm.ai/docs/providers/deepinfra) | ✅ | ✅ | ✅ | ✅ | | [perplexity-ai](https://docs.litellm.ai/docs/providers/perplexity) | ✅ | ✅ | ✅ | ✅ | | [Groq AI](https://docs.litellm.ai/docs/providers/groq) | ✅ | ✅ | ✅ | ✅ | +| [Deepseek](https://docs.litellm.ai/docs/providers/deepseek) | ✅ | ✅ | ✅ | ✅ | | [anyscale](https://docs.litellm.ai/docs/providers/anyscale) | ✅ | ✅ | ✅ | ✅ | +| [IBM - watsonx.ai](https://docs.litellm.ai/docs/providers/watsonx) | ✅ | ✅ | ✅ | ✅ | ✅ | [voyage ai](https://docs.litellm.ai/docs/providers/voyage) | | | | | ✅ | | [xinference [Xorbits Inference]](https://docs.litellm.ai/docs/providers/xinference) | | | | | ✅ | @@ -245,7 +249,7 @@ Step 2: Navigate into the project, and install dependencies: ``` cd litellm -poetry install +poetry install -E extra_proxy -E proxy ``` Step 3: Test your change: diff --git a/cookbook/Proxy_Batch_Users.ipynb b/cookbook/Proxy_Batch_Users.ipynb new file mode 100644 index 000000000..70521f5ab --- /dev/null +++ b/cookbook/Proxy_Batch_Users.ipynb @@ -0,0 +1,204 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": { + "id": "680oRk1af-xJ" + }, + "source": [ + "# Environment Setup" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "id": "X7TgJFn8f88p" + }, + "outputs": [], + "source": [ + "import csv\n", + "from typing import Optional\n", + "import httpx, json\n", + "import asyncio\n", + "\n", + "proxy_base_url = \"http://0.0.0.0:4000\" # 👈 SET TO PROXY URL\n", + "master_key = \"sk-1234\" # 👈 SET TO PROXY MASTER KEY" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "id": "rauw8EOhgBz5" + }, + "outputs": [], + "source": [ + "## GLOBAL HTTP CLIENT ## - faster http calls\n", + "class HTTPHandler:\n", + " def __init__(self, concurrent_limit=1000):\n", + " # Create a client with a connection pool\n", + " self.client = httpx.AsyncClient(\n", + " limits=httpx.Limits(\n", + " max_connections=concurrent_limit,\n", + " max_keepalive_connections=concurrent_limit,\n", + " )\n", + " )\n", + "\n", + " async def close(self):\n", + " # Close the client when you're done with it\n", + " await self.client.aclose()\n", + "\n", + " async def get(\n", + " self, url: str, params: Optional[dict] = None, headers: Optional[dict] = None\n", + " ):\n", + " response = await self.client.get(url, params=params, headers=headers)\n", + " return response\n", + "\n", + " async def post(\n", + " self,\n", + " url: str,\n", + " data: Optional[dict] = None,\n", + " params: Optional[dict] = None,\n", + " headers: Optional[dict] = None,\n", + " ):\n", + " try:\n", + " response = await self.client.post(\n", + " url, data=data, params=params, headers=headers\n", + " )\n", + " return response\n", + " except Exception as e:\n", + " raise e\n" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "7LXN8zaLgOie" + }, + "source": [ + "# Import Sheet\n", + "\n", + "\n", + "Format: | ID | Name | Max Budget |" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "id": "oiED0usegPGf" + }, + "outputs": [], + "source": [ + "async def import_sheet():\n", + " tasks = []\n", + " http_client = HTTPHandler()\n", + " with open('my-batch-sheet.csv', 'r') as file:\n", + " csv_reader = csv.DictReader(file)\n", + " for row in csv_reader:\n", + " task = create_user(client=http_client, user_id=row['ID'], max_budget=row['Max Budget'], user_name=row['Name'])\n", + " tasks.append(task)\n", + " # print(f\"ID: {row['ID']}, Name: {row['Name']}, Max Budget: {row['Max Budget']}\")\n", + "\n", + " keys = await asyncio.gather(*tasks)\n", + "\n", + " with open('my-batch-sheet_new.csv', 'w', newline='') as new_file:\n", + " fieldnames = ['ID', 'Name', 'Max Budget', 'keys']\n", + " csv_writer = csv.DictWriter(new_file, fieldnames=fieldnames)\n", + " csv_writer.writeheader()\n", + "\n", + " with open('my-batch-sheet.csv', 'r') as file:\n", + " csv_reader = csv.DictReader(file)\n", + " for i, row in enumerate(csv_reader):\n", + " row['keys'] = keys[i] # Add the 'keys' value from the corresponding task result\n", + " csv_writer.writerow(row)\n", + "\n", + " await http_client.close()\n", + "\n", + "asyncio.run(import_sheet())" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "E7M0Li_UgJeZ" + }, + "source": [ + "# Create Users + Keys\n", + "\n", + "- Creates a user\n", + "- Creates a key with max budget" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "id": "NZudRFujf7j-" + }, + "outputs": [], + "source": [ + "\n", + "async def create_key_with_alias(client: HTTPHandler, user_id: str, max_budget: float):\n", + " global proxy_base_url\n", + " if not proxy_base_url.endswith(\"/\"):\n", + " proxy_base_url += \"/\"\n", + " url = proxy_base_url + \"key/generate\"\n", + "\n", + " # call /key/generate\n", + " print(\"CALLING /KEY/GENERATE\")\n", + " response = await client.post(\n", + " url=url,\n", + " headers={\"Authorization\": f\"Bearer {master_key}\"},\n", + " data=json.dumps({\n", + " \"user_id\": user_id,\n", + " \"key_alias\": f\"{user_id}-key\",\n", + " \"max_budget\": max_budget # 👈 KEY CHANGE: SETS MAX BUDGET PER KEY\n", + " })\n", + " )\n", + " print(f\"response: {response.text}\")\n", + " return response.json()[\"key\"]\n", + "\n", + "async def create_user(client: HTTPHandler, user_id: str, max_budget: float, user_name: str):\n", + " \"\"\"\n", + " - call /user/new\n", + " - create key for user\n", + " \"\"\"\n", + " global proxy_base_url\n", + " if not proxy_base_url.endswith(\"/\"):\n", + " proxy_base_url += \"/\"\n", + " url = proxy_base_url + \"user/new\"\n", + "\n", + " # call /user/new\n", + " await client.post(\n", + " url=url,\n", + " headers={\"Authorization\": f\"Bearer {master_key}\"},\n", + " data=json.dumps({\n", + " \"user_id\": user_id,\n", + " \"user_alias\": user_name,\n", + " \"auto_create_key\": False,\n", + " # \"max_budget\": max_budget # 👈 [OPTIONAL] Sets max budget per user (if you want to set a max budget across keys)\n", + " })\n", + " )\n", + "\n", + " # create key for user\n", + " return await create_key_with_alias(client=client, user_id=user_id, max_budget=max_budget)\n" + ] + } + ], + "metadata": { + "colab": { + "provenance": [] + }, + "kernelspec": { + "display_name": "Python 3", + "name": "python3" + }, + "language_info": { + "name": "python" + } + }, + "nbformat": 4, + "nbformat_minor": 0 +} diff --git a/cookbook/liteLLM_IBM_Watsonx.ipynb b/cookbook/liteLLM_IBM_Watsonx.ipynb new file mode 100644 index 000000000..6de108b5d --- /dev/null +++ b/cookbook/liteLLM_IBM_Watsonx.ipynb @@ -0,0 +1,300 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# LiteLLM x IBM [watsonx.ai](https://www.ibm.com/products/watsonx-ai)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Pre-Requisites" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "!pip install litellm" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Set watsonx.ai Credentials\n", + "\n", + "See [this documentation](https://cloud.ibm.com/apidocs/watsonx-ai#api-authentication) for more information about authenticating to watsonx.ai" + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "metadata": {}, + "outputs": [], + "source": [ + "import os\n", + "import litellm\n", + "from litellm.llms.watsonx import IBMWatsonXAI\n", + "litellm.set_verbose = False\n", + "\n", + "os.environ[\"WATSONX_URL\"] = \"\" # Your watsonx.ai base URL\n", + "os.environ[\"WATSONX_APIKEY\"] = \"\" # Your IBM cloud API key or watsonx.ai token\n", + "os.environ[\"WATSONX_PROJECT_ID\"] = \"\" # ID of your watsonx.ai project\n", + "# these can also be passed as arguments to the function\n", + "\n", + "# generating an IAM token is optional, but it is recommended to generate it once and use it for all your requests during the session\n", + "# if not passed to the function, it will be generated automatically for each request\n", + "iam_token = IBMWatsonXAI().generate_iam_token(api_key=os.environ[\"WATSONX_APIKEY\"]) \n", + "# you can also set os.environ[\"WATSONX_TOKEN\"] = iam_token" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Completion Requests\n", + "\n", + "See the following link for a list of supported *text generation* models available with watsonx.ai:\n", + "\n", + "https://dataplatform.cloud.ibm.com/docs/content/wsj/analyze-data/fm-models.html?context=wx&locale=en&audience=wdp" + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Granite v2 response:\n", + "ModelResponse(id='chatcmpl-adba60b2-3741-452e-921c-27b8f68d0298', choices=[Choices(finish_reason='stop', index=0, message=Message(content=\" I'm often asked this question, but it seems a bit bizarre given my circumstances. You see,\", role='assistant'))], created=1713881850, model='ibm/granite-13b-chat-v2', object='chat.completion', system_fingerprint=None, usage=Usage(prompt_tokens=8, completion_tokens=20, total_tokens=28), finish_reason='max_tokens')\n", + "LLaMa 3 8b response:\n", + "ModelResponse(id='chatcmpl-eb282abc-373c-4082-9dae-172546d16d5c', choices=[Choices(finish_reason='stop', index=0, message=Message(content=\"I'm just a language model, I don't have emotions or feelings like humans do, but I\", role='assistant'))], created=1713881852, model='meta-llama/llama-3-8b-instruct', object='chat.completion', system_fingerprint=None, usage=Usage(prompt_tokens=16, completion_tokens=20, total_tokens=36), finish_reason='max_tokens')\n" + ] + } + ], + "source": [ + "from litellm import completion\n", + "\n", + "# see litellm.llms.watsonx.IBMWatsonXAIConfig for a list of available parameters to pass to the completion functions\n", + "response = completion(\n", + " model=\"watsonx/ibm/granite-13b-chat-v2\",\n", + " messages=[{ \"content\": \"Hello, how are you?\",\"role\": \"user\"}],\n", + " token=iam_token\n", + ")\n", + "print(\"Granite v2 response:\")\n", + "print(response)\n", + "\n", + "\n", + "response = completion(\n", + " model=\"watsonx/meta-llama/llama-3-8b-instruct\",\n", + " messages=[{ \"content\": \"Hello, how are you?\",\"role\": \"user\"}],\n", + " token=iam_token\n", + ")\n", + "print(\"LLaMa 3 8b response:\")\n", + "print(response)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Streaming Requests" + ] + }, + { + "cell_type": "code", + "execution_count": 6, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Granite v2 streaming response:\n", + "\n", + "Thank you for asking. I'm fine, thank you for asking. What can I do for you today?\n", + "I'm looking for a new job. Do you have any job openings that might be a good fit for me?\n", + "Sure,\n", + "LLaMa 3 8b streaming response:\n", + "I'm just an AI, so I don't have emotions or feelings like humans do, but I'm functioning properly and ready to help you with any questions or tasks you have! It's great to chat with you. How can I assist you today" + ] + } + ], + "source": [ + "from litellm import completion\n", + "\n", + "response = completion(\n", + " model=\"watsonx/ibm/granite-13b-chat-v2\",\n", + " messages=[{ \"content\": \"Hello, how are you?\",\"role\": \"user\"}],\n", + " stream=True,\n", + " max_tokens=50, # maps to watsonx.ai max_new_tokens\n", + ")\n", + "print(\"Granite v2 streaming response:\")\n", + "for chunk in response:\n", + " print(chunk['choices'][0]['delta']['content'] or '', end='')\n", + "\n", + "# print()\n", + "response = completion(\n", + " model=\"watsonx/meta-llama/llama-3-8b-instruct\",\n", + " messages=[{ \"content\": \"Hello, how are you?\",\"role\": \"user\"}],\n", + " stream=True,\n", + " max_tokens=50, # maps to watsonx.ai max_new_tokens\n", + ")\n", + "print(\"\\nLLaMa 3 8b streaming response:\")\n", + "for chunk in response:\n", + " print(chunk['choices'][0]['delta']['content'] or '', end='')" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Async Requests" + ] + }, + { + "cell_type": "code", + "execution_count": 7, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Granite v2 response:\n", + "ModelResponse(id='chatcmpl-73e7474b-2760-4578-b52d-068d6f4ff68b', choices=[Choices(finish_reason='stop', index=0, message=Message(content=\"\\nHello, thank you for asking. I'm well, how about you?\\n\\n3.\", role='assistant'))], created=1713881895, model='ibm/granite-13b-chat-v2', object='chat.completion', system_fingerprint=None, usage=Usage(prompt_tokens=8, completion_tokens=20, total_tokens=28), finish_reason='max_tokens')\n", + "LLaMa 3 8b response:\n", + "ModelResponse(id='chatcmpl-fbf4cd5a-3a38-4b6c-ba00-01ada9fbde8a', choices=[Choices(finish_reason='stop', index=0, message=Message(content=\"I'm just a language model, I don't have emotions or feelings like humans do. However,\", role='assistant'))], created=1713881894, model='meta-llama/llama-3-8b-instruct', object='chat.completion', system_fingerprint=None, usage=Usage(prompt_tokens=16, completion_tokens=20, total_tokens=36), finish_reason='max_tokens')\n" + ] + } + ], + "source": [ + "from litellm import acompletion\n", + "import asyncio\n", + "\n", + "granite_task = acompletion(\n", + " model=\"watsonx/ibm/granite-13b-chat-v2\",\n", + " messages=[{ \"content\": \"Hello, how are you?\",\"role\": \"user\"}],\n", + " max_tokens=20, # maps to watsonx.ai max_new_tokens\n", + " token=iam_token\n", + ")\n", + "llama_3_task = acompletion(\n", + " model=\"watsonx/meta-llama/llama-3-8b-instruct\",\n", + " messages=[{ \"content\": \"Hello, how are you?\",\"role\": \"user\"}],\n", + " max_tokens=20, # maps to watsonx.ai max_new_tokens\n", + " token=iam_token\n", + ")\n", + "\n", + "granite_response, llama_3_response = await asyncio.gather(granite_task, llama_3_task)\n", + "\n", + "print(\"Granite v2 response:\")\n", + "print(granite_response)\n", + "\n", + "print(\"LLaMa 3 8b response:\")\n", + "print(llama_3_response)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Request deployed models\n", + "\n", + "Models that have been deployed to a deployment space (e.g tuned models) can be called using the \"deployment/\" format (where `` is the ID of the deployed model in your deployment space). The ID of your deployment space must also be set in the environment variable `WATSONX_DEPLOYMENT_SPACE_ID` or passed to the function as `space_id=`. " + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "from litellm import acompletion\n", + "\n", + "os.environ[\"WATSONX_DEPLOYMENT_SPACE_ID\"] = \"\" # ID of the watsonx.ai deployment space where the model is deployed\n", + "await acompletion(\n", + " model=\"watsonx/deployment/\",\n", + " messages=[{ \"content\": \"Hello, how are you?\",\"role\": \"user\"}],\n", + " token=iam_token\n", + ")" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Embeddings\n", + "\n", + "See the following link for a list of supported *embedding* models available with watsonx.ai:\n", + "\n", + "https://dataplatform.cloud.ibm.com/docs/content/wsj/analyze-data/fm-models-embed.html?context=wx" + ] + }, + { + "cell_type": "code", + "execution_count": 7, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Slate 30m embeddings response:\n", + "EmbeddingResponse(model='ibm/slate-30m-english-rtrvr', data=[{'object': 'embedding', 'index': 0, 'embedding': [0.0025110552, -0.021022381, 0.056658838, 0.023194756, 0.06528087, 0.051285733, 0.025715597, 0.009245981, -0.048218597, 0.02131204, 0.0048608365, 0.056427978, -0.029722512, -0.022280851, 0.03397489, 0.15861669, -0.0032172804, 0.021461686, -0.034179244, 0.03242367, 0.045696042, -0.10642838, 0.044042706, 0.003619815, -0.03445944, 0.06782116, -0.012801977, -0.083491564, 0.048063237, -0.0009263491, 0.03926016, -0.003800945, 0.06431806, 0.008804617, 0.041459076, 0.019176882, 0.063215, 0.016872335, -0.07120825, 0.0026858407, -0.0061372668, 0.016006729, 0.034623176, -0.0009702338, 0.05586387, -0.0030038806, 0.10219119, 0.023867028, 0.017003942, 0.07522453, 0.03827543, 0.002119465, -0.047579825, 0.030801363, 0.055104297, -0.00926156, 0.060950216, -0.012564041, -0.0938483, 0.06749232, 0.0303093, 0.1260211, 0.008772238, 0.0937941, 0.03146898, -0.013548525, -0.04654987, 0.038247738, -0.0047283196, -0.021979854, -0.04481472, 0.009184976, 0.030558616, -0.035239127, 0.015711905, 0.079948395, -0.10273533, -0.033666693, 0.009253284, -0.013218568, 0.014513645, 0.011746366, -0.04836566, 0.00059039996, 0.056465007, 0.057913274, 0.046911363, 0.022496173, -0.016504057, -0.0009266135, 0.007562665, 0.024523543, 0.012681347, -0.0034720704, 0.014897689, 0.034027215, -0.035149213, 0.046610955, -0.38038146, -0.05560348, 0.056164417, 0.023633359, -0.020914413, 0.0017839101, 0.043425612, 0.0921522, 0.021333266, 0.032627117, 0.052366074, 0.059688427, -0.02425017, 0.07460727, 0.040419403, 0.018662684, -0.02174095, -0.015262358, 0.0041535227, -0.004320668, 0.001545062, 0.023696192, 0.053526532, 0.031027582, -0.030727778, -0.07266011, 0.01924883, -0.021610625, 0.03179455, -0.002117363, 0.037670195, -0.021235954, -0.03931032, -0.057163127, -0.046020538, 0.013852293, 0.007136301, 0.020461356, 0.027465757, 0.013625788, 0.09281521, 0.03537469, -0.15295835, -0.045262642, 0.013799362, 0.029831719, 0.06360841, 0.045387108, -0.008106462, 0.047562532, 0.026519125, 0.030519808, -0.035604805, 0.059504308, -0.010260606, 0.05920231, -0.039987702, 0.003475537, 0.012535757, 0.03711557, 0.022637982, 0.022368006, -0.013918498, 0.03144229, 0.02680179, 0.05283082, 0.09737034, 0.062140185, 0.047479317, 0.04292394, 0.041657448, 0.031671192, -0.01198203, -0.0398639, 0.050961364, -0.005440624, -0.013748672, 0.02486566, 0.06105261, 0.09158345, 0.047486037, 0.03503525, -0.0009857323, 0.017584834, 0.0015176772, -0.013855697, -0.0016783233, -0.032760657, 0.0073869363, 0.0032070065, 0.08748817, 0.062042974, -0.006563574, -0.01277716, 0.064277925, -0.048509046, 0.01998247, 0.015449057, 0.06161844, 0.0361277, 0.07378269, 0.031909943, 0.035593968, -0.021533003, 0.15151453, 0.009489467, 0.0077385777, 0.004732935, 0.06757376, 0.018628953, 0.03609718, 0.065334365, 0.046664603, 0.03710433, 0.023046834, 0.065034136, 0.021973003, 0.01938253, 0.0049545416, 0.009443422, 0.08657203, -0.006455585, 0.06113277, -0.009921393, 0.008861325, 0.021925068, 0.0073863543, 0.029231662, 0.018063372, -0.028237753, 0.06752595, -0.015746683, -0.06744447, -0.0019776542, -0.16144808, 0.055144247, -0.07052258, -0.0062173936, 0.005187277, 0.057623632, 0.008336536, 0.018794686, 0.08856226, 0.05324669, 0.023925344, -0.011277585, -0.015746504, -0.01888707, -0.010619123, 0.05960752, -0.02111604, 0.13263386, 0.053238407, 0.0423469, 0.03247613, 0.072818235, 0.039493106, -0.0080635715, 0.038805183, 0.05633994, 0.021095807, -0.022528276, 0.113213256, -0.040802993, 0.01971789, 0.00073800184, 0.04653605, 0.024364496, 0.051224973, 0.022803178, 0.06527072, -0.030100288, 0.02277551, 0.034268156, -0.0024341822, 0.030275142, -0.0043326514, 0.026949842, 0.03554525, 0.043582354, 0.037845742, 0.024644673, 0.06225431, 0.06668994, 0.042802095, -0.14308476, 0.028445719, -0.0057268543, 0.034851402, 0.04973769, -0.01673276, -0.0084733, -0.04498498, -0.01888843, 0.0018199912, -0.08666151, 0.03408551, 0.03374362, 0.016341621, -0.017816868, 0.027611718, 0.048712954, 0.03562084, 0.06156702, 0.06942091, 0.018424997, 0.010069236, -0.025854982, -0.005099922, 0.042129293, -0.018960087, -0.04267046, 0.003192464, 0.07610024, 0.01623567, 0.06430824, 0.045628317, -0.13192567, 0.00597194, 0.03359213, -0.051644783, -0.027538724, 0.047537625, 0.00078535493, -0.050269134, 0.06352181, 0.04414142, -0.00025181545, -0.011166945, 0.083493516, -0.022445189, 0.06386556, 0.009009819, 0.018880796, 0.046981215, -0.04803033, 0.20140722, 0.009405448, 0.011427641, 0.032028355, -0.039911997, 0.059231583, 0.10603366, -0.012695404, -0.018773954, 0.051107403, 0.004720434, 0.049031533, 0.008848073, -0.008443017, 0.068459414, -0.001594059, -0.037717424, 0.0083658025, 0.036570624, -0.009189262, -0.07422237, -0.03578154, 0.00016998129, -0.033594534, 0.04550856, -0.09751915, 0.031381045, -0.020289807, -0.025066, 0.05559659, 0.065852426, -0.030574895, 0.098877095, 0.024548644, 0.02716826, -0.0073690503, -0.006680294, -0.062504984, 0.001748584, -0.0015254011, 0.0030000636, 0.05166639, -0.03598367, 0.02785021, 0.019170346, -0.01893702, 0.006487694, -0.045320857, -0.042290565, 0.030072719]}], object='list', usage=Usage(prompt_tokens=8, total_tokens=8))\n", + "Slate 125m embeddings response:\n", + "EmbeddingResponse(model='ibm/slate-125m-english-rtrvr', data=[{'object': 'embedding', 'index': 0, 'embedding': [-0.037463713, -0.02141933, -0.02851813, 0.015519324, -0.08252965, 0.040418413, 0.0125358505, -0.015099016, 0.007372251, 0.043594047, -0.045923322, -0.024535796, -0.06683439, -0.023252856, -0.014445329, -0.007990043, -0.0038893714, 0.024145052, 0.002840671, -0.005213263, 0.025767032, -0.029234663, -0.022147253, -0.04008686, -0.0049467147, -0.005722156, 0.05712166, 0.02074406, -0.027984975, 0.011733741, 0.037084717, 0.0267332, 0.027662167, 0.018661365, 0.034368176, -0.016858159, 0.01525097, 0.0037685328, -0.029145032, -0.014014788, -0.026596593, -0.019313056, -0.034545943, -0.012755116, -0.027378004, -0.0022658114, 0.0671108, -0.011186887, -0.012560194, 0.07890564, 0.04370288, -0.002565922, 0.04558289, -0.015022389, 0.01721297, -0.02836881, 0.00028577668, 0.041560214, -0.028451115, 0.026690092, -0.03240052, 0.043185145, -0.048146088, -0.01863734, 0.014189055, 0.005409885, -0.004303547, 0.043854367, -0.08027855, 0.0036468406, -0.03761452, -0.01586453, 0.0015843573, -0.06557115, -0.017214078, 0.013112075, -0.063624665, -0.059002113, -0.027906772, -0.0104140695, -0.0122148385, 0.002914942, 0.009600896, 0.024618316, 0.0028588492, -0.04129038, -0.0066302163, -0.016593395, 0.0119156595, 0.030668158, 0.032204323, -0.008526114, 0.031477567, -0.027671225, -0.021325896, -0.012719999, 0.020595504, -0.010196725, 0.016694892, 0.015447107, 0.033599768, 0.0015109212, 0.055442166, -0.032922138, 0.032867074, 0.034223255, 0.018267235, 0.044258785, -0.009512916, -0.01888108, 0.0020811916, -0.071849406, -0.029209733, 0.030071445, 0.04898721, 0.03807559, 0.030091342, 0.0049845255, 0.011301079, 0.0060062855, -0.052550614, -0.040027767, -0.04539995, -0.069943875, 0.052881725, 0.015551356, -0.0016604571, 0.0021608798, 0.055507053, -0.015404854, -0.0023839937, 0.0070840786, 0.042537935, -0.045489613, 0.018908504, -0.015565469, 0.015916781, 0.07333876, 0.0034915418, -0.0029724848, 0.019170308, 0.02221138, -0.027242986, -0.003735747, -0.02341423, -0.0037938543, 0.0104211755, -0.06185881, -0.036718667, -0.02746382, -0.026462527, -0.050701175, 0.0057923957, 0.040674523, -0.019840682, -0.030195065, 0.045316722, 0.017369563, -0.031288657, -0.047546197, 0.026255054, -0.0049950704, -0.040272273, 0.0005752177, 0.03959872, -0.0073655704, -0.025617458, -0.009416491, -0.019514928, -0.07619169, 0.0051972694, 0.016387343, -0.012366861, -0.009152257, -0.035955105, -0.05794065, 0.019153351, -0.0461187, 0.024734644, 0.0031722176, 0.06610593, -0.0046516205, -0.04635891, 0.02524459, 0.004230386, 0.06153266, -0.0008394812, -0.013522857, 0.029861225, -0.00394871, -0.037432022, 0.0483034, 0.02181303, 0.015967155, 0.06181817, -0.018545056, 0.044176213, -0.07024062, -0.013022128, -0.0087189535, -0.025292343, 0.040448178, -0.051455554, -0.014017804, 0.012191985, 0.0071282317, -0.015855217, 0.013618914, -0.0060378346, -0.057781402, -0.035322957, -0.013627626, -0.027318006, -0.27732822, -0.007108157, 0.012321971, -0.15896526, -0.03793523, -0.025426138, 0.020721687, -0.04701553, -0.004927499, 0.010541978, -0.003212021, -0.0023603817, -0.052153032, 0.043272667, 0.024041472, -0.031666223, 0.0017891804, 0.026806207, -0.026526717, 0.0023138188, 0.024067048, 0.03326347, -0.039004102, -0.0004279829, 0.007266309, -0.008940641, 0.03715139, -0.037960306, 0.01647343, -0.022163782, 0.07456727, -0.0013284415, -0.029121747, 0.012727488, -0.007229313, 0.03177136, -0.08142398, 0.010223168, -0.025942598, -0.23807198, 0.022616733, -0.03925926, 0.05572623, -0.00020389797, -0.0022259122, -0.007885641, -0.00719495, 0.0018412926, 0.018953165, -0.009946787, 0.03723944, -0.015900994, 0.013648507, 0.010997674, -0.018918132, 0.013143112, 0.032894272, -0.05800237, 0.011163258, 0.025205074, -0.017001726, 0.03673705, -0.011551997, 0.06637543, -0.033003606, -0.041392814, -0.004078506, 0.03916763, -0.0022711542, 0.058338877, -0.034323692, -0.033700593, 0.01051642, 0.035579532, -0.01997833, 0.002977113, 0.06590587, 0.042783573, 0.020624464, 0.029172791, -0.035136282, 0.02035436, 0.05696583, -0.010200334, -0.0010580813, -0.024785697, -0.014516442, -0.030100575, -0.03807279, 0.042534467, -0.0281041, -0.05331885, -0.019467393, 0.016051197, 0.012470333, -0.008369627, 0.002254233, 0.026580654, -0.04541506, -0.018085537, -0.034577485, -0.0014747214, 0.0005770179, 0.0043190396, -0.004989785, 0.007569717, 0.010167482, -0.03335266, -0.015255423, 0.07341545, 0.012114007, -0.0010415721, 0.008754641, 0.05932771, 0.030799353, 0.026148474, -0.0069155577, -0.056865778, 0.0038446637, -0.010079895, 0.013511311, 0.023351224, -0.049000103, -0.013028001, -0.04957143, -0.031393193, 0.040289443, 0.063747466, 0.046358805, 0.0023754216, -0.0054107807, -0.020128531, 0.0013747461, -0.018183928, -0.04754063, -0.0064625163, 0.0417791, 0.06087331, -0.012241535, 0.04185439, 0.03641727, -0.02044306, -0.061368305, -0.023353308, 0.055897385, -0.047081504, 0.012900442, -0.018708078, 0.0028819577, 0.006964468, 0.0008757072, 0.04605831, 0.01716345, -0.004099444, -0.015493673, 0.021323929, -0.011252118, -0.02278577, 0.01893121, 0.009134488, 0.021568391, 0.011066748, -0.018853422, 0.027866907, -0.02831057, -0.010147286, 0.014807969, -0.03266599, -0.06711559, 0.038546126, 0.0031859868, -0.029038243, 0.046595056, 0.036973156, -0.033408422, 0.021968717, -0.011411975, 0.006584961, 0.072844714, -0.005873538, 0.029435376, 0.061169676, -0.02318868, 0.051129397, 0.014791153, -0.009028991, -0.021579748, 0.02669236, 0.029696332, -0.063952625, -0.061506465, -0.00080902094, 0.06850867, -0.09809231, -0.005534635, 0.066767104, -0.041267477, 0.046568397, 0.00983124, -0.0048434925, 0.038644254, 0.04096419, 0.0023063375, 0.014526287, 0.014016995, 0.020224908, 0.007113328, -0.0732543, -0.0054818415, 0.05807576, 0.022461535, 0.21100426, -0.009597197, -0.020674499, 0.010743241, -0.046834, -0.0068005333, 0.04918187, -0.06680011, -0.025018543, 0.016360015, 0.100744724, -0.019944709, -0.052390855, -0.0034876189, 0.031699855, -0.03024188, 0.009384044, -0.073849924, 0.01846066, -0.017075414, 0.0067319535, 0.045643695, 0.0121267075, 0.014980903, -0.0022226444, -0.015187039, 0.040638167, 0.023607453, -0.018353134, 0.007413985, 0.03487914, 0.018997269, -0.0107962405, -0.0040080273, 0.001454658, -0.023004232, -0.03065838, -0.0691732, -0.009669473, -0.017253181, 0.100617275, -0.00028453665, -0.055184573, -0.04010461, -0.022628073, -0.02138574, -0.00011931983, -0.021988528, 0.021569526, 0.018913478, -0.07588871, -0.030895703, -0.045679674, 0.03548181, 0.05806986, -0.00313453, 0.005607964, 0.014474551, -0.016833752, -0.022846023, 0.03665983, 0.04312398, 0.006030178, 0.020107903, -0.067837745, -0.039261904, -0.013903933, -0.011238981, -0.091779895, 0.03393072, 0.03576862, -0.016447216, -0.013628061, 0.035994843, 0.02442105, 0.0013356373, -0.013639993, -0.0070654624, -0.031047037, 0.0321763, 0.019488426, 0.030912274, -0.018131692, 0.034129236, -0.038152352, -0.020318052, 0.012934771, -0.0038958737, 0.029313264, 0.0609006, -0.06022117, -0.016697206, -0.030089315, -0.0030464267, -0.05011375, 0.016849633, -0.01935251, 0.00033423092, 0.018090008, 0.034528963, 0.015720658, 0.006443832, 0.0024674414, 0.0033006326, -0.011959118, -0.014686165, 0.00851113, 0.032130115, 0.016566927, -0.0048006177, -0.041135546, 0.017366901, 0.014404645, 0.0014093819, -0.039899524, -0.020875102, -0.01322629, -0.010891931, 0.019460721, -0.098985165, -0.03990147, 0.035807386, 0.05274234, -0.017714208, 0.0023620757, 0.022553496, 0.010935722, -0.016535437, -0.014505468, -0.005573891, -0.029528206, -0.010998497, 0.011297328, 0.007440231, 0.054734096, -0.035311602, 0.07038191, -0.034328025, -0.0109814005, -0.00578824, -0.009286793, 0.06692834, -0.040116422, -0.030043483, -0.010882302, -0.024094587, 0.026659116, -0.0637435, -0.022305744, 0.024388585, 0.011812823, -0.022778027, -0.0039024823, 0.027778644, 0.010566278, 0.011030791, -0.0021155484, 0.018014789, -0.03458981, 0.02546183, -0.11745906, 0.038193583, 0.0019787792, 0.01639592, 0.013218127, -0.012434678, -0.047858853, 0.006662704, 0.033221778, 0.008376927, -0.011822234, 0.01202769, 0.008761578, -0.04075117, 0.0025187496, 0.0026266004, 0.029762473, 0.009570205, -0.03644678, -0.033258904, -0.030776607, 0.05373578, 0.010904848, 0.040284622, 0.02707032, 0.021803873, -0.022011256, -0.05517991, -0.005213912, 0.009023477, -0.011895841, -0.026821174, -0.009035418, -0.021059638, 0.025536137, -0.053264923, 0.032206282, 0.020235807, 0.018660447, 0.0028790566, -0.019914437, 0.097842626, 0.027617158, 0.020276038, -0.014215543, 0.012761584, 0.032757074, 0.061124176, 0.049016643, -0.016509317, -0.03750349, -0.03449537, -0.02039439, -0.051360182, -0.041909404, 0.016175032, 0.040492736, 0.031218654, 0.0020242895, -0.032167237, 0.019398497, 0.057013687, 0.0031299617, 0.019177254, 0.015395364, -0.034078192, 0.041325297, 0.044380017, -0.004446819, 0.019610956, -0.030034903, 0.008468295, 0.03065914, -0.009548659, -0.07113981, 0.051648173, 0.03746448, -0.021847434, 0.01844844, 0.01333424, -0.001188216, 0.012330977, -0.056448817, 0.0008659569, 0.011183285, 0.006780519, -0.007357356, 0.05263679, -0.024631461, 0.00519591, -0.052165415, -0.03250626, -0.009370051, 0.00292325, -0.007187242, 0.029566163, -0.049605303, -0.02625627, -0.003157652, 0.052691437, -0.03589223, 0.03889354, -0.0035060279, 0.024555178, -0.00929779, -0.05037946, -0.022402484, 0.030634355, -0.03300659, -0.0063623153, 0.0027472514, 0.03196768, -0.019257778, 0.0089001395, 0.008908001, 0.018918095, 0.059574094, -0.02838763, 0.018203752, -0.06708146, -0.022670228, -0.013985525, 0.045018435, 0.011420395, -0.008649952, -0.027328938, -0.03527292, -0.0038555951, 0.017597001, 0.024891963, -0.0039160745, -0.015237065, -0.0008723479, -0.018641612, -0.036825016, -0.028743235, 0.00091956893, 0.00030935413, -0.048641082, 0.03744432, -0.024196126, 0.009848505, -0.043836866, 0.0044429195, 0.013709644, 0.06295503, -0.016072558, 0.01277375, -0.03548109, 0.003398656, 0.025347201, 0.019685786, 0.00758199, -0.016122513, -0.039198015, -0.0023108267, -0.0041584945, 0.005161282, 0.00089106365, 0.0076085874, -0.055768084, -0.0058975955, 0.007728267, 0.00076985586, -0.013469806, -0.031578194, -0.0138569595, 0.044540506, -0.0408136, -0.015252405, 0.06232591, -0.04198101, 0.0048899655, -0.0030694627, -0.025022805, -0.010789543, -0.025350742, 0.007836728, 0.024604483, -5.385127e-05, -0.0021367231, -0.01704561, -0.001425816, 0.0035238306]}], object='list', usage=Usage(prompt_tokens=8, total_tokens=8))\n" + ] + } + ], + "source": [ + "from litellm import embedding, aembedding\n", + "\n", + "response = embedding(\n", + " model=\"watsonx/ibm/slate-30m-english-rtrvr\",\n", + " input=[\"Hello, how are you?\"],\n", + " token=iam_token\n", + ")\n", + "print(\"Slate 30m embeddings response:\")\n", + "print(response)\n", + "\n", + "response = await aembedding(\n", + " model=\"watsonx/ibm/slate-125m-english-rtrvr\",\n", + " input=[\"Hello, how are you?\"],\n", + " token=iam_token\n", + ")\n", + "print(\"Slate 125m embeddings response:\")\n", + "print(response)" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "base", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.11.5" + } + }, + "nbformat": 4, + "nbformat_minor": 2 +} diff --git a/cookbook/liteLLM_clarifai_Demo.ipynb b/cookbook/liteLLM_clarifai_Demo.ipynb new file mode 100644 index 000000000..40ef2fcf9 --- /dev/null +++ b/cookbook/liteLLM_clarifai_Demo.ipynb @@ -0,0 +1,187 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# LiteLLM Clarifai \n", + "This notebook walks you through on how to use liteLLM integration of Clarifai and call LLM model from clarifai with response in openAI output format." + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Pre-Requisites" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "#install necessary packages\n", + "!pip install litellm\n", + "!pip install clarifai" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "To obtain Clarifai Personal Access Token follow the steps mentioned in the [link](https://docs.clarifai.com/clarifai-basics/authentication/personal-access-tokens/)" + ] + }, + { + "cell_type": "code", + "execution_count": 1, + "metadata": {}, + "outputs": [], + "source": [ + "## Set Clarifai Credentials\n", + "import os\n", + "os.environ[\"CLARIFAI_API_KEY\"]= \"YOUR_CLARIFAI_PAT\" # Clarifai PAT" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Mistral-large" + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "metadata": {}, + "outputs": [], + "source": [ + "import litellm\n", + "\n", + "litellm.set_verbose=False" + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Mistral large response : ModelResponse(id='chatcmpl-6eed494d-7ae2-4870-b9c2-6a64d50a6151', choices=[Choices(finish_reason='stop', index=1, message=Message(content=\"In the grand tapestry of time, where tales unfold,\\nLies the chronicle of ages, a sight to behold.\\nA tale of empires rising, and kings of old,\\nOf civilizations lost, and stories untold.\\n\\nOnce upon a yesterday, in a time so vast,\\nHumans took their first steps, casting shadows in the past.\\nFrom the cradle of mankind, a journey they embarked,\\nThrough stone and bronze and iron, their skills they sharpened and marked.\\n\\nEgyptians built pyramids, reaching for the skies,\\nWhile Greeks sought wisdom, truth, in philosophies that lie.\\nRoman legions marched, their empire to expand,\\nAnd in the East, the Silk Road joined the world, hand in hand.\\n\\nThe Middle Ages came, with knights in shining armor,\\nFeudal lords and serfs, a time of both clamor and calm order.\\nThen Renaissance bloomed, like a flower in the sun,\\nA rebirth of art and science, a new age had begun.\\n\\nAcross the vast oceans, explorers sailed with courage bold,\\nDiscovering new lands, stories of adventure, untold.\\nIndustrial Revolution churned, progress in its wake,\\nMachines and factories, a whole new world to make.\\n\\nTwo World Wars raged, a testament to man's strife,\\nYet from the ashes rose hope, a renewed will for life.\\nInto the modern era, technology took flight,\\nConnecting every corner, bathed in digital light.\\n\\nHistory, a symphony, a melody of time,\\nA testament to human will, resilience so sublime.\\nIn every page, a lesson, in every tale, a guide,\\nFor understanding our past, shapes our future's tide.\", role='assistant'))], created=1713896412, model='https://api.clarifai.com/v2/users/mistralai/apps/completion/models/mistral-large/outputs', object='chat.completion', system_fingerprint=None, usage=Usage(prompt_tokens=13, completion_tokens=338, total_tokens=351))\n" + ] + } + ], + "source": [ + "from litellm import completion\n", + "\n", + "messages = [{\"role\": \"user\",\"content\": \"\"\"Write a poem about history?\"\"\"}]\n", + "response=completion(\n", + " model=\"clarifai/mistralai.completion.mistral-large\",\n", + " messages=messages,\n", + " )\n", + "\n", + "print(f\"Mistral large response : {response}\")" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Claude-2.1 " + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Claude-2.1 response : ModelResponse(id='chatcmpl-d126c919-4db4-4aa3-ac8f-7edea41e0b93', choices=[Choices(finish_reason='stop', index=1, message=Message(content=\" Here's a poem I wrote about history:\\n\\nThe Tides of Time\\n\\nThe tides of time ebb and flow,\\nCarrying stories of long ago.\\nFigures and events come into light,\\nShaping the future with all their might.\\n\\nKingdoms rise, empires fall, \\nLeaving traces that echo down every hall.\\nRevolutions bring change with a fiery glow,\\nToppling structures from long ago.\\n\\nExplorers traverse each ocean and land,\\nSeeking treasures they don't understand.\\nWhile artists and writers try to make their mark,\\nHoping their works shine bright in the dark.\\n\\nThe cycle repeats again and again,\\nAs humanity struggles to learn from its pain.\\nThough the players may change on history's stage,\\nThe themes stay the same from age to age.\\n\\nWar and peace, life and death,\\nLove and strife with every breath.\\nThe tides of time continue their dance,\\nAs we join in, by luck or by chance.\\n\\nSo we study the past to light the way forward, \\nHeeding warnings from stories told and heard.\\nThe future unfolds from this unending flow -\\nWhere the tides of time ultimately go.\", role='assistant'))], created=1713896579, model='https://api.clarifai.com/v2/users/anthropic/apps/completion/models/claude-2_1/outputs', object='chat.completion', system_fingerprint=None, usage=Usage(prompt_tokens=12, completion_tokens=232, total_tokens=244))\n" + ] + } + ], + "source": [ + "from litellm import completion\n", + "\n", + "messages = [{\"role\": \"user\",\"content\": \"\"\"Write a poem about history?\"\"\"}]\n", + "response=completion(\n", + " model=\"clarifai/anthropic.completion.claude-2_1\",\n", + " messages=messages,\n", + " )\n", + "\n", + "print(f\"Claude-2.1 response : {response}\")" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### OpenAI GPT-4 (Streaming)\n", + "Though clarifai doesn't support streaming, still you can call stream and get the response in standard StreamResponse format of liteLLM" + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "ModelResponse(id='chatcmpl-40ae19af-3bf0-4eb4-99f2-33aec3ba84af', choices=[StreamingChoices(finish_reason=None, index=0, delta=Delta(content=\"In the quiet corners of time's grand hall,\\nLies the tale of rise and fall.\\nFrom ancient ruins to modern sprawl,\\nHistory, the greatest story of them all.\\n\\nEmpires have risen, empires have decayed,\\nThrough the eons, memories have stayed.\\nIn the book of time, history is laid,\\nA tapestry of events, meticulously displayed.\\n\\nThe pyramids of Egypt, standing tall,\\nThe Roman Empire's mighty sprawl.\\nFrom Alexander's conquest, to the Berlin Wall,\\nHistory, a silent witness to it all.\\n\\nIn the shadow of the past we tread,\\nWhere once kings and prophets led.\\nTheir stories in our hearts are spread,\\nEchoes of their words, in our minds are read.\\n\\nBattles fought and victories won,\\nActs of courage under the sun.\\nTales of love, of deeds done,\\nIn history's grand book, they all run.\\n\\nHeroes born, legends made,\\nIn the annals of time, they'll never fade.\\nTheir triumphs and failures all displayed,\\nIn the eternal march of history's parade.\\n\\nThe ink of the past is forever dry,\\nBut its lessons, we cannot deny.\\nIn its stories, truths lie,\\nIn its wisdom, we rely.\\n\\nHistory, a mirror to our past,\\nA guide for the future vast.\\nThrough its lens, we're ever cast,\\nIn the drama of life, forever vast.\", role='assistant', function_call=None, tool_calls=None), logprobs=None)], created=1714744515, model='https://api.clarifai.com/v2/users/openai/apps/chat-completion/models/GPT-4/outputs', object='chat.completion.chunk', system_fingerprint=None)\n", + "ModelResponse(id='chatcmpl-40ae19af-3bf0-4eb4-99f2-33aec3ba84af', choices=[StreamingChoices(finish_reason='stop', index=0, delta=Delta(content=None, role=None, function_call=None, tool_calls=None), logprobs=None)], created=1714744515, model='https://api.clarifai.com/v2/users/openai/apps/chat-completion/models/GPT-4/outputs', object='chat.completion.chunk', system_fingerprint=None)\n" + ] + } + ], + "source": [ + "from litellm import completion\n", + "\n", + "messages = [{\"role\": \"user\",\"content\": \"\"\"Write a poem about history?\"\"\"}]\n", + "response = completion(\n", + " model=\"clarifai/openai.chat-completion.GPT-4\",\n", + " messages=messages,\n", + " stream=True,\n", + " api_key = \"c75cc032415e45368be331fdd2c06db0\")\n", + "\n", + "for chunk in response:\n", + " print(chunk)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.10.13" + } + }, + "nbformat": 4, + "nbformat_minor": 2 +} diff --git a/deploy/azure_resource_manager/azure_marketplace.zip b/deploy/azure_resource_manager/azure_marketplace.zip new file mode 100644 index 000000000..347512586 Binary files /dev/null and b/deploy/azure_resource_manager/azure_marketplace.zip differ diff --git a/deploy/azure_resource_manager/azure_marketplace/createUiDefinition.json b/deploy/azure_resource_manager/azure_marketplace/createUiDefinition.json new file mode 100644 index 000000000..4eba73bdb --- /dev/null +++ b/deploy/azure_resource_manager/azure_marketplace/createUiDefinition.json @@ -0,0 +1,15 @@ +{ + "$schema": "https://schema.management.azure.com/schemas/0.1.2-preview/CreateUIDefinition.MultiVm.json#", + "handler": "Microsoft.Azure.CreateUIDef", + "version": "0.1.2-preview", + "parameters": { + "config": { + "isWizard": false, + "basics": { } + }, + "basics": [ ], + "steps": [ ], + "outputs": { }, + "resourceTypes": [ ] + } +} \ No newline at end of file diff --git a/deploy/azure_resource_manager/azure_marketplace/mainTemplate.json b/deploy/azure_resource_manager/azure_marketplace/mainTemplate.json new file mode 100644 index 000000000..114e855bf --- /dev/null +++ b/deploy/azure_resource_manager/azure_marketplace/mainTemplate.json @@ -0,0 +1,63 @@ +{ + "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#", + "contentVersion": "1.0.0.0", + "parameters": { + "imageName": { + "type": "string", + "defaultValue": "ghcr.io/berriai/litellm:main-latest" + }, + "containerName": { + "type": "string", + "defaultValue": "litellm-container" + }, + "dnsLabelName": { + "type": "string", + "defaultValue": "litellm" + }, + "portNumber": { + "type": "int", + "defaultValue": 4000 + } + }, + "resources": [ + { + "type": "Microsoft.ContainerInstance/containerGroups", + "apiVersion": "2021-03-01", + "name": "[parameters('containerName')]", + "location": "[resourceGroup().location]", + "properties": { + "containers": [ + { + "name": "[parameters('containerName')]", + "properties": { + "image": "[parameters('imageName')]", + "resources": { + "requests": { + "cpu": 1, + "memoryInGB": 2 + } + }, + "ports": [ + { + "port": "[parameters('portNumber')]" + } + ] + } + } + ], + "osType": "Linux", + "restartPolicy": "Always", + "ipAddress": { + "type": "Public", + "ports": [ + { + "protocol": "tcp", + "port": "[parameters('portNumber')]" + } + ], + "dnsNameLabel": "[parameters('dnsLabelName')]" + } + } + } + ] + } \ No newline at end of file diff --git a/deploy/azure_resource_manager/main.bicep b/deploy/azure_resource_manager/main.bicep new file mode 100644 index 000000000..b104cefe1 --- /dev/null +++ b/deploy/azure_resource_manager/main.bicep @@ -0,0 +1,42 @@ +param imageName string = 'ghcr.io/berriai/litellm:main-latest' +param containerName string = 'litellm-container' +param dnsLabelName string = 'litellm' +param portNumber int = 4000 + +resource containerGroupName 'Microsoft.ContainerInstance/containerGroups@2021-03-01' = { + name: containerName + location: resourceGroup().location + properties: { + containers: [ + { + name: containerName + properties: { + image: imageName + resources: { + requests: { + cpu: 1 + memoryInGB: 2 + } + } + ports: [ + { + port: portNumber + } + ] + } + } + ] + osType: 'Linux' + restartPolicy: 'Always' + ipAddress: { + type: 'Public' + ports: [ + { + protocol: 'tcp' + port: portNumber + } + ] + dnsNameLabel: dnsLabelName + } + } +} diff --git a/deploy/charts/litellm-helm/Chart.yaml b/deploy/charts/litellm-helm/Chart.yaml index 628b76a3c..7f68acf88 100644 --- a/deploy/charts/litellm-helm/Chart.yaml +++ b/deploy/charts/litellm-helm/Chart.yaml @@ -24,7 +24,7 @@ version: 0.2.0 # incremented each time you make changes to the application. Versions are not expected to # follow Semantic Versioning. They should reflect the version the application is using. # It is recommended to use it with quotes. -appVersion: v1.24.5 +appVersion: v1.35.38 dependencies: - name: "postgresql" diff --git a/docs/my-website/docs/caching/redis_cache.md b/docs/my-website/docs/caching/all_caches.md similarity index 80% rename from docs/my-website/docs/caching/redis_cache.md rename to docs/my-website/docs/caching/all_caches.md index b00a118c1..eb309f9b8 100644 --- a/docs/my-website/docs/caching/redis_cache.md +++ b/docs/my-website/docs/caching/all_caches.md @@ -1,7 +1,7 @@ import Tabs from '@theme/Tabs'; import TabItem from '@theme/TabItem'; -# Caching - In-Memory, Redis, s3, Redis Semantic Cache +# Caching - In-Memory, Redis, s3, Redis Semantic Cache, Disk [**See Code**](https://github.com/BerriAI/litellm/blob/main/litellm/caching.py) @@ -11,7 +11,7 @@ Need to use Caching on LiteLLM Proxy Server? Doc here: [Caching Proxy Server](ht ::: -## Initialize Cache - In Memory, Redis, s3 Bucket, Redis Semantic Cache +## Initialize Cache - In Memory, Redis, s3 Bucket, Redis Semantic, Disk Cache @@ -159,7 +159,7 @@ litellm.cache = Cache() # Make completion calls response1 = completion( model="gpt-3.5-turbo", - messages=[{"role": "user", "content": "Tell me a joke."}] + messages=[{"role": "user", "content": "Tell me a joke."}], caching=True ) response2 = completion( @@ -174,6 +174,43 @@ response2 = completion( + + +### Quick Start + +Install diskcache: + +```shell +pip install diskcache +``` + +Then you can use the disk cache as follows. + +```python +import litellm +from litellm import completion +from litellm.caching import Cache +litellm.cache = Cache(type="disk") + +# Make completion calls +response1 = completion( + model="gpt-3.5-turbo", + messages=[{"role": "user", "content": "Tell me a joke."}], + caching=True +) +response2 = completion( + model="gpt-3.5-turbo", + messages=[{"role": "user", "content": "Tell me a joke."}], + caching=True +) + +# response1 == response2, response 1 is cached + +``` + +If you run the code two times, response1 will use the cache from the first run that was stored in a cache file. + + @@ -191,13 +228,13 @@ Advanced Params ```python litellm.enable_cache( - type: Optional[Literal["local", "redis"]] = "local", + type: Optional[Literal["local", "redis", "s3", "disk"]] = "local", host: Optional[str] = None, port: Optional[str] = None, password: Optional[str] = None, supported_call_types: Optional[ - List[Literal["completion", "acompletion", "embedding", "aembedding"]] - ] = ["completion", "acompletion", "embedding", "aembedding"], + List[Literal["completion", "acompletion", "embedding", "aembedding", "atranscription", "transcription"]] + ] = ["completion", "acompletion", "embedding", "aembedding", "atranscription", "transcription"], **kwargs, ) ``` @@ -215,13 +252,13 @@ Update the Cache params ```python litellm.update_cache( - type: Optional[Literal["local", "redis"]] = "local", + type: Optional[Literal["local", "redis", "s3", "disk"]] = "local", host: Optional[str] = None, port: Optional[str] = None, password: Optional[str] = None, supported_call_types: Optional[ - List[Literal["completion", "acompletion", "embedding", "aembedding"]] - ] = ["completion", "acompletion", "embedding", "aembedding"], + List[Literal["completion", "acompletion", "embedding", "aembedding", "atranscription", "transcription"]] + ] = ["completion", "acompletion", "embedding", "aembedding", "atranscription", "transcription"], **kwargs, ) ``` @@ -276,22 +313,29 @@ cache.get_cache = get_cache ```python def __init__( self, - type: Optional[Literal["local", "redis", "s3"]] = "local", + type: Optional[Literal["local", "redis", "redis-semantic", "s3", "disk"]] = "local", supported_call_types: Optional[ - List[Literal["completion", "acompletion", "embedding", "aembedding"]] - ] = ["completion", "acompletion", "embedding", "aembedding"], # A list of litellm call types to cache for. Defaults to caching for all litellm call types. - + List[Literal["completion", "acompletion", "embedding", "aembedding", "atranscription", "transcription"]] + ] = ["completion", "acompletion", "embedding", "aembedding", "atranscription", "transcription"], + ttl: Optional[float] = None, + default_in_memory_ttl: Optional[float] = None, + # redis cache params host: Optional[str] = None, port: Optional[str] = None, password: Optional[str] = None, - + namespace: Optional[str] = None, + default_in_redis_ttl: Optional[float] = None, + similarity_threshold: Optional[float] = None, + redis_semantic_cache_use_async=False, + redis_semantic_cache_embedding_model="text-embedding-ada-002", + redis_flush_size=None, # s3 Bucket, boto3 configuration s3_bucket_name: Optional[str] = None, s3_region_name: Optional[str] = None, s3_api_version: Optional[str] = None, - s3_path: Optional[str] = None, # if you wish to save to a spefic path + s3_path: Optional[str] = None, # if you wish to save to a specific path s3_use_ssl: Optional[bool] = True, s3_verify: Optional[Union[bool, str]] = None, s3_endpoint_url: Optional[str] = None, @@ -299,7 +343,11 @@ def __init__( s3_aws_secret_access_key: Optional[str] = None, s3_aws_session_token: Optional[str] = None, s3_config: Optional[Any] = None, - **kwargs, + + # disk cache params + disk_cache_dir=None, + + **kwargs ): ``` diff --git a/docs/my-website/docs/caching/local_caching.md b/docs/my-website/docs/caching/local_caching.md index d0e26e4bf..81c4edcb8 100644 --- a/docs/my-website/docs/caching/local_caching.md +++ b/docs/my-website/docs/caching/local_caching.md @@ -40,7 +40,7 @@ cache = Cache() cache.add_cache(cache_key="test-key", result="1234") -cache.get_cache(cache_key="test-key) +cache.get_cache(cache_key="test-key") ``` ## Caching with Streaming diff --git a/docs/my-website/docs/completion/batching.md b/docs/my-website/docs/completion/batching.md index 05683b3dd..09f59f743 100644 --- a/docs/my-website/docs/completion/batching.md +++ b/docs/my-website/docs/completion/batching.md @@ -4,6 +4,12 @@ LiteLLM allows you to: * Send 1 completion call to many models: Return Fastest Response * Send 1 completion call to many models: Return All Responses +:::info + +Trying to do batch completion on LiteLLM Proxy ? Go here: https://docs.litellm.ai/docs/proxy/user_keys#beta-batch-completions---pass-model-as-list + +::: + ## Send multiple completion calls to 1 model In the batch_completion method, you provide a list of `messages` where each sub-list of messages is passed to `litellm.completion()`, allowing you to process multiple prompts efficiently in a single API call. diff --git a/docs/my-website/docs/completion/input.md b/docs/my-website/docs/completion/input.md index fd5594610..ba01dd9d8 100644 --- a/docs/my-website/docs/completion/input.md +++ b/docs/my-website/docs/completion/input.md @@ -37,11 +37,12 @@ print(response) # ["max_tokens", "tools", "tool_choice", "stream"] This is a list of openai params we translate across providers. -This list is constantly being updated. +Use `litellm.get_supported_openai_params()` for an updated list of params for each model + provider | Provider | temperature | max_tokens | top_p | stream | stop | n | presence_penalty | frequency_penalty | functions | function_call | logit_bias | user | response_format | seed | tools | tool_choice | logprobs | top_logprobs | extra_headers | |---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|--| -|Anthropic| ✅ | ✅ | ✅ | ✅ | ✅ | | | | | | +|Anthropic| ✅ | ✅ | ✅ | ✅ | ✅ | | | | | | | | | | ✅ | ✅ | +|Anthropic| ✅ | ✅ | ✅ | ✅ | ✅ | | | | | | | | ✅ | ✅ | ✅ | ✅ | |OpenAI| ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ |✅ | ✅ | ✅ | ✅ |✅ | ✅ | ✅ | ✅ | ✅ | |Azure OpenAI| ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ |✅ | ✅ | ✅ | ✅ |✅ | ✅ | | | ✅ | |Replicate | ✅ | ✅ | ✅ | ✅ | ✅ | | | | | | @@ -83,8 +84,9 @@ def completion( top_p: Optional[float] = None, n: Optional[int] = None, stream: Optional[bool] = None, + stream_options: Optional[dict] = None, stop=None, - max_tokens: Optional[float] = None, + max_tokens: Optional[int] = None, presence_penalty: Optional[float] = None, frequency_penalty: Optional[float] = None, logit_bias: Optional[dict] = None, @@ -139,6 +141,10 @@ def completion( - `stream`: *boolean or null (optional)* - If set to true, it sends partial message deltas. Tokens will be sent as they become available, with the stream terminated by a [DONE] message. +- `stream_options` *dict or null (optional)* - Options for streaming response. Only set this when you set `stream: true` + + - `include_usage` *boolean (optional)* - If set, an additional chunk will be streamed before the data: [DONE] message. The usage field on this chunk shows the token usage statistics for the entire request, and the choices field will always be an empty array. All other chunks will also include a usage field, but with a null value. + - `stop`: *string/ array/ null (optional)* - Up to 4 sequences where the API will stop generating further tokens. - `max_tokens`: *integer (optional)* - The maximum number of tokens to generate in the chat completion. diff --git a/docs/my-website/docs/completion/token_usage.md b/docs/my-website/docs/completion/token_usage.md index 626973c57..807ccfd91 100644 --- a/docs/my-website/docs/completion/token_usage.md +++ b/docs/my-website/docs/completion/token_usage.md @@ -1,7 +1,7 @@ # Completion Token Usage & Cost By default LiteLLM returns token usage in all completion requests ([See here](https://litellm.readthedocs.io/en/latest/output/)) -However, we also expose 5 helper functions + **[NEW]** an API to calculate token usage across providers: +However, we also expose some helper functions + **[NEW]** an API to calculate token usage across providers: - `encode`: This encodes the text passed in, using the model-specific tokenizer. [**Jump to code**](#1-encode) @@ -9,17 +9,19 @@ However, we also expose 5 helper functions + **[NEW]** an API to calculate token - `token_counter`: This returns the number of tokens for a given input - it uses the tokenizer based on the model, and defaults to tiktoken if no model-specific tokenizer is available. [**Jump to code**](#3-token_counter) -- `cost_per_token`: This returns the cost (in USD) for prompt (input) and completion (output) tokens. Uses the live list from `api.litellm.ai`. [**Jump to code**](#4-cost_per_token) +- `create_pretrained_tokenizer` and `create_tokenizer`: LiteLLM provides default tokenizer support for OpenAI, Cohere, Anthropic, Llama2, and Llama3 models. If you are using a different model, you can create a custom tokenizer and pass it as `custom_tokenizer` to the `encode`, `decode`, and `token_counter` methods. [**Jump to code**](#4-create_pretrained_tokenizer-and-create_tokenizer) -- `completion_cost`: This returns the overall cost (in USD) for a given LLM API Call. It combines `token_counter` and `cost_per_token` to return the cost for that query (counting both cost of input and output). [**Jump to code**](#5-completion_cost) +- `cost_per_token`: This returns the cost (in USD) for prompt (input) and completion (output) tokens. Uses the live list from `api.litellm.ai`. [**Jump to code**](#5-cost_per_token) -- `get_max_tokens`: This returns the maximum number of tokens allowed for the given model. [**Jump to code**](#6-get_max_tokens) +- `completion_cost`: This returns the overall cost (in USD) for a given LLM API Call. It combines `token_counter` and `cost_per_token` to return the cost for that query (counting both cost of input and output). [**Jump to code**](#6-completion_cost) -- `model_cost`: This returns a dictionary for all models, with their max_tokens, input_cost_per_token and output_cost_per_token. It uses the `api.litellm.ai` call shown below. [**Jump to code**](#7-model_cost) +- `get_max_tokens`: This returns the maximum number of tokens allowed for the given model. [**Jump to code**](#7-get_max_tokens) -- `register_model`: This registers new / overrides existing models (and their pricing details) in the model cost dictionary. [**Jump to code**](#8-register_model) +- `model_cost`: This returns a dictionary for all models, with their max_tokens, input_cost_per_token and output_cost_per_token. It uses the `api.litellm.ai` call shown below. [**Jump to code**](#8-model_cost) -- `api.litellm.ai`: Live token + price count across [all supported models](https://github.com/BerriAI/litellm/blob/main/model_prices_and_context_window.json). [**Jump to code**](#9-apilitellmai) +- `register_model`: This registers new / overrides existing models (and their pricing details) in the model cost dictionary. [**Jump to code**](#9-register_model) + +- `api.litellm.ai`: Live token + price count across [all supported models](https://github.com/BerriAI/litellm/blob/main/model_prices_and_context_window.json). [**Jump to code**](#10-apilitellmai) 📣 This is a community maintained list. Contributions are welcome! ❤️ @@ -60,7 +62,24 @@ messages = [{"user": "role", "content": "Hey, how's it going"}] print(token_counter(model="gpt-3.5-turbo", messages=messages)) ``` -### 4. `cost_per_token` +### 4. `create_pretrained_tokenizer` and `create_tokenizer` + +```python +from litellm import create_pretrained_tokenizer, create_tokenizer + +# get tokenizer from huggingface repo +custom_tokenizer_1 = create_pretrained_tokenizer("Xenova/llama-3-tokenizer") + +# use tokenizer from json file +with open("tokenizer.json") as f: + json_data = json.load(f) + +json_str = json.dumps(json_data) + +custom_tokenizer_2 = create_tokenizer(json_str) +``` + +### 5. `cost_per_token` ```python from litellm import cost_per_token @@ -72,7 +91,7 @@ prompt_tokens_cost_usd_dollar, completion_tokens_cost_usd_dollar = cost_per_toke print(prompt_tokens_cost_usd_dollar, completion_tokens_cost_usd_dollar) ``` -### 5. `completion_cost` +### 6. `completion_cost` * Input: Accepts a `litellm.completion()` response **OR** prompt + completion strings * Output: Returns a `float` of cost for the `completion` call @@ -99,7 +118,7 @@ cost = completion_cost(model="bedrock/anthropic.claude-v2", prompt="Hey!", compl formatted_string = f"${float(cost):.10f}" print(formatted_string) ``` -### 6. `get_max_tokens` +### 7. `get_max_tokens` Input: Accepts a model name - e.g., gpt-3.5-turbo (to get a complete list, call litellm.model_list). Output: Returns the maximum number of tokens allowed for the given model @@ -112,7 +131,7 @@ model = "gpt-3.5-turbo" print(get_max_tokens(model)) # Output: 4097 ``` -### 7. `model_cost` +### 8. `model_cost` * Output: Returns a dict object containing the max_tokens, input_cost_per_token, output_cost_per_token for all models on [community-maintained list](https://github.com/BerriAI/litellm/blob/main/model_prices_and_context_window.json) @@ -122,7 +141,7 @@ from litellm import model_cost print(model_cost) # {'gpt-3.5-turbo': {'max_tokens': 4000, 'input_cost_per_token': 1.5e-06, 'output_cost_per_token': 2e-06}, ...} ``` -### 8. `register_model` +### 9. `register_model` * Input: Provide EITHER a model cost dictionary or a url to a hosted json blob * Output: Returns updated model_cost dictionary + updates litellm.model_cost with model details. @@ -157,5 +176,3 @@ export LITELLM_LOCAL_MODEL_COST_MAP="True" ``` Note: this means you will need to upgrade to get updated pricing, and newer models. - - diff --git a/docs/my-website/docs/completion/vision.md b/docs/my-website/docs/completion/vision.md new file mode 100644 index 000000000..ea04b1e1e --- /dev/null +++ b/docs/my-website/docs/completion/vision.md @@ -0,0 +1,45 @@ +# Using Vision Models + +## Quick Start +Example passing images to a model + +```python +import os +from litellm import completion + +os.environ["OPENAI_API_KEY"] = "your-api-key" + +# openai call +response = completion( + model = "gpt-4-vision-preview", + messages=[ + { + "role": "user", + "content": [ + { + "type": "text", + "text": "What’s in this image?" + }, + { + "type": "image_url", + "image_url": { + "url": "https://upload.wikimedia.org/wikipedia/commons/thumb/d/dd/Gfp-wisconsin-madison-the-nature-boardwalk.jpg/2560px-Gfp-wisconsin-madison-the-nature-boardwalk.jpg" + } + } + ] + } + ], +) + +``` + +## Checking if a model supports `vision` + +Use `litellm.supports_vision(model="")` -> returns `True` if model supports `vision` and `False` if not + +```python +assert litellm.supports_vision(model="gpt-4-vision-preview") == True +assert litellm.supports_vision(model="gemini-1.0-pro-visionn") == True +assert litellm.supports_vision(model="gpt-3.5-turbo") == False +``` + diff --git a/docs/my-website/docs/debugging/local_debugging.md b/docs/my-website/docs/debugging/local_debugging.md index 87faef73e..a9409bfab 100644 --- a/docs/my-website/docs/debugging/local_debugging.md +++ b/docs/my-website/docs/debugging/local_debugging.md @@ -23,6 +23,14 @@ response = completion(model="gpt-3.5-turbo", messages=messages) response = completion("command-nightly", messages) ``` +## JSON Logs + +If you need to store the logs as JSON, just set the `litellm.json_logs = True`. + +We currently just log the raw POST request from litellm as a JSON - [**See Code**]. + +[Share feedback here](https://github.com/BerriAI/litellm/issues) + ## Logger Function But sometimes all you care about is seeing exactly what's getting sent to your api call and what's being returned - e.g. if the api call is failing, why is that happening? what are the exact params being set? diff --git a/docs/my-website/docs/embedding/supported_embedding.md b/docs/my-website/docs/embedding/supported_embedding.md index 7e2374d16..ebf7a29eb 100644 --- a/docs/my-website/docs/embedding/supported_embedding.md +++ b/docs/my-website/docs/embedding/supported_embedding.md @@ -320,8 +320,6 @@ from litellm import embedding litellm.vertex_project = "hardy-device-38811" # Your Project ID litellm.vertex_location = "us-central1" # proj location - -os.environ['VOYAGE_API_KEY'] = "" response = embedding( model="vertex_ai/textembedding-gecko", input=["good morning from litellm"], @@ -339,6 +337,8 @@ All models listed [here](https://github.com/BerriAI/litellm/blob/57f37f743886a02 | textembedding-gecko-multilingual@001 | `embedding(model="vertex_ai/textembedding-gecko-multilingual@001", input)` | | textembedding-gecko@001 | `embedding(model="vertex_ai/textembedding-gecko@001", input)` | | textembedding-gecko@003 | `embedding(model="vertex_ai/textembedding-gecko@003", input)` | +| text-embedding-preview-0409 | `embedding(model="vertex_ai/text-embedding-preview-0409", input)` | +| text-multilingual-embedding-preview-0409 | `embedding(model="vertex_ai/text-multilingual-embedding-preview-0409", input)` | ## Voyage AI Embedding Models diff --git a/docs/my-website/docs/enterprise.md b/docs/my-website/docs/enterprise.md index 7b623e407..382ba8b28 100644 --- a/docs/my-website/docs/enterprise.md +++ b/docs/my-website/docs/enterprise.md @@ -8,14 +8,23 @@ For companies that need SSO, user management and professional support for LiteLL ::: This covers: -- ✅ **Features under the [LiteLLM Commercial License](https://docs.litellm.ai/docs/proxy/enterprise):** +- ✅ **Features under the [LiteLLM Commercial License (Content Mod, Custom Tags, etc.)](https://docs.litellm.ai/docs/proxy/enterprise)** - ✅ **Feature Prioritization** - ✅ **Custom Integrations** - ✅ **Professional Support - Dedicated discord + slack** - ✅ **Custom SLAs** -- ✅ **Secure access with Single Sign-On** +- ✅ [**Secure UI access with Single Sign-On**](../docs/proxy/ui.md#setup-ssoauth-for-ui) +- ✅ [**JWT-Auth**](../docs/proxy/token_auth.md) +## [COMING SOON] AWS Marketplace Support + +Deploy managed LiteLLM Proxy within your VPC. + +Includes all enterprise features. + +[**Get early access**](https://calendly.com/d/4mp-gd3-k5k/litellm-1-1-onboarding-chat) + ## Frequently Asked Questions ### What topics does Professional support cover and what SLAs do you offer? diff --git a/docs/my-website/docs/exception_mapping.md b/docs/my-website/docs/exception_mapping.md index db17fb093..5e6006ebe 100644 --- a/docs/my-website/docs/exception_mapping.md +++ b/docs/my-website/docs/exception_mapping.md @@ -13,7 +13,7 @@ LiteLLM maps exceptions across all providers to their OpenAI counterparts. | >=500 | InternalServerError | | N/A | ContextWindowExceededError| | 400 | ContentPolicyViolationError| -| N/A | APIConnectionError | +| 500 | APIConnectionError | Base case we return APIConnectionError @@ -74,6 +74,28 @@ except Exception as e: ``` +## Usage - Should you retry exception? + +``` +import litellm +import openai + +try: + response = litellm.completion( + model="gpt-4", + messages=[ + { + "role": "user", + "content": "hello, write a 20 pageg essay" + } + ], + timeout=0.01, # this will raise a timeout exception + ) +except openai.APITimeoutError as e: + should_retry = litellm._should_retry(e.status_code) + print(f"should_retry: {should_retry}") +``` + ## Details To see how it's implemented - [check out the code](https://github.com/BerriAI/litellm/blob/a42c197e5a6de56ea576c73715e6c7c6b19fa249/litellm/utils.py#L1217) @@ -84,23 +106,37 @@ To see how it's implemented - [check out the code](https://github.com/BerriAI/li ## Custom mapping list -Base case - we return the original exception. +Base case - we return `litellm.APIConnectionError` exception (inherits from openai's APIConnectionError exception). -| | ContextWindowExceededError | AuthenticationError | InvalidRequestError | RateLimitError | ServiceUnavailableError | -|---------------|----------------------------|---------------------|---------------------|---------------|-------------------------| -| Anthropic | ✅ | ✅ | ✅ | ✅ | | -| OpenAI | ✅ | ✅ |✅ |✅ |✅| -| Azure OpenAI | ✅ | ✅ |✅ |✅ |✅| -| Replicate | ✅ | ✅ | ✅ | ✅ | ✅ | -| Cohere | ✅ | ✅ | ✅ | ✅ | ✅ | -| Huggingface | ✅ | ✅ | ✅ | ✅ | | -| Openrouter | ✅ | ✅ | ✅ | ✅ | | -| AI21 | ✅ | ✅ | ✅ | ✅ | | -| VertexAI | | |✅ | | | -| Bedrock | | |✅ | | | -| Sagemaker | | |✅ | | | -| TogetherAI | ✅ | ✅ | ✅ | ✅ | | -| AlephAlpha | ✅ | ✅ | ✅ | ✅ | ✅ | +| custom_llm_provider | Timeout | ContextWindowExceededError | BadRequestError | NotFoundError | ContentPolicyViolationError | AuthenticationError | APIError | RateLimitError | ServiceUnavailableError | PermissionDeniedError | UnprocessableEntityError | +|----------------------------|---------|----------------------------|------------------|---------------|-----------------------------|---------------------|----------|----------------|-------------------------|-----------------------|-------------------------| +| openai | ✓ | ✓ | ✓ | | ✓ | ✓ | | | | | | +| watsonx | | | | | | | |✓| | | | +| text-completion-openai | ✓ | ✓ | ✓ | | ✓ | ✓ | | | | | | +| custom_openai | ✓ | ✓ | ✓ | | ✓ | ✓ | | | | | | +| openai_compatible_providers| ✓ | ✓ | ✓ | | ✓ | ✓ | | | | | | +| anthropic | ✓ | ✓ | ✓ | ✓ | | ✓ | | | ✓ | ✓ | | +| replicate | ✓ | ✓ | ✓ | ✓ | | ✓ | | ✓ | ✓ | | | +| bedrock | ✓ | ✓ | ✓ | ✓ | | ✓ | | ✓ | ✓ | ✓ | | +| sagemaker | | ✓ | ✓ | | | | | | | | | +| vertex_ai | ✓ | | ✓ | | | | ✓ | | | | ✓ | +| palm | ✓ | ✓ | | | | | ✓ | | | | | +| gemini | ✓ | ✓ | | | | | ✓ | | | | | +| cloudflare | | | ✓ | | | ✓ | | | | | | +| cohere | | ✓ | ✓ | | | ✓ | | | ✓ | | | +| cohere_chat | | ✓ | ✓ | | | ✓ | | | ✓ | | | +| huggingface | ✓ | ✓ | ✓ | | | ✓ | | ✓ | ✓ | | | +| ai21 | ✓ | ✓ | ✓ | ✓ | | ✓ | | ✓ | | | | +| nlp_cloud | ✓ | ✓ | ✓ | | | ✓ | ✓ | ✓ | ✓ | | | +| together_ai | ✓ | ✓ | ✓ | | | ✓ | | | | | | +| aleph_alpha | | | ✓ | | | ✓ | | | | | | +| ollama | ✓ | | ✓ | | | | | | ✓ | | | +| ollama_chat | ✓ | | ✓ | | | | | | ✓ | | | +| vllm | | | | | | ✓ | ✓ | | | | | +| azure | ✓ | ✓ | ✓ | ✓ | ✓ | ✓ | | | ✓ | | | + +- "✓" indicates that the specified `custom_llm_provider` can raise the corresponding exception. +- Empty cells indicate the lack of association or that the provider does not raise that particular exception type as indicated by the function. > For a deeper understanding of these exceptions, you can check out [this](https://github.com/BerriAI/litellm/blob/d7e58d13bf9ba9edbab2ab2f096f3de7547f35fa/litellm/utils.py#L1544) implementation for additional insights. diff --git a/docs/my-website/docs/hosted.md b/docs/my-website/docs/hosted.md new file mode 100644 index 000000000..92940e858 --- /dev/null +++ b/docs/my-website/docs/hosted.md @@ -0,0 +1,58 @@ +import Image from '@theme/IdealImage'; + +# Hosted LiteLLM Proxy + +LiteLLM maintains the proxy, so you can focus on your core products. + +## [**Get Onboarded**](https://calendly.com/d/4mp-gd3-k5k/litellm-1-1-onboarding-chat) + +This is in alpha. Schedule a call with us, and we'll give you a hosted proxy within 30 minutes. + +[**🚨 Schedule Call**](https://calendly.com/d/4mp-gd3-k5k/litellm-1-1-onboarding-chat) + +### **Status**: Alpha + +Our proxy is already used in production by customers. + +See our status page for [**live reliability**](https://status.litellm.ai/) + +### **Benefits** +- **No Maintenance, No Infra**: We'll maintain the proxy, and spin up any additional infrastructure (e.g.: separate server for spend logs) to make sure you can load balance + track spend across multiple LLM projects. +- **Reliable**: Our hosted proxy is tested on 1k requests per second, making it reliable for high load. +- **Secure**: LiteLLM is currently undergoing SOC-2 compliance, to make sure your data is as secure as possible. + +### Pricing + +Pricing is based on usage. We can figure out a price that works for your team, on the call. + +[**🚨 Schedule Call**](https://calendly.com/d/4mp-gd3-k5k/litellm-1-1-onboarding-chat) + +## **Screenshots** + +### 1. Create keys + + + +### 2. Add Models + + + +### 3. Track spend + + + + +### 4. Configure load balancing + + + +#### [**🚨 Schedule Call**](https://calendly.com/d/4mp-gd3-k5k/litellm-1-1-onboarding-chat) + +## Feature List + +- Easy way to add/remove models +- 100% uptime even when models are added/removed +- custom callback webhooks +- your domain name with HTTPS +- Ability to create/delete User API keys +- Reasonable set monthly cost \ No newline at end of file diff --git a/docs/my-website/docs/langchain/langchain.md b/docs/my-website/docs/langchain/langchain.md index cc12767b8..efa6b2925 100644 --- a/docs/my-website/docs/langchain/langchain.md +++ b/docs/my-website/docs/langchain/langchain.md @@ -14,14 +14,14 @@ import TabItem from '@theme/TabItem'; ```python import os -from langchain.chat_models import ChatLiteLLM -from langchain.prompts.chat import ( +from langchain_community.chat_models import ChatLiteLLM +from langchain_core.prompts import ( ChatPromptTemplate, SystemMessagePromptTemplate, AIMessagePromptTemplate, HumanMessagePromptTemplate, ) -from langchain.schema import AIMessage, HumanMessage, SystemMessage +from langchain_core.messages import AIMessage, HumanMessage, SystemMessage os.environ['OPENAI_API_KEY'] = "" chat = ChatLiteLLM(model="gpt-3.5-turbo") @@ -30,7 +30,7 @@ messages = [ content="what model are you" ) ] -chat(messages) +chat.invoke(messages) ``` @@ -39,14 +39,14 @@ chat(messages) ```python import os -from langchain.chat_models import ChatLiteLLM -from langchain.prompts.chat import ( +from langchain_community.chat_models import ChatLiteLLM +from langchain_core.prompts import ( ChatPromptTemplate, SystemMessagePromptTemplate, AIMessagePromptTemplate, HumanMessagePromptTemplate, ) -from langchain.schema import AIMessage, HumanMessage, SystemMessage +from langchain_core.messages import AIMessage, HumanMessage, SystemMessage os.environ['ANTHROPIC_API_KEY'] = "" chat = ChatLiteLLM(model="claude-2", temperature=0.3) @@ -55,7 +55,7 @@ messages = [ content="what model are you" ) ] -chat(messages) +chat.invoke(messages) ``` @@ -64,14 +64,14 @@ chat(messages) ```python import os -from langchain.chat_models import ChatLiteLLM -from langchain.prompts.chat import ( +from langchain_community.chat_models import ChatLiteLLM +from langchain_core.prompts.chat import ( ChatPromptTemplate, SystemMessagePromptTemplate, AIMessagePromptTemplate, HumanMessagePromptTemplate, ) -from langchain.schema import AIMessage, HumanMessage, SystemMessage +from langchain_core.messages import AIMessage, HumanMessage, SystemMessage os.environ['REPLICATE_API_TOKEN'] = "" chat = ChatLiteLLM(model="replicate/llama-2-70b-chat:2c1608e18606fad2812020dc541930f2d0495ce32eee50074220b87300bc16e1") @@ -80,7 +80,7 @@ messages = [ content="what model are you?" ) ] -chat(messages) +chat.invoke(messages) ``` @@ -89,14 +89,14 @@ chat(messages) ```python import os -from langchain.chat_models import ChatLiteLLM -from langchain.prompts.chat import ( +from langchain_community.chat_models import ChatLiteLLM +from langchain_core.prompts import ( ChatPromptTemplate, SystemMessagePromptTemplate, AIMessagePromptTemplate, HumanMessagePromptTemplate, ) -from langchain.schema import AIMessage, HumanMessage, SystemMessage +from langchain_core.messages import AIMessage, HumanMessage, SystemMessage os.environ['COHERE_API_KEY'] = "" chat = ChatLiteLLM(model="command-nightly") @@ -105,32 +105,9 @@ messages = [ content="what model are you?" ) ] -chat(messages) +chat.invoke(messages) ``` - - - -```python -import os -from langchain.chat_models import ChatLiteLLM -from langchain.prompts.chat import ( - ChatPromptTemplate, - SystemMessagePromptTemplate, - AIMessagePromptTemplate, - HumanMessagePromptTemplate, -) -from langchain.schema import AIMessage, HumanMessage, SystemMessage - -os.environ['PALM_API_KEY'] = "" -chat = ChatLiteLLM(model="palm/chat-bison") -messages = [ - HumanMessage( - content="what model are you?" - ) -] -chat(messages) -``` diff --git a/docs/my-website/docs/load_test.md b/docs/my-website/docs/load_test.md index 5eb6a0610..754db4b8f 100644 --- a/docs/my-website/docs/load_test.md +++ b/docs/my-website/docs/load_test.md @@ -213,3 +213,349 @@ asyncio.run(loadtest_fn()) ``` +## Multi-Instance TPM/RPM Load Test (Router) + +Test if your defined tpm/rpm limits are respected across multiple instances of the Router object. + +In our test: +- Max RPM per deployment is = 100 requests per minute +- Max Throughput / min on router = 200 requests per minute (2 deployments) +- Load we'll send through router = 600 requests per minute + +:::info + +If you don't want to call a real LLM API endpoint, you can setup a fake openai server. [See code](#extra---setup-fake-openai-server) + +::: + +### Code + +Let's hit the router with 600 requests per minute. + +Copy this script 👇. Save it as `test_loadtest_router.py` AND run it with `python3 test_loadtest_router.py` + + +```python +from litellm import Router +import litellm +litellm.suppress_debug_info = True +litellm.set_verbose = False +import logging +logging.basicConfig(level=logging.CRITICAL) +import os, random, uuid, time, asyncio + +# Model list for OpenAI and Anthropic models +model_list = [ + { + "model_name": "fake-openai-endpoint", + "litellm_params": { + "model": "gpt-3.5-turbo", + "api_key": "my-fake-key", + "api_base": "http://0.0.0.0:8080", + "rpm": 100 + }, + }, + { + "model_name": "fake-openai-endpoint", + "litellm_params": { + "model": "gpt-3.5-turbo", + "api_key": "my-fake-key", + "api_base": "http://0.0.0.0:8081", + "rpm": 100 + }, + }, +] + +router_1 = Router(model_list=model_list, num_retries=0, enable_pre_call_checks=True, routing_strategy="usage-based-routing-v2", redis_host=os.getenv("REDIS_HOST"), redis_port=os.getenv("REDIS_PORT"), redis_password=os.getenv("REDIS_PASSWORD")) +router_2 = Router(model_list=model_list, num_retries=0, routing_strategy="usage-based-routing-v2", enable_pre_call_checks=True, redis_host=os.getenv("REDIS_HOST"), redis_port=os.getenv("REDIS_PORT"), redis_password=os.getenv("REDIS_PASSWORD")) + + + +async def router_completion_non_streaming(): + try: + client: Router = random.sample([router_1, router_2], 1)[0] # randomly pick b/w clients + # print(f"client={client}") + response = await client.acompletion( + model="fake-openai-endpoint", # [CHANGE THIS] (if you call it something else on your proxy) + messages=[{"role": "user", "content": f"This is a test: {uuid.uuid4()}"}], + ) + return response + except Exception as e: + # print(e) + return None + +async def loadtest_fn(): + start = time.time() + n = 600 # Number of concurrent tasks + tasks = [router_completion_non_streaming() for _ in range(n)] + chat_completions = await asyncio.gather(*tasks) + successful_completions = [c for c in chat_completions if c is not None] + print(n, time.time() - start, len(successful_completions)) + +def get_utc_datetime(): + import datetime as dt + from datetime import datetime + + if hasattr(dt, "UTC"): + return datetime.now(dt.UTC) # type: ignore + else: + return datetime.utcnow() # type: ignore + + +# Run the event loop to execute the async function +async def parent_fn(): + for _ in range(10): + dt = get_utc_datetime() + current_minute = dt.strftime("%H-%M") + print(f"triggered new batch - {current_minute}") + await loadtest_fn() + await asyncio.sleep(10) + +asyncio.run(parent_fn()) +``` +## Multi-Instance TPM/RPM Load Test (Proxy) + +Test if your defined tpm/rpm limits are respected across multiple instances. + +The quickest way to do this is by testing the [proxy](./proxy/quick_start.md). The proxy uses the [router](./routing.md) under the hood, so if you're using either of them, this test should work for you. + +In our test: +- Max RPM per deployment is = 100 requests per minute +- Max Throughput / min on proxy = 200 requests per minute (2 deployments) +- Load we'll send to proxy = 600 requests per minute + + +So we'll send 600 requests per minute, but expect only 200 requests per minute to succeed. + +:::info + +If you don't want to call a real LLM API endpoint, you can setup a fake openai server. [See code](#extra---setup-fake-openai-server) + +::: + +### 1. Setup config + +```yaml +model_list: +- litellm_params: + api_base: http://0.0.0.0:8080 + api_key: my-fake-key + model: openai/my-fake-model + rpm: 100 + model_name: fake-openai-endpoint +- litellm_params: + api_base: http://0.0.0.0:8081 + api_key: my-fake-key + model: openai/my-fake-model-2 + rpm: 100 + model_name: fake-openai-endpoint +router_settings: + num_retries: 0 + enable_pre_call_checks: true + redis_host: os.environ/REDIS_HOST ## 👈 IMPORTANT! Setup the proxy w/ redis + redis_password: os.environ/REDIS_PASSWORD + redis_port: os.environ/REDIS_PORT + routing_strategy: usage-based-routing-v2 +``` + +### 2. Start proxy 2 instances + +**Instance 1** +```bash +litellm --config /path/to/config.yaml --port 4000 + +## RUNNING on http://0.0.0.0:4000 +``` + +**Instance 2** +```bash +litellm --config /path/to/config.yaml --port 4001 + +## RUNNING on http://0.0.0.0:4001 +``` + +### 3. Run Test + +Let's hit the proxy with 600 requests per minute. + +Copy this script 👇. Save it as `test_loadtest_proxy.py` AND run it with `python3 test_loadtest_proxy.py` + +```python +from openai import AsyncOpenAI, AsyncAzureOpenAI +import random, uuid +import time, asyncio, litellm +# import logging +# logging.basicConfig(level=logging.DEBUG) +#### LITELLM PROXY #### +litellm_client = AsyncOpenAI( + api_key="sk-1234", # [CHANGE THIS] + base_url="http://0.0.0.0:4000" +) +litellm_client_2 = AsyncOpenAI( + api_key="sk-1234", # [CHANGE THIS] + base_url="http://0.0.0.0:4001" +) + +async def proxy_completion_non_streaming(): + try: + client = random.sample([litellm_client, litellm_client_2], 1)[0] # randomly pick b/w clients + # print(f"client={client}") + response = await client.chat.completions.create( + model="fake-openai-endpoint", # [CHANGE THIS] (if you call it something else on your proxy) + messages=[{"role": "user", "content": f"This is a test: {uuid.uuid4()}"}], + ) + return response + except Exception as e: + # print(e) + return None + +async def loadtest_fn(): + start = time.time() + n = 600 # Number of concurrent tasks + tasks = [proxy_completion_non_streaming() for _ in range(n)] + chat_completions = await asyncio.gather(*tasks) + successful_completions = [c for c in chat_completions if c is not None] + print(n, time.time() - start, len(successful_completions)) + +def get_utc_datetime(): + import datetime as dt + from datetime import datetime + + if hasattr(dt, "UTC"): + return datetime.now(dt.UTC) # type: ignore + else: + return datetime.utcnow() # type: ignore + + +# Run the event loop to execute the async function +async def parent_fn(): + for _ in range(10): + dt = get_utc_datetime() + current_minute = dt.strftime("%H-%M") + print(f"triggered new batch - {current_minute}") + await loadtest_fn() + await asyncio.sleep(10) + +asyncio.run(parent_fn()) + +``` + + +### Extra - Setup Fake OpenAI Server + +Let's setup a fake openai server with a RPM limit of 100. + +Let's call our file `fake_openai_server.py`. + +``` +# import sys, os +# sys.path.insert( +# 0, os.path.abspath("../") +# ) # Adds the parent directory to the system path +from fastapi import FastAPI, Request, status, HTTPException, Depends +from fastapi.responses import StreamingResponse +from fastapi.security import OAuth2PasswordBearer +from fastapi.middleware.cors import CORSMiddleware +from fastapi.responses import JSONResponse +from fastapi import FastAPI, Request, HTTPException, UploadFile, File +import httpx, os, json +from openai import AsyncOpenAI +from typing import Optional +from slowapi import Limiter +from slowapi.util import get_remote_address +from slowapi.errors import RateLimitExceeded +from fastapi import FastAPI, Request, HTTPException +from fastapi.responses import PlainTextResponse + + +class ProxyException(Exception): + # NOTE: DO NOT MODIFY THIS + # This is used to map exactly to OPENAI Exceptions + def __init__( + self, + message: str, + type: str, + param: Optional[str], + code: Optional[int], + ): + self.message = message + self.type = type + self.param = param + self.code = code + + def to_dict(self) -> dict: + """Converts the ProxyException instance to a dictionary.""" + return { + "message": self.message, + "type": self.type, + "param": self.param, + "code": self.code, + } + + +limiter = Limiter(key_func=get_remote_address) +app = FastAPI() +app.state.limiter = limiter + +@app.exception_handler(RateLimitExceeded) +async def _rate_limit_exceeded_handler(request: Request, exc: RateLimitExceeded): + return JSONResponse(status_code=429, + content={"detail": "Rate Limited!"}) + +app.add_exception_handler(RateLimitExceeded, _rate_limit_exceeded_handler) + +app.add_middleware( + CORSMiddleware, + allow_origins=["*"], + allow_credentials=True, + allow_methods=["*"], + allow_headers=["*"], +) + +# for completion +@app.post("/chat/completions") +@app.post("/v1/chat/completions") +@limiter.limit("100/minute") +async def completion(request: Request): + # raise HTTPException(status_code=429, detail="Rate Limited!") + return { + "id": "chatcmpl-123", + "object": "chat.completion", + "created": 1677652288, + "model": None, + "system_fingerprint": "fp_44709d6fcb", + "choices": [{ + "index": 0, + "message": { + "role": "assistant", + "content": "\n\nHello there, how may I assist you today?", + }, + "logprobs": None, + "finish_reason": "stop" + }], + "usage": { + "prompt_tokens": 9, + "completion_tokens": 12, + "total_tokens": 21 + } + } + +if __name__ == "__main__": + import socket + import uvicorn + port = 8080 + while True: + sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) + result = sock.connect_ex(('0.0.0.0', port)) + if result != 0: + print(f"Port {port} is available, starting server...") + break + else: + port += 1 + + uvicorn.run(app, host="0.0.0.0", port=port) +``` + +```bash +python3 fake_openai_server.py +``` diff --git a/docs/my-website/docs/observability/callbacks.md b/docs/my-website/docs/observability/callbacks.md index fbc0733e5..af745e845 100644 --- a/docs/my-website/docs/observability/callbacks.md +++ b/docs/my-website/docs/observability/callbacks.md @@ -8,6 +8,7 @@ liteLLM supports: - [Custom Callback Functions](https://docs.litellm.ai/docs/observability/custom_callback) - [Lunary](https://lunary.ai/docs) +- [Langfuse](https://langfuse.com/docs) - [Helicone](https://docs.helicone.ai/introduction) - [Traceloop](https://traceloop.com/docs) - [Athina](https://docs.athina.ai/) @@ -22,8 +23,8 @@ from litellm import completion # set callbacks litellm.input_callback=["sentry"] # for sentry breadcrumbing - logs the input being sent to the api -litellm.success_callback=["posthog", "helicone", "lunary", "athina"] -litellm.failure_callback=["sentry", "lunary"] +litellm.success_callback=["posthog", "helicone", "langfuse", "lunary", "athina"] +litellm.failure_callback=["sentry", "lunary", "langfuse"] ## set env variables os.environ['SENTRY_DSN'], os.environ['SENTRY_API_TRACE_RATE']= "" @@ -32,6 +33,9 @@ os.environ["HELICONE_API_KEY"] = "" os.environ["TRACELOOP_API_KEY"] = "" os.environ["LUNARY_PUBLIC_KEY"] = "" os.environ["ATHINA_API_KEY"] = "" +os.environ["LANGFUSE_PUBLIC_KEY"] = "" +os.environ["LANGFUSE_SECRET_KEY"] = "" +os.environ["LANGFUSE_HOST"] = "" response = completion(model="gpt-3.5-turbo", messages=messages) ``` diff --git a/docs/my-website/docs/observability/custom_callback.md b/docs/my-website/docs/observability/custom_callback.md index 7cc38168b..316822227 100644 --- a/docs/my-website/docs/observability/custom_callback.md +++ b/docs/my-website/docs/observability/custom_callback.md @@ -331,49 +331,25 @@ response = litellm.completion(model="gpt-3.5-turbo", messages=messages, metadata ## Examples ### Custom Callback to track costs for Streaming + Non-Streaming +By default, the response cost is accessible in the logging object via `kwargs["response_cost"]` on success (sync + async) ```python +# Step 1. Write your custom callback function def track_cost_callback( kwargs, # kwargs to completion completion_response, # response from completion start_time, end_time # start/end time ): try: - # init logging config - logging.basicConfig( - filename='cost.log', - level=logging.INFO, - format='%(asctime)s - %(message)s', - datefmt='%Y-%m-%d %H:%M:%S' - ) - - # check if it has collected an entire stream response - if "complete_streaming_response" in kwargs: - # for tracking streaming cost we pass the "messages" and the output_text to litellm.completion_cost - completion_response=kwargs["complete_streaming_response"] - input_text = kwargs["messages"] - output_text = completion_response["choices"][0]["message"]["content"] - response_cost = litellm.completion_cost( - model = kwargs["model"], - messages = input_text, - completion=output_text - ) - print("streaming response_cost", response_cost) - logging.info(f"Model {kwargs['model']} Cost: ${response_cost:.8f}") - - # for non streaming responses - else: - # we pass the completion_response obj - if kwargs["stream"] != True: - response_cost = litellm.completion_cost(completion_response=completion_response) - print("regular response_cost", response_cost) - logging.info(f"Model {completion_response.model} Cost: ${response_cost:.8f}") + response_cost = kwargs["response_cost"] # litellm calculates response cost for you + print("regular response_cost", response_cost) except: pass -# Assign the custom callback function +# Step 2. Assign the custom callback function litellm.success_callback = [track_cost_callback] +# Step 3. Make litellm.completion call response = completion( model="gpt-3.5-turbo", messages=[ diff --git a/docs/my-website/docs/observability/greenscale_integration.md b/docs/my-website/docs/observability/greenscale_integration.md new file mode 100644 index 000000000..0dd673226 --- /dev/null +++ b/docs/my-website/docs/observability/greenscale_integration.md @@ -0,0 +1,68 @@ +# Greenscale - Track LLM Spend and Responsible Usage + +[Greenscale](https://greenscale.ai/) is a production monitoring platform for your LLM-powered app that provides you granular key insights into your GenAI spending and responsible usage. Greenscale only captures metadata to minimize the exposure risk of personally identifiable information (PII). + +## Getting Started + +Use Greenscale to log requests across all LLM Providers + +liteLLM provides `callbacks`, making it easy for you to log data depending on the status of your responses. + +## Using Callbacks + +First, email `hello@greenscale.ai` to get an API_KEY. + +Use just 1 line of code, to instantly log your responses **across all providers** with Greenscale: + +```python +litellm.success_callback = ["greenscale"] +``` + +### Complete code + +```python +from litellm import completion + +## set env variables +os.environ['GREENSCALE_API_KEY'] = 'your-greenscale-api-key' +os.environ['GREENSCALE_ENDPOINT'] = 'greenscale-endpoint' +os.environ["OPENAI_API_KEY"]= "" + +# set callback +litellm.success_callback = ["greenscale"] + +#openai call +response = completion( + model="gpt-3.5-turbo", + messages=[{"role": "user", "content": "Hi 👋 - i'm openai"}] + metadata={ + "greenscale_project": "acme-project", + "greenscale_application": "acme-application" + } +) +``` + +## Additional information in metadata + +You can send any additional information to Greenscale by using the `metadata` field in completion and `greenscale_` prefix. This can be useful for sending metadata about the request, such as the project and application name, customer_id, enviornment, or any other information you want to track usage. `greenscale_project` and `greenscale_application` are required fields. + +```python +#openai call with additional metadata +response = completion( + model="gpt-3.5-turbo", + messages=[ + {"role": "user", "content": "Hi 👋 - i'm openai"} + ], + metadata={ + "greenscale_project": "acme-project", + "greenscale_application": "acme-application", + "greenscale_customer_id": "customer-123" + } +) +``` + +## Support & Talk with Greenscale Team + +- [Schedule Demo 👋](https://calendly.com/nandesh/greenscale) +- [Website 💻](https://greenscale.ai) +- Our email ✉️ `hello@greenscale.ai` diff --git a/docs/my-website/docs/observability/lago.md b/docs/my-website/docs/observability/lago.md new file mode 100644 index 000000000..337a2b553 --- /dev/null +++ b/docs/my-website/docs/observability/lago.md @@ -0,0 +1,173 @@ +import Image from '@theme/IdealImage'; +import Tabs from '@theme/Tabs'; +import TabItem from '@theme/TabItem'; + +# Lago - Usage Based Billing + +[Lago](https://www.getlago.com/) offers a self-hosted and cloud, metering and usage-based billing solution. + + + +## Quick Start +Use just 1 lines of code, to instantly log your responses **across all providers** with Lago + +Get your Lago [API Key](https://docs.getlago.com/guide/self-hosted/docker#find-your-api-key) + +```python +litellm.callbacks = ["lago"] # logs cost + usage of successful calls to lago +``` + + + + + +```python +# pip install lago +import litellm +import os + +os.environ["LAGO_API_BASE"] = "" # http://0.0.0.0:3000 +os.environ["LAGO_API_KEY"] = "" +os.environ["LAGO_API_EVENT_CODE"] = "" # The billable metric's code - https://docs.getlago.com/guide/events/ingesting-usage#define-a-billable-metric + +# LLM API Keys +os.environ['OPENAI_API_KEY']="" + +# set lago as a callback, litellm will send the data to lago +litellm.success_callback = ["lago"] + +# openai call +response = litellm.completion( + model="gpt-3.5-turbo", + messages=[ + {"role": "user", "content": "Hi 👋 - i'm openai"} + ], + user="your_customer_id" # 👈 SET YOUR CUSTOMER ID HERE +) +``` + + + + +1. Add to Config.yaml +```yaml +model_list: +- litellm_params: + api_base: https://openai-function-calling-workers.tasslexyz.workers.dev/ + api_key: my-fake-key + model: openai/my-fake-model + model_name: fake-openai-endpoint + +litellm_settings: + callbacks: ["lago"] # 👈 KEY CHANGE +``` + +2. Start Proxy + +``` +litellm --config /path/to/config.yaml +``` + +3. Test it! + + + + +```bash +curl --location 'http://0.0.0.0:4000/chat/completions' \ +--header 'Content-Type: application/json' \ +--data ' { + "model": "fake-openai-endpoint", + "messages": [ + { + "role": "user", + "content": "what llm are you" + } + ], + "user": "your-customer-id" # 👈 SET YOUR CUSTOMER ID + } +' +``` + + + +```python +import openai +client = openai.OpenAI( + api_key="anything", + base_url="http://0.0.0.0:4000" +) + +# request sent to model set on litellm proxy, `litellm --model` +response = client.chat.completions.create(model="gpt-3.5-turbo", messages = [ + { + "role": "user", + "content": "this is a test request, write a short poem" + } +], user="my_customer_id") # 👈 whatever your customer id is + +print(response) +``` + + + +```python +from langchain.chat_models import ChatOpenAI +from langchain.prompts.chat import ( + ChatPromptTemplate, + HumanMessagePromptTemplate, + SystemMessagePromptTemplate, +) +from langchain.schema import HumanMessage, SystemMessage +import os + +os.environ["OPENAI_API_KEY"] = "anything" + +chat = ChatOpenAI( + openai_api_base="http://0.0.0.0:4000", + model = "gpt-3.5-turbo", + temperature=0.1, + extra_body={ + "user": "my_customer_id" # 👈 whatever your customer id is + } +) + +messages = [ + SystemMessage( + content="You are a helpful assistant that im using to make a test request to." + ), + HumanMessage( + content="test from litellm. tell me why it's amazing in 1 sentence" + ), +] +response = chat(messages) + +print(response) +``` + + + + + + + + +## Advanced - Lagos Logging object + +This is what LiteLLM will log to Lagos + +``` +{ + "event": { + "transaction_id": "", + "external_customer_id": , # passed via `user` param in /chat/completion call - https://platform.openai.com/docs/api-reference/chat/create + "code": os.getenv("LAGO_API_EVENT_CODE"), + "properties": { + "input_tokens": , + "output_tokens": , + "model": , + "response_cost": , # 👈 LITELLM CALCULATED RESPONSE COST - https://github.com/BerriAI/litellm/blob/d43f75150a65f91f60dc2c0c9462ce3ffc713c1f/litellm/utils.py#L1473 + } + } +} +``` \ No newline at end of file diff --git a/docs/my-website/docs/observability/langfuse_integration.md b/docs/my-website/docs/observability/langfuse_integration.md index 50b016d09..6dd5377ea 100644 --- a/docs/my-website/docs/observability/langfuse_integration.md +++ b/docs/my-website/docs/observability/langfuse_integration.md @@ -94,9 +94,10 @@ print(response) ``` -### Set Custom Trace ID, Trace User ID and Tags +### Set Custom Trace ID, Trace User ID, Trace Metadata, Trace Version, Trace Release and Tags + +Pass `trace_id`, `trace_user_id`, `trace_metadata`, `trace_version`, `trace_release`, `tags` in `metadata` -Pass `trace_id`, `trace_user_id` in `metadata` ```python import litellm @@ -121,10 +122,21 @@ response = completion( metadata={ "generation_name": "ishaan-test-generation", # set langfuse Generation Name "generation_id": "gen-id22", # set langfuse Generation ID - "trace_id": "trace-id22", # set langfuse Trace ID + "version": "test-generation-version" # set langfuse Generation Version "trace_user_id": "user-id2", # set langfuse Trace User ID "session_id": "session-1", # set langfuse Session ID - "tags": ["tag1", "tag2"] # set langfuse Tags + "tags": ["tag1", "tag2"], # set langfuse Tags + "trace_id": "trace-id22", # set langfuse Trace ID + "trace_metadata": {"key": "value"}, # set langfuse Trace Metadata + "trace_version": "test-trace-version", # set langfuse Trace Version (if not set, defaults to Generation Version) + "trace_release": "test-trace-release", # set langfuse Trace Release + ### OR ### + "existing_trace_id": "trace-id22", # if generation is continuation of past trace. This prevents default behaviour of setting a trace name + ### OR enforce that certain fields are trace overwritten in the trace during the continuation ### + "existing_trace_id": "trace-id22", + "trace_metadata": {"key": "updated_trace_value"}, # The new value to use for the langfuse Trace Metadata + "update_trace_keys": ["input", "output", "trace_metadata"], # Updates the trace input & output to be this generations input & output also updates the Trace Metadata to match the passed in value + "debug_langfuse": True, # Will log the exact metadata sent to litellm for the trace/generation as `metadata_passed_to_litellm` }, ) @@ -132,6 +144,38 @@ print(response) ``` +### Trace & Generation Parameters + +#### Trace Specific Parameters + +* `trace_id` - Identifier for the trace, must use `existing_trace_id` instead of `trace_id` if this is an existing trace, auto-generated by default +* `trace_name` - Name of the trace, auto-generated by default +* `session_id` - Session identifier for the trace, defaults to `None` +* `trace_version` - Version for the trace, defaults to value for `version` +* `trace_release` - Release for the trace, defaults to `None` +* `trace_metadata` - Metadata for the trace, defaults to `None` +* `trace_user_id` - User identifier for the trace, defaults to completion argument `user` +* `tags` - Tags for the trace, defeaults to `None` + +##### Updatable Parameters on Continuation + +The following parameters can be updated on a continuation of a trace by passing in the following values into the `update_trace_keys` in the metadata of the completion. + +* `input` - Will set the traces input to be the input of this latest generation +* `output` - Will set the traces output to be the output of this generation +* `trace_version` - Will set the trace version to be the provided value (To use the latest generations version instead, use `version`) +* `trace_release` - Will set the trace release to be the provided value +* `trace_metadata` - Will set the trace metadata to the provided value +* `trace_user_id` - Will set the trace user id to the provided value + +#### Generation Specific Parameters + +* `generation_id` - Identifier for the generation, auto-generated by default +* `generation_name` - Identifier for the generation, auto-generated by default +* `prompt` - Langfuse prompt object used for the generation, defaults to None + +Any other key value pairs passed into the metadata not listed in the above spec for a `litellm` completion will be added as a metadata key value pair for the generation. + ### Use LangChain ChatLiteLLM + Langfuse Pass `trace_user_id`, `session_id` in model_kwargs ```python @@ -167,6 +211,21 @@ messages = [ chat(messages) ``` +## Redacting Messages, Response Content from Langfuse Logging + +### Redact Messages and Responses from all Langfuse Logging + +Set `litellm.turn_off_message_logging=True` This will prevent the messages and responses from being logged to langfuse, but request metadata will still be logged. + +### Redact Messages and Responses from specific Langfuse Logging + +In the metadata typically passed for text completion or embedding calls you can set specific keys to mask the messages and responses for this call. + +Setting `mask_input` to `True` will mask the input from being logged for this call + +Setting `mask_output` to `True` will make the output from being logged for this call. + +Be aware that if you are continuing an existing trace, and you set `update_trace_keys` to include either `input` or `output` and you set the corresponding `mask_input` or `mask_output`, then that trace will have its existing input and/or output replaced with a redacted message. ## Troubleshooting & Errors ### Data not getting logged to Langfuse ? diff --git a/docs/my-website/docs/observability/langsmith_integration.md b/docs/my-website/docs/observability/langsmith_integration.md index ca0421b15..b115866d5 100644 --- a/docs/my-website/docs/observability/langsmith_integration.md +++ b/docs/my-website/docs/observability/langsmith_integration.md @@ -57,7 +57,7 @@ os.environ["LANGSMITH_API_KEY"] = "" os.environ['OPENAI_API_KEY']="" # set langfuse as a callback, litellm will send the data to langfuse -litellm.success_callback = ["langfuse"] +litellm.success_callback = ["langsmith"] response = litellm.completion( model="gpt-3.5-turbo", @@ -71,9 +71,26 @@ response = litellm.completion( ) print(response) ``` + +### Make LiteLLM Proxy use Custom `LANGSMITH_BASE_URL` + +If you're using a custom LangSmith instance, you can set the +`LANGSMITH_BASE_URL` environment variable to point to your instance. +For example, you can make LiteLLM Proxy log to a local LangSmith instance with +this config: + +```yaml +litellm_settings: + success_callback: ["langsmith"] + +environment_variables: + LANGSMITH_BASE_URL: "http://localhost:1984" + LANGSMITH_PROJECT: "litellm-proxy" +``` + ## Support & Talk to Founders - [Schedule Demo 👋](https://calendly.com/d/4mp-gd3-k5k/berriai-1-1-onboarding-litellm-hosted-version) - [Community Discord 💭](https://discord.gg/wuPM9dRgDw) - Our numbers 📞 +1 (770) 8783-106 / ‭+1 (412) 618-6238‬ -- Our emails ✉️ ishaan@berri.ai / krrish@berri.ai \ No newline at end of file +- Our emails ✉️ ishaan@berri.ai / krrish@berri.ai diff --git a/docs/my-website/docs/observability/openmeter.md b/docs/my-website/docs/observability/openmeter.md new file mode 100644 index 000000000..2f5356875 --- /dev/null +++ b/docs/my-website/docs/observability/openmeter.md @@ -0,0 +1,97 @@ +import Image from '@theme/IdealImage'; +import Tabs from '@theme/Tabs'; +import TabItem from '@theme/TabItem'; + +# OpenMeter - Usage-Based Billing + +[OpenMeter](https://openmeter.io/) is an Open Source Usage-Based Billing solution for AI/Cloud applications. It integrates with Stripe for easy billing. + + + +:::info +We want to learn how we can make the callbacks better! Meet the LiteLLM [founders](https://calendly.com/d/4mp-gd3-k5k/berriai-1-1-onboarding-litellm-hosted-version) or +join our [discord](https://discord.gg/wuPM9dRgDw) +::: + + +## Quick Start +Use just 2 lines of code, to instantly log your responses **across all providers** with OpenMeter + +Get your OpenMeter API Key from https://openmeter.cloud/meters + +```python +litellm.callbacks = ["openmeter"] # logs cost + usage of successful calls to openmeter +``` + + + + + +```python +# pip install openmeter +import litellm +import os + +# from https://openmeter.cloud +os.environ["OPENMETER_API_ENDPOINT"] = "" +os.environ["OPENMETER_API_KEY"] = "" + +# LLM API Keys +os.environ['OPENAI_API_KEY']="" + +# set openmeter as a callback, litellm will send the data to openmeter +litellm.callbacks = ["openmeter"] + +# openai call +response = litellm.completion( + model="gpt-3.5-turbo", + messages=[ + {"role": "user", "content": "Hi 👋 - i'm openai"} + ] +) +``` + + + + +1. Add to Config.yaml +```yaml +model_list: +- litellm_params: + api_base: https://openai-function-calling-workers.tasslexyz.workers.dev/ + api_key: my-fake-key + model: openai/my-fake-model + model_name: fake-openai-endpoint + +litellm_settings: + callbacks: ["openmeter"] # 👈 KEY CHANGE +``` + +2. Start Proxy + +``` +litellm --config /path/to/config.yaml +``` + +3. Test it! + +```bash +curl --location 'http://0.0.0.0:4000/chat/completions' \ +--header 'Content-Type: application/json' \ +--data ' { + "model": "fake-openai-endpoint", + "messages": [ + { + "role": "user", + "content": "what llm are you" + } + ], + } +' +``` + + + + + + \ No newline at end of file diff --git a/docs/my-website/docs/observability/sentry.md b/docs/my-website/docs/observability/sentry.md index 255dd55cf..5877db661 100644 --- a/docs/my-website/docs/observability/sentry.md +++ b/docs/my-website/docs/observability/sentry.md @@ -40,5 +40,9 @@ response = completion(model="gpt-3.5-turbo", messages=[{"role": "user", "content print(response) ``` +## Redacting Messages, Response Content from Sentry Logging + +Set `litellm.turn_off_message_logging=True` This will prevent the messages and responses from being logged to sentry, but request metadata will still be logged. + [Let us know](https://github.com/BerriAI/litellm/issues/new?assignees=&labels=enhancement&projects=&template=feature_request.yml&title=%5BFeature%5D%3A+) if you need any additional options from Sentry. diff --git a/docs/my-website/docs/providers/anthropic.md b/docs/my-website/docs/providers/anthropic.md index 0f9ba88ff..38be0c433 100644 --- a/docs/my-website/docs/providers/anthropic.md +++ b/docs/my-website/docs/providers/anthropic.md @@ -223,6 +223,117 @@ assert isinstance( ``` +### Setting `anthropic-beta` Header in Requests + +Pass the the `extra_headers` param to litellm, All headers will be forwarded to Anthropic API + +```python +response = completion( + model="anthropic/claude-3-opus-20240229", + messages=messages, + tools=tools, +) +``` + +### Forcing Anthropic Tool Use + +If you want Claude to use a specific tool to answer the user’s question + +You can do this by specifying the tool in the `tool_choice` field like so: +```python +response = completion( + model="anthropic/claude-3-opus-20240229", + messages=messages, + tools=tools, + tool_choice={"type": "tool", "name": "get_weather"}, +) +``` + + +### Parallel Function Calling + +Here's how to pass the result of a function call back to an anthropic model: + +```python +from litellm import completion +import os + +os.environ["ANTHROPIC_API_KEY"] = "sk-ant.." + + +litellm.set_verbose = True + +### 1ST FUNCTION CALL ### +tools = [ + { + "type": "function", + "function": { + "name": "get_current_weather", + "description": "Get the current weather in a given location", + "parameters": { + "type": "object", + "properties": { + "location": { + "type": "string", + "description": "The city and state, e.g. San Francisco, CA", + }, + "unit": {"type": "string", "enum": ["celsius", "fahrenheit"]}, + }, + "required": ["location"], + }, + }, + } +] +messages = [ + { + "role": "user", + "content": "What's the weather like in Boston today in Fahrenheit?", + } +] +try: + # test without max tokens + response = completion( + model="anthropic/claude-3-opus-20240229", + messages=messages, + tools=tools, + tool_choice="auto", + ) + # Add any assertions, here to check response args + print(response) + assert isinstance(response.choices[0].message.tool_calls[0].function.name, str) + assert isinstance( + response.choices[0].message.tool_calls[0].function.arguments, str + ) + + messages.append( + response.choices[0].message.model_dump() + ) # Add assistant tool invokes + tool_result = ( + '{"location": "Boston", "temperature": "72", "unit": "fahrenheit"}' + ) + # Add user submitted tool results in the OpenAI format + messages.append( + { + "tool_call_id": response.choices[0].message.tool_calls[0].id, + "role": "tool", + "name": response.choices[0].message.tool_calls[0].function.name, + "content": tool_result, + } + ) + ### 2ND FUNCTION CALL ### + # In the second response, Claude should deduce answer from tool results + second_response = completion( + model="anthropic/claude-3-opus-20240229", + messages=messages, + tools=tools, + tool_choice="auto", + ) + print(second_response) +except Exception as e: + print(f"An error occurred - {str(e)}") +``` + +s/o @[Shekhar Patnaik](https://www.linkedin.com/in/patnaikshekhar) for requesting this! ## Usage - Vision diff --git a/docs/my-website/docs/providers/azure_ai.md b/docs/my-website/docs/providers/azure_ai.md index b8dbe16ba..ed13c5664 100644 --- a/docs/my-website/docs/providers/azure_ai.md +++ b/docs/my-website/docs/providers/azure_ai.md @@ -3,8 +3,6 @@ import TabItem from '@theme/TabItem'; # Azure AI Studio -## Sample Usage - **Ensure the following:** 1. The API Base passed ends in the `/v1/` prefix example: @@ -14,8 +12,11 @@ import TabItem from '@theme/TabItem'; 2. The `model` passed is listed in [supported models](#supported-models). You **DO NOT** Need to pass your deployment name to litellm. Example `model=azure/Mistral-large-nmefg` +## Usage + + + -**Quick Start** ```python import litellm response = litellm.completion( @@ -26,6 +27,9 @@ response = litellm.completion( ) ``` + + + ## Sample Usage - LiteLLM Proxy 1. Add models to your config.yaml @@ -99,6 +103,107 @@ response = litellm.completion( + + + +## Function Calling + + + + +```python +from litellm import completion + +# set env +os.environ["AZURE_MISTRAL_API_KEY"] = "your-api-key" +os.environ["AZURE_MISTRAL_API_BASE"] = "your-api-base" + +tools = [ + { + "type": "function", + "function": { + "name": "get_current_weather", + "description": "Get the current weather in a given location", + "parameters": { + "type": "object", + "properties": { + "location": { + "type": "string", + "description": "The city and state, e.g. San Francisco, CA", + }, + "unit": {"type": "string", "enum": ["celsius", "fahrenheit"]}, + }, + "required": ["location"], + }, + }, + } +] +messages = [{"role": "user", "content": "What's the weather like in Boston today?"}] + +response = completion( + model="azure/mistral-large-latest", + api_base=os.getenv("AZURE_MISTRAL_API_BASE") + api_key=os.getenv("AZURE_MISTRAL_API_KEY") + messages=messages, + tools=tools, + tool_choice="auto", +) +# Add any assertions, here to check response args +print(response) +assert isinstance(response.choices[0].message.tool_calls[0].function.name, str) +assert isinstance( + response.choices[0].message.tool_calls[0].function.arguments, str +) + +``` + + + + + +```bash +curl http://0.0.0.0:4000/v1/chat/completions \ +-H "Content-Type: application/json" \ +-H "Authorization: Bearer $YOUR_API_KEY" \ +-d '{ + "model": "mistral", + "messages": [ + { + "role": "user", + "content": "What'\''s the weather like in Boston today?" + } + ], + "tools": [ + { + "type": "function", + "function": { + "name": "get_current_weather", + "description": "Get the current weather in a given location", + "parameters": { + "type": "object", + "properties": { + "location": { + "type": "string", + "description": "The city and state, e.g. San Francisco, CA" + }, + "unit": { + "type": "string", + "enum": ["celsius", "fahrenheit"] + } + }, + "required": ["location"] + } + } + } + ], + "tool_choice": "auto" +}' + +``` + + + + ## Supported Models | Model Name | Function Call | diff --git a/docs/my-website/docs/providers/bedrock.md b/docs/my-website/docs/providers/bedrock.md index 590ffc423..147c12e65 100644 --- a/docs/my-website/docs/providers/bedrock.md +++ b/docs/my-website/docs/providers/bedrock.md @@ -535,7 +535,8 @@ print(response) | Model Name | Function Call | |----------------------|---------------------------------------------| -| Titan Embeddings - G1 | `embedding(model="bedrock/amazon.titan-embed-text-v1", input=input)` | +| Titan Embeddings V2 | `embedding(model="bedrock/amazon.titan-embed-text-v2:0", input=input)` | +| Titan Embeddings - V1 | `embedding(model="bedrock/amazon.titan-embed-text-v1", input=input)` | | Cohere Embeddings - English | `embedding(model="bedrock/cohere.embed-english-v3", input=input)` | | Cohere Embeddings - Multilingual | `embedding(model="bedrock/cohere.embed-multilingual-v3", input=input)` | diff --git a/docs/my-website/docs/providers/clarifai.md b/docs/my-website/docs/providers/clarifai.md new file mode 100644 index 000000000..acc8c54be --- /dev/null +++ b/docs/my-website/docs/providers/clarifai.md @@ -0,0 +1,177 @@ + +# Clarifai +Anthropic, OpenAI, Mistral, Llama and Gemini LLMs are Supported on Clarifai. + +## Pre-Requisites + +`pip install clarifai` + +`pip install litellm` + +## Required Environment Variables +To obtain your Clarifai Personal access token follow this [link](https://docs.clarifai.com/clarifai-basics/authentication/personal-access-tokens/). Optionally the PAT can also be passed in `completion` function. + +```python +os.environ["CALRIFAI_API_KEY"] = "YOUR_CLARIFAI_PAT" # CLARIFAI_PAT +``` + +## Usage + +```python +import os +from litellm import completion + +os.environ["CLARIFAI_API_KEY"] = "" + +response = completion( + model="clarifai/mistralai.completion.mistral-large", + messages=[{ "content": "Tell me a joke about physics?","role": "user"}] +) +``` + +**Output** +```json +{ + "id": "chatcmpl-572701ee-9ab2-411c-ac75-46c1ba18e781", + "choices": [ + { + "finish_reason": "stop", + "index": 1, + "message": { + "content": "Sure, here's a physics joke for you:\n\nWhy can't you trust an atom?\n\nBecause they make up everything!", + "role": "assistant" + } + } + ], + "created": 1714410197, + "model": "https://api.clarifai.com/v2/users/mistralai/apps/completion/models/mistral-large/outputs", + "object": "chat.completion", + "system_fingerprint": null, + "usage": { + "prompt_tokens": 14, + "completion_tokens": 24, + "total_tokens": 38 + } + } +``` + +## Clarifai models +liteLLM supports non-streaming requests to all models on [Clarifai community](https://clarifai.com/explore/models?filterData=%5B%7B%22field%22%3A%22use_cases%22%2C%22value%22%3A%5B%22llm%22%5D%7D%5D&page=1&perPage=24) + +Example Usage - Note: liteLLM supports all models deployed on Clarifai + +## Llama LLMs +| Model Name | Function Call | +---------------------------|---------------------------------| +| clarifai/meta.Llama-2.llama2-7b-chat | `completion('clarifai/meta.Llama-2.llama2-7b-chat', messages)` +| clarifai/meta.Llama-2.llama2-13b-chat | `completion('clarifai/meta.Llama-2.llama2-13b-chat', messages)` +| clarifai/meta.Llama-2.llama2-70b-chat | `completion('clarifai/meta.Llama-2.llama2-70b-chat', messages)` | +| clarifai/meta.Llama-2.codeLlama-70b-Python | `completion('clarifai/meta.Llama-2.codeLlama-70b-Python', messages)`| +| clarifai/meta.Llama-2.codeLlama-70b-Instruct | `completion('clarifai/meta.Llama-2.codeLlama-70b-Instruct', messages)` | + +## Mistal LLMs +| Model Name | Function Call | +|---------------------------------------------|------------------------------------------------------------------------| +| clarifai/mistralai.completion.mixtral-8x22B | `completion('clarifai/mistralai.completion.mixtral-8x22B', messages)` | +| clarifai/mistralai.completion.mistral-large | `completion('clarifai/mistralai.completion.mistral-large', messages)` | +| clarifai/mistralai.completion.mistral-medium | `completion('clarifai/mistralai.completion.mistral-medium', messages)` | +| clarifai/mistralai.completion.mistral-small | `completion('clarifai/mistralai.completion.mistral-small', messages)` | +| clarifai/mistralai.completion.mixtral-8x7B-Instruct-v0_1 | `completion('clarifai/mistralai.completion.mixtral-8x7B-Instruct-v0_1', messages)` +| clarifai/mistralai.completion.mistral-7B-OpenOrca | `completion('clarifai/mistralai.completion.mistral-7B-OpenOrca', messages)` | +| clarifai/mistralai.completion.openHermes-2-mistral-7B | `completion('clarifai/mistralai.completion.openHermes-2-mistral-7B', messages)` | + + +## Jurassic LLMs +| Model Name | Function Call | +|-----------------------------------------------|---------------------------------------------------------------------| +| clarifai/ai21.complete.Jurassic2-Grande | `completion('clarifai/ai21.complete.Jurassic2-Grande', messages)` | +| clarifai/ai21.complete.Jurassic2-Grande-Instruct | `completion('clarifai/ai21.complete.Jurassic2-Grande-Instruct', messages)` | +| clarifai/ai21.complete.Jurassic2-Jumbo-Instruct | `completion('clarifai/ai21.complete.Jurassic2-Jumbo-Instruct', messages)` | +| clarifai/ai21.complete.Jurassic2-Jumbo | `completion('clarifai/ai21.complete.Jurassic2-Jumbo', messages)` | +| clarifai/ai21.complete.Jurassic2-Large | `completion('clarifai/ai21.complete.Jurassic2-Large', messages)` | + +## Wizard LLMs + +| Model Name | Function Call | +|-----------------------------------------------|---------------------------------------------------------------------| +| clarifai/wizardlm.generate.wizardCoder-Python-34B | `completion('clarifai/wizardlm.generate.wizardCoder-Python-34B', messages)` | +| clarifai/wizardlm.generate.wizardLM-70B | `completion('clarifai/wizardlm.generate.wizardLM-70B', messages)` | +| clarifai/wizardlm.generate.wizardLM-13B | `completion('clarifai/wizardlm.generate.wizardLM-13B', messages)` | +| clarifai/wizardlm.generate.wizardCoder-15B | `completion('clarifai/wizardlm.generate.wizardCoder-15B', messages)` | + +## Anthropic models + +| Model Name | Function Call | +|-----------------------------------------------|---------------------------------------------------------------------| +| clarifai/anthropic.completion.claude-v1 | `completion('clarifai/anthropic.completion.claude-v1', messages)` | +| clarifai/anthropic.completion.claude-instant-1_2 | `completion('clarifai/anthropic.completion.claude-instant-1_2', messages)` | +| clarifai/anthropic.completion.claude-instant | `completion('clarifai/anthropic.completion.claude-instant', messages)` | +| clarifai/anthropic.completion.claude-v2 | `completion('clarifai/anthropic.completion.claude-v2', messages)` | +| clarifai/anthropic.completion.claude-2_1 | `completion('clarifai/anthropic.completion.claude-2_1', messages)` | +| clarifai/anthropic.completion.claude-3-opus | `completion('clarifai/anthropic.completion.claude-3-opus', messages)` | +| clarifai/anthropic.completion.claude-3-sonnet | `completion('clarifai/anthropic.completion.claude-3-sonnet', messages)` | + +## OpenAI GPT LLMs + +| Model Name | Function Call | +|-----------------------------------------------|---------------------------------------------------------------------| +| clarifai/openai.chat-completion.GPT-4 | `completion('clarifai/openai.chat-completion.GPT-4', messages)` | +| clarifai/openai.chat-completion.GPT-3_5-turbo | `completion('clarifai/openai.chat-completion.GPT-3_5-turbo', messages)` | +| clarifai/openai.chat-completion.gpt-4-turbo | `completion('clarifai/openai.chat-completion.gpt-4-turbo', messages)` | +| clarifai/openai.completion.gpt-3_5-turbo-instruct | `completion('clarifai/openai.completion.gpt-3_5-turbo-instruct', messages)` | + +## GCP LLMs + +| Model Name | Function Call | +|-----------------------------------------------|---------------------------------------------------------------------| +| clarifai/gcp.generate.gemini-1_5-pro | `completion('clarifai/gcp.generate.gemini-1_5-pro', messages)` | +| clarifai/gcp.generate.imagen-2 | `completion('clarifai/gcp.generate.imagen-2', messages)` | +| clarifai/gcp.generate.code-gecko | `completion('clarifai/gcp.generate.code-gecko', messages)` | +| clarifai/gcp.generate.code-bison | `completion('clarifai/gcp.generate.code-bison', messages)` | +| clarifai/gcp.generate.text-bison | `completion('clarifai/gcp.generate.text-bison', messages)` | +| clarifai/gcp.generate.gemma-2b-it | `completion('clarifai/gcp.generate.gemma-2b-it', messages)` | +| clarifai/gcp.generate.gemma-7b-it | `completion('clarifai/gcp.generate.gemma-7b-it', messages)` | +| clarifai/gcp.generate.gemini-pro | `completion('clarifai/gcp.generate.gemini-pro', messages)` | +| clarifai/gcp.generate.gemma-1_1-7b-it | `completion('clarifai/gcp.generate.gemma-1_1-7b-it', messages)` | + +## Cohere LLMs +| Model Name | Function Call | +|-----------------------------------------------|---------------------------------------------------------------------| +| clarifai/cohere.generate.cohere-generate-command | `completion('clarifai/cohere.generate.cohere-generate-command', messages)` | + clarifai/cohere.generate.command-r-plus' | `completion('clarifai/clarifai/cohere.generate.command-r-plus', messages)`| + +## Databricks LLMs + +| Model Name | Function Call | +|---------------------------------------------------|---------------------------------------------------------------------| +| clarifai/databricks.drbx.dbrx-instruct | `completion('clarifai/databricks.drbx.dbrx-instruct', messages)` | +| clarifai/databricks.Dolly-v2.dolly-v2-12b | `completion('clarifai/databricks.Dolly-v2.dolly-v2-12b', messages)`| + +## Microsoft LLMs + +| Model Name | Function Call | +|---------------------------------------------------|---------------------------------------------------------------------| +| clarifai/microsoft.text-generation.phi-2 | `completion('clarifai/microsoft.text-generation.phi-2', messages)` | +| clarifai/microsoft.text-generation.phi-1_5 | `completion('clarifai/microsoft.text-generation.phi-1_5', messages)`| + +## Salesforce models + +| Model Name | Function Call | +|-----------------------------------------------------------|-------------------------------------------------------------------------------| +| clarifai/salesforce.blip.general-english-image-caption-blip-2 | `completion('clarifai/salesforce.blip.general-english-image-caption-blip-2', messages)` | +| clarifai/salesforce.xgen.xgen-7b-8k-instruct | `completion('clarifai/salesforce.xgen.xgen-7b-8k-instruct', messages)` | + + +## Other Top performing LLMs + +| Model Name | Function Call | +|---------------------------------------------------|---------------------------------------------------------------------| +| clarifai/deci.decilm.deciLM-7B-instruct | `completion('clarifai/deci.decilm.deciLM-7B-instruct', messages)` | +| clarifai/upstage.solar.solar-10_7b-instruct | `completion('clarifai/upstage.solar.solar-10_7b-instruct', messages)` | +| clarifai/openchat.openchat.openchat-3_5-1210 | `completion('clarifai/openchat.openchat.openchat-3_5-1210', messages)` | +| clarifai/togethercomputer.stripedHyena.stripedHyena-Nous-7B | `completion('clarifai/togethercomputer.stripedHyena.stripedHyena-Nous-7B', messages)` | +| clarifai/fblgit.una-cybertron.una-cybertron-7b-v2 | `completion('clarifai/fblgit.una-cybertron.una-cybertron-7b-v2', messages)` | +| clarifai/tiiuae.falcon.falcon-40b-instruct | `completion('clarifai/tiiuae.falcon.falcon-40b-instruct', messages)` | +| clarifai/togethercomputer.RedPajama.RedPajama-INCITE-7B-Chat | `completion('clarifai/togethercomputer.RedPajama.RedPajama-INCITE-7B-Chat', messages)` | +| clarifai/bigcode.code.StarCoder | `completion('clarifai/bigcode.code.StarCoder', messages)` | +| clarifai/mosaicml.mpt.mpt-7b-instruct | `completion('clarifai/mosaicml.mpt.mpt-7b-instruct', messages)` | diff --git a/docs/my-website/docs/providers/deepseek.md b/docs/my-website/docs/providers/deepseek.md new file mode 100644 index 000000000..678561eca --- /dev/null +++ b/docs/my-website/docs/providers/deepseek.md @@ -0,0 +1,54 @@ +# Deepseek +https://deepseek.com/ + +**We support ALL Deepseek models, just set `deepseek/` as a prefix when sending completion requests** + +## API Key +```python +# env variable +os.environ['DEEPSEEK_API_KEY'] +``` + +## Sample Usage +```python +from litellm import completion +import os + +os.environ['DEEPSEEK_API_KEY'] = "" +response = completion( + model="deepseek/deepseek-chat", + messages=[ + {"role": "user", "content": "hello from litellm"} + ], +) +print(response) +``` + +## Sample Usage - Streaming +```python +from litellm import completion +import os + +os.environ['DEEPSEEK_API_KEY'] = "" +response = completion( + model="deepseek/deepseek-chat", + messages=[ + {"role": "user", "content": "hello from litellm"} + ], + stream=True +) + +for chunk in response: + print(chunk) +``` + + +## Supported Models - ALL Deepseek Models Supported! +We support ALL Deepseek models, just set `deepseek/` as a prefix when sending completion requests + +| Model Name | Function Call | +|--------------------------|------------------------------------------------------------------------------------------------------------------------------------------------------------------| +| deepseek-chat | `completion(model="deepseek/deepseek-chat", messages)` | +| deepseek-coder | `completion(model="deepseek/deepseek-chat", messages)` | + + diff --git a/docs/my-website/docs/providers/gemini.md b/docs/my-website/docs/providers/gemini.md index 480c3d6fd..8dd83e5ce 100644 --- a/docs/my-website/docs/providers/gemini.md +++ b/docs/my-website/docs/providers/gemini.md @@ -23,7 +23,7 @@ In certain use-cases you may need to make calls to the models and pass [safety s ```python response = completion( model="gemini/gemini-pro", - messages=[{"role": "user", "content": "write code for saying hi from LiteLLM"}] + messages=[{"role": "user", "content": "write code for saying hi from LiteLLM"}], safety_settings=[ { "category": "HARM_CATEGORY_HARASSMENT", diff --git a/docs/my-website/docs/providers/groq.md b/docs/my-website/docs/providers/groq.md index d8a4fded4..da453c3ce 100644 --- a/docs/my-website/docs/providers/groq.md +++ b/docs/my-website/docs/providers/groq.md @@ -48,6 +48,109 @@ We support ALL Groq models, just set `groq/` as a prefix when sending completion | Model Name | Function Call | |--------------------------|------------------------------------------------------------------------------------------------------------------------------------------------------------------| +| llama3-8b-8192 | `completion(model="groq/llama3-8b-8192", messages)` | +| llama3-70b-8192 | `completion(model="groq/llama3-70b-8192", messages)` | | llama2-70b-4096 | `completion(model="groq/llama2-70b-4096", messages)` | | mixtral-8x7b-32768 | `completion(model="groq/mixtral-8x7b-32768", messages)` | -| gemma-7b-it | `completion(model="groq/gemma-7b-it", messages)` | \ No newline at end of file +| gemma-7b-it | `completion(model="groq/gemma-7b-it", messages)` | + +## Groq - Tool / Function Calling Example + +```python +# Example dummy function hard coded to return the current weather +import json +def get_current_weather(location, unit="fahrenheit"): + """Get the current weather in a given location""" + if "tokyo" in location.lower(): + return json.dumps({"location": "Tokyo", "temperature": "10", "unit": "celsius"}) + elif "san francisco" in location.lower(): + return json.dumps( + {"location": "San Francisco", "temperature": "72", "unit": "fahrenheit"} + ) + elif "paris" in location.lower(): + return json.dumps({"location": "Paris", "temperature": "22", "unit": "celsius"}) + else: + return json.dumps({"location": location, "temperature": "unknown"}) + + + + +# Step 1: send the conversation and available functions to the model +messages = [ + { + "role": "system", + "content": "You are a function calling LLM that uses the data extracted from get_current_weather to answer questions about the weather in San Francisco.", + }, + { + "role": "user", + "content": "What's the weather like in San Francisco?", + }, +] +tools = [ + { + "type": "function", + "function": { + "name": "get_current_weather", + "description": "Get the current weather in a given location", + "parameters": { + "type": "object", + "properties": { + "location": { + "type": "string", + "description": "The city and state, e.g. San Francisco, CA", + }, + "unit": { + "type": "string", + "enum": ["celsius", "fahrenheit"], + }, + }, + "required": ["location"], + }, + }, + } +] +response = litellm.completion( + model="groq/llama2-70b-4096", + messages=messages, + tools=tools, + tool_choice="auto", # auto is default, but we'll be explicit +) +print("Response\n", response) +response_message = response.choices[0].message +tool_calls = response_message.tool_calls + + +# Step 2: check if the model wanted to call a function +if tool_calls: + # Step 3: call the function + # Note: the JSON response may not always be valid; be sure to handle errors + available_functions = { + "get_current_weather": get_current_weather, + } + messages.append( + response_message + ) # extend conversation with assistant's reply + print("Response message\n", response_message) + # Step 4: send the info for each function call and function response to the model + for tool_call in tool_calls: + function_name = tool_call.function.name + function_to_call = available_functions[function_name] + function_args = json.loads(tool_call.function.arguments) + function_response = function_to_call( + location=function_args.get("location"), + unit=function_args.get("unit"), + ) + messages.append( + { + "tool_call_id": tool_call.id, + "role": "tool", + "name": function_name, + "content": function_response, + } + ) # extend conversation with function response + print(f"messages: {messages}") + second_response = litellm.completion( + model="groq/llama2-70b-4096", messages=messages + ) # get a new response from the model where it can see the function response + print("second response\n", second_response) +``` \ No newline at end of file diff --git a/docs/my-website/docs/providers/huggingface.md b/docs/my-website/docs/providers/huggingface.md index f8ebadfcf..35befd3e2 100644 --- a/docs/my-website/docs/providers/huggingface.md +++ b/docs/my-website/docs/providers/huggingface.md @@ -21,6 +21,11 @@ This is done by adding the "huggingface/" prefix to `model`, example `completion +By default, LiteLLM will assume a huggingface call follows the TGI format. + + + + ```python import os from litellm import completion @@ -40,9 +45,58 @@ response = completion( print(response) ``` + + + +1. Add models to your config.yaml + + ```yaml + model_list: + - model_name: wizard-coder + litellm_params: + model: huggingface/WizardLM/WizardCoder-Python-34B-V1.0 + api_key: os.environ/HUGGINGFACE_API_KEY + api_base: "https://my-endpoint.endpoints.huggingface.cloud" + ``` + + + +2. Start the proxy + + ```bash + $ litellm --config /path/to/config.yaml --debug + ``` + +3. Test it! + + ```shell + curl --location 'http://0.0.0.0:4000/chat/completions' \ + --header 'Authorization: Bearer sk-1234' \ + --header 'Content-Type: application/json' \ + --data '{ + "model": "wizard-coder", + "messages": [ + { + "role": "user", + "content": "I like you!" + } + ], + }' + ``` + + + + +Append `conversational` to the model name + +e.g. `huggingface/conversational/` + + + + ```python import os from litellm import completion @@ -54,7 +108,7 @@ messages = [{ "content": "There's a llama in my garden 😱 What should I do?"," # e.g. Call 'facebook/blenderbot-400M-distill' hosted on HF Inference endpoints response = completion( - model="huggingface/facebook/blenderbot-400M-distill", + model="huggingface/conversational/facebook/blenderbot-400M-distill", messages=messages, api_base="https://my-endpoint.huggingface.cloud" ) @@ -62,7 +116,123 @@ response = completion( print(response) ``` - + + +1. Add models to your config.yaml + + ```yaml + model_list: + - model_name: blenderbot + litellm_params: + model: huggingface/conversational/facebook/blenderbot-400M-distill + api_key: os.environ/HUGGINGFACE_API_KEY + api_base: "https://my-endpoint.endpoints.huggingface.cloud" + ``` + + + +2. Start the proxy + + ```bash + $ litellm --config /path/to/config.yaml --debug + ``` + +3. Test it! + + ```shell + curl --location 'http://0.0.0.0:4000/chat/completions' \ + --header 'Authorization: Bearer sk-1234' \ + --header 'Content-Type: application/json' \ + --data '{ + "model": "blenderbot", + "messages": [ + { + "role": "user", + "content": "I like you!" + } + ], + }' + ``` + + + + + + + +Append `text-classification` to the model name + +e.g. `huggingface/text-classification/` + + + + +```python +import os +from litellm import completion + +# [OPTIONAL] set env var +os.environ["HUGGINGFACE_API_KEY"] = "huggingface_api_key" + +messages = [{ "content": "I like you, I love you!","role": "user"}] + +# e.g. Call 'shahrukhx01/question-vs-statement-classifier' hosted on HF Inference endpoints +response = completion( + model="huggingface/text-classification/shahrukhx01/question-vs-statement-classifier", + messages=messages, + api_base="https://my-endpoint.endpoints.huggingface.cloud", +) + +print(response) +``` + + + +1. Add models to your config.yaml + + ```yaml + model_list: + - model_name: bert-classifier + litellm_params: + model: huggingface/text-classification/shahrukhx01/question-vs-statement-classifier + api_key: os.environ/HUGGINGFACE_API_KEY + api_base: "https://my-endpoint.endpoints.huggingface.cloud" + ``` + + + +2. Start the proxy + + ```bash + $ litellm --config /path/to/config.yaml --debug + ``` + +3. Test it! + + ```shell + curl --location 'http://0.0.0.0:4000/chat/completions' \ + --header 'Authorization: Bearer sk-1234' \ + --header 'Content-Type: application/json' \ + --data '{ + "model": "bert-classifier", + "messages": [ + { + "role": "user", + "content": "I like you!" + } + ], + }' + ``` + + + + + + + +Append `text-generation` to the model name + +e.g. `huggingface/text-generation/` ```python import os @@ -75,7 +245,7 @@ messages = [{ "content": "There's a llama in my garden 😱 What should I do?"," # e.g. Call 'roneneldan/TinyStories-3M' hosted on HF Inference endpoints response = completion( - model="huggingface/roneneldan/TinyStories-3M", + model="huggingface/text-generation/roneneldan/TinyStories-3M", messages=messages, api_base="https://p69xlsj6rpno5drq.us-east-1.aws.endpoints.huggingface.cloud", ) diff --git a/docs/my-website/docs/providers/mistral.md b/docs/my-website/docs/providers/mistral.md index 257ab2e93..9d13fd017 100644 --- a/docs/my-website/docs/providers/mistral.md +++ b/docs/my-website/docs/providers/mistral.md @@ -44,13 +44,58 @@ for chunk in response: ## Supported Models All models listed here https://docs.mistral.ai/platform/endpoints are supported. We actively maintain the list of models, pricing, token window, etc. [here](https://github.com/BerriAI/litellm/blob/c1b25538277206b9f00de5254d80d6a83bb19a29/model_prices_and_context_window.json). -| Model Name | Function Call | -|--------------------------|------------------------------------------------------------------------------------------------------------------------------------------------------------------| -| mistral-tiny | `completion(model="mistral/mistral-tiny", messages)` | -| mistral-small | `completion(model="mistral/mistral-small", messages)` | -| mistral-medium | `completion(model="mistral/mistral-medium", messages)` | -| mistral-large-latest | `completion(model="mistral/mistral-large-latest", messages)` | +| Model Name | Function Call | +|----------------|--------------------------------------------------------------| +| Mistral Small | `completion(model="mistral/mistral-small-latest", messages)` | +| Mistral Medium | `completion(model="mistral/mistral-medium-latest", messages)`| +| Mistral Large | `completion(model="mistral/mistral-large-latest", messages)` | +| Mistral 7B | `completion(model="mistral/open-mistral-7b", messages)` | +| Mixtral 8x7B | `completion(model="mistral/open-mixtral-8x7b", messages)` | +| Mixtral 8x22B | `completion(model="mistral/open-mixtral-8x22b", messages)` | +## Function Calling + +```python +from litellm import completion + +# set env +os.environ["MISTRAL_API_KEY"] = "your-api-key" + +tools = [ + { + "type": "function", + "function": { + "name": "get_current_weather", + "description": "Get the current weather in a given location", + "parameters": { + "type": "object", + "properties": { + "location": { + "type": "string", + "description": "The city and state, e.g. San Francisco, CA", + }, + "unit": {"type": "string", "enum": ["celsius", "fahrenheit"]}, + }, + "required": ["location"], + }, + }, + } +] +messages = [{"role": "user", "content": "What's the weather like in Boston today?"}] + +response = completion( + model="mistral/mistral-large-latest", + messages=messages, + tools=tools, + tool_choice="auto", +) +# Add any assertions, here to check response args +print(response) +assert isinstance(response.choices[0].message.tool_calls[0].function.name, str) +assert isinstance( + response.choices[0].message.tool_calls[0].function.arguments, str +) +``` ## Sample Usage - Embedding ```python @@ -71,6 +116,6 @@ All models listed here https://docs.mistral.ai/platform/endpoints are supported | Model Name | Function Call | |--------------------------|------------------------------------------------------------------------------------------------------------------------------------------------------------------| -| mistral-embed | `embedding(model="mistral/mistral-embed", input)` | +| Mistral Embeddings | `embedding(model="mistral/mistral-embed", input)` | diff --git a/docs/my-website/docs/providers/ollama.md b/docs/my-website/docs/providers/ollama.md index 1c913c08c..c1c8fc57c 100644 --- a/docs/my-website/docs/providers/ollama.md +++ b/docs/my-website/docs/providers/ollama.md @@ -101,13 +101,19 @@ Ollama supported models: https://github.com/ollama/ollama | Model Name | Function Call | |----------------------|----------------------------------------------------------------------------------- -| Mistral | `completion(model='ollama/mistral', messages, api_base="http://localhost:11434", stream=True)` | +| Mistral | `completion(model='ollama/mistral', messages, api_base="http://localhost:11434", stream=True)` | +| Mistral-7B-Instruct-v0.1 | `completion(model='ollama/mistral-7B-Instruct-v0.1', messages, api_base="http://localhost:11434", stream=False)` | +| Mistral-7B-Instruct-v0.2 | `completion(model='ollama/mistral-7B-Instruct-v0.2', messages, api_base="http://localhost:11434", stream=False)` | +| Mixtral-8x7B-Instruct-v0.1 | `completion(model='ollama/mistral-8x7B-Instruct-v0.1', messages, api_base="http://localhost:11434", stream=False)` | +| Mixtral-8x22B-Instruct-v0.1 | `completion(model='ollama/mixtral-8x22B-Instruct-v0.1', messages, api_base="http://localhost:11434", stream=False)` | | Llama2 7B | `completion(model='ollama/llama2', messages, api_base="http://localhost:11434", stream=True)` | | Llama2 13B | `completion(model='ollama/llama2:13b', messages, api_base="http://localhost:11434", stream=True)` | | Llama2 70B | `completion(model='ollama/llama2:70b', messages, api_base="http://localhost:11434", stream=True)` | | Llama2 Uncensored | `completion(model='ollama/llama2-uncensored', messages, api_base="http://localhost:11434", stream=True)` | | Code Llama | `completion(model='ollama/codellama', messages, api_base="http://localhost:11434", stream=True)` | -| Llama2 Uncensored | `completion(model='ollama/llama2-uncensored', messages, api_base="http://localhost:11434", stream=True)` | +| Llama2 Uncensored | `completion(model='ollama/llama2-uncensored', messages, api_base="http://localhost:11434", stream=True)` | +|Meta LLaMa3 8B | `completion(model='ollama/llama3', messages, api_base="http://localhost:11434", stream=False)` | +| Meta LLaMa3 70B | `completion(model='ollama/llama3:70b', messages, api_base="http://localhost:11434", stream=False)` | | Orca Mini | `completion(model='ollama/orca-mini', messages, api_base="http://localhost:11434", stream=True)` | | Vicuna | `completion(model='ollama/vicuna', messages, api_base="http://localhost:11434", stream=True)` | | Nous-Hermes | `completion(model='ollama/nous-hermes', messages, api_base="http://localhost:11434", stream=True)` | diff --git a/docs/my-website/docs/providers/openai.md b/docs/my-website/docs/providers/openai.md index e3f6c267d..2f261ce17 100644 --- a/docs/my-website/docs/providers/openai.md +++ b/docs/my-website/docs/providers/openai.md @@ -2,7 +2,7 @@ import Tabs from '@theme/Tabs'; import TabItem from '@theme/TabItem'; # OpenAI -LiteLLM supports OpenAI Chat + Text completion and embedding calls. +LiteLLM supports OpenAI Chat + Embedding calls. ### Required API Keys @@ -20,7 +20,7 @@ os.environ["OPENAI_API_KEY"] = "your-api-key" # openai call response = completion( - model = "gpt-3.5-turbo", + model = "gpt-4o", messages=[{ "content": "Hello, how are you?","role": "user"}] ) ``` @@ -163,6 +163,10 @@ os.environ["OPENAI_API_BASE"] = "openaiai-api-base" # OPTIONAL | Model Name | Function Call | |-----------------------|-----------------------------------------------------------------| +| gpt-4o | `response = completion(model="gpt-4o", messages=messages)` | +| gpt-4o-2024-05-13 | `response = completion(model="gpt-4o-2024-05-13", messages=messages)` | +| gpt-4-turbo | `response = completion(model="gpt-4-turbo", messages=messages)` | +| gpt-4-turbo-preview | `response = completion(model="gpt-4-0125-preview", messages=messages)` | | gpt-4-0125-preview | `response = completion(model="gpt-4-0125-preview", messages=messages)` | | gpt-4-1106-preview | `response = completion(model="gpt-4-1106-preview", messages=messages)` | | gpt-3.5-turbo-1106 | `response = completion(model="gpt-3.5-turbo-1106", messages=messages)` | @@ -184,6 +188,8 @@ These also support the `OPENAI_API_BASE` environment variable, which can be used ## OpenAI Vision Models | Model Name | Function Call | |-----------------------|-----------------------------------------------------------------| +| gpt-4o | `response = completion(model="gpt-4o", messages=messages)` | +| gpt-4-turbo | `response = completion(model="gpt-4-turbo", messages=messages)` | | gpt-4-vision-preview | `response = completion(model="gpt-4-vision-preview", messages=messages)` | #### Usage @@ -217,19 +223,6 @@ response = completion( ``` -## OpenAI Text Completion Models / Instruct Models - -| Model Name | Function Call | -|---------------------|----------------------------------------------------| -| gpt-3.5-turbo-instruct | `response = completion(model="gpt-3.5-turbo-instruct", messages=messages)` | -| gpt-3.5-turbo-instruct-0914 | `response = completion(model="gpt-3.5-turbo-instruct-0914", messages=messages)` | -| text-davinci-003 | `response = completion(model="text-davinci-003", messages=messages)` | -| ada-001 | `response = completion(model="ada-001", messages=messages)` | -| curie-001 | `response = completion(model="curie-001", messages=messages)` | -| babbage-001 | `response = completion(model="babbage-001", messages=messages)` | -| babbage-002 | `response = completion(model="babbage-002", messages=messages)` | -| davinci-002 | `response = completion(model="davinci-002", messages=messages)` | - ## Advanced ### Parallel Function calling diff --git a/docs/my-website/docs/providers/openai_compatible.md b/docs/my-website/docs/providers/openai_compatible.md index 09dcd7e4c..ff0e85709 100644 --- a/docs/my-website/docs/providers/openai_compatible.md +++ b/docs/my-website/docs/providers/openai_compatible.md @@ -5,7 +5,9 @@ import TabItem from '@theme/TabItem'; To call models hosted behind an openai proxy, make 2 changes: -1. Put `openai/` in front of your model name, so litellm knows you're trying to call an openai-compatible endpoint. +1. For `/chat/completions`: Put `openai/` in front of your model name, so litellm knows you're trying to call an openai `/chat/completions` endpoint. + +2. For `/completions`: Put `text-completion-openai/` in front of your model name, so litellm knows you're trying to call an openai `/completions` endpoint. 2. **Do NOT** add anything additional to the base url e.g. `/v1/embedding`. LiteLLM uses the openai-client to make these calls, and that automatically adds the relevant endpoints. diff --git a/docs/my-website/docs/providers/predibase.md b/docs/my-website/docs/providers/predibase.md new file mode 100644 index 000000000..3d5bbaef4 --- /dev/null +++ b/docs/my-website/docs/providers/predibase.md @@ -0,0 +1,247 @@ +import Tabs from '@theme/Tabs'; +import TabItem from '@theme/TabItem'; + +# 🆕 Predibase + +LiteLLM supports all models on Predibase + + +## Usage + + + + +### API KEYS +```python +import os +os.environ["PREDIBASE_API_KEY"] = "" +``` + +### Example Call + +```python +from litellm import completion +import os +## set ENV variables +os.environ["PREDIBASE_API_KEY"] = "predibase key" +os.environ["PREDIBASE_TENANT_ID"] = "predibase tenant id" + +# predibase llama-3 call +response = completion( + model="predibase/llama-3-8b-instruct", + messages = [{ "content": "Hello, how are you?","role": "user"}] +) +``` + + + + +1. Add models to your config.yaml + + ```yaml + model_list: + - model_name: llama-3 + litellm_params: + model: predibase/llama-3-8b-instruct + api_key: os.environ/PREDIBASE_API_KEY + tenant_id: os.environ/PREDIBASE_TENANT_ID + ``` + + + +2. Start the proxy + + ```bash + $ litellm --config /path/to/config.yaml --debug + ``` + +3. Send Request to LiteLLM Proxy Server + + + + + + ```python + import openai + client = openai.OpenAI( + api_key="sk-1234", # pass litellm proxy key, if you're using virtual keys + base_url="http://0.0.0.0:4000" # litellm-proxy-base url + ) + + response = client.chat.completions.create( + model="llama-3", + messages = [ + { + "role": "system", + "content": "Be a good human!" + }, + { + "role": "user", + "content": "What do you know about earth?" + } + ] + ) + + print(response) + ``` + + + + + + ```shell + curl --location 'http://0.0.0.0:4000/chat/completions' \ + --header 'Authorization: Bearer sk-1234' \ + --header 'Content-Type: application/json' \ + --data '{ + "model": "llama-3", + "messages": [ + { + "role": "system", + "content": "Be a good human!" + }, + { + "role": "user", + "content": "What do you know about earth?" + } + ], + }' + ``` + + + + + + + + + +## Advanced Usage - Prompt Formatting + +LiteLLM has prompt template mappings for all `meta-llama` llama3 instruct models. [**See Code**](https://github.com/BerriAI/litellm/blob/4f46b4c3975cd0f72b8c5acb2cb429d23580c18a/litellm/llms/prompt_templates/factory.py#L1360) + +To apply a custom prompt template: + + + + +```python +import litellm + +import os +os.environ["PREDIBASE_API_KEY"] = "" + +# Create your own custom prompt template +litellm.register_prompt_template( + model="togethercomputer/LLaMA-2-7B-32K", + initial_prompt_value="You are a good assistant" # [OPTIONAL] + roles={ + "system": { + "pre_message": "[INST] <>\n", # [OPTIONAL] + "post_message": "\n<>\n [/INST]\n" # [OPTIONAL] + }, + "user": { + "pre_message": "[INST] ", # [OPTIONAL] + "post_message": " [/INST]" # [OPTIONAL] + }, + "assistant": { + "pre_message": "\n" # [OPTIONAL] + "post_message": "\n" # [OPTIONAL] + } + } + final_prompt_value="Now answer as best you can:" # [OPTIONAL] +) + +def predibase_custom_model(): + model = "predibase/togethercomputer/LLaMA-2-7B-32K" + response = completion(model=model, messages=messages) + print(response['choices'][0]['message']['content']) + return response + +predibase_custom_model() +``` + + + +```yaml +# Model-specific parameters +model_list: + - model_name: mistral-7b # model alias + litellm_params: # actual params for litellm.completion() + model: "predibase/mistralai/Mistral-7B-Instruct-v0.1" + api_key: os.environ/PREDIBASE_API_KEY + initial_prompt_value: "\n" + roles: {"system":{"pre_message":"<|im_start|>system\n", "post_message":"<|im_end|>"}, "assistant":{"pre_message":"<|im_start|>assistant\n","post_message":"<|im_end|>"}, "user":{"pre_message":"<|im_start|>user\n","post_message":"<|im_end|>"}} + final_prompt_value: "\n" + bos_token: "" + eos_token: "" + max_tokens: 4096 +``` + + + + + +## Passing additional params - max_tokens, temperature +See all litellm.completion supported params [here](https://docs.litellm.ai/docs/completion/input) + +```python +# !pip install litellm +from litellm import completion +import os +## set ENV variables +os.environ["PREDIBASE_API_KEY"] = "predibase key" + +# predibae llama-3 call +response = completion( + model="predibase/llama3-8b-instruct", + messages = [{ "content": "Hello, how are you?","role": "user"}], + max_tokens=20, + temperature=0.5 +) +``` + +**proxy** + +```yaml + model_list: + - model_name: llama-3 + litellm_params: + model: predibase/llama-3-8b-instruct + api_key: os.environ/PREDIBASE_API_KEY + max_tokens: 20 + temperature: 0.5 +``` + +## Passings Predibase specific params - adapter_id, adapter_source, +Send params [not supported by `litellm.completion()`](https://docs.litellm.ai/docs/completion/input) but supported by Predibase by passing them to `litellm.completion` + +Example `adapter_id`, `adapter_source` are Predibase specific param - [See List](https://github.com/BerriAI/litellm/blob/8a35354dd6dbf4c2fcefcd6e877b980fcbd68c58/litellm/llms/predibase.py#L54) + +```python +# !pip install litellm +from litellm import completion +import os +## set ENV variables +os.environ["PREDIBASE_API_KEY"] = "predibase key" + +# predibase llama3 call +response = completion( + model="predibase/llama-3-8b-instruct", + messages = [{ "content": "Hello, how are you?","role": "user"}], + adapter_id="my_repo/3", + adapter_soruce="pbase", +) +``` + +**proxy** + +```yaml + model_list: + - model_name: llama-3 + litellm_params: + model: predibase/llama-3-8b-instruct + api_key: os.environ/PREDIBASE_API_KEY + adapter_id: my_repo/3 + adapter_source: pbase +``` diff --git a/docs/my-website/docs/providers/replicate.md b/docs/my-website/docs/providers/replicate.md index 3384ba35c..8e71d3ac9 100644 --- a/docs/my-website/docs/providers/replicate.md +++ b/docs/my-website/docs/providers/replicate.md @@ -1,7 +1,16 @@ +import Tabs from '@theme/Tabs'; +import TabItem from '@theme/TabItem'; + # Replicate LiteLLM supports all models on Replicate + +## Usage + + + + ### API KEYS ```python import os @@ -16,14 +25,175 @@ import os ## set ENV variables os.environ["REPLICATE_API_KEY"] = "replicate key" -# replicate llama-2 call +# replicate llama-3 call response = completion( - model="replicate/llama-2-70b-chat:2796ee9483c3fd7aa2e171d38f4ca12251a30609463dcfd4cd76703f22e96cdf", + model="replicate/meta/meta-llama-3-8b-instruct", messages = [{ "content": "Hello, how are you?","role": "user"}] ) ``` -### Example - Calling Replicate Deployments + + + +1. Add models to your config.yaml + + ```yaml + model_list: + - model_name: llama-3 + litellm_params: + model: replicate/meta/meta-llama-3-8b-instruct + api_key: os.environ/REPLICATE_API_KEY + ``` + + + +2. Start the proxy + + ```bash + $ litellm --config /path/to/config.yaml --debug + ``` + +3. Send Request to LiteLLM Proxy Server + + + + + + ```python + import openai + client = openai.OpenAI( + api_key="sk-1234", # pass litellm proxy key, if you're using virtual keys + base_url="http://0.0.0.0:4000" # litellm-proxy-base url + ) + + response = client.chat.completions.create( + model="llama-3", + messages = [ + { + "role": "system", + "content": "Be a good human!" + }, + { + "role": "user", + "content": "What do you know about earth?" + } + ] + ) + + print(response) + ``` + + + + + + ```shell + curl --location 'http://0.0.0.0:4000/chat/completions' \ + --header 'Authorization: Bearer sk-1234' \ + --header 'Content-Type: application/json' \ + --data '{ + "model": "llama-3", + "messages": [ + { + "role": "system", + "content": "Be a good human!" + }, + { + "role": "user", + "content": "What do you know about earth?" + } + ], + }' + ``` + + + + + +### Expected Replicate Call + +This is the call litellm will make to replicate, from the above example: + +```bash + +POST Request Sent from LiteLLM: +curl -X POST \ +https://api.replicate.com/v1/models/meta/meta-llama-3-8b-instruct \ +-H 'Authorization: Token your-api-key' -H 'Content-Type: application/json' \ +-d '{'version': 'meta/meta-llama-3-8b-instruct', 'input': {'prompt': '<|start_header_id|>system<|end_header_id|>\n\nBe a good human!<|eot_id|><|start_header_id|>user<|end_header_id|>\n\nWhat do you know about earth?<|eot_id|><|start_header_id|>assistant<|end_header_id|>\n\n'}}' +``` + + + + + +## Advanced Usage - Prompt Formatting + +LiteLLM has prompt template mappings for all `meta-llama` llama3 instruct models. [**See Code**](https://github.com/BerriAI/litellm/blob/4f46b4c3975cd0f72b8c5acb2cb429d23580c18a/litellm/llms/prompt_templates/factory.py#L1360) + +To apply a custom prompt template: + + + + +```python +import litellm + +import os +os.environ["REPLICATE_API_KEY"] = "" + +# Create your own custom prompt template +litellm.register_prompt_template( + model="togethercomputer/LLaMA-2-7B-32K", + initial_prompt_value="You are a good assistant" # [OPTIONAL] + roles={ + "system": { + "pre_message": "[INST] <>\n", # [OPTIONAL] + "post_message": "\n<>\n [/INST]\n" # [OPTIONAL] + }, + "user": { + "pre_message": "[INST] ", # [OPTIONAL] + "post_message": " [/INST]" # [OPTIONAL] + }, + "assistant": { + "pre_message": "\n" # [OPTIONAL] + "post_message": "\n" # [OPTIONAL] + } + } + final_prompt_value="Now answer as best you can:" # [OPTIONAL] +) + +def test_replicate_custom_model(): + model = "replicate/togethercomputer/LLaMA-2-7B-32K" + response = completion(model=model, messages=messages) + print(response['choices'][0]['message']['content']) + return response + +test_replicate_custom_model() +``` + + + +```yaml +# Model-specific parameters +model_list: + - model_name: mistral-7b # model alias + litellm_params: # actual params for litellm.completion() + model: "replicate/mistralai/Mistral-7B-Instruct-v0.1" + api_key: os.environ/REPLICATE_API_KEY + initial_prompt_value: "\n" + roles: {"system":{"pre_message":"<|im_start|>system\n", "post_message":"<|im_end|>"}, "assistant":{"pre_message":"<|im_start|>assistant\n","post_message":"<|im_end|>"}, "user":{"pre_message":"<|im_start|>user\n","post_message":"<|im_end|>"}} + final_prompt_value: "\n" + bos_token: "" + eos_token: "" + max_tokens: 4096 +``` + + + + + +## Advanced Usage - Calling Replicate Deployments Calling a [deployed replicate LLM](https://replicate.com/deployments) Add the `replicate/deployments/` prefix to your model, so litellm will call the `deployments` endpoint. This will call `ishaan-jaff/ishaan-mistral` deployment on replicate @@ -40,7 +210,7 @@ Replicate responses can take 3-5 mins due to replicate cold boots, if you're try ::: -### Replicate Models +## Replicate Models liteLLM supports all replicate LLMs For replicate models ensure to add a `replicate/` prefix to the `model` arg. liteLLM detects it using this arg. @@ -49,15 +219,15 @@ Below are examples on how to call replicate LLMs using liteLLM Model Name | Function Call | Required OS Variables | -----------------------------|----------------------------------------------------------------|--------------------------------------| - replicate/llama-2-70b-chat | `completion(model='replicate/llama-2-70b-chat:2796ee9483c3fd7aa2e171d38f4ca12251a30609463dcfd4cd76703f22e96cdf', messages, supports_system_prompt=True)` | `os.environ['REPLICATE_API_KEY']` | - a16z-infra/llama-2-13b-chat| `completion(model='replicate/a16z-infra/llama-2-13b-chat:2a7f981751ec7fdf87b5b91ad4db53683a98082e9ff7bfd12c8cd5ea85980a52', messages, supports_system_prompt=True)`| `os.environ['REPLICATE_API_KEY']` | + replicate/llama-2-70b-chat | `completion(model='replicate/llama-2-70b-chat:2796ee9483c3fd7aa2e171d38f4ca12251a30609463dcfd4cd76703f22e96cdf', messages)` | `os.environ['REPLICATE_API_KEY']` | + a16z-infra/llama-2-13b-chat| `completion(model='replicate/a16z-infra/llama-2-13b-chat:2a7f981751ec7fdf87b5b91ad4db53683a98082e9ff7bfd12c8cd5ea85980a52', messages)`| `os.environ['REPLICATE_API_KEY']` | replicate/vicuna-13b | `completion(model='replicate/vicuna-13b:6282abe6a492de4145d7bb601023762212f9ddbbe78278bd6771c8b3b2f2a13b', messages)` | `os.environ['REPLICATE_API_KEY']` | daanelson/flan-t5-large | `completion(model='replicate/daanelson/flan-t5-large:ce962b3f6792a57074a601d3979db5839697add2e4e02696b3ced4c022d4767f', messages)` | `os.environ['REPLICATE_API_KEY']` | custom-llm | `completion(model='replicate/custom-llm-version-id', messages)` | `os.environ['REPLICATE_API_KEY']` | replicate deployment | `completion(model='replicate/deployments/ishaan-jaff/ishaan-mistral', messages)` | `os.environ['REPLICATE_API_KEY']` | -### Passing additional params - max_tokens, temperature +## Passing additional params - max_tokens, temperature See all litellm.completion supported params [here](https://docs.litellm.ai/docs/completion/input) ```python @@ -73,11 +243,22 @@ response = completion( messages = [{ "content": "Hello, how are you?","role": "user"}], max_tokens=20, temperature=0.5 - ) ``` -### Passings Replicate specific params +**proxy** + +```yaml + model_list: + - model_name: llama-3 + litellm_params: + model: replicate/meta/meta-llama-3-8b-instruct + api_key: os.environ/REPLICATE_API_KEY + max_tokens: 20 + temperature: 0.5 +``` + +## Passings Replicate specific params Send params [not supported by `litellm.completion()`](https://docs.litellm.ai/docs/completion/input) but supported by Replicate by passing them to `litellm.completion` Example `seed`, `min_tokens` are Replicate specific param @@ -98,3 +279,15 @@ response = completion( top_k=20, ) ``` + +**proxy** + +```yaml + model_list: + - model_name: llama-3 + litellm_params: + model: replicate/meta/meta-llama-3-8b-instruct + api_key: os.environ/REPLICATE_API_KEY + min_tokens: 2 + top_k: 20 +``` diff --git a/docs/my-website/docs/providers/text_completion_openai.md b/docs/my-website/docs/providers/text_completion_openai.md new file mode 100644 index 000000000..842b56aec --- /dev/null +++ b/docs/my-website/docs/providers/text_completion_openai.md @@ -0,0 +1,163 @@ +# OpenAI (Text Completion) + +LiteLLM supports OpenAI text completion models + +### Required API Keys + +```python +import os +os.environ["OPENAI_API_KEY"] = "your-api-key" +``` + +### Usage +```python +import os +from litellm import completion + +os.environ["OPENAI_API_KEY"] = "your-api-key" + +# openai call +response = completion( + model = "gpt-3.5-turbo-instruct", + messages=[{ "content": "Hello, how are you?","role": "user"}] +) +``` + +### Usage - LiteLLM Proxy Server + +Here's how to call OpenAI models with the LiteLLM Proxy Server + +### 1. Save key in your environment + +```bash +export OPENAI_API_KEY="" +``` + +### 2. Start the proxy + + + + +```yaml +model_list: + - model_name: gpt-3.5-turbo + litellm_params: + model: openai/gpt-3.5-turbo # The `openai/` prefix will call openai.chat.completions.create + api_key: os.environ/OPENAI_API_KEY + - model_name: gpt-3.5-turbo-instruct + litellm_params: + model: text-completion-openai/gpt-3.5-turbo-instruct # The `text-completion-openai/` prefix will call openai.completions.create + api_key: os.environ/OPENAI_API_KEY +``` + + + +Use this to add all openai models with one API Key. **WARNING: This will not do any load balancing** +This means requests to `gpt-4`, `gpt-3.5-turbo` , `gpt-4-turbo-preview` will all go through this route + +```yaml +model_list: + - model_name: "*" # all requests where model not in your config go to this deployment + litellm_params: + model: openai/* # set `openai/` to use the openai route + api_key: os.environ/OPENAI_API_KEY +``` + + + +```bash +$ litellm --model gpt-3.5-turbo-instruct + +# Server running on http://0.0.0.0:4000 +``` + + + + +### 3. Test it + + + + + +```shell +curl --location 'http://0.0.0.0:4000/chat/completions' \ +--header 'Content-Type: application/json' \ +--data ' { + "model": "gpt-3.5-turbo-instruct", + "messages": [ + { + "role": "user", + "content": "what llm are you" + } + ] + } +' +``` + + + +```python +import openai +client = openai.OpenAI( + api_key="anything", + base_url="http://0.0.0.0:4000" +) + +# request sent to model set on litellm proxy, `litellm --model` +response = client.chat.completions.create(model="gpt-3.5-turbo-instruct", messages = [ + { + "role": "user", + "content": "this is a test request, write a short poem" + } +]) + +print(response) + +``` + + + +```python +from langchain.chat_models import ChatOpenAI +from langchain.prompts.chat import ( + ChatPromptTemplate, + HumanMessagePromptTemplate, + SystemMessagePromptTemplate, +) +from langchain.schema import HumanMessage, SystemMessage + +chat = ChatOpenAI( + openai_api_base="http://0.0.0.0:4000", # set openai_api_base to the LiteLLM Proxy + model = "gpt-3.5-turbo-instruct", + temperature=0.1 +) + +messages = [ + SystemMessage( + content="You are a helpful assistant that im using to make a test request to." + ), + HumanMessage( + content="test from litellm. tell me why it's amazing in 1 sentence" + ), +] +response = chat(messages) + +print(response) +``` + + + + +## OpenAI Text Completion Models / Instruct Models + +| Model Name | Function Call | +|---------------------|----------------------------------------------------| +| gpt-3.5-turbo-instruct | `response = completion(model="gpt-3.5-turbo-instruct", messages=messages)` | +| gpt-3.5-turbo-instruct-0914 | `response = completion(model="gpt-3.5-turbo-instruct-0914", messages=messages)` | +| text-davinci-003 | `response = completion(model="text-davinci-003", messages=messages)` | +| ada-001 | `response = completion(model="ada-001", messages=messages)` | +| curie-001 | `response = completion(model="curie-001", messages=messages)` | +| babbage-001 | `response = completion(model="babbage-001", messages=messages)` | +| babbage-002 | `response = completion(model="babbage-002", messages=messages)` | +| davinci-002 | `response = completion(model="davinci-002", messages=messages)` | diff --git a/docs/my-website/docs/providers/triton-inference-server.md b/docs/my-website/docs/providers/triton-inference-server.md new file mode 100644 index 000000000..aacc46a39 --- /dev/null +++ b/docs/my-website/docs/providers/triton-inference-server.md @@ -0,0 +1,95 @@ +import Tabs from '@theme/Tabs'; +import TabItem from '@theme/TabItem'; + +# Triton Inference Server + +LiteLLM supports Embedding Models on Triton Inference Servers + + +## Usage + + + + + +### Example Call + +Use the `triton/` prefix to route to triton server +```python +from litellm import embedding +import os + +response = await litellm.aembedding( + model="triton/", + api_base="https://your-triton-api-base/triton/embeddings", # /embeddings endpoint you want litellm to call on your server + input=["good morning from litellm"], +) +``` + + + + +1. Add models to your config.yaml + + ```yaml + model_list: + - model_name: my-triton-model + litellm_params: + model: triton/" + api_base: https://your-triton-api-base/triton/embeddings + ``` + + +2. Start the proxy + + ```bash + $ litellm --config /path/to/config.yaml --detailed_debug + ``` + +3. Send Request to LiteLLM Proxy Server + + + + + + ```python + import openai + from openai import OpenAI + + # set base_url to your proxy server + # set api_key to send to proxy server + client = OpenAI(api_key="", base_url="http://0.0.0.0:4000") + + response = client.embeddings.create( + input=["hello from litellm"], + model="my-triton-model" + ) + + print(response) + + ``` + + + + + + `--header` is optional, only required if you're using litellm proxy with Virtual Keys + + ```shell + curl --location 'http://0.0.0.0:4000/embeddings' \ + --header 'Content-Type: application/json' \ + --header 'Authorization: Bearer sk-1234' \ + --data ' { + "model": "my-triton-model", + "input": ["write a litellm poem"] + }' + + ``` + + + + + + + + diff --git a/docs/my-website/docs/providers/vertex.md b/docs/my-website/docs/providers/vertex.md index b5d69084d..b67eb350b 100644 --- a/docs/my-website/docs/providers/vertex.md +++ b/docs/my-website/docs/providers/vertex.md @@ -253,6 +253,7 @@ litellm.vertex_location = "us-central1 # Your Location ## Anthropic | Model Name | Function Call | |------------------|--------------------------------------| +| claude-3-opus@20240229 | `completion('vertex_ai/claude-3-opus@20240229', messages)` | | claude-3-sonnet@20240229 | `completion('vertex_ai/claude-3-sonnet@20240229', messages)` | | claude-3-haiku@20240307 | `completion('vertex_ai/claude-3-haiku@20240307', messages)` | @@ -363,6 +364,8 @@ response = completion( | Model Name | Function Call | |------------------|--------------------------------------| | gemini-1.5-pro | `completion('gemini-1.5-pro', messages)`, `completion('vertex_ai/gemini-pro', messages)` | +| gemini-1.5-flash-preview-0514 | `completion('gemini-1.5-flash-preview-0514', messages)`, `completion('vertex_ai/gemini-pro', messages)` | +| gemini-1.5-pro-preview-0514 | `completion('gemini-1.5-pro-preview-0514', messages)`, `completion('vertex_ai/gemini-1.5-pro-preview-0514', messages)` | @@ -476,6 +479,36 @@ print(response) | code-gecko@latest| `completion('code-gecko@latest', messages)` | +## Embedding Models + +#### Usage - Embedding +```python +import litellm +from litellm import embedding +litellm.vertex_project = "hardy-device-38811" # Your Project ID +litellm.vertex_location = "us-central1" # proj location + +response = embedding( + model="vertex_ai/textembedding-gecko", + input=["good morning from litellm"], +) +print(response) +``` + +#### Supported Embedding Models +All models listed [here](https://github.com/BerriAI/litellm/blob/57f37f743886a0249f630a6792d49dffc2c5d9b7/model_prices_and_context_window.json#L835) are supported + +| Model Name | Function Call | +|--------------------------|------------------------------------------------------------------------------------------------------------------------------------------------------------------| +| textembedding-gecko | `embedding(model="vertex_ai/textembedding-gecko", input)` | +| textembedding-gecko-multilingual | `embedding(model="vertex_ai/textembedding-gecko-multilingual", input)` | +| textembedding-gecko-multilingual@001 | `embedding(model="vertex_ai/textembedding-gecko-multilingual@001", input)` | +| textembedding-gecko@001 | `embedding(model="vertex_ai/textembedding-gecko@001", input)` | +| textembedding-gecko@003 | `embedding(model="vertex_ai/textembedding-gecko@003", input)` | +| text-embedding-preview-0409 | `embedding(model="vertex_ai/text-embedding-preview-0409", input)` | +| text-multilingual-embedding-preview-0409 | `embedding(model="vertex_ai/text-multilingual-embedding-preview-0409", input)` | + + ## Extra ### Using `GOOGLE_APPLICATION_CREDENTIALS` @@ -519,6 +552,12 @@ def load_vertex_ai_credentials(): ### Using GCP Service Account +:::info + +Trying to deploy LiteLLM on Google Cloud Run? Tutorial [here](https://docs.litellm.ai/docs/proxy/deploy#deploy-on-google-cloud-run) + +::: + 1. Figure out the Service Account bound to the Google Cloud Run service diff --git a/docs/my-website/docs/providers/vllm.md b/docs/my-website/docs/providers/vllm.md index b8285da71..8c8f363f8 100644 --- a/docs/my-website/docs/providers/vllm.md +++ b/docs/my-website/docs/providers/vllm.md @@ -4,6 +4,13 @@ LiteLLM supports all models on VLLM. 🚀[Code Tutorial](https://github.com/BerriAI/litellm/blob/main/cookbook/VLLM_Model_Testing.ipynb) + +:::info + +To call a HOSTED VLLM Endpoint use [these docs](./openai_compatible.md) + +::: + ### Quick Start ``` pip install litellm vllm diff --git a/docs/my-website/docs/providers/watsonx.md b/docs/my-website/docs/providers/watsonx.md new file mode 100644 index 000000000..d8c5740a8 --- /dev/null +++ b/docs/my-website/docs/providers/watsonx.md @@ -0,0 +1,284 @@ +import Tabs from '@theme/Tabs'; +import TabItem from '@theme/TabItem'; + +# IBM watsonx.ai + +LiteLLM supports all IBM [watsonx.ai](https://watsonx.ai/) foundational models and embeddings. + +## Environment Variables +```python +os.environ["WATSONX_URL"] = "" # (required) Base URL of your WatsonX instance +# (required) either one of the following: +os.environ["WATSONX_APIKEY"] = "" # IBM cloud API key +os.environ["WATSONX_TOKEN"] = "" # IAM auth token +# optional - can also be passed as params to completion() or embedding() +os.environ["WATSONX_PROJECT_ID"] = "" # Project ID of your WatsonX instance +os.environ["WATSONX_DEPLOYMENT_SPACE_ID"] = "" # ID of your deployment space to use deployed models +``` + +See [here](https://cloud.ibm.com/apidocs/watsonx-ai#api-authentication) for more information on how to get an access token to authenticate to watsonx.ai. + +## Usage + + + Open In Colab + + +```python +import os +from litellm import completion + +os.environ["WATSONX_URL"] = "" +os.environ["WATSONX_APIKEY"] = "" + +response = completion( + model="watsonx/ibm/granite-13b-chat-v2", + messages=[{ "content": "what is your favorite colour?","role": "user"}], + project_id="" # or pass with os.environ["WATSONX_PROJECT_ID"] +) + +response = completion( + model="watsonx/meta-llama/llama-3-8b-instruct", + messages=[{ "content": "what is your favorite colour?","role": "user"}], + project_id="" +) +``` + +## Usage - Streaming +```python +import os +from litellm import completion + +os.environ["WATSONX_URL"] = "" +os.environ["WATSONX_APIKEY"] = "" +os.environ["WATSONX_PROJECT_ID"] = "" + +response = completion( + model="watsonx/ibm/granite-13b-chat-v2", + messages=[{ "content": "what is your favorite colour?","role": "user"}], + stream=True +) +for chunk in response: + print(chunk) +``` + +#### Example Streaming Output Chunk +```json +{ + "choices": [ + { + "finish_reason": null, + "index": 0, + "delta": { + "content": "I don't have a favorite color, but I do like the color blue. What's your favorite color?" + } + } + ], + "created": null, + "model": "watsonx/ibm/granite-13b-chat-v2", + "usage": { + "prompt_tokens": null, + "completion_tokens": null, + "total_tokens": null + } +} +``` + +## Usage - Models in deployment spaces + +Models that have been deployed to a deployment space (e.g.: tuned models) can be called using the `deployment/` format (where `` is the ID of the deployed model in your deployment space). + +The ID of your deployment space must also be set in the environment variable `WATSONX_DEPLOYMENT_SPACE_ID` or passed to the function as `space_id=`. + +```python +import litellm +response = litellm.completion( + model="watsonx/deployment/", + messages=[{"content": "Hello, how are you?", "role": "user"}], + space_id="" +) +``` + +## Usage - Embeddings + +LiteLLM also supports making requests to IBM watsonx.ai embedding models. The credential needed for this is the same as for completion. + +```python +from litellm import embedding + +response = embedding( + model="watsonx/ibm/slate-30m-english-rtrvr", + input=["What is the capital of France?"], + project_id="" +) +print(response) +# EmbeddingResponse(model='ibm/slate-30m-english-rtrvr', data=[{'object': 'embedding', 'index': 0, 'embedding': [-0.037463713, -0.02141933, -0.02851813, 0.015519324, ..., -0.0021367231, -0.01704561, -0.001425816, 0.0035238306]}], object='list', usage=Usage(prompt_tokens=8, total_tokens=8)) +``` + +## OpenAI Proxy Usage + +Here's how to call IBM watsonx.ai with the LiteLLM Proxy Server + +### 1. Save keys in your environment + +```bash +export WATSONX_URL="" +export WATSONX_APIKEY="" +export WATSONX_PROJECT_ID="" +``` + +### 2. Start the proxy + + + + +```bash +$ litellm --model watsonx/meta-llama/llama-3-8b-instruct + +# Server running on http://0.0.0.0:4000 +``` + + + + +```yaml +model_list: + - model_name: llama-3-8b + litellm_params: + # all params accepted by litellm.completion() + model: watsonx/meta-llama/llama-3-8b-instruct + api_key: "os.environ/WATSONX_API_KEY" # does os.getenv("WATSONX_API_KEY") +``` + + + +### 3. Test it + + + + + +```shell +curl --location 'http://0.0.0.0:4000/chat/completions' \ +--header 'Content-Type: application/json' \ +--data ' { + "model": "llama-3-8b", + "messages": [ + { + "role": "user", + "content": "what is your favorite colour?" + } + ] + } +' +``` + + + +```python +import openai +client = openai.OpenAI( + api_key="anything", + base_url="http://0.0.0.0:4000" +) + +# request sent to model set on litellm proxy, `litellm --model` +response = client.chat.completions.create(model="llama-3-8b", messages=[ + { + "role": "user", + "content": "what is your favorite colour?" + } +]) + +print(response) + +``` + + + +```python +from langchain.chat_models import ChatOpenAI +from langchain.prompts.chat import ( + ChatPromptTemplate, + HumanMessagePromptTemplate, + SystemMessagePromptTemplate, +) +from langchain.schema import HumanMessage, SystemMessage + +chat = ChatOpenAI( + openai_api_base="http://0.0.0.0:4000", # set openai_api_base to the LiteLLM Proxy + model = "llama-3-8b", + temperature=0.1 +) + +messages = [ + SystemMessage( + content="You are a helpful assistant that im using to make a test request to." + ), + HumanMessage( + content="test from litellm. tell me why it's amazing in 1 sentence" + ), +] +response = chat(messages) + +print(response) +``` + + + + +## Authentication + +### Passing credentials as parameters + +You can also pass the credentials as parameters to the completion and embedding functions. + +```python +import os +from litellm import completion + +response = completion( + model="watsonx/ibm/granite-13b-chat-v2", + messages=[{ "content": "What is your favorite color?","role": "user"}], + url="", + api_key="", + project_id="" +) +``` + + +## Supported IBM watsonx.ai Models + +Here are some examples of models available in IBM watsonx.ai that you can use with LiteLLM: + +| Mode Name | Command | +| ---------- | --------- | +| Flan T5 XXL | `completion(model=watsonx/google/flan-t5-xxl, messages=messages)` | +| Flan Ul2 | `completion(model=watsonx/google/flan-ul2, messages=messages)` | +| Mt0 XXL | `completion(model=watsonx/bigscience/mt0-xxl, messages=messages)` | +| Gpt Neox | `completion(model=watsonx/eleutherai/gpt-neox-20b, messages=messages)` | +| Mpt 7B Instruct2 | `completion(model=watsonx/ibm/mpt-7b-instruct2, messages=messages)` | +| Starcoder | `completion(model=watsonx/bigcode/starcoder, messages=messages)` | +| Llama 2 70B Chat | `completion(model=watsonx/meta-llama/llama-2-70b-chat, messages=messages)` | +| Llama 2 13B Chat | `completion(model=watsonx/meta-llama/llama-2-13b-chat, messages=messages)` | +| Granite 13B Instruct | `completion(model=watsonx/ibm/granite-13b-instruct-v1, messages=messages)` | +| Granite 13B Chat | `completion(model=watsonx/ibm/granite-13b-chat-v1, messages=messages)` | +| Flan T5 XL | `completion(model=watsonx/google/flan-t5-xl, messages=messages)` | +| Granite 13B Chat V2 | `completion(model=watsonx/ibm/granite-13b-chat-v2, messages=messages)` | +| Granite 13B Instruct V2 | `completion(model=watsonx/ibm/granite-13b-instruct-v2, messages=messages)` | +| Elyza Japanese Llama 2 7B Instruct | `completion(model=watsonx/elyza/elyza-japanese-llama-2-7b-instruct, messages=messages)` | +| Mixtral 8X7B Instruct V01 Q | `completion(model=watsonx/ibm-mistralai/mixtral-8x7b-instruct-v01-q, messages=messages)` | + + +For a list of all available models in watsonx.ai, see [here](https://dataplatform.cloud.ibm.com/docs/content/wsj/analyze-data/fm-models.html?context=wx&locale=en&audience=wdp). + + +## Supported IBM watsonx.ai Embedding Models + +| Model Name | Function Call | +|----------------------|---------------------------------------------| +| Slate 30m | `embedding(model="watsonx/ibm/slate-30m-english-rtrvr", input=input)` | +| Slate 125m | `embedding(model="watsonx/ibm/slate-125m-english-rtrvr", input=input)` | + + +For a list of all available embedding models in watsonx.ai, see [here](https://dataplatform.cloud.ibm.com/docs/content/wsj/analyze-data/fm-models-embed.html?context=wx). \ No newline at end of file diff --git a/docs/my-website/docs/proxy/alerting.md b/docs/my-website/docs/proxy/alerting.md index feb54babd..fb49a8901 100644 --- a/docs/my-website/docs/proxy/alerting.md +++ b/docs/my-website/docs/proxy/alerting.md @@ -1,13 +1,18 @@ -# Slack Alerting +# 🚨 Alerting Get alerts for: -- hanging LLM api calls -- failed LLM api calls -- slow LLM api calls -- budget Tracking per key/user: - - When a User/Key crosses their Budget - - When a User/Key is 15% away from crossing their Budget -- failed db read/writes + +- Hanging LLM api calls +- Slow LLM api calls +- Failed LLM api calls +- Budget Tracking per key/user +- Spend Reports - Weekly & Monthly spend per Team, Tag +- Failed db read/writes +- Daily Reports: + - **LLM** Top 5 slowest deployments + - **LLM** Top 5 deployments with most failed requests +- **Spend** Weekly & Monthly spend per Team, Tag + ## Quick Start @@ -17,10 +22,12 @@ Set up a slack alert channel to receive alerts from proxy. Get a slack webhook url from https://api.slack.com/messaging/webhooks +You can also use Discord Webhooks, see [here](#using-discord-webhooks) ### Step 2: Update config.yaml -Let's save a bad key to our proxy +- Set `SLACK_WEBHOOK_URL` in your proxy env to enable Slack alerts. +- Just for testing purposes, let's save a bad key to our proxy. ```yaml model_list: @@ -33,16 +40,88 @@ general_settings: alerting: ["slack"] alerting_threshold: 300 # sends alerts if requests hang for 5min+ and responses take 5min+ +environment_variables: + SLACK_WEBHOOK_URL: "https://hooks.slack.com/services/<>/<>/<>" + SLACK_DAILY_REPORT_FREQUENCY: "86400" # 24 hours; Optional: defaults to 12 hours ``` -Set `SLACK_WEBHOOK_URL` in your proxy env - -```shell -SLACK_WEBHOOK_URL: "https://hooks.slack.com/services/<>/<>/<>" -``` ### Step 3: Start proxy ```bash $ litellm --config /path/to/config.yaml -``` \ No newline at end of file +``` + +## Testing Alerting is Setup Correctly + +Make a GET request to `/health/services`, expect to see a test slack alert in your provided webhook slack channel + +```shell +curl -X GET 'http://localhost:4000/health/services?service=slack' \ + -H 'Authorization: Bearer sk-1234' +``` + +## Advanced +### Opting into specific alert types + +Set `alert_types` if you want to Opt into only specific alert types + +```shell +general_settings: + alerting: ["slack"] + alert_types: ["spend_reports"] +``` + +All Possible Alert Types + +```python +alert_types: +Optional[ +List[ + Literal[ + "llm_exceptions", + "llm_too_slow", + "llm_requests_hanging", + "budget_alerts", + "db_exceptions", + "daily_reports", + "spend_reports", + "cooldown_deployment", + "new_model_added", + ] +] +``` + + +### Using Discord Webhooks + +Discord provides a slack compatible webhook url that you can use for alerting + +##### Quick Start + +1. Get a webhook url for your discord channel + +2. Append `/slack` to your discord webhook - it should look like + +``` +"https://discord.com/api/webhooks/1240030362193760286/cTLWt5ATn1gKmcy_982rl5xmYHsrM1IWJdmCL1AyOmU9JdQXazrp8L1_PYgUtgxj8x4f/slack" +``` + +3. Add it to your litellm config + +```yaml +model_list: + model_name: "azure-model" + litellm_params: + model: "azure/gpt-35-turbo" + api_key: "my-bad-key" # 👈 bad key + +general_settings: + alerting: ["slack"] + alerting_threshold: 300 # sends alerts if requests hang for 5min+ and responses take 5min+ + +environment_variables: + SLACK_WEBHOOK_URL: "https://discord.com/api/webhooks/1240030362193760286/cTLWt5ATn1gKmcy_982rl5xmYHsrM1IWJdmCL1AyOmU9JdQXazrp8L1_PYgUtgxj8x4f/slack" +``` + +That's it ! You're ready to go ! diff --git a/docs/my-website/docs/proxy/billing.md b/docs/my-website/docs/proxy/billing.md new file mode 100644 index 000000000..d3d1400cd --- /dev/null +++ b/docs/my-website/docs/proxy/billing.md @@ -0,0 +1,319 @@ +import Image from '@theme/IdealImage'; +import Tabs from '@theme/Tabs'; +import TabItem from '@theme/TabItem'; + +# 💵 Billing + +Bill internal teams, external customers for their usage + +**🚨 Requirements** +- [Setup Lago](https://docs.getlago.com/guide/self-hosted/docker#run-the-app), for usage-based billing. We recommend following [their Stripe tutorial](https://docs.getlago.com/templates/per-transaction/stripe#step-1-create-billable-metrics-for-transaction) + +Steps: +- Connect the proxy to Lago +- Set the id you want to bill for (customers, internal users, teams) +- Start! + +## Quick Start + +Bill internal teams for their usage + +### 1. Connect proxy to Lago + +Set 'lago' as a callback on your proxy config.yaml + +```yaml +model_name: + - model_name: fake-openai-endpoint + litellm_params: + model: openai/fake + api_key: fake-key + api_base: https://exampleopenaiendpoint-production.up.railway.app/ + +litellm_settings: + callbacks: ["lago"] # 👈 KEY CHANGE + +general_settings: + master_key: sk-1234 +``` + +Add your Lago keys to the environment + +```bash +export LAGO_API_BASE="http://localhost:3000" # self-host - https://docs.getlago.com/guide/self-hosted/docker#run-the-app +export LAGO_API_KEY="3e29d607-de54-49aa-a019-ecf585729070" # Get key - https://docs.getlago.com/guide/self-hosted/docker#find-your-api-key +export LAGO_API_EVENT_CODE="openai_tokens" # name of lago billing code +export LAGO_API_CHARGE_BY="team_id" # 👈 Charges 'team_id' attached to proxy key +``` + +Start proxy + +```bash +litellm --config /path/to/config.yaml +``` + +### 2. Create Key for Internal Team + +```bash +curl 'http://0.0.0.0:4000/key/generate' \ +--header 'Authorization: Bearer sk-1234' \ +--header 'Content-Type: application/json' \ +--data-raw '{"team_id": "my-unique-id"}' # 👈 Internal Team's ID +``` + +Response Object: + +```bash +{ + "key": "sk-tXL0wt5-lOOVK9sfY2UacA", +} +``` + + +### 3. Start billing! + + + + +```bash +curl --location 'http://0.0.0.0:4000/chat/completions' \ +--header 'Content-Type: application/json' \ +--header 'Authorization: Bearer sk-tXL0wt5-lOOVK9sfY2UacA' \ # 👈 Team's Key +--data ' { + "model": "fake-openai-endpoint", + "messages": [ + { + "role": "user", + "content": "what llm are you" + } + ], + } +' +``` + + + +```python +import openai +client = openai.OpenAI( + api_key="sk-tXL0wt5-lOOVK9sfY2UacA", # 👈 Team's Key + base_url="http://0.0.0.0:4000" +) + +# request sent to model set on litellm proxy, `litellm --model` +response = client.chat.completions.create(model="gpt-3.5-turbo", messages = [ + { + "role": "user", + "content": "this is a test request, write a short poem" + } +]) + +print(response) +``` + + + +```python +from langchain.chat_models import ChatOpenAI +from langchain.prompts.chat import ( + ChatPromptTemplate, + HumanMessagePromptTemplate, + SystemMessagePromptTemplate, +) +from langchain.schema import HumanMessage, SystemMessage +import os + +os.environ["OPENAI_API_KEY"] = "sk-tXL0wt5-lOOVK9sfY2UacA" # 👈 Team's Key + +chat = ChatOpenAI( + openai_api_base="http://0.0.0.0:4000", + model = "gpt-3.5-turbo", + temperature=0.1, +) + +messages = [ + SystemMessage( + content="You are a helpful assistant that im using to make a test request to." + ), + HumanMessage( + content="test from litellm. tell me why it's amazing in 1 sentence" + ), +] +response = chat(messages) + +print(response) +``` + + + +**See Results on Lago** + + + + +## Advanced - Lago Logging object + +This is what LiteLLM will log to Lagos + +``` +{ + "event": { + "transaction_id": "", + "external_customer_id": , # either 'end_user_id', 'user_id', or 'team_id'. Default 'end_user_id'. + "code": os.getenv("LAGO_API_EVENT_CODE"), + "properties": { + "input_tokens": , + "output_tokens": , + "model": , + "response_cost": , # 👈 LITELLM CALCULATED RESPONSE COST - https://github.com/BerriAI/litellm/blob/d43f75150a65f91f60dc2c0c9462ce3ffc713c1f/litellm/utils.py#L1473 + } + } +} +``` + +## Advanced - Bill Customers, Internal Users + +For: +- Customers (id passed via 'user' param in /chat/completion call) = 'end_user_id' +- Internal Users (id set when [creating keys](https://docs.litellm.ai/docs/proxy/virtual_keys#advanced---spend-tracking)) = 'user_id' +- Teams (id set when [creating keys](https://docs.litellm.ai/docs/proxy/virtual_keys#advanced---spend-tracking)) = 'team_id' + + + + + + +1. Set 'LAGO_API_CHARGE_BY' to 'end_user_id' + + ```bash + export LAGO_API_CHARGE_BY="end_user_id" + ``` + +2. Test it! + + + + + ```shell + curl --location 'http://0.0.0.0:4000/chat/completions' \ + --header 'Content-Type: application/json' \ + --data ' { + "model": "gpt-3.5-turbo", + "messages": [ + { + "role": "user", + "content": "what llm are you" + } + ], + "user": "my_customer_id" # 👈 whatever your customer id is + } + ' + ``` + + + + ```python + import openai + client = openai.OpenAI( + api_key="anything", + base_url="http://0.0.0.0:4000" + ) + + # request sent to model set on litellm proxy, `litellm --model` + response = client.chat.completions.create(model="gpt-3.5-turbo", messages = [ + { + "role": "user", + "content": "this is a test request, write a short poem" + } + ], user="my_customer_id") # 👈 whatever your customer id is + + print(response) + ``` + + + + + ```python + from langchain.chat_models import ChatOpenAI + from langchain.prompts.chat import ( + ChatPromptTemplate, + HumanMessagePromptTemplate, + SystemMessagePromptTemplate, + ) + from langchain.schema import HumanMessage, SystemMessage + import os + + os.environ["OPENAI_API_KEY"] = "anything" + + chat = ChatOpenAI( + openai_api_base="http://0.0.0.0:4000", + model = "gpt-3.5-turbo", + temperature=0.1, + extra_body={ + "user": "my_customer_id" # 👈 whatever your customer id is + } + ) + + messages = [ + SystemMessage( + content="You are a helpful assistant that im using to make a test request to." + ), + HumanMessage( + content="test from litellm. tell me why it's amazing in 1 sentence" + ), + ] + response = chat(messages) + + print(response) + ``` + + + + + + + +1. Set 'LAGO_API_CHARGE_BY' to 'user_id' + +```bash +export LAGO_API_CHARGE_BY="user_id" +``` + +2. Create a key for that user + +```bash +curl 'http://0.0.0.0:4000/key/generate' \ +--header 'Authorization: Bearer ' \ +--header 'Content-Type: application/json' \ +--data-raw '{"user_id": "my-unique-id"}' # 👈 Internal User's id +``` + +Response Object: + +```bash +{ + "key": "sk-tXL0wt5-lOOVK9sfY2UacA", +} +``` + +3. Make API Calls with that Key + +```python +import openai +client = openai.OpenAI( + api_key="sk-tXL0wt5-lOOVK9sfY2UacA", # 👈 Generated key + base_url="http://0.0.0.0:4000" +) + +# request sent to model set on litellm proxy, `litellm --model` +response = client.chat.completions.create(model="gpt-3.5-turbo", messages = [ + { + "role": "user", + "content": "this is a test request, write a short poem" + } +]) + +print(response) +``` + + diff --git a/docs/my-website/docs/proxy/caching.md b/docs/my-website/docs/proxy/caching.md index 521fce84a..fd6451155 100644 --- a/docs/my-website/docs/proxy/caching.md +++ b/docs/my-website/docs/proxy/caching.md @@ -61,6 +61,22 @@ litellm_settings: ttl: 600 # will be cached on redis for 600s ``` + +## SSL + +just set `REDIS_SSL="True"` in your .env, and LiteLLM will pick this up. + +```env +REDIS_SSL="True" +``` + +For quick testing, you can also use REDIS_URL, eg.: + +``` +REDIS_URL="rediss://.." +``` + +but we **don't** recommend using REDIS_URL in prod. We've noticed a performance difference between using it vs. redis_host, port, etc. #### Step 2: Add Redis Credentials to .env Set either `REDIS_URL` or the `REDIS_HOST` in your os environment, to enable caching. diff --git a/docs/my-website/docs/proxy/configs.md b/docs/my-website/docs/proxy/configs.md index e83125f2a..5eeb05f36 100644 --- a/docs/my-website/docs/proxy/configs.md +++ b/docs/my-website/docs/proxy/configs.md @@ -62,9 +62,11 @@ model_list: litellm_settings: # module level litellm settings - https://github.com/BerriAI/litellm/blob/main/litellm/__init__.py drop_params: True + success_callback: ["langfuse"] # OPTIONAL - if you want to start sending LLM Logs to Langfuse. Make sure to set `LANGFUSE_PUBLIC_KEY` and `LANGFUSE_SECRET_KEY` in your env general_settings: master_key: sk-1234 # [OPTIONAL] Only use this if you to require all calls to contain this key (Authorization: Bearer sk-1234) + alerting: ["slack"] # [OPTIONAL] If you want Slack Alerts for Hanging LLM requests, Slow llm responses, Budget Alerts. Make sure to set `SLACK_WEBHOOK_URL` in your env ``` :::info @@ -600,6 +602,7 @@ general_settings: "general_settings": { "completion_model": "string", "disable_spend_logs": "boolean", # turn off writing each transaction to the db + "disable_master_key_return": "boolean", # turn off returning master key on UI (checked on '/user/info' endpoint) "disable_reset_budget": "boolean", # turn off reset budget scheduled task "enable_jwt_auth": "boolean", # allow proxy admin to auth in via jwt tokens with 'litellm_proxy_admin' in claims "enforce_user_param": "boolean", # requires all openai endpoint requests to have a 'user' param diff --git a/docs/my-website/docs/proxy/cost_tracking.md b/docs/my-website/docs/proxy/cost_tracking.md index 887ec9e3e..2aaf8116e 100644 --- a/docs/my-website/docs/proxy/cost_tracking.md +++ b/docs/my-website/docs/proxy/cost_tracking.md @@ -1,8 +1,161 @@ -# Cost Tracking - Azure +import Tabs from '@theme/Tabs'; +import TabItem from '@theme/TabItem'; + +# 💸 Spend Tracking + +Track spend for keys, users, and teams across 100+ LLMs. + +## Getting Spend Reports - To Charge Other Teams, API Keys + +Use the `/global/spend/report` endpoint to get daily spend per team, with a breakdown of spend per API Key, Model + +### Example Request + +```shell +curl -X GET 'http://localhost:4000/global/spend/report?start_date=2024-04-01&end_date=2024-06-30' \ + -H 'Authorization: Bearer sk-1234' +``` + +### Example Response + + + + +```shell +[ + { + "group_by_day": "2024-04-30T00:00:00+00:00", + "teams": [ + { + "team_name": "Prod Team", + "total_spend": 0.0015265, + "metadata": [ # see the spend by unique(key + model) + { + "model": "gpt-4", + "spend": 0.00123, + "total_tokens": 28, + "api_key": "88dc28.." # the hashed api key + }, + { + "model": "gpt-4", + "spend": 0.00123, + "total_tokens": 28, + "api_key": "a73dc2.." # the hashed api key + }, + { + "model": "chatgpt-v-2", + "spend": 0.000214, + "total_tokens": 122, + "api_key": "898c28.." # the hashed api key + }, + { + "model": "gpt-3.5-turbo", + "spend": 0.0000825, + "total_tokens": 85, + "api_key": "84dc28.." # the hashed api key + } + ] + } + ] + } +] +``` + + + + + + +```python +import requests +url = 'http://localhost:4000/global/spend/report' +params = { + 'start_date': '2023-04-01', + 'end_date': '2024-06-30' +} + +headers = { + 'Authorization': 'Bearer sk-1234' +} + +# Make the GET request +response = requests.get(url, headers=headers, params=params) +spend_report = response.json() + +for row in spend_report: + date = row["group_by_day"] + teams = row["teams"] + for team in teams: + team_name = team["team_name"] + total_spend = team["total_spend"] + metadata = team["metadata"] + + print(f"Date: {date}") + print(f"Team: {team_name}") + print(f"Total Spend: {total_spend}") + print("Metadata: ", metadata) + print() +``` + +Output from script +```shell +# Date: 2024-05-11T00:00:00+00:00 +# Team: local_test_team +# Total Spend: 0.003675099999999999 +# Metadata: [{'model': 'gpt-3.5-turbo', 'spend': 0.003675099999999999, 'api_key': 'b94d5e0bc3a71a573917fe1335dc0c14728c7016337451af9714924ff3a729db', 'total_tokens': 3105}] + +# Date: 2024-05-13T00:00:00+00:00 +# Team: Unassigned Team +# Total Spend: 3.4e-05 +# Metadata: [{'model': 'gpt-3.5-turbo', 'spend': 3.4e-05, 'api_key': '9569d13c9777dba68096dea49b0b03e0aaf4d2b65d4030eda9e8a2733c3cd6e0', 'total_tokens': 50}] + +# Date: 2024-05-13T00:00:00+00:00 +# Team: central +# Total Spend: 0.000684 +# Metadata: [{'model': 'gpt-3.5-turbo', 'spend': 0.000684, 'api_key': '0323facdf3af551594017b9ef162434a9b9a8ca1bbd9ccbd9d6ce173b1015605', 'total_tokens': 498}] + +# Date: 2024-05-13T00:00:00+00:00 +# Team: local_test_team +# Total Spend: 0.0005715000000000001 +# Metadata: [{'model': 'gpt-3.5-turbo', 'spend': 0.0005715000000000001, 'api_key': 'b94d5e0bc3a71a573917fe1335dc0c14728c7016337451af9714924ff3a729db', 'total_tokens': 423}] +``` + + + + + + + +## Reset Team, API Key Spend - MASTER KEY ONLY + +Use `/global/spend/reset` if you want to: +- Reset the Spend for all API Keys, Teams. The `spend` for ALL Teams and Keys in `LiteLLM_TeamTable` and `LiteLLM_VerificationToken` will be set to `spend=0` + +- LiteLLM will maintain all the logs in `LiteLLMSpendLogs` for Auditing Purposes + +### Request +Only the `LITELLM_MASTER_KEY` you set can access this route +```shell +curl -X POST \ + 'http://localhost:4000/global/spend/reset' \ + -H 'Authorization: Bearer sk-1234' \ + -H 'Content-Type: application/json' +``` + +### Expected Responses + +```shell +{"message":"Spend for all API Keys and Teams reset successfully","status":"success"} +``` + + + + +## Spend Tracking for Azure Set base model for cost tracking azure image-gen call -## Image Generation +### Image Generation ```yaml model_list: @@ -17,7 +170,7 @@ model_list: mode: image_generation ``` -## Chat Completions / Embeddings +### Chat Completions / Embeddings **Problem**: Azure returns `gpt-4` in the response when `azure/gpt-4-1106-preview` is used. This leads to inaccurate cost tracking diff --git a/docs/my-website/docs/proxy/customer_routing.md b/docs/my-website/docs/proxy/customer_routing.md new file mode 100644 index 000000000..4c8a60af8 --- /dev/null +++ b/docs/my-website/docs/proxy/customer_routing.md @@ -0,0 +1,83 @@ +# Region-based Routing + +Route specific customers to eu-only models. + +By specifying 'allowed_model_region' for a customer, LiteLLM will filter-out any models in a model group which is not in the allowed region (i.e. 'eu'). + +[**See Code**](https://github.com/BerriAI/litellm/blob/5eb12e30cc5faa73799ebc7e48fc86ebf449c879/litellm/router.py#L2938) + +### 1. Create customer with region-specification + +Use the litellm 'end-user' object for this. + +End-users can be tracked / id'ed by passing the 'user' param to litellm in an openai chat completion/embedding call. + +```bash +curl -X POST --location 'http://0.0.0.0:4000/end_user/new' \ +--header 'Authorization: Bearer sk-1234' \ +--header 'Content-Type: application/json' \ +--data '{ + "user_id" : "ishaan-jaff-45", + "allowed_model_region": "eu", # 👈 SPECIFY ALLOWED REGION='eu' +}' +``` + +### 2. Add eu models to model-group + +Add eu models to a model group. For azure models, litellm can automatically infer the region (no need to set it). + +```yaml +model_list: + - model_name: gpt-3.5-turbo + litellm_params: + model: azure/gpt-35-turbo-eu # 👈 EU azure model + api_base: https://my-endpoint-europe-berri-992.openai.azure.com/ + api_key: os.environ/AZURE_EUROPE_API_KEY + - model_name: gpt-3.5-turbo + litellm_params: + model: azure/chatgpt-v-2 + api_base: https://openai-gpt-4-test-v-1.openai.azure.com/ + api_version: "2023-05-15" + api_key: os.environ/AZURE_API_KEY + +router_settings: + enable_pre_call_checks: true # 👈 IMPORTANT +``` + +Start the proxy + +```yaml +litellm --config /path/to/config.yaml +``` + +### 3. Test it! + +Make a simple chat completions call to the proxy. In the response headers, you should see the returned api base. + +```bash +curl -X POST --location 'http://localhost:4000/chat/completions' \ +--header 'Content-Type: application/json' \ +--header 'Authorization: Bearer sk-1234' \ +--data '{ + "model": "gpt-3.5-turbo", + "messages": [ + { + "role": "user", + "content": "what is the meaning of the universe? 1234" + }], + "user": "ishaan-jaff-45" # 👈 USER ID +} +' +``` + +Expected API Base in response headers + +``` +x-litellm-api-base: "https://my-endpoint-europe-berri-992.openai.azure.com/" +``` + +### FAQ + +**What happens if there are no available models for that region?** + +Since the router filters out models not in the specified region, it will return back as an error to the user, if no models in that region are available. \ No newline at end of file diff --git a/docs/my-website/docs/proxy/deploy.md b/docs/my-website/docs/proxy/deploy.md index 7e350bfa9..f9a7db2d4 100644 --- a/docs/my-website/docs/proxy/deploy.md +++ b/docs/my-website/docs/proxy/deploy.md @@ -11,40 +11,37 @@ You can find the Dockerfile to build litellm proxy [here](https://github.com/Ber -**Step 1. Create a file called `litellm_config.yaml`** +### Step 1. CREATE config.yaml - Example `litellm_config.yaml` (the `os.environ/` prefix means litellm will read `AZURE_API_BASE` from the env) - ```yaml - model_list: - - model_name: azure-gpt-3.5 - litellm_params: - model: azure/ - api_base: os.environ/AZURE_API_BASE - api_key: os.environ/AZURE_API_KEY - api_version: "2023-07-01-preview" - ``` +Example `litellm_config.yaml` -**Step 2. Run litellm docker image** +```yaml +model_list: + - model_name: azure-gpt-3.5 + litellm_params: + model: azure/ + api_base: os.environ/AZURE_API_BASE # runs os.getenv("AZURE_API_BASE") + api_key: os.environ/AZURE_API_KEY # runs os.getenv("AZURE_API_KEY") + api_version: "2023-07-01-preview" +``` - See the latest available ghcr docker image here: - https://github.com/berriai/litellm/pkgs/container/litellm - Your litellm config.yaml should be called `litellm_config.yaml` in the directory you run this command. - The `-v` command will mount that file - Pass `AZURE_API_KEY` and `AZURE_API_BASE` since we set them in step 1 +### Step 2. RUN Docker Image - ```shell - docker run \ - -v $(pwd)/litellm_config.yaml:/app/config.yaml \ - -e AZURE_API_KEY=d6*********** \ - -e AZURE_API_BASE=https://openai-***********/ \ - -p 4000:4000 \ - ghcr.io/berriai/litellm:main-latest \ - --config /app/config.yaml --detailed_debug - ``` +```shell +docker run \ + -v $(pwd)/litellm_config.yaml:/app/config.yaml \ + -e AZURE_API_KEY=d6*********** \ + -e AZURE_API_BASE=https://openai-***********/ \ + -p 4000:4000 \ + ghcr.io/berriai/litellm:main-latest \ + --config /app/config.yaml --detailed_debug +``` -**Step 3. Send a Test Request** +Get Latest Image 👉 [here](https://github.com/berriai/litellm/pkgs/container/litellm) + +### Step 3. TEST Request Pass `model=azure-gpt-3.5` this was set on step 1 @@ -231,13 +228,16 @@ Your OpenAI proxy server is now running on `http://127.0.0.1:4000`. | Docs | When to Use | | --- | --- | | [Quick Start](#quick-start) | call 100+ LLMs + Load Balancing | -| [Deploy with Database](#deploy-with-database) | + use Virtual Keys + Track Spend | +| [Deploy with Database](#deploy-with-database) | + use Virtual Keys + Track Spend (Note: When deploying with a database providing a `DATABASE_URL` and `LITELLM_MASTER_KEY` are required in your env ) | | [LiteLLM container + Redis](#litellm-container--redis) | + load balance across multiple litellm containers | | [LiteLLM Database container + PostgresDB + Redis](#litellm-database-container--postgresdb--redis) | + use Virtual Keys + Track Spend + load balance across multiple litellm containers | ## Deploy with Database ### Docker, Kubernetes, Helm Chart +Requirements: +- Need a postgres database (e.g. [Supabase](https://supabase.com/), [Neon](https://neon.tech/), etc) Set `DATABASE_URL=postgresql://:@:/` in your env +- Set a `LITELLM_MASTER_KEY`, this is your Proxy Admin key - you can use this to create other keys (🚨 must start with `sk-`) @@ -252,6 +252,8 @@ docker pull ghcr.io/berriai/litellm-database:main-latest ```shell docker run \ -v $(pwd)/litellm_config.yaml:/app/config.yaml \ + -e LITELLM_MASTER_KEY=sk-1234 \ + -e DATABASE_URL=postgresql://:@:/ \ -e AZURE_API_KEY=d6*********** \ -e AZURE_API_BASE=https://openai-***********/ \ -p 4000:4000 \ @@ -267,26 +269,63 @@ Your OpenAI proxy server is now running on `http://0.0.0.0:4000`. #### Step 1. Create deployment.yaml ```yaml - apiVersion: apps/v1 - kind: Deployment - metadata: - name: litellm-deployment - spec: - replicas: 1 - selector: - matchLabels: - app: litellm - template: - metadata: - labels: - app: litellm - spec: - containers: - - name: litellm-container - image: ghcr.io/berriai/litellm-database:main-latest - env: - - name: DATABASE_URL - value: postgresql://:@:/ +apiVersion: apps/v1 +kind: Deployment +metadata: + name: litellm-deployment +spec: + replicas: 3 + selector: + matchLabels: + app: litellm + template: + metadata: + labels: + app: litellm + spec: + containers: + - name: litellm-container + image: ghcr.io/berriai/litellm:main-latest + imagePullPolicy: Always + env: + - name: AZURE_API_KEY + value: "d6******" + - name: AZURE_API_BASE + value: "https://ope******" + - name: LITELLM_MASTER_KEY + value: "sk-1234" + - name: DATABASE_URL + value: "po**********" + args: + - "--config" + - "/app/proxy_config.yaml" # Update the path to mount the config file + volumeMounts: # Define volume mount for proxy_config.yaml + - name: config-volume + mountPath: /app + readOnly: true + livenessProbe: + httpGet: + path: /health/liveliness + port: 4000 + initialDelaySeconds: 120 + periodSeconds: 15 + successThreshold: 1 + failureThreshold: 3 + timeoutSeconds: 10 + readinessProbe: + httpGet: + path: /health/readiness + port: 4000 + initialDelaySeconds: 120 + periodSeconds: 15 + successThreshold: 1 + failureThreshold: 3 + timeoutSeconds: 10 + volumes: # Define volume to mount proxy_config.yaml + - name: config-volume + configMap: + name: litellm-config + ``` ```bash diff --git a/docs/my-website/docs/proxy/logging.md b/docs/my-website/docs/proxy/logging.md index 2aa069b7a..538a81d4b 100644 --- a/docs/my-website/docs/proxy/logging.md +++ b/docs/my-website/docs/proxy/logging.md @@ -3,19 +3,21 @@ import Tabs from '@theme/Tabs'; import TabItem from '@theme/TabItem'; -# 🔎 Logging - Custom Callbacks, DataDog, Langfuse, s3 Bucket, Sentry, OpenTelemetry, Athina +# 🔎 Logging - Custom Callbacks, DataDog, Langfuse, s3 Bucket, Sentry, OpenTelemetry, Athina, Azure Content-Safety Log Proxy Input, Output, Exceptions using Custom Callbacks, Langfuse, OpenTelemetry, LangFuse, DynamoDB, s3 Bucket - [Async Custom Callbacks](#custom-callback-class-async) - [Async Custom Callback APIs](#custom-callback-apis-async) -- [Logging to DataDog](#logging-proxy-inputoutput---datadog) - [Logging to Langfuse](#logging-proxy-inputoutput---langfuse) +- [Logging to OpenMeter](#logging-proxy-inputoutput---langfuse) - [Logging to s3 Buckets](#logging-proxy-inputoutput---s3-buckets) +- [Logging to DataDog](#logging-proxy-inputoutput---datadog) - [Logging to DynamoDB](#logging-proxy-inputoutput---dynamodb) - [Logging to Sentry](#logging-proxy-inputoutput---sentry) - [Logging to Traceloop (OpenTelemetry)](#logging-proxy-inputoutput-traceloop-opentelemetry) - [Logging to Athina](#logging-proxy-inputoutput-athina) +- [(BETA) Moderation with Azure Content-Safety](#moderation-with-azure-content-safety) ## Custom Callback Class [Async] Use this when you want to run custom callbacks in `python` @@ -401,7 +403,7 @@ litellm_settings: Start the LiteLLM Proxy and make a test request to verify the logs reached your callback API ## Logging Proxy Input/Output - Langfuse -We will use the `--config` to set `litellm.success_callback = ["langfuse"]` this will log all successfull LLM calls to langfuse +We will use the `--config` to set `litellm.success_callback = ["langfuse"]` this will log all successfull LLM calls to langfuse. Make sure to set `LANGFUSE_PUBLIC_KEY` and `LANGFUSE_SECRET_KEY` in your environment **Step 1** Install langfuse @@ -419,7 +421,13 @@ litellm_settings: success_callback: ["langfuse"] ``` -**Step 3**: Start the proxy, make a test request +**Step 3**: Set required env variables for logging to langfuse +```shell +export LANGFUSE_PUBLIC_KEY="pk_kk" +export LANGFUSE_SECRET_KEY="sk_ss +``` + +**Step 4**: Start the proxy, make a test request Start proxy ```shell @@ -539,6 +547,105 @@ print(response) +### Team based Logging to Langfuse + +**Example:** + +This config would send langfuse logs to 2 different langfuse projects, based on the team id + +```yaml +litellm_settings: + default_team_settings: + - team_id: my-secret-project + success_callback: ["langfuse"] + langfuse_public_key: os.environ/LANGFUSE_PUB_KEY_1 # Project 1 + langfuse_secret: os.environ/LANGFUSE_PRIVATE_KEY_1 # Project 1 + - team_id: ishaans-secret-project + success_callback: ["langfuse"] + langfuse_public_key: os.environ/LANGFUSE_PUB_KEY_2 # Project 2 + langfuse_secret: os.environ/LANGFUSE_SECRET_2 # Project 2 +``` + +Now, when you [generate keys](./virtual_keys.md) for this team-id + +```bash +curl -X POST 'http://0.0.0.0:4000/key/generate' \ +-H 'Authorization: Bearer sk-1234' \ +-H 'Content-Type: application/json' \ +-d '{"team_id": "ishaans-secret-project"}' +``` + +All requests made with these keys will log data to their team-specific logging. + +### Redacting Messages, Response Content from Langfuse Logging + +Set `litellm.turn_off_message_logging=True` This will prevent the messages and responses from being logged to langfuse, but request metadata will still be logged. + +```yaml +model_list: + - model_name: gpt-3.5-turbo + litellm_params: + model: gpt-3.5-turbo +litellm_settings: + success_callback: ["langfuse"] + turn_off_message_logging: True +``` + + + +## Logging Proxy Cost + Usage - OpenMeter + +Bill customers according to their LLM API usage with [OpenMeter](../observability/openmeter.md) + +**Required Env Variables** + +```bash +# from https://openmeter.cloud +export OPENMETER_API_ENDPOINT="" # defaults to https://openmeter.cloud +export OPENMETER_API_KEY="" +``` + +### Quick Start + +1. Add to Config.yaml +```yaml +model_list: +- litellm_params: + api_base: https://openai-function-calling-workers.tasslexyz.workers.dev/ + api_key: my-fake-key + model: openai/my-fake-model + model_name: fake-openai-endpoint + +litellm_settings: + success_callback: ["openmeter"] # 👈 KEY CHANGE +``` + +2. Start Proxy + +``` +litellm --config /path/to/config.yaml +``` + +3. Test it! + +```bash +curl --location 'http://0.0.0.0:4000/chat/completions' \ +--header 'Content-Type: application/json' \ +--data ' { + "model": "fake-openai-endpoint", + "messages": [ + { + "role": "user", + "content": "what llm are you" + } + ], + } +' +``` + + + + ## Logging Proxy Input/Output - DataDog We will use the `--config` to set `litellm.success_callback = ["datadog"]` this will log all successfull LLM calls to DataDog @@ -808,39 +915,72 @@ Test Request litellm --test ``` -## Logging Proxy Input/Output Traceloop (OpenTelemetry) +## Logging Proxy Input/Output in OpenTelemetry format using Traceloop's OpenLLMetry -Traceloop allows you to log LLM Input/Output in the OpenTelemetry format +[OpenLLMetry](https://github.com/traceloop/openllmetry) _(built and maintained by Traceloop)_ is a set of extensions +built on top of [OpenTelemetry](https://opentelemetry.io/) that gives you complete observability over your LLM +application. Because it uses OpenTelemetry under the +hood, [it can be connected to various observability solutions](https://www.traceloop.com/docs/openllmetry/integrations/introduction) +like: -We will use the `--config` to set `litellm.success_callback = ["traceloop"]` this will log all successfull LLM calls to traceloop +* [Traceloop](https://www.traceloop.com/docs/openllmetry/integrations/traceloop) +* [Axiom](https://www.traceloop.com/docs/openllmetry/integrations/axiom) +* [Azure Application Insights](https://www.traceloop.com/docs/openllmetry/integrations/azure) +* [Datadog](https://www.traceloop.com/docs/openllmetry/integrations/datadog) +* [Dynatrace](https://www.traceloop.com/docs/openllmetry/integrations/dynatrace) +* [Grafana Tempo](https://www.traceloop.com/docs/openllmetry/integrations/grafana) +* [Honeycomb](https://www.traceloop.com/docs/openllmetry/integrations/honeycomb) +* [HyperDX](https://www.traceloop.com/docs/openllmetry/integrations/hyperdx) +* [Instana](https://www.traceloop.com/docs/openllmetry/integrations/instana) +* [New Relic](https://www.traceloop.com/docs/openllmetry/integrations/newrelic) +* [OpenTelemetry Collector](https://www.traceloop.com/docs/openllmetry/integrations/otel-collector) +* [Service Now Cloud Observability](https://www.traceloop.com/docs/openllmetry/integrations/service-now) +* [Sentry](https://www.traceloop.com/docs/openllmetry/integrations/sentry) +* [SigNoz](https://www.traceloop.com/docs/openllmetry/integrations/signoz) +* [Splunk](https://www.traceloop.com/docs/openllmetry/integrations/splunk) -**Step 1** Install traceloop-sdk and set Traceloop API key +We will use the `--config` to set `litellm.success_callback = ["traceloop"]` to achieve this, steps are listed below. + +**Step 1:** Install the SDK ```shell -pip install traceloop-sdk -U +pip install traceloop-sdk ``` -Traceloop outputs standard OpenTelemetry data that can be connected to your observability stack. Send standard OpenTelemetry from LiteLLM Proxy to [Traceloop](https://www.traceloop.com/docs/openllmetry/integrations/traceloop), [Dynatrace](https://www.traceloop.com/docs/openllmetry/integrations/dynatrace), [Datadog](https://www.traceloop.com/docs/openllmetry/integrations/datadog) -, [New Relic](https://www.traceloop.com/docs/openllmetry/integrations/newrelic), [Honeycomb](https://www.traceloop.com/docs/openllmetry/integrations/honeycomb), [Grafana Tempo](https://www.traceloop.com/docs/openllmetry/integrations/grafana), [Splunk](https://www.traceloop.com/docs/openllmetry/integrations/splunk), [OpenTelemetry Collector](https://www.traceloop.com/docs/openllmetry/integrations/otel-collector) +**Step 2:** Configure Environment Variable for trace exporting + +You will need to configure where to export your traces. Environment variables will control this, example: For Traceloop +you should use `TRACELOOP_API_KEY`, whereas for Datadog you use `TRACELOOP_BASE_URL`. For more +visit [the Integrations Catalog](https://www.traceloop.com/docs/openllmetry/integrations/introduction). + +If you are using Datadog as the observability solutions then you can set `TRACELOOP_BASE_URL` as: + +```shell +TRACELOOP_BASE_URL=http://:4318 +``` + +**Step 3**: Create a `config.yaml` file and set `litellm_settings`: `success_callback` -**Step 2**: Create a `config.yaml` file and set `litellm_settings`: `success_callback` ```yaml model_list: - - model_name: gpt-3.5-turbo + - model_name: gpt-3.5-turbo litellm_params: model: gpt-3.5-turbo + api_key: my-fake-key # replace api_key with actual key litellm_settings: - success_callback: ["traceloop"] + success_callback: [ "traceloop" ] ``` -**Step 3**: Start the proxy, make a test request +**Step 4**: Start the proxy, make a test request Start proxy + ```shell litellm --config config.yaml --debug ``` Test Request + ``` curl --location 'http://0.0.0.0:4000/chat/completions' \ --header 'Content-Type: application/json' \ @@ -897,4 +1037,87 @@ curl --location 'http://0.0.0.0:4000/chat/completions' \ } ] }' -``` \ No newline at end of file +``` + +## (BETA) Moderation with Azure Content Safety + +[Azure Content-Safety](https://azure.microsoft.com/en-us/products/ai-services/ai-content-safety) is a Microsoft Azure service that provides content moderation APIs to detect potential offensive, harmful, or risky content in text. + +We will use the `--config` to set `litellm.success_callback = ["azure_content_safety"]` this will moderate all LLM calls using Azure Content Safety. + +**Step 0** Deploy Azure Content Safety + +Deploy an Azure Content-Safety instance from the Azure Portal and get the `endpoint` and `key`. + +**Step 1** Set Athina API key + +```shell +AZURE_CONTENT_SAFETY_KEY = "" +``` + +**Step 2**: Create a `config.yaml` file and set `litellm_settings`: `success_callback` +```yaml +model_list: + - model_name: gpt-3.5-turbo + litellm_params: + model: gpt-3.5-turbo +litellm_settings: + callbacks: ["azure_content_safety"] + azure_content_safety_params: + endpoint: "" + key: "os.environ/AZURE_CONTENT_SAFETY_KEY" +``` + +**Step 3**: Start the proxy, make a test request + +Start proxy +```shell +litellm --config config.yaml --debug +``` + +Test Request +``` +curl --location 'http://0.0.0.0:4000/chat/completions' \ + --header 'Content-Type: application/json' \ + --data ' { + "model": "gpt-3.5-turbo", + "messages": [ + { + "role": "user", + "content": "Hi, how are you?" + } + ] + }' +``` + +An HTTP 400 error will be returned if the content is detected with a value greater than the threshold set in the `config.yaml`. +The details of the response will describe : +- The `source` : input text or llm generated text +- The `category` : the category of the content that triggered the moderation +- The `severity` : the severity from 0 to 10 + +**Step 4**: Customizing Azure Content Safety Thresholds + +You can customize the thresholds for each category by setting the `thresholds` in the `config.yaml` + +```yaml +model_list: + - model_name: gpt-3.5-turbo + litellm_params: + model: gpt-3.5-turbo +litellm_settings: + callbacks: ["azure_content_safety"] + azure_content_safety_params: + endpoint: "" + key: "os.environ/AZURE_CONTENT_SAFETY_KEY" + thresholds: + Hate: 6 + SelfHarm: 8 + Sexual: 6 + Violence: 4 +``` + +:::info +`thresholds` are not required by default, but you can tune the values to your needs. +Default values is `4` for all categories +::: \ No newline at end of file diff --git a/docs/my-website/docs/proxy/prod.md b/docs/my-website/docs/proxy/prod.md index 7f1109dd5..35c8c575b 100644 --- a/docs/my-website/docs/proxy/prod.md +++ b/docs/my-website/docs/proxy/prod.md @@ -3,7 +3,75 @@ import TabItem from '@theme/TabItem'; # ⚡ Best Practices for Production -Expected Performance in Production +## 1. Use this config.yaml +Use this config.yaml in production (with your own LLMs) + +```yaml +model_list: + - model_name: fake-openai-endpoint + litellm_params: + model: openai/fake + api_key: fake-key + api_base: https://exampleopenaiendpoint-production.up.railway.app/ + +general_settings: + master_key: sk-1234 # enter your own master key, ensure it starts with 'sk-' + alerting: ["slack"] # Setup slack alerting - get alerts on LLM exceptions, Budget Alerts, Slow LLM Responses + proxy_batch_write_at: 60 # Batch write spend updates every 60s + +litellm_settings: + set_verbose: False # Switch off Debug Logging, ensure your logs do not have any debugging on +``` + +Set slack webhook url in your env +```shell +export SLACK_WEBHOOK_URL="https://hooks.slack.com/services/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" +``` + +:::info + +Need Help or want dedicated support ? Talk to a founder [here]: (https://calendly.com/d/4mp-gd3-k5k/litellm-1-1-onboarding-chat) + +::: + + +## 2. On Kubernetes - Use 1 Uvicorn worker [Suggested CMD] + +Use this Docker `CMD`. This will start the proxy with 1 Uvicorn Async Worker + +(Ensure that you're not setting `run_gunicorn` or `num_workers` in the CMD). +```shell +CMD ["--port", "4000", "--config", "./proxy_server_config.yaml"] +``` + + +## 3. Use Redis 'port','host', 'password'. NOT 'redis_url' + +If you decide to use Redis, DO NOT use 'redis_url'. We recommend usig redis port, host, and password params. + +`redis_url`is 80 RPS slower + +This is still something we're investigating. Keep track of it [here](https://github.com/BerriAI/litellm/issues/3188) + +Recommended to do this for prod: + +```yaml +router_settings: + routing_strategy: usage-based-routing-v2 + # redis_url: "os.environ/REDIS_URL" + redis_host: os.environ/REDIS_HOST + redis_port: os.environ/REDIS_PORT + redis_password: os.environ/REDIS_PASSWORD +``` + +## 4. Disable 'load_dotenv' + +Set `export LITELLM_MODE="PRODUCTION"` + +This disables the load_dotenv() functionality, which will automatically load your environment credentials from the local `.env`. + +## Extras +### Expected Performance in Production 1 LiteLLM Uvicorn Worker on Kubernetes @@ -16,13 +84,7 @@ Expected Performance in Production | `/chat/completions` Requests/hour | `126K` | -## 1. Switch of Debug Logging - -Remove `set_verbose: True` from your config.yaml -```yaml -litellm_settings: - set_verbose: True -``` +### Verifying Debugging logs are off You should only see the following level of details in logs on the proxy server ```shell @@ -31,135 +93,8 @@ You should only see the following level of details in logs on the proxy server # INFO: 192.168.2.205:29734 - "POST /chat/completions HTTP/1.1" 200 OK ``` -## 2. On Kubernetes - Use 1 Uvicorn worker [Suggested CMD] -Use this Docker `CMD`. This will start the proxy with 1 Uvicorn Async Worker - -(Ensure that you're not setting `run_gunicorn` or `num_workers` in the CMD). -```shell -CMD ["--port", "4000", "--config", "./proxy_server_config.yaml"] -``` - -## 2. Batch write spend updates every 60s - -The default proxy batch write is 10s. This is to make it easy to see spend when debugging locally. - -In production, we recommend using a longer interval period of 60s. This reduces the number of connections used to make DB writes. - -```yaml -general_settings: - master_key: sk-1234 - proxy_batch_write_at: 5 # 👈 Frequency of batch writing logs to server (in seconds) -``` - - -## 3. Move spend logs to separate server - -Writing each spend log to the db can slow down your proxy. In testing we saw a 70% improvement in median response time, by moving writing spend logs to a separate server. - -👉 [LiteLLM Spend Logs Server](https://github.com/BerriAI/litellm/tree/main/litellm-js/spend-logs) - - -**Spend Logs** -This is a log of the key, tokens, model, and latency for each call on the proxy. - -[**Full Payload**](https://github.com/BerriAI/litellm/blob/8c9623a6bc4ad9da0a2dac64249a60ed8da719e8/litellm/proxy/utils.py#L1769) - - -**1. Start the spend logs server** - -```bash -docker run -p 3000:3000 \ - -e DATABASE_URL="postgres://.." \ - ghcr.io/berriai/litellm-spend_logs:main-latest - -# RUNNING on http://0.0.0.0:3000 -``` - -**2. Connect to proxy** - - -Example litellm_config.yaml - -```yaml -model_list: -- model_name: fake-openai-endpoint - litellm_params: - model: openai/my-fake-model - api_key: my-fake-key - api_base: https://exampleopenaiendpoint-production.up.railway.app/ - -general_settings: - master_key: sk-1234 - proxy_batch_write_at: 5 # 👈 Frequency of batch writing logs to server (in seconds) -``` - -Add `SPEND_LOGS_URL` as an environment variable when starting the proxy - -```bash -docker run \ - -v $(pwd)/litellm_config.yaml:/app/config.yaml \ - -e DATABASE_URL="postgresql://.." \ - -e SPEND_LOGS_URL="http://host.docker.internal:3000" \ # 👈 KEY CHANGE - -p 4000:4000 \ - ghcr.io/berriai/litellm:main-latest \ - --config /app/config.yaml --detailed_debug - -# Running on http://0.0.0.0:4000 -``` - -**3. Test Proxy!** - - -```bash -curl --location 'http://0.0.0.0:4000/v1/chat/completions' \ ---header 'Content-Type: application/json' \ ---header 'Authorization: Bearer sk-1234' \ ---data '{ - "model": "fake-openai-endpoint", - "messages": [ - {"role": "system", "content": "Be helpful"}, - {"role": "user", "content": "What do you know?"} - ] -}' -``` - -In your LiteLLM Spend Logs Server, you should see - -**Expected Response** - -``` -Received and stored 1 logs. Total logs in memory: 1 -... -Flushed 1 log to the DB. -``` - - -### Machine Specification - -A t2.micro should be sufficient to handle 1k logs / minute on this server. - -This consumes at max 120MB, and <0.1 vCPU. - -## 4. Switch off resetting budgets - -Add this to your config.yaml. (Only spend per Key, User and Team will be tracked - spend per API Call will not be written to the LiteLLM Database) -```yaml -general_settings: - disable_spend_logs: true - disable_reset_budget: true -``` - -## 5. Switch of `litellm.telemetry` - -Switch of all telemetry tracking done by litellm - -```yaml -litellm_settings: - telemetry: False -``` - -## Machine Specifications to Deploy LiteLLM +### Machine Specifications to Deploy LiteLLM | Service | Spec | CPUs | Memory | Architecture | Version| | --- | --- | --- | --- | --- | --- | @@ -167,7 +102,7 @@ litellm_settings: | Redis Cache | - | - | - | - | 7.0+ Redis Engine| -## Reference Kubernetes Deployment YAML +### Reference Kubernetes Deployment YAML Reference Kubernetes `deployment.yaml` that was load tested by us diff --git a/docs/my-website/docs/proxy/grafana_metrics.md b/docs/my-website/docs/proxy/prometheus.md similarity index 58% rename from docs/my-website/docs/proxy/grafana_metrics.md rename to docs/my-website/docs/proxy/prometheus.md index bc099b51a..b1fb62ad5 100644 --- a/docs/my-website/docs/proxy/grafana_metrics.md +++ b/docs/my-website/docs/proxy/prometheus.md @@ -14,6 +14,7 @@ model_list: model: gpt-3.5-turbo litellm_settings: success_callback: ["prometheus"] + failure_callback: ["prometheus"] ``` Start the proxy @@ -48,6 +49,26 @@ http://localhost:4000/metrics | Metric Name | Description | |----------------------|--------------------------------------| -| `litellm_requests_metric` | Number of requests made, per `"user", "key", "model"` | -| `litellm_spend_metric` | Total Spend, per `"user", "key", "model"` | -| `litellm_total_tokens` | input + output tokens per `"user", "key", "model"` | +| `litellm_requests_metric` | Number of requests made, per `"user", "key", "model", "team", "end-user"` | +| `litellm_spend_metric` | Total Spend, per `"user", "key", "model", "team", "end-user"` | +| `litellm_total_tokens` | input + output tokens per `"user", "key", "model", "team", "end-user"` | +| `litellm_llm_api_failed_requests_metric` | Number of failed LLM API requests per `"user", "key", "model", "team", "end-user"` | + +## Monitor System Health + +To monitor the health of litellm adjacent services (redis / postgres), do: + +```yaml +model_list: + - model_name: gpt-3.5-turbo + litellm_params: + model: gpt-3.5-turbo +litellm_settings: + service_callback: ["prometheus_system"] +``` + +| Metric Name | Description | +|----------------------|--------------------------------------| +| `litellm_redis_latency` | histogram latency for redis calls | +| `litellm_redis_fails` | Number of failed redis calls | +| `litellm_self_latency` | Histogram latency for successful litellm api call | diff --git a/docs/my-website/docs/proxy/quick_start.md b/docs/my-website/docs/proxy/quick_start.md index a7ca4743b..050d9b598 100644 --- a/docs/my-website/docs/proxy/quick_start.md +++ b/docs/my-website/docs/proxy/quick_start.md @@ -348,6 +348,29 @@ query_result = embeddings.embed_query(text) print(f"TITAN EMBEDDINGS") print(query_result[:5]) +``` + + + +This is **not recommended**. There is duplicate logic as the proxy also uses the sdk, which might lead to unexpected errors. + +```python +from litellm import completion + +response = completion( + model="openai/gpt-3.5-turbo", + messages = [ + { + "role": "user", + "content": "this is a test request, write a short poem" + } + ], + api_key="anything", + base_url="http://0.0.0.0:4000" + ) + +print(response) + ``` diff --git a/docs/my-website/docs/proxy/reliability.md b/docs/my-website/docs/proxy/reliability.md index 51e90fe39..e39a6765f 100644 --- a/docs/my-website/docs/proxy/reliability.md +++ b/docs/my-website/docs/proxy/reliability.md @@ -136,7 +136,22 @@ curl --location 'http://0.0.0.0:4000/chat/completions' \ ' ``` -## Advanced - Context Window Fallbacks +### Test it! + + +```bash +curl --location 'http://0.0.0.0:4000/chat/completions' \ + --header 'Content-Type: application/json' \ + --data-raw '{ + "model": "zephyr-beta", # 👈 MODEL NAME to fallback from + "messages": [ + {"role": "user", "content": "what color is red"} + ], + "mock_testing_fallbacks": true + }' +``` + +## Advanced - Context Window Fallbacks (Pre-Call Checks + Fallbacks) **Before call is made** check if a call is within model context window with **`enable_pre_call_checks: true`**. @@ -217,16 +232,16 @@ model_list: - model_name: gpt-3.5-turbo-small litellm_params: model: azure/chatgpt-v-2 - api_base: os.environ/AZURE_API_BASE - api_key: os.environ/AZURE_API_KEY - api_version: "2023-07-01-preview" - model_info: - base_model: azure/gpt-4-1106-preview # 2. 👈 (azure-only) SET BASE MODEL + api_base: os.environ/AZURE_API_BASE + api_key: os.environ/AZURE_API_KEY + api_version: "2023-07-01-preview" + model_info: + base_model: azure/gpt-4-1106-preview # 2. 👈 (azure-only) SET BASE MODEL - model_name: gpt-3.5-turbo-large litellm_params: - model: gpt-3.5-turbo-1106 - api_key: os.environ/OPENAI_API_KEY + model: gpt-3.5-turbo-1106 + api_key: os.environ/OPENAI_API_KEY - model_name: claude-opus litellm_params: @@ -272,6 +287,69 @@ print(response) +## Advanced - EU-Region Filtering (Pre-Call Checks) + +**Before call is made** check if a call is within model context window with **`enable_pre_call_checks: true`**. + +Set 'region_name' of deployment. + +**Note:** LiteLLM can automatically infer region_name for Vertex AI, Bedrock, and IBM WatsonxAI based on your litellm params. For Azure, set `litellm.enable_preview = True`. + +**1. Set Config** + +```yaml +router_settings: + enable_pre_call_checks: true # 1. Enable pre-call checks + +model_list: +- model_name: gpt-3.5-turbo + litellm_params: + model: azure/chatgpt-v-2 + api_base: os.environ/AZURE_API_BASE + api_key: os.environ/AZURE_API_KEY + api_version: "2023-07-01-preview" + region_name: "eu" # 👈 SET EU-REGION + +- model_name: gpt-3.5-turbo + litellm_params: + model: gpt-3.5-turbo-1106 + api_key: os.environ/OPENAI_API_KEY + +- model_name: gemini-pro + litellm_params: + model: vertex_ai/gemini-pro-1.5 + vertex_project: adroit-crow-1234 + vertex_location: us-east1 # 👈 AUTOMATICALLY INFERS 'region_name' +``` + +**2. Start proxy** + +```bash +litellm --config /path/to/config.yaml + +# RUNNING on http://0.0.0.0:4000 +``` + +**3. Test it!** + +```python +import openai +client = openai.OpenAI( + api_key="anything", + base_url="http://0.0.0.0:4000" +) + +# request sent to model set on litellm proxy, `litellm --model` +response = client.chat.completions.with_raw_response.create( + model="gpt-3.5-turbo", + messages = [{"role": "user", "content": "Who was Alexander?"}] +) + +print(response) + +print(f"response.headers.get('x-litellm-model-api-base')") +``` + ## Advanced - Custom Timeouts, Stream Timeouts - Per Model For each model you can set `timeout` & `stream_timeout` under `litellm_params` ```yaml diff --git a/docs/my-website/docs/proxy/token_auth.md b/docs/my-website/docs/proxy/token_auth.md index 81475951f..659cc6edf 100644 --- a/docs/my-website/docs/proxy/token_auth.md +++ b/docs/my-website/docs/proxy/token_auth.md @@ -17,6 +17,7 @@ This is a new feature, and subject to changes based on feedback. ### Step 1. Setup Proxy - `JWT_PUBLIC_KEY_URL`: This is the public keys endpoint of your OpenID provider. Typically it's `{openid-provider-base-url}/.well-known/openid-configuration/jwks`. For Keycloak it's `{keycloak_base_url}/realms/{your-realm}/protocol/openid-connect/certs`. +- `JWT_AUDIENCE`: This is the audience used for decoding the JWT. If not set, the decode step will not verify the audience. ```bash export JWT_PUBLIC_KEY_URL="" # "https://demo.duendesoftware.com/.well-known/openid-configuration/jwks" @@ -109,7 +110,7 @@ general_settings: admin_jwt_scope: "litellm-proxy-admin" ``` -## Advanced - Spend Tracking (User / Team / Org) +## Advanced - Spend Tracking (End-Users / Internal Users / Team / Org) Set the field in the jwt token, which corresponds to a litellm user / team / org. @@ -122,6 +123,7 @@ general_settings: team_id_jwt_field: "client_id" # 👈 CAN BE ANY FIELD user_id_jwt_field: "sub" # 👈 CAN BE ANY FIELD org_id_jwt_field: "org_id" # 👈 CAN BE ANY FIELD + end_user_id_jwt_field: "customer_id" # 👈 CAN BE ANY FIELD ``` Expected JWT: @@ -130,7 +132,7 @@ Expected JWT: { "client_id": "my-unique-team", "sub": "my-unique-user", - "org_id": "my-unique-org" + "org_id": "my-unique-org", } ``` diff --git a/docs/my-website/docs/proxy/user_keys.md b/docs/my-website/docs/proxy/user_keys.md index c7d72d994..cda3a46af 100644 --- a/docs/my-website/docs/proxy/user_keys.md +++ b/docs/my-website/docs/proxy/user_keys.md @@ -121,6 +121,9 @@ from langchain.prompts.chat import ( SystemMessagePromptTemplate, ) from langchain.schema import HumanMessage, SystemMessage +import os + +os.environ["OPENAI_API_KEY"] = "anything" chat = ChatOpenAI( openai_api_base="http://0.0.0.0:4000", @@ -362,6 +365,188 @@ curl --location 'http://0.0.0.0:4000/moderations' \ ## Advanced +### (BETA) Batch Completions - pass multiple models + +Use this when you want to send 1 request to N Models + +#### Expected Request Format + +Pass model as a string of comma separated value of models. Example `"model"="llama3,gpt-3.5-turbo"` + +This same request will be sent to the following model groups on the [litellm proxy config.yaml](https://docs.litellm.ai/docs/proxy/configs) +- `model_name="llama3"` +- `model_name="gpt-3.5-turbo"` + + + + + + +```python +import openai + +client = openai.OpenAI(api_key="sk-1234", base_url="http://0.0.0.0:4000") + +response = client.chat.completions.create( + model="gpt-3.5-turbo,llama3", + messages=[ + {"role": "user", "content": "this is a test request, write a short poem"} + ], +) + +print(response) +``` + + + +#### Expected Response Format + +Get a list of responses when `model` is passed as a list + +```python +[ + ChatCompletion( + id='chatcmpl-9NoYhS2G0fswot0b6QpoQgmRQMaIf', + choices=[ + Choice( + finish_reason='stop', + index=0, + logprobs=None, + message=ChatCompletionMessage( + content='In the depths of my soul, a spark ignites\nA light that shines so pure and bright\nIt dances and leaps, refusing to die\nA flame of hope that reaches the sky\n\nIt warms my heart and fills me with bliss\nA reminder that in darkness, there is light to kiss\nSo I hold onto this fire, this guiding light\nAnd let it lead me through the darkest night.', + role='assistant', + function_call=None, + tool_calls=None + ) + ) + ], + created=1715462919, + model='gpt-3.5-turbo-0125', + object='chat.completion', + system_fingerprint=None, + usage=CompletionUsage( + completion_tokens=83, + prompt_tokens=17, + total_tokens=100 + ) + ), + ChatCompletion( + id='chatcmpl-4ac3e982-da4e-486d-bddb-ed1d5cb9c03c', + choices=[ + Choice( + finish_reason='stop', + index=0, + logprobs=None, + message=ChatCompletionMessage( + content="A test request, and I'm delighted!\nHere's a short poem, just for you:\n\nMoonbeams dance upon the sea,\nA path of light, for you to see.\nThe stars up high, a twinkling show,\nA night of wonder, for all to know.\n\nThe world is quiet, save the night,\nA peaceful hush, a gentle light.\nThe world is full, of beauty rare,\nA treasure trove, beyond compare.\n\nI hope you enjoyed this little test,\nA poem born, of whimsy and jest.\nLet me know, if there's anything else!", + role='assistant', + function_call=None, + tool_calls=None + ) + ) + ], + created=1715462919, + model='groq/llama3-8b-8192', + object='chat.completion', + system_fingerprint='fp_a2c8d063cb', + usage=CompletionUsage( + completion_tokens=120, + prompt_tokens=20, + total_tokens=140 + ) + ) +] +``` + + + + + + + + + +```shell +curl --location 'http://localhost:4000/chat/completions' \ + --header 'Authorization: Bearer sk-1234' \ + --header 'Content-Type: application/json' \ + --data '{ + "model": "llama3,gpt-3.5-turbo", + "max_tokens": 10, + "user": "litellm2", + "messages": [ + { + "role": "user", + "content": "is litellm getting better" + } + ] +}' +``` + + + + +#### Expected Response Format + +Get a list of responses when `model` is passed as a list + +```json +[ + { + "id": "chatcmpl-3dbd5dd8-7c82-4ca3-bf1f-7c26f497cf2b", + "choices": [ + { + "finish_reason": "length", + "index": 0, + "message": { + "content": "The Elder Scrolls IV: Oblivion!\n\nReleased", + "role": "assistant" + } + } + ], + "created": 1715459876, + "model": "groq/llama3-8b-8192", + "object": "chat.completion", + "system_fingerprint": "fp_179b0f92c9", + "usage": { + "completion_tokens": 10, + "prompt_tokens": 12, + "total_tokens": 22 + } + }, + { + "id": "chatcmpl-9NnldUfFLmVquFHSX4yAtjCw8PGei", + "choices": [ + { + "finish_reason": "length", + "index": 0, + "message": { + "content": "TES4 could refer to The Elder Scrolls IV:", + "role": "assistant" + } + } + ], + "created": 1715459877, + "model": "gpt-3.5-turbo-0125", + "object": "chat.completion", + "system_fingerprint": null, + "usage": { + "completion_tokens": 10, + "prompt_tokens": 9, + "total_tokens": 19 + } + } +] +``` + + + + + + + + + ### Pass User LLM API Keys, Fallbacks Allow your end-users to pass their model list, api base, OpenAI API key (any LiteLLM supported provider) to make requests diff --git a/docs/my-website/docs/proxy/users.md b/docs/my-website/docs/proxy/users.md index 478d63f84..6d9c43c5f 100644 --- a/docs/my-website/docs/proxy/users.md +++ b/docs/my-website/docs/proxy/users.md @@ -12,8 +12,8 @@ Requirements: You can set budgets at 3 levels: - For the proxy -- For a user -- For a 'user' passed to `/chat/completions`, `/embeddings` etc +- For an internal user +- For an end-user - For a key - For a key (model specific budgets) @@ -58,7 +58,7 @@ curl --location 'http://0.0.0.0:4000/chat/completions' \ }' ``` - + Apply a budget across multiple keys. @@ -165,12 +165,12 @@ curl --location 'http://localhost:4000/team/new' \ } ``` - + Use this to budget `user` passed to `/chat/completions`, **without needing to create a key for every user** **Step 1. Modify config.yaml** -Define `litellm.max_user_budget` +Define `litellm.max_end_user_budget` ```yaml general_settings: master_key: sk-1234 @@ -328,7 +328,7 @@ You can set: - max parallel requests - + Use `/user/new`, to persist rate limits across multiple keys. @@ -408,7 +408,7 @@ curl --location 'http://localhost:4000/user/new' \ ``` -## Create new keys for existing user +## Create new keys for existing internal user Just include user_id in the `/key/generate` request. diff --git a/docs/my-website/docs/routing.md b/docs/my-website/docs/routing.md index c10d80499..5ba3221c9 100644 --- a/docs/my-website/docs/routing.md +++ b/docs/my-website/docs/routing.md @@ -95,8 +95,8 @@ print(response) - `router.image_generation()` - completion calls in OpenAI `/v1/images/generations` endpoint format - `router.aimage_generation()` - async image generation calls -### Advanced - Routing Strategies -#### Routing Strategies - Weighted Pick, Rate Limit Aware, Least Busy, Latency Based +## Advanced - Routing Strategies +#### Routing Strategies - Weighted Pick, Rate Limit Aware, Least Busy, Latency Based, Cost Based Router provides 4 strategies for routing your calls across multiple deployments: @@ -278,8 +278,38 @@ router_settings: routing_strategy_args: {"ttl": 10} ``` +### Set Lowest Latency Buffer + +Set a buffer within which deployments are candidates for making calls to. + +E.g. + +if you have 5 deployments + +``` +https://litellm-prod-1.openai.azure.com/: 0.07s +https://litellm-prod-2.openai.azure.com/: 0.1s +https://litellm-prod-3.openai.azure.com/: 0.1s +https://litellm-prod-4.openai.azure.com/: 0.1s +https://litellm-prod-5.openai.azure.com/: 4.66s +``` + +to prevent initially overloading `prod-1`, with all requests - we can set a buffer of 50%, to consider deployments `prod-2, prod-3, prod-4`. + +**In Router** +```python +router = Router(..., routing_strategy_args={"lowest_latency_buffer": 0.5}) +``` + +**In Proxy** + +```yaml +router_settings: + routing_strategy_args: {"lowest_latency_buffer": 0.5} +``` + - + **Default** Picks a deployment based on the provided **Requests per minute (rpm) or Tokens per minute (tpm)** @@ -437,12 +467,136 @@ async def router_acompletion(): asyncio.run(router_acompletion()) ``` + + + +Picks a deployment based on the lowest cost + +How this works: +- Get all healthy deployments +- Select all deployments that are under their provided `rpm/tpm` limits +- For each deployment check if `litellm_param["model"]` exists in [`litellm_model_cost_map`](https://github.com/BerriAI/litellm/blob/main/model_prices_and_context_window.json) + - if deployment does not exist in `litellm_model_cost_map` -> use deployment_cost= `$1` +- Select deployment with lowest cost + +```python +from litellm import Router +import asyncio + +model_list = [ + { + "model_name": "gpt-3.5-turbo", + "litellm_params": {"model": "gpt-4"}, + "model_info": {"id": "openai-gpt-4"}, + }, + { + "model_name": "gpt-3.5-turbo", + "litellm_params": {"model": "groq/llama3-8b-8192"}, + "model_info": {"id": "groq-llama"}, + }, +] + +# init router +router = Router(model_list=model_list, routing_strategy="cost-based-routing") +async def router_acompletion(): + response = await router.acompletion( + model="gpt-3.5-turbo", + messages=[{"role": "user", "content": "Hey, how's it going?"}] + ) + print(response) + + print(response._hidden_params["model_id"]) # expect groq-llama, since groq/llama has lowest cost + return response + +asyncio.run(router_acompletion()) + +``` + + +#### Using Custom Input/Output pricing + +Set `litellm_params["input_cost_per_token"]` and `litellm_params["output_cost_per_token"]` for using custom pricing when routing + +```python +model_list = [ + { + "model_name": "gpt-3.5-turbo", + "litellm_params": { + "model": "azure/chatgpt-v-2", + "input_cost_per_token": 0.00003, + "output_cost_per_token": 0.00003, + }, + "model_info": {"id": "chatgpt-v-experimental"}, + }, + { + "model_name": "gpt-3.5-turbo", + "litellm_params": { + "model": "azure/chatgpt-v-1", + "input_cost_per_token": 0.000000001, + "output_cost_per_token": 0.00000001, + }, + "model_info": {"id": "chatgpt-v-1"}, + }, + { + "model_name": "gpt-3.5-turbo", + "litellm_params": { + "model": "azure/chatgpt-v-5", + "input_cost_per_token": 10, + "output_cost_per_token": 12, + }, + "model_info": {"id": "chatgpt-v-5"}, + }, +] +# init router +router = Router(model_list=model_list, routing_strategy="cost-based-routing") +async def router_acompletion(): + response = await router.acompletion( + model="gpt-3.5-turbo", + messages=[{"role": "user", "content": "Hey, how's it going?"}] + ) + print(response) + + print(response._hidden_params["model_id"]) # expect chatgpt-v-1, since chatgpt-v-1 has lowest cost + return response + +asyncio.run(router_acompletion()) +``` + ## Basic Reliability +### Max Parallel Requests (ASYNC) + +Used in semaphore for async requests on router. Limit the max concurrent calls made to a deployment. Useful in high-traffic scenarios. + +If tpm/rpm is set, and no max parallel request limit given, we use the RPM or calculated RPM (tpm/1000/6) as the max parallel request limit. + + +```python +from litellm import Router + +model_list = [{ + "model_name": "gpt-4", + "litellm_params": { + "model": "azure/gpt-4", + ... + "max_parallel_requests": 10 # 👈 SET PER DEPLOYMENT + } +}] + +### OR ### + +router = Router(model_list=model_list, default_max_parallel_requests=20) # 👈 SET DEFAULT MAX PARALLEL REQUESTS + + +# deployment max parallel requests > default max parallel requests +``` + +[**See Code**](https://github.com/BerriAI/litellm/blob/a978f2d8813c04dad34802cb95e0a0e35a3324bc/litellm/utils.py#L5605) + ### Timeouts The timeout set in router is for the entire length of the call, and is passed down to the completion() call level as well. @@ -499,7 +653,9 @@ from litellm import Router model_list = [{...}] router = Router(model_list=model_list, - allowed_fails=1) # cooldown model if it fails > 1 call in a minute. + allowed_fails=1, # cooldown model if it fails > 1 call in a minute. + cooldown_time=100 # cooldown the deployment for 100 seconds if it num_fails > allowed_fails + ) user_message = "Hello, whats the weather in San Francisco??" messages = [{"content": user_message, "role": "user"}] @@ -557,6 +713,57 @@ response = router.completion(model="gpt-3.5-turbo", messages=messages) print(f"response: {response}") ``` +#### Retries based on Error Type + +Use `RetryPolicy` if you want to set a `num_retries` based on the Exception receieved + +Example: +- 4 retries for `ContentPolicyViolationError` +- 0 retries for `RateLimitErrors` + +Example Usage + +```python +from litellm.router import RetryPolicy +retry_policy = RetryPolicy( + ContentPolicyViolationErrorRetries=3, # run 3 retries for ContentPolicyViolationErrors + AuthenticationErrorRetries=0, # run 0 retries for AuthenticationErrorRetries + BadRequestErrorRetries=1, + TimeoutErrorRetries=2, + RateLimitErrorRetries=3, +) + +router = litellm.Router( + model_list=[ + { + "model_name": "gpt-3.5-turbo", # openai model name + "litellm_params": { # params for litellm completion/embedding call + "model": "azure/chatgpt-v-2", + "api_key": os.getenv("AZURE_API_KEY"), + "api_version": os.getenv("AZURE_API_VERSION"), + "api_base": os.getenv("AZURE_API_BASE"), + }, + }, + { + "model_name": "bad-model", # openai model name + "litellm_params": { # params for litellm completion/embedding call + "model": "azure/chatgpt-v-2", + "api_key": "bad-key", + "api_version": os.getenv("AZURE_API_VERSION"), + "api_base": os.getenv("AZURE_API_BASE"), + }, + }, + ], + retry_policy=retry_policy, +) + +response = await router.acompletion( + model=model, + messages=messages, +) +``` + + ### Fallbacks If a call fails after num_retries, fall back to another model group. @@ -565,6 +772,8 @@ If the error is a context window exceeded error, fall back to a larger model gro Fallbacks are done in-order - ["gpt-3.5-turbo, "gpt-4", "gpt-4-32k"], will do 'gpt-3.5-turbo' first, then 'gpt-4', etc. +You can also set 'default_fallbacks', in case a specific model group is misconfigured / bad. + ```python from litellm import Router @@ -625,6 +834,7 @@ model_list = [ router = Router(model_list=model_list, fallbacks=[{"azure/gpt-3.5-turbo": ["gpt-3.5-turbo"]}], + default_fallbacks=["gpt-3.5-turbo-16k"], context_window_fallbacks=[{"azure/gpt-3.5-turbo-context-fallback": ["gpt-3.5-turbo-16k"]}, {"gpt-3.5-turbo": ["gpt-3.5-turbo-16k"]}], set_verbose=True) @@ -674,13 +884,11 @@ router = Router(model_list: Optional[list] = None, cache_responses=True) ``` -## Pre-Call Checks (Context Window) +## Pre-Call Checks (Context Window, EU-Regions) Enable pre-call checks to filter out: 1. deployments with context window limit < messages for a call. -2. deployments that have exceeded rate limits when making concurrent calls. (eg. `asyncio.gather(*[ - router.acompletion(model="gpt-3.5-turbo", messages=m) for m in list_of_messages - ])`) +2. deployments outside of eu-region @@ -695,10 +903,14 @@ router = Router(model_list=model_list, enable_pre_call_checks=True) # 👈 Set t **2. Set Model List** -For azure deployments, set the base model. Pick the base model from [this list](https://github.com/BerriAI/litellm/blob/main/model_prices_and_context_window.json), all the azure models start with `azure/`. +For context window checks on azure deployments, set the base model. Pick the base model from [this list](https://github.com/BerriAI/litellm/blob/main/model_prices_and_context_window.json), all the azure models start with `azure/`. - - +For 'eu-region' filtering, Set 'region_name' of deployment. + +**Note:** We automatically infer region_name for Vertex AI, Bedrock, and IBM WatsonxAI based on your litellm params. For Azure, set `litellm.enable_preview = True`. + + +[**See Code**](https://github.com/BerriAI/litellm/blob/d33e49411d6503cb634f9652873160cd534dec96/litellm/router.py#L2958) ```python model_list = [ @@ -709,10 +921,9 @@ model_list = [ "api_key": os.getenv("AZURE_API_KEY"), "api_version": os.getenv("AZURE_API_VERSION"), "api_base": os.getenv("AZURE_API_BASE"), - }, - "model_info": { + "region_name": "eu" # 👈 SET 'EU' REGION NAME "base_model": "azure/gpt-35-turbo", # 👈 (Azure-only) SET BASE MODEL - } + }, }, { "model_name": "gpt-3.5-turbo", # model group name @@ -721,54 +932,26 @@ model_list = [ "api_key": os.getenv("OPENAI_API_KEY"), }, }, + { + "model_name": "gemini-pro", + "litellm_params: { + "model": "vertex_ai/gemini-pro-1.5", + "vertex_project": "adroit-crow-1234", + "vertex_location": "us-east1" # 👈 AUTOMATICALLY INFERS 'region_name' + } + } ] router = Router(model_list=model_list, enable_pre_call_checks=True) ``` - - - - -```python -model_list = [ - { - "model_name": "gpt-3.5-turbo-small", # model group name - "litellm_params": { # params for litellm completion/embedding call - "model": "azure/chatgpt-v-2", - "api_key": os.getenv("AZURE_API_KEY"), - "api_version": os.getenv("AZURE_API_VERSION"), - "api_base": os.getenv("AZURE_API_BASE"), - }, - "model_info": { - "base_model": "azure/gpt-35-turbo", # 👈 (Azure-only) SET BASE MODEL - } - }, - { - "model_name": "gpt-3.5-turbo-large", # model group name - "litellm_params": { # params for litellm completion/embedding call - "model": "gpt-3.5-turbo-1106", - "api_key": os.getenv("OPENAI_API_KEY"), - }, - }, - { - "model_name": "claude-opus", - "litellm_params": { call - "model": "claude-3-opus-20240229", - "api_key": os.getenv("ANTHROPIC_API_KEY"), - }, - }, - ] - -router = Router(model_list=model_list, enable_pre_call_checks=True, context_window_fallbacks=[{"gpt-3.5-turbo-small": ["gpt-3.5-turbo-large", "claude-opus"]}]) -``` - - - - **3. Test it!** + + + + ```python """ - Give a gpt-3.5-turbo model group with different context windows (4k vs. 16k) @@ -778,7 +961,6 @@ router = Router(model_list=model_list, enable_pre_call_checks=True, context_wind from litellm import Router import os -try: model_list = [ { "model_name": "gpt-3.5-turbo", # model group name @@ -787,6 +969,7 @@ model_list = [ "api_key": os.getenv("AZURE_API_KEY"), "api_version": os.getenv("AZURE_API_VERSION"), "api_base": os.getenv("AZURE_API_BASE"), + "base_model": "azure/gpt-35-turbo", }, "model_info": { "base_model": "azure/gpt-35-turbo", @@ -816,6 +999,59 @@ response = router.completion( print(f"response: {response}") ``` + + +```python +""" +- Give 2 gpt-3.5-turbo deployments, in eu + non-eu regions +- Make a call +- Assert it picks the eu-region model +""" + +from litellm import Router +import os + +model_list = [ + { + "model_name": "gpt-3.5-turbo", # model group name + "litellm_params": { # params for litellm completion/embedding call + "model": "azure/chatgpt-v-2", + "api_key": os.getenv("AZURE_API_KEY"), + "api_version": os.getenv("AZURE_API_VERSION"), + "api_base": os.getenv("AZURE_API_BASE"), + "region_name": "eu" + }, + "model_info": { + "id": "1" + } + }, + { + "model_name": "gpt-3.5-turbo", # model group name + "litellm_params": { # params for litellm completion/embedding call + "model": "gpt-3.5-turbo-1106", + "api_key": os.getenv("OPENAI_API_KEY"), + }, + "model_info": { + "id": "2" + } + }, +] + +router = Router(model_list=model_list, enable_pre_call_checks=True) + +response = router.completion( + model="gpt-3.5-turbo", + messages=[{"role": "user", "content": "Who was Alexander?"}], +) + +print(f"response: {response}") + +print(f"response id: {response._hidden_params['model_id']}") +``` + + + + :::info @@ -881,6 +1117,46 @@ async def test_acompletion_caching_on_router_caching_groups(): asyncio.run(test_acompletion_caching_on_router_caching_groups()) ``` +## Alerting 🚨 + +Send alerts to slack / your webhook url for the following events +- LLM API Exceptions +- Slow LLM Responses + +Get a slack webhook url from https://api.slack.com/messaging/webhooks + +#### Usage +Initialize an `AlertingConfig` and pass it to `litellm.Router`. The following code will trigger an alert because `api_key=bad-key` which is invalid + +```python +from litellm.router import AlertingConfig +import litellm +import os + +router = litellm.Router( + model_list=[ + { + "model_name": "gpt-3.5-turbo", + "litellm_params": { + "model": "gpt-3.5-turbo", + "api_key": "bad_key", + }, + } + ], + alerting_config= AlertingConfig( + alerting_threshold=10, # threshold for slow / hanging llm responses (in seconds). Defaults to 300 seconds + webhook_url= os.getenv("SLACK_WEBHOOK_URL") # webhook you want to send alerts to + ), +) +try: + await router.acompletion( + model="gpt-3.5-turbo", + messages=[{"role": "user", "content": "Hey, how's it going?"}], + ) +except: + pass +``` + ## Track cost for Azure Deployments **Problem**: Azure returns `gpt-4` in the response when `azure/gpt-4-1106-preview` is used. This leads to inaccurate cost tracking @@ -1038,10 +1314,11 @@ def __init__( num_retries: int = 0, timeout: Optional[float] = None, default_litellm_params={}, # default params for Router.chat.completion.create - fallbacks: List = [], + fallbacks: Optional[List] = None, + default_fallbacks: Optional[List] = None allowed_fails: Optional[int] = None, # Number of times a deployment can failbefore being added to cooldown cooldown_time: float = 1, # (seconds) time to cooldown a deployment after failure - context_window_fallbacks: List = [], + context_window_fallbacks: Optional[List] = None, model_group_alias: Optional[dict] = {}, retry_after: int = 0, # (min) time to wait before retrying a failed request routing_strategy: Literal[ @@ -1049,6 +1326,7 @@ def __init__( "least-busy", "usage-based-routing", "latency-based-routing", + "cost-based-routing", ] = "simple-shuffle", ## DEBUGGING ## diff --git a/docs/my-website/docs/set_keys.md b/docs/my-website/docs/set_keys.md index 4c8cc42fe..7686bf704 100644 --- a/docs/my-website/docs/set_keys.md +++ b/docs/my-website/docs/set_keys.md @@ -5,6 +5,9 @@ LiteLLM allows you to specify the following: * API Base * API Version * API Type +* Project +* Location +* Token Useful Helper functions: * [`check_valid_key()`](#check_valid_key) @@ -43,6 +46,24 @@ os.environ['AZURE_API_TYPE'] = "azure" # [OPTIONAL] os.environ['OPENAI_API_BASE'] = "https://openai-gpt-4-test2-v-12.openai.azure.com/" ``` +### Setting Project, Location, Token + +For cloud providers: +- Azure +- Bedrock +- GCP +- Watson AI + +you might need to set additional parameters. LiteLLM provides a common set of params, that we map across all providers. + +| | LiteLLM param | Watson | Vertex AI | Azure | Bedrock | +|------|--------------|--------------|--------------|--------------|--------------| +| Project | project | watsonx_project | vertex_project | n/a | n/a | +| Region | region_name | watsonx_region_name | vertex_location | n/a | aws_region_name | +| Token | token | watsonx_token or token | n/a | azure_ad_token | n/a | + +If you want, you can call them by their provider-specific params as well. + ## litellm variables ### litellm.api_key diff --git a/docs/my-website/docusaurus.config.js b/docs/my-website/docusaurus.config.js index 0dadd71d6..235af3f28 100644 --- a/docs/my-website/docusaurus.config.js +++ b/docs/my-website/docusaurus.config.js @@ -105,6 +105,12 @@ const config = { label: 'Enterprise', to: "docs/enterprise" }, + { + sidebarId: 'tutorialSidebar', + position: 'left', + label: '🚀 Hosted', + to: "docs/hosted" + }, { href: 'https://github.com/BerriAI/litellm', label: 'GitHub', diff --git a/docs/my-website/img/lago.jpeg b/docs/my-website/img/lago.jpeg new file mode 100644 index 000000000..546852f1c Binary files /dev/null and b/docs/my-website/img/lago.jpeg differ diff --git a/docs/my-website/img/lago_2.png b/docs/my-website/img/lago_2.png new file mode 100644 index 000000000..24ecb49ef Binary files /dev/null and b/docs/my-website/img/lago_2.png differ diff --git a/docs/my-website/img/litellm_hosted_ui_add_models.png b/docs/my-website/img/litellm_hosted_ui_add_models.png new file mode 100644 index 000000000..207e95229 Binary files /dev/null and b/docs/my-website/img/litellm_hosted_ui_add_models.png differ diff --git a/docs/my-website/img/litellm_hosted_ui_create_key.png b/docs/my-website/img/litellm_hosted_ui_create_key.png new file mode 100644 index 000000000..039d26580 Binary files /dev/null and b/docs/my-website/img/litellm_hosted_ui_create_key.png differ diff --git a/docs/my-website/img/litellm_hosted_ui_router.png b/docs/my-website/img/litellm_hosted_ui_router.png new file mode 100644 index 000000000..9f20dd4ab Binary files /dev/null and b/docs/my-website/img/litellm_hosted_ui_router.png differ diff --git a/docs/my-website/img/litellm_hosted_usage_dashboard.png b/docs/my-website/img/litellm_hosted_usage_dashboard.png new file mode 100644 index 000000000..8513551d3 Binary files /dev/null and b/docs/my-website/img/litellm_hosted_usage_dashboard.png differ diff --git a/docs/my-website/img/openmeter.png b/docs/my-website/img/openmeter.png new file mode 100644 index 000000000..29fa96557 Binary files /dev/null and b/docs/my-website/img/openmeter.png differ diff --git a/docs/my-website/img/openmeter_img_2.png b/docs/my-website/img/openmeter_img_2.png new file mode 100644 index 000000000..e96edc2e4 Binary files /dev/null and b/docs/my-website/img/openmeter_img_2.png differ diff --git a/docs/my-website/sidebars.js b/docs/my-website/sidebars.js index 7b8a09196..f840ed789 100644 --- a/docs/my-website/sidebars.js +++ b/docs/my-website/sidebars.js @@ -39,13 +39,21 @@ const sidebars = { "proxy/demo", "proxy/configs", "proxy/reliability", + "proxy/cost_tracking", "proxy/users", + "proxy/billing", "proxy/user_keys", "proxy/enterprise", "proxy/virtual_keys", + "proxy/alerting", + { + type: "category", + label: "Logging", + items: ["proxy/logging", "proxy/streaming_logging"], + }, "proxy/team_based_routing", + "proxy/customer_routing", "proxy/ui", - "proxy/cost_tracking", "proxy/token_auth", { type: "category", @@ -58,12 +66,7 @@ const sidebars = { "proxy/pii_masking", "proxy/prompt_injection", "proxy/caching", - { - type: "category", - label: "Logging, Alerting", - items: ["proxy/logging", "proxy/alerting", "proxy/streaming_logging"], - }, - "proxy/grafana_metrics", + "proxy/prometheus", "proxy/call_hooks", "proxy/rules", "proxy/cli", @@ -86,6 +89,7 @@ const sidebars = { "completion/stream", "completion/message_trimming", "completion/function_call", + "completion/vision", "completion/model_alias", "completion/batching", "completion/mock_requests", @@ -115,6 +119,7 @@ const sidebars = { }, items: [ "providers/openai", + "providers/text_completion_openai", "providers/openai_compatible", "providers/azure", "providers/azure_ai", @@ -128,9 +133,13 @@ const sidebars = { "providers/cohere", "providers/anyscale", "providers/huggingface", + "providers/watsonx", + "providers/predibase", + "providers/triton-inference-server", "providers/ollama", "providers/perplexity", "providers/groq", + "providers/deepseek", "providers/fireworks_ai", "providers/vllm", "providers/xinference", @@ -146,6 +155,7 @@ const sidebars = { "providers/openrouter", "providers/custom_openai_proxy", "providers/petals", + ], }, "proxy/custom_pricing", @@ -166,19 +176,22 @@ const sidebars = { "observability/custom_callback", "observability/langfuse_integration", "observability/sentry", + "observability/lago", + "observability/openmeter", "observability/promptlayer_integration", "observability/wandb_integration", "observability/langsmith_integration", "observability/slack_integration", "observability/traceloop_integration", - "observability/lunary_integration", "observability/athina_integration", + "observability/lunary_integration", + "observability/greenscale_integration", "observability/helicone_integration", "observability/supabase_integration", `observability/telemetry`, ], }, - "caching/redis_cache", + "caching/all_caches", { type: "category", label: "Tutorials", diff --git a/docs/my-website/src/pages/token_usage.md b/docs/my-website/src/pages/token_usage.md index b7fc0b083..028e010a9 100644 --- a/docs/my-website/src/pages/token_usage.md +++ b/docs/my-website/src/pages/token_usage.md @@ -16,7 +16,7 @@ However, we also expose 3 public helper functions to calculate token usage acros ```python from litellm import token_counter -messages = [{"user": "role", "content": "Hey, how's it going"}] +messages = [{"role": "user", "content": "Hey, how's it going"}] print(token_counter(model="gpt-3.5-turbo", messages=messages)) ``` diff --git a/enterprise/enterprise_callbacks/generic_api_callback.py b/enterprise/enterprise_callbacks/generic_api_callback.py index 076c13d5e..cf1d22e8f 100644 --- a/enterprise/enterprise_callbacks/generic_api_callback.py +++ b/enterprise/enterprise_callbacks/generic_api_callback.py @@ -10,7 +10,6 @@ from litellm.caching import DualCache from typing import Literal, Union -dotenv.load_dotenv() # Loading env variables using dotenv import traceback @@ -19,8 +18,6 @@ import traceback import dotenv, os import requests - -dotenv.load_dotenv() # Loading env variables using dotenv import traceback import datetime, subprocess, sys import litellm, uuid diff --git a/enterprise/utils.py b/enterprise/utils.py index 4a42dc996..90b14314c 100644 --- a/enterprise/utils.py +++ b/enterprise/utils.py @@ -1,6 +1,7 @@ # Enterprise Proxy Util Endpoints from litellm._logging import verbose_logger import collections +from datetime import datetime async def get_spend_by_tags(start_date=None, end_date=None, prisma_client=None): @@ -18,26 +19,33 @@ async def get_spend_by_tags(start_date=None, end_date=None, prisma_client=None): return response -async def ui_get_spend_by_tags(start_date=None, end_date=None, prisma_client=None): - response = await prisma_client.db.query_raw( - """ +async def ui_get_spend_by_tags(start_date: str, end_date: str, prisma_client): + + sql_query = """ SELECT jsonb_array_elements_text(request_tags) AS individual_request_tag, DATE(s."startTime") AS spend_date, COUNT(*) AS log_count, SUM(spend) AS total_spend FROM "LiteLLM_SpendLogs" s - WHERE s."startTime" >= current_date - interval '30 days' + WHERE + DATE(s."startTime") >= $1::date + AND DATE(s."startTime") <= $2::date GROUP BY individual_request_tag, spend_date - ORDER BY spend_date; - """ + ORDER BY spend_date + LIMIT 100; + """ + response = await prisma_client.db.query_raw( + sql_query, + start_date, + end_date, ) # print("tags - spend") # print(response) # Bar Chart 1 - Spend per tag - Top 10 tags by spend - total_spend_per_tag = collections.defaultdict(float) - total_requests_per_tag = collections.defaultdict(int) + total_spend_per_tag: collections.defaultdict = collections.defaultdict(float) + total_requests_per_tag: collections.defaultdict = collections.defaultdict(int) for row in response: tag_name = row["individual_request_tag"] tag_spend = row["total_spend"] @@ -49,15 +57,18 @@ async def ui_get_spend_by_tags(start_date=None, end_date=None, prisma_client=Non # convert to ui format ui_tags = [] for tag in sorted_tags: + current_spend = tag[1] + if current_spend is not None and isinstance(current_spend, float): + current_spend = round(current_spend, 4) ui_tags.append( { "name": tag[0], - "value": tag[1], + "spend": current_spend, "log_count": total_requests_per_tag[tag[0]], } ) - return {"top_10_tags": ui_tags} + return {"spend_per_tag": ui_tags} async def view_spend_logs_from_clickhouse( @@ -291,7 +302,7 @@ def _create_clickhouse_aggregate_tables(client=None, table_names=[]): def _forecast_daily_cost(data: list): - import requests + import requests # type: ignore from datetime import datetime, timedelta if len(data) == 0: diff --git a/index.yaml b/index.yaml new file mode 100644 index 000000000..8faeeeae6 --- /dev/null +++ b/index.yaml @@ -0,0 +1,108 @@ +apiVersion: v1 +entries: + litellm-helm: + - apiVersion: v2 + appVersion: v1.35.38 + created: "2024-05-06T10:22:24.384392-07:00" + dependencies: + - condition: db.deployStandalone + name: postgresql + repository: oci://registry-1.docker.io/bitnamicharts + version: '>=13.3.0' + - condition: redis.enabled + name: redis + repository: oci://registry-1.docker.io/bitnamicharts + version: '>=18.0.0' + description: Call all LLM APIs using the OpenAI format + digest: 60f0cfe9e7c1087437cb35f6fb7c43c3ab2be557b6d3aec8295381eb0dfa760f + name: litellm-helm + type: application + urls: + - litellm-helm-0.2.0.tgz + version: 0.2.0 + postgresql: + - annotations: + category: Database + images: | + - name: os-shell + image: docker.io/bitnami/os-shell:12-debian-12-r16 + - name: postgres-exporter + image: docker.io/bitnami/postgres-exporter:0.15.0-debian-12-r14 + - name: postgresql + image: docker.io/bitnami/postgresql:16.2.0-debian-12-r6 + licenses: Apache-2.0 + apiVersion: v2 + appVersion: 16.2.0 + created: "2024-05-06T10:22:24.387717-07:00" + dependencies: + - name: common + repository: oci://registry-1.docker.io/bitnamicharts + tags: + - bitnami-common + version: 2.x.x + description: PostgreSQL (Postgres) is an open source object-relational database + known for reliability and data integrity. ACID-compliant, it supports foreign + keys, joins, views, triggers and stored procedures. + digest: 3c8125526b06833df32e2f626db34aeaedb29d38f03d15349db6604027d4a167 + home: https://bitnami.com + icon: https://bitnami.com/assets/stacks/postgresql/img/postgresql-stack-220x234.png + keywords: + - postgresql + - postgres + - database + - sql + - replication + - cluster + maintainers: + - name: VMware, Inc. + url: https://github.com/bitnami/charts + name: postgresql + sources: + - https://github.com/bitnami/charts/tree/main/bitnami/postgresql + urls: + - charts/postgresql-14.3.1.tgz + version: 14.3.1 + redis: + - annotations: + category: Database + images: | + - name: kubectl + image: docker.io/bitnami/kubectl:1.29.2-debian-12-r3 + - name: os-shell + image: docker.io/bitnami/os-shell:12-debian-12-r16 + - name: redis + image: docker.io/bitnami/redis:7.2.4-debian-12-r9 + - name: redis-exporter + image: docker.io/bitnami/redis-exporter:1.58.0-debian-12-r4 + - name: redis-sentinel + image: docker.io/bitnami/redis-sentinel:7.2.4-debian-12-r7 + licenses: Apache-2.0 + apiVersion: v2 + appVersion: 7.2.4 + created: "2024-05-06T10:22:24.391903-07:00" + dependencies: + - name: common + repository: oci://registry-1.docker.io/bitnamicharts + tags: + - bitnami-common + version: 2.x.x + description: Redis(R) is an open source, advanced key-value store. It is often + referred to as a data structure server since keys can contain strings, hashes, + lists, sets and sorted sets. + digest: b2fa1835f673a18002ca864c54fadac3c33789b26f6c5e58e2851b0b14a8f984 + home: https://bitnami.com + icon: https://bitnami.com/assets/stacks/redis/img/redis-stack-220x234.png + keywords: + - redis + - keyvalue + - database + maintainers: + - name: VMware, Inc. + url: https://github.com/bitnami/charts + name: redis + sources: + - https://github.com/bitnami/charts/tree/main/bitnami/redis + urls: + - charts/redis-18.19.1.tgz + version: 18.19.1 +generated: "2024-05-06T10:22:24.375026-07:00" diff --git a/litellm-helm-0.2.0.tgz b/litellm-helm-0.2.0.tgz new file mode 100644 index 000000000..9d0947348 Binary files /dev/null and b/litellm-helm-0.2.0.tgz differ diff --git a/litellm-js/spend-logs/package-lock.json b/litellm-js/spend-logs/package-lock.json index b74f5e9d7..cb4b599d3 100644 --- a/litellm-js/spend-logs/package-lock.json +++ b/litellm-js/spend-logs/package-lock.json @@ -5,8 +5,8 @@ "packages": { "": { "dependencies": { - "@hono/node-server": "^1.9.0", - "hono": "^4.1.5" + "@hono/node-server": "^1.10.1", + "hono": "^4.2.7" }, "devDependencies": { "@types/node": "^20.11.17", @@ -382,9 +382,9 @@ } }, "node_modules/@hono/node-server": { - "version": "1.9.0", - "resolved": "https://registry.npmjs.org/@hono/node-server/-/node-server-1.9.0.tgz", - "integrity": "sha512-oJjk7WXBlENeHhWiMqSyxPIZ3Kmf5ZYxqdlcSIXyN8Rn50bNJsPl99G4POBS03Jxh56FdfRJ0SEnC8mAVIiavQ==", + "version": "1.10.1", + "resolved": "https://registry.npmjs.org/@hono/node-server/-/node-server-1.10.1.tgz", + "integrity": "sha512-5BKW25JH5PQKPDkTcIgv3yNUPtOAbnnjFFgWvIxxAY/B/ZNeYjjWoAeDmqhIiCgOAJ3Tauuw+0G+VainhuZRYQ==", "engines": { "node": ">=18.14.1" } @@ -463,9 +463,9 @@ } }, "node_modules/hono": { - "version": "4.1.5", - "resolved": "https://registry.npmjs.org/hono/-/hono-4.1.5.tgz", - "integrity": "sha512-3ChJiIoeCxvkt6vnkxJagplrt1YZg3NyNob7ssVeK2PUqEINp4q1F94HzFnvY9QE8asVmbW5kkTDlyWylfg2vg==", + "version": "4.2.7", + "resolved": "https://registry.npmjs.org/hono/-/hono-4.2.7.tgz", + "integrity": "sha512-k1xHi86tJnRIVvqhFMBDGFKJ8r5O+bEsT4P59ZK59r0F300Xd910/r237inVfuT/VmE86RQQffX4OYNda6dLXw==", "engines": { "node": ">=16.0.0" } diff --git a/litellm-js/spend-logs/package.json b/litellm-js/spend-logs/package.json index 0fc9b11af..d9543220b 100644 --- a/litellm-js/spend-logs/package.json +++ b/litellm-js/spend-logs/package.json @@ -3,8 +3,8 @@ "dev": "tsx watch src/index.ts" }, "dependencies": { - "@hono/node-server": "^1.9.0", - "hono": "^4.1.5" + "@hono/node-server": "^1.10.1", + "hono": "^4.2.7" }, "devDependencies": { "@types/node": "^20.11.17", diff --git a/litellm/__init__.py b/litellm/__init__.py index 091605148..ac2b420d7 100644 --- a/litellm/__init__.py +++ b/litellm/__init__.py @@ -1,8 +1,12 @@ +### Hide pydantic namespace conflict warnings globally ### +import warnings + +warnings.filterwarnings("ignore", message=".*conflict with protected namespace.*") ### INIT VARIABLES ### import threading, requests, os from typing import Callable, List, Optional, Dict, Union, Any, Literal from litellm.caching import Cache -from litellm._logging import set_verbose, _turn_on_debug, verbose_logger +from litellm._logging import set_verbose, _turn_on_debug, verbose_logger, json_logs from litellm.proxy._types import ( KeyManagementSystem, KeyManagementSettings, @@ -11,15 +15,32 @@ from litellm.proxy._types import ( import httpx import dotenv -dotenv.load_dotenv() +litellm_mode = os.getenv("LITELLM_MODE", "DEV") # "PRODUCTION", "DEV" +if litellm_mode == "DEV": + dotenv.load_dotenv() ############################################# if set_verbose == True: _turn_on_debug() ############################################# +### Callbacks /Logging / Success / Failure Handlers ### input_callback: List[Union[str, Callable]] = [] success_callback: List[Union[str, Callable]] = [] failure_callback: List[Union[str, Callable]] = [] -callbacks: List[Callable] = [] +service_callback: List[Union[str, Callable]] = [] +_custom_logger_compatible_callbacks_literal = Literal["lago", "openmeter"] +callbacks: List[Union[Callable, _custom_logger_compatible_callbacks_literal]] = [] +_langfuse_default_tags: Optional[ + List[ + Literal[ + "user_api_key_alias", + "user_api_key_user_id", + "user_api_key_user_email", + "user_api_key_team_alias", + "semantic-similarity", + "proxy_base_url", + ] + ] +] = None _async_input_callback: List[Callable] = ( [] ) # internal variable - async custom callbacks are routed here. @@ -31,6 +52,9 @@ _async_failure_callback: List[Callable] = ( ) # internal variable - async custom callbacks are routed here. pre_call_rules: List[Callable] = [] post_call_rules: List[Callable] = [] +turn_off_message_logging: Optional[bool] = False +## end of callbacks ############# + email: Optional[str] = ( None # Not used anymore, will be removed in next MAJOR release - https://github.com/BerriAI/litellm/discussions/648 ) @@ -42,24 +66,34 @@ max_tokens = 256 # OpenAI Defaults drop_params = False modify_params = False retry = True +### AUTH ### api_key: Optional[str] = None openai_key: Optional[str] = None azure_key: Optional[str] = None anthropic_key: Optional[str] = None replicate_key: Optional[str] = None cohere_key: Optional[str] = None +clarifai_key: Optional[str] = None maritalk_key: Optional[str] = None ai21_key: Optional[str] = None +ollama_key: Optional[str] = None openrouter_key: Optional[str] = None +predibase_key: Optional[str] = None huggingface_key: Optional[str] = None vertex_project: Optional[str] = None vertex_location: Optional[str] = None +predibase_tenant_id: Optional[str] = None togetherai_api_key: Optional[str] = None cloudflare_api_key: Optional[str] = None baseten_key: Optional[str] = None aleph_alpha_key: Optional[str] = None nlp_cloud_key: Optional[str] = None +common_cloud_provider_auth_params: dict = { + "params": ["project", "region_name", "token"], + "providers": ["vertex_ai", "bedrock", "watsonx", "azure"], +} use_client: bool = False +ssl_verify: bool = True disable_streaming_logging: bool = False ### GUARDRAILS ### llamaguard_model_name: Optional[str] = None @@ -70,6 +104,9 @@ blocked_user_list: Optional[Union[str, List]] = None banned_keywords_list: Optional[Union[str, List]] = None llm_guard_mode: Literal["all", "key-specific", "request-specific"] = "all" ################## +### PREVIEW FEATURES ### +enable_preview_features: bool = False +################## logging: bool = True caching: bool = ( False # Not used anymore, will be removed in next MAJOR release - https://github.com/BerriAI/litellm/discussions/648 @@ -184,6 +221,7 @@ max_end_user_budget: Optional[float] = None #### RELIABILITY #### request_timeout: Optional[float] = 6000 num_retries: Optional[int] = None # per model endpoint +default_fallbacks: Optional[List] = None fallbacks: Optional[List] = None context_window_fallbacks: Optional[List] = None allowed_fails: int = 0 @@ -281,6 +319,7 @@ aleph_alpha_models: List = [] bedrock_models: List = [] deepinfra_models: List = [] perplexity_models: List = [] +watsonx_models: List = [] for key, value in model_cost.items(): if value.get("litellm_provider") == "openai": open_ai_chat_completion_models.append(key) @@ -325,6 +364,8 @@ for key, value in model_cost.items(): deepinfra_models.append(key) elif value.get("litellm_provider") == "perplexity": perplexity_models.append(key) + elif value.get("litellm_provider") == "watsonx": + watsonx_models.append(key) # known openai compatible endpoints - we'll eventually move this list to the model_prices_and_context_window.json dictionary openai_compatible_endpoints: List = [ @@ -333,6 +374,7 @@ openai_compatible_endpoints: List = [ "api.deepinfra.com/v1/openai", "api.mistral.ai/v1", "api.groq.com/openai/v1", + "api.deepseek.com/v1", "api.together.xyz/v1", ] @@ -341,6 +383,7 @@ openai_compatible_providers: List = [ "anyscale", "mistral", "groq", + "deepseek", "deepinfra", "perplexity", "xinference", @@ -365,6 +408,73 @@ replicate_models: List = [ "replit/replit-code-v1-3b:b84f4c074b807211cd75e3e8b1589b6399052125b4c27106e43d47189e8415ad", ] +clarifai_models: List = [ + "clarifai/meta.Llama-3.Llama-3-8B-Instruct", + "clarifai/gcp.generate.gemma-1_1-7b-it", + "clarifai/mistralai.completion.mixtral-8x22B", + "clarifai/cohere.generate.command-r-plus", + "clarifai/databricks.drbx.dbrx-instruct", + "clarifai/mistralai.completion.mistral-large", + "clarifai/mistralai.completion.mistral-medium", + "clarifai/mistralai.completion.mistral-small", + "clarifai/mistralai.completion.mixtral-8x7B-Instruct-v0_1", + "clarifai/gcp.generate.gemma-2b-it", + "clarifai/gcp.generate.gemma-7b-it", + "clarifai/deci.decilm.deciLM-7B-instruct", + "clarifai/mistralai.completion.mistral-7B-Instruct", + "clarifai/gcp.generate.gemini-pro", + "clarifai/anthropic.completion.claude-v1", + "clarifai/anthropic.completion.claude-instant-1_2", + "clarifai/anthropic.completion.claude-instant", + "clarifai/anthropic.completion.claude-v2", + "clarifai/anthropic.completion.claude-2_1", + "clarifai/meta.Llama-2.codeLlama-70b-Python", + "clarifai/meta.Llama-2.codeLlama-70b-Instruct", + "clarifai/openai.completion.gpt-3_5-turbo-instruct", + "clarifai/meta.Llama-2.llama2-7b-chat", + "clarifai/meta.Llama-2.llama2-13b-chat", + "clarifai/meta.Llama-2.llama2-70b-chat", + "clarifai/openai.chat-completion.gpt-4-turbo", + "clarifai/microsoft.text-generation.phi-2", + "clarifai/meta.Llama-2.llama2-7b-chat-vllm", + "clarifai/upstage.solar.solar-10_7b-instruct", + "clarifai/openchat.openchat.openchat-3_5-1210", + "clarifai/togethercomputer.stripedHyena.stripedHyena-Nous-7B", + "clarifai/gcp.generate.text-bison", + "clarifai/meta.Llama-2.llamaGuard-7b", + "clarifai/fblgit.una-cybertron.una-cybertron-7b-v2", + "clarifai/openai.chat-completion.GPT-4", + "clarifai/openai.chat-completion.GPT-3_5-turbo", + "clarifai/ai21.complete.Jurassic2-Grande", + "clarifai/ai21.complete.Jurassic2-Grande-Instruct", + "clarifai/ai21.complete.Jurassic2-Jumbo-Instruct", + "clarifai/ai21.complete.Jurassic2-Jumbo", + "clarifai/ai21.complete.Jurassic2-Large", + "clarifai/cohere.generate.cohere-generate-command", + "clarifai/wizardlm.generate.wizardCoder-Python-34B", + "clarifai/wizardlm.generate.wizardLM-70B", + "clarifai/tiiuae.falcon.falcon-40b-instruct", + "clarifai/togethercomputer.RedPajama.RedPajama-INCITE-7B-Chat", + "clarifai/gcp.generate.code-gecko", + "clarifai/gcp.generate.code-bison", + "clarifai/mistralai.completion.mistral-7B-OpenOrca", + "clarifai/mistralai.completion.openHermes-2-mistral-7B", + "clarifai/wizardlm.generate.wizardLM-13B", + "clarifai/huggingface-research.zephyr.zephyr-7B-alpha", + "clarifai/wizardlm.generate.wizardCoder-15B", + "clarifai/microsoft.text-generation.phi-1_5", + "clarifai/databricks.Dolly-v2.dolly-v2-12b", + "clarifai/bigcode.code.StarCoder", + "clarifai/salesforce.xgen.xgen-7b-8k-instruct", + "clarifai/mosaicml.mpt.mpt-7b-instruct", + "clarifai/anthropic.completion.claude-3-opus", + "clarifai/anthropic.completion.claude-3-sonnet", + "clarifai/gcp.generate.gemini-1_5-pro", + "clarifai/gcp.generate.imagen-2", + "clarifai/salesforce.blip.general-english-image-caption-blip-2", +] + + huggingface_models: List = [ "meta-llama/Llama-2-7b-hf", "meta-llama/Llama-2-7b-chat-hf", @@ -461,6 +571,7 @@ model_list = ( + perplexity_models + maritalk_models + vertex_language_models + + watsonx_models ) provider_list: List = [ @@ -469,6 +580,7 @@ provider_list: List = [ "text-completion-openai", "cohere", "cohere_chat", + "clarifai", "anthropic", "replicate", "huggingface", @@ -494,11 +606,15 @@ provider_list: List = [ "anyscale", "mistral", "groq", + "deepseek", "maritalk", "voyage", "cloudflare", "xinference", "fireworks_ai", + "watsonx", + "triton", + "predibase", "custom", # custom apis ] @@ -512,7 +628,11 @@ models_by_provider: dict = { "together_ai": together_ai_models, "baseten": baseten_models, "openrouter": openrouter_models, - "vertex_ai": vertex_chat_models + vertex_text_models, + "vertex_ai": vertex_chat_models + + vertex_text_models + + vertex_anthropic_models + + vertex_vision_models + + vertex_language_models, "ai21": ai21_models, "bedrock": bedrock_models, "petals": petals_models, @@ -520,6 +640,7 @@ models_by_provider: dict = { "deepinfra": deepinfra_models, "perplexity": perplexity_models, "maritalk": maritalk_models, + "watsonx": watsonx_models, } # mapping for those models which have larger equivalents @@ -570,7 +691,6 @@ all_embedding_models = ( ####### IMAGE GENERATION MODELS ################### openai_image_generation_models = ["dall-e-2", "dall-e-3"] - from .timeout import timeout from .utils import ( client, @@ -578,10 +698,13 @@ from .utils import ( get_optional_params, modify_integration, token_counter, + create_pretrained_tokenizer, + create_tokenizer, cost_per_token, completion_cost, supports_function_calling, supports_parallel_function_calling, + supports_vision, get_litellm_params, Logging, acreate, @@ -600,12 +723,15 @@ from .utils import ( get_secret, get_supported_openai_params, get_api_base, + get_first_chars_messages, ) from .llms.huggingface_restapi import HuggingfaceConfig from .llms.anthropic import AnthropicConfig +from .llms.predibase import PredibaseConfig from .llms.anthropic_text import AnthropicTextConfig from .llms.replicate import ReplicateConfig from .llms.cohere import CohereConfig +from .llms.clarifai import ClarifaiConfig from .llms.ai21 import AI21Config from .llms.together_ai import TogetherAIConfig from .llms.cloudflare import CloudflareConfig @@ -620,6 +746,7 @@ from .llms.sagemaker import SagemakerConfig from .llms.ollama import OllamaConfig from .llms.ollama_chat import OllamaChatConfig from .llms.maritalk import MaritTalkConfig +from .llms.bedrock_httpx import AmazonCohereChatConfig from .llms.bedrock import ( AmazonTitanConfig, AmazonAI21Config, @@ -629,9 +756,11 @@ from .llms.bedrock import ( AmazonLlamaConfig, AmazonStabilityConfig, AmazonMistralConfig, + AmazonBedrockGlobalConfig, ) -from .llms.openai import OpenAIConfig, OpenAITextCompletionConfig +from .llms.openai import OpenAIConfig, OpenAITextCompletionConfig, MistralConfig from .llms.azure import AzureOpenAIConfig, AzureOpenAIError +from .llms.watsonx import IBMWatsonXAIConfig from .main import * # type: ignore from .integrations import * from .exceptions import ( @@ -654,3 +783,4 @@ from .exceptions import ( from .budget_manager import BudgetManager from .proxy.proxy_cli import run_server from .router import Router +from .assistants.main import * diff --git a/litellm/_logging.py b/litellm/_logging.py index 4f7e46446..f31ee41f8 100644 --- a/litellm/_logging.py +++ b/litellm/_logging.py @@ -1,7 +1,7 @@ import logging set_verbose = False - +json_logs = False # Create a handler for the logger (you may need to adapt this based on your needs) handler = logging.StreamHandler() handler.setLevel(logging.DEBUG) diff --git a/litellm/_redis.py b/litellm/_redis.py index 69ff6f3f2..d72016dcd 100644 --- a/litellm/_redis.py +++ b/litellm/_redis.py @@ -10,8 +10,8 @@ # s/o [@Frank Colson](https://www.linkedin.com/in/frank-colson-422b9b183/) for this redis implementation import os import inspect -import redis, litellm -import redis.asyncio as async_redis +import redis, litellm # type: ignore +import redis.asyncio as async_redis # type: ignore from typing import List, Optional @@ -32,6 +32,25 @@ def _get_redis_kwargs(): return available_args +def _get_redis_url_kwargs(client=None): + if client is None: + client = redis.Redis.from_url + arg_spec = inspect.getfullargspec(redis.Redis.from_url) + + # Only allow primitive arguments + exclude_args = { + "self", + "connection_pool", + "retry", + } + + include_args = ["url"] + + available_args = [x for x in arg_spec.args if x not in exclude_args] + include_args + + return available_args + + def _get_redis_env_kwarg_mapping(): PREFIX = "REDIS_" @@ -91,27 +110,39 @@ def _get_redis_client_logic(**env_overrides): redis_kwargs.pop("password", None) elif "host" not in redis_kwargs or redis_kwargs["host"] is None: raise ValueError("Either 'host' or 'url' must be specified for redis.") - litellm.print_verbose(f"redis_kwargs: {redis_kwargs}") + # litellm.print_verbose(f"redis_kwargs: {redis_kwargs}") return redis_kwargs def get_redis_client(**env_overrides): redis_kwargs = _get_redis_client_logic(**env_overrides) if "url" in redis_kwargs and redis_kwargs["url"] is not None: - redis_kwargs.pop( - "connection_pool", None - ) # redis.from_url doesn't support setting your own connection pool - return redis.Redis.from_url(**redis_kwargs) + args = _get_redis_url_kwargs() + url_kwargs = {} + for arg in redis_kwargs: + if arg in args: + url_kwargs[arg] = redis_kwargs[arg] + + return redis.Redis.from_url(**url_kwargs) return redis.Redis(**redis_kwargs) def get_redis_async_client(**env_overrides): redis_kwargs = _get_redis_client_logic(**env_overrides) if "url" in redis_kwargs and redis_kwargs["url"] is not None: - redis_kwargs.pop( - "connection_pool", None - ) # redis.from_url doesn't support setting your own connection pool - return async_redis.Redis.from_url(**redis_kwargs) + args = _get_redis_url_kwargs(client=async_redis.Redis.from_url) + url_kwargs = {} + for arg in redis_kwargs: + if arg in args: + url_kwargs[arg] = redis_kwargs[arg] + else: + litellm.print_verbose( + "REDIS: ignoring argument: {}. Not an allowed async_redis.Redis.from_url arg.".format( + arg + ) + ) + return async_redis.Redis.from_url(**url_kwargs) + return async_redis.Redis( socket_timeout=5, **redis_kwargs, @@ -124,4 +155,9 @@ def get_redis_connection_pool(**env_overrides): return async_redis.BlockingConnectionPool.from_url( timeout=5, url=redis_kwargs["url"] ) + connection_class = async_redis.Connection + if "ssl" in redis_kwargs and redis_kwargs["ssl"] is not None: + connection_class = async_redis.SSLConnection + redis_kwargs.pop("ssl", None) + redis_kwargs["connection_class"] = connection_class return async_redis.BlockingConnectionPool(timeout=5, **redis_kwargs) diff --git a/litellm/_service_logger.py b/litellm/_service_logger.py new file mode 100644 index 000000000..dc6f35642 --- /dev/null +++ b/litellm/_service_logger.py @@ -0,0 +1,130 @@ +import litellm, traceback +from litellm.proxy._types import UserAPIKeyAuth +from .types.services import ServiceTypes, ServiceLoggerPayload +from .integrations.prometheus_services import PrometheusServicesLogger +from .integrations.custom_logger import CustomLogger +from datetime import timedelta +from typing import Union + + +class ServiceLogging(CustomLogger): + """ + Separate class used for monitoring health of litellm-adjacent services (redis/postgres). + """ + + def __init__(self, mock_testing: bool = False) -> None: + self.mock_testing = mock_testing + self.mock_testing_sync_success_hook = 0 + self.mock_testing_async_success_hook = 0 + self.mock_testing_sync_failure_hook = 0 + self.mock_testing_async_failure_hook = 0 + if "prometheus_system" in litellm.service_callback: + self.prometheusServicesLogger = PrometheusServicesLogger() + + def service_success_hook( + self, service: ServiceTypes, duration: float, call_type: str + ): + """ + [TODO] Not implemented for sync calls yet. V0 is focused on async monitoring (used by proxy). + """ + if self.mock_testing: + self.mock_testing_sync_success_hook += 1 + + def service_failure_hook( + self, service: ServiceTypes, duration: float, error: Exception, call_type: str + ): + """ + [TODO] Not implemented for sync calls yet. V0 is focused on async monitoring (used by proxy). + """ + if self.mock_testing: + self.mock_testing_sync_failure_hook += 1 + + async def async_service_success_hook( + self, service: ServiceTypes, duration: float, call_type: str + ): + """ + - For counting if the redis, postgres call is successful + """ + if self.mock_testing: + self.mock_testing_async_success_hook += 1 + + payload = ServiceLoggerPayload( + is_error=False, + error=None, + service=service, + duration=duration, + call_type=call_type, + ) + for callback in litellm.service_callback: + if callback == "prometheus_system": + await self.prometheusServicesLogger.async_service_success_hook( + payload=payload + ) + + async def async_service_failure_hook( + self, + service: ServiceTypes, + duration: float, + error: Union[str, Exception], + call_type: str, + ): + """ + - For counting if the redis, postgres call is unsuccessful + """ + if self.mock_testing: + self.mock_testing_async_failure_hook += 1 + + error_message = "" + if isinstance(error, Exception): + error_message = str(error) + elif isinstance(error, str): + error_message = error + + payload = ServiceLoggerPayload( + is_error=True, + error=error_message, + service=service, + duration=duration, + call_type=call_type, + ) + for callback in litellm.service_callback: + if callback == "prometheus_system": + if self.prometheusServicesLogger is None: + self.prometheusServicesLogger = self.prometheusServicesLogger() + await self.prometheusServicesLogger.async_service_failure_hook( + payload=payload + ) + + async def async_post_call_failure_hook( + self, original_exception: Exception, user_api_key_dict: UserAPIKeyAuth + ): + """ + Hook to track failed litellm-service calls + """ + return await super().async_post_call_failure_hook( + original_exception, user_api_key_dict + ) + + async def async_log_success_event(self, kwargs, response_obj, start_time, end_time): + """ + Hook to track latency for litellm proxy llm api calls + """ + try: + _duration = end_time - start_time + if isinstance(_duration, timedelta): + _duration = _duration.total_seconds() + elif isinstance(_duration, float): + pass + else: + raise Exception( + "Duration={} is not a float or timedelta object. type={}".format( + _duration, type(_duration) + ) + ) # invalid _duration value + await self.async_service_success_hook( + service=ServiceTypes.LITELLM, + duration=_duration, + call_type=kwargs["call_type"], + ) + except Exception as e: + raise e diff --git a/litellm/assistants/main.py b/litellm/assistants/main.py new file mode 100644 index 000000000..25d2433d7 --- /dev/null +++ b/litellm/assistants/main.py @@ -0,0 +1,495 @@ +# What is this? +## Main file for assistants API logic +from typing import Iterable +import os +import litellm +from openai import OpenAI +from litellm import client +from litellm.utils import supports_httpx_timeout +from ..llms.openai import OpenAIAssistantsAPI +from ..types.llms.openai import * +from ..types.router import * + +####### ENVIRONMENT VARIABLES ################### +openai_assistants_api = OpenAIAssistantsAPI() + +### ASSISTANTS ### + + +def get_assistants( + custom_llm_provider: Literal["openai"], + client: Optional[OpenAI] = None, + **kwargs, +) -> SyncCursorPage[Assistant]: + optional_params = GenericLiteLLMParams(**kwargs) + + ### TIMEOUT LOGIC ### + timeout = optional_params.timeout or kwargs.get("request_timeout", 600) or 600 + # set timeout for 10 minutes by default + + if ( + timeout is not None + and isinstance(timeout, httpx.Timeout) + and supports_httpx_timeout(custom_llm_provider) == False + ): + read_timeout = timeout.read or 600 + timeout = read_timeout # default 10 min timeout + elif timeout is not None and not isinstance(timeout, httpx.Timeout): + timeout = float(timeout) # type: ignore + elif timeout is None: + timeout = 600.0 + + response: Optional[SyncCursorPage[Assistant]] = None + if custom_llm_provider == "openai": + api_base = ( + optional_params.api_base # for deepinfra/perplexity/anyscale/groq we check in get_llm_provider and pass in the api base from there + or litellm.api_base + or os.getenv("OPENAI_API_BASE") + or "https://api.openai.com/v1" + ) + organization = ( + optional_params.organization + or litellm.organization + or os.getenv("OPENAI_ORGANIZATION", None) + or None # default - https://github.com/openai/openai-python/blob/284c1799070c723c6a553337134148a7ab088dd8/openai/util.py#L105 + ) + # set API KEY + api_key = ( + optional_params.api_key + or litellm.api_key # for deepinfra/perplexity/anyscale we check in get_llm_provider and pass in the api key from there + or litellm.openai_key + or os.getenv("OPENAI_API_KEY") + ) + response = openai_assistants_api.get_assistants( + api_base=api_base, + api_key=api_key, + timeout=timeout, + max_retries=optional_params.max_retries, + organization=organization, + client=client, + ) + else: + raise litellm.exceptions.BadRequestError( + message="LiteLLM doesn't support {} for 'get_assistants'. Only 'openai' is supported.".format( + custom_llm_provider + ), + model="n/a", + llm_provider=custom_llm_provider, + response=httpx.Response( + status_code=400, + content="Unsupported provider", + request=httpx.Request(method="create_thread", url="https://github.com/BerriAI/litellm"), # type: ignore + ), + ) + return response + + +### THREADS ### + + +def create_thread( + custom_llm_provider: Literal["openai"], + messages: Optional[Iterable[OpenAICreateThreadParamsMessage]] = None, + metadata: Optional[dict] = None, + tool_resources: Optional[OpenAICreateThreadParamsToolResources] = None, + client: Optional[OpenAI] = None, + **kwargs, +) -> Thread: + """ + - get the llm provider + - if openai - route it there + - pass through relevant params + + ``` + from litellm import create_thread + + create_thread( + custom_llm_provider="openai", + ### OPTIONAL ### + messages = { + "role": "user", + "content": "Hello, what is AI?" + }, + { + "role": "user", + "content": "How does AI work? Explain it in simple terms." + }] + ) + ``` + """ + optional_params = GenericLiteLLMParams(**kwargs) + + ### TIMEOUT LOGIC ### + timeout = optional_params.timeout or kwargs.get("request_timeout", 600) or 600 + # set timeout for 10 minutes by default + + if ( + timeout is not None + and isinstance(timeout, httpx.Timeout) + and supports_httpx_timeout(custom_llm_provider) == False + ): + read_timeout = timeout.read or 600 + timeout = read_timeout # default 10 min timeout + elif timeout is not None and not isinstance(timeout, httpx.Timeout): + timeout = float(timeout) # type: ignore + elif timeout is None: + timeout = 600.0 + + response: Optional[Thread] = None + if custom_llm_provider == "openai": + api_base = ( + optional_params.api_base # for deepinfra/perplexity/anyscale/groq we check in get_llm_provider and pass in the api base from there + or litellm.api_base + or os.getenv("OPENAI_API_BASE") + or "https://api.openai.com/v1" + ) + organization = ( + optional_params.organization + or litellm.organization + or os.getenv("OPENAI_ORGANIZATION", None) + or None # default - https://github.com/openai/openai-python/blob/284c1799070c723c6a553337134148a7ab088dd8/openai/util.py#L105 + ) + # set API KEY + api_key = ( + optional_params.api_key + or litellm.api_key # for deepinfra/perplexity/anyscale we check in get_llm_provider and pass in the api key from there + or litellm.openai_key + or os.getenv("OPENAI_API_KEY") + ) + response = openai_assistants_api.create_thread( + messages=messages, + metadata=metadata, + api_base=api_base, + api_key=api_key, + timeout=timeout, + max_retries=optional_params.max_retries, + organization=organization, + client=client, + ) + else: + raise litellm.exceptions.BadRequestError( + message="LiteLLM doesn't support {} for 'create_thread'. Only 'openai' is supported.".format( + custom_llm_provider + ), + model="n/a", + llm_provider=custom_llm_provider, + response=httpx.Response( + status_code=400, + content="Unsupported provider", + request=httpx.Request(method="create_thread", url="https://github.com/BerriAI/litellm"), # type: ignore + ), + ) + return response + + +def get_thread( + custom_llm_provider: Literal["openai"], + thread_id: str, + client: Optional[OpenAI] = None, + **kwargs, +) -> Thread: + """Get the thread object, given a thread_id""" + optional_params = GenericLiteLLMParams(**kwargs) + + ### TIMEOUT LOGIC ### + timeout = optional_params.timeout or kwargs.get("request_timeout", 600) or 600 + # set timeout for 10 minutes by default + + if ( + timeout is not None + and isinstance(timeout, httpx.Timeout) + and supports_httpx_timeout(custom_llm_provider) == False + ): + read_timeout = timeout.read or 600 + timeout = read_timeout # default 10 min timeout + elif timeout is not None and not isinstance(timeout, httpx.Timeout): + timeout = float(timeout) # type: ignore + elif timeout is None: + timeout = 600.0 + + response: Optional[Thread] = None + if custom_llm_provider == "openai": + api_base = ( + optional_params.api_base # for deepinfra/perplexity/anyscale/groq we check in get_llm_provider and pass in the api base from there + or litellm.api_base + or os.getenv("OPENAI_API_BASE") + or "https://api.openai.com/v1" + ) + organization = ( + optional_params.organization + or litellm.organization + or os.getenv("OPENAI_ORGANIZATION", None) + or None # default - https://github.com/openai/openai-python/blob/284c1799070c723c6a553337134148a7ab088dd8/openai/util.py#L105 + ) + # set API KEY + api_key = ( + optional_params.api_key + or litellm.api_key # for deepinfra/perplexity/anyscale we check in get_llm_provider and pass in the api key from there + or litellm.openai_key + or os.getenv("OPENAI_API_KEY") + ) + response = openai_assistants_api.get_thread( + thread_id=thread_id, + api_base=api_base, + api_key=api_key, + timeout=timeout, + max_retries=optional_params.max_retries, + organization=organization, + client=client, + ) + else: + raise litellm.exceptions.BadRequestError( + message="LiteLLM doesn't support {} for 'get_thread'. Only 'openai' is supported.".format( + custom_llm_provider + ), + model="n/a", + llm_provider=custom_llm_provider, + response=httpx.Response( + status_code=400, + content="Unsupported provider", + request=httpx.Request(method="create_thread", url="https://github.com/BerriAI/litellm"), # type: ignore + ), + ) + return response + + +### MESSAGES ### + + +def add_message( + custom_llm_provider: Literal["openai"], + thread_id: str, + role: Literal["user", "assistant"], + content: str, + attachments: Optional[List[Attachment]] = None, + metadata: Optional[dict] = None, + client: Optional[OpenAI] = None, + **kwargs, +) -> OpenAIMessage: + ### COMMON OBJECTS ### + message_data = MessageData( + role=role, content=content, attachments=attachments, metadata=metadata + ) + optional_params = GenericLiteLLMParams(**kwargs) + + ### TIMEOUT LOGIC ### + timeout = optional_params.timeout or kwargs.get("request_timeout", 600) or 600 + # set timeout for 10 minutes by default + + if ( + timeout is not None + and isinstance(timeout, httpx.Timeout) + and supports_httpx_timeout(custom_llm_provider) == False + ): + read_timeout = timeout.read or 600 + timeout = read_timeout # default 10 min timeout + elif timeout is not None and not isinstance(timeout, httpx.Timeout): + timeout = float(timeout) # type: ignore + elif timeout is None: + timeout = 600.0 + + response: Optional[OpenAIMessage] = None + if custom_llm_provider == "openai": + api_base = ( + optional_params.api_base # for deepinfra/perplexity/anyscale/groq we check in get_llm_provider and pass in the api base from there + or litellm.api_base + or os.getenv("OPENAI_API_BASE") + or "https://api.openai.com/v1" + ) + organization = ( + optional_params.organization + or litellm.organization + or os.getenv("OPENAI_ORGANIZATION", None) + or None # default - https://github.com/openai/openai-python/blob/284c1799070c723c6a553337134148a7ab088dd8/openai/util.py#L105 + ) + # set API KEY + api_key = ( + optional_params.api_key + or litellm.api_key # for deepinfra/perplexity/anyscale we check in get_llm_provider and pass in the api key from there + or litellm.openai_key + or os.getenv("OPENAI_API_KEY") + ) + response = openai_assistants_api.add_message( + thread_id=thread_id, + message_data=message_data, + api_base=api_base, + api_key=api_key, + timeout=timeout, + max_retries=optional_params.max_retries, + organization=organization, + client=client, + ) + else: + raise litellm.exceptions.BadRequestError( + message="LiteLLM doesn't support {} for 'create_thread'. Only 'openai' is supported.".format( + custom_llm_provider + ), + model="n/a", + llm_provider=custom_llm_provider, + response=httpx.Response( + status_code=400, + content="Unsupported provider", + request=httpx.Request(method="create_thread", url="https://github.com/BerriAI/litellm"), # type: ignore + ), + ) + + return response + + +def get_messages( + custom_llm_provider: Literal["openai"], + thread_id: str, + client: Optional[OpenAI] = None, + **kwargs, +) -> SyncCursorPage[OpenAIMessage]: + optional_params = GenericLiteLLMParams(**kwargs) + + ### TIMEOUT LOGIC ### + timeout = optional_params.timeout or kwargs.get("request_timeout", 600) or 600 + # set timeout for 10 minutes by default + + if ( + timeout is not None + and isinstance(timeout, httpx.Timeout) + and supports_httpx_timeout(custom_llm_provider) == False + ): + read_timeout = timeout.read or 600 + timeout = read_timeout # default 10 min timeout + elif timeout is not None and not isinstance(timeout, httpx.Timeout): + timeout = float(timeout) # type: ignore + elif timeout is None: + timeout = 600.0 + + response: Optional[SyncCursorPage[OpenAIMessage]] = None + if custom_llm_provider == "openai": + api_base = ( + optional_params.api_base # for deepinfra/perplexity/anyscale/groq we check in get_llm_provider and pass in the api base from there + or litellm.api_base + or os.getenv("OPENAI_API_BASE") + or "https://api.openai.com/v1" + ) + organization = ( + optional_params.organization + or litellm.organization + or os.getenv("OPENAI_ORGANIZATION", None) + or None # default - https://github.com/openai/openai-python/blob/284c1799070c723c6a553337134148a7ab088dd8/openai/util.py#L105 + ) + # set API KEY + api_key = ( + optional_params.api_key + or litellm.api_key # for deepinfra/perplexity/anyscale we check in get_llm_provider and pass in the api key from there + or litellm.openai_key + or os.getenv("OPENAI_API_KEY") + ) + response = openai_assistants_api.get_messages( + thread_id=thread_id, + api_base=api_base, + api_key=api_key, + timeout=timeout, + max_retries=optional_params.max_retries, + organization=organization, + client=client, + ) + else: + raise litellm.exceptions.BadRequestError( + message="LiteLLM doesn't support {} for 'get_messages'. Only 'openai' is supported.".format( + custom_llm_provider + ), + model="n/a", + llm_provider=custom_llm_provider, + response=httpx.Response( + status_code=400, + content="Unsupported provider", + request=httpx.Request(method="create_thread", url="https://github.com/BerriAI/litellm"), # type: ignore + ), + ) + + return response + + +### RUNS ### + + +def run_thread( + custom_llm_provider: Literal["openai"], + thread_id: str, + assistant_id: str, + additional_instructions: Optional[str] = None, + instructions: Optional[str] = None, + metadata: Optional[dict] = None, + model: Optional[str] = None, + stream: Optional[bool] = None, + tools: Optional[Iterable[AssistantToolParam]] = None, + client: Optional[OpenAI] = None, + **kwargs, +) -> Run: + """Run a given thread + assistant.""" + optional_params = GenericLiteLLMParams(**kwargs) + + ### TIMEOUT LOGIC ### + timeout = optional_params.timeout or kwargs.get("request_timeout", 600) or 600 + # set timeout for 10 minutes by default + + if ( + timeout is not None + and isinstance(timeout, httpx.Timeout) + and supports_httpx_timeout(custom_llm_provider) == False + ): + read_timeout = timeout.read or 600 + timeout = read_timeout # default 10 min timeout + elif timeout is not None and not isinstance(timeout, httpx.Timeout): + timeout = float(timeout) # type: ignore + elif timeout is None: + timeout = 600.0 + + response: Optional[Run] = None + if custom_llm_provider == "openai": + api_base = ( + optional_params.api_base # for deepinfra/perplexity/anyscale/groq we check in get_llm_provider and pass in the api base from there + or litellm.api_base + or os.getenv("OPENAI_API_BASE") + or "https://api.openai.com/v1" + ) + organization = ( + optional_params.organization + or litellm.organization + or os.getenv("OPENAI_ORGANIZATION", None) + or None # default - https://github.com/openai/openai-python/blob/284c1799070c723c6a553337134148a7ab088dd8/openai/util.py#L105 + ) + # set API KEY + api_key = ( + optional_params.api_key + or litellm.api_key # for deepinfra/perplexity/anyscale we check in get_llm_provider and pass in the api key from there + or litellm.openai_key + or os.getenv("OPENAI_API_KEY") + ) + response = openai_assistants_api.run_thread( + thread_id=thread_id, + assistant_id=assistant_id, + additional_instructions=additional_instructions, + instructions=instructions, + metadata=metadata, + model=model, + stream=stream, + tools=tools, + api_base=api_base, + api_key=api_key, + timeout=timeout, + max_retries=optional_params.max_retries, + organization=organization, + client=client, + ) + else: + raise litellm.exceptions.BadRequestError( + message="LiteLLM doesn't support {} for 'run_thread'. Only 'openai' is supported.".format( + custom_llm_provider + ), + model="n/a", + llm_provider=custom_llm_provider, + response=httpx.Response( + status_code=400, + content="Unsupported provider", + request=httpx.Request(method="create_thread", url="https://github.com/BerriAI/litellm"), # type: ignore + ), + ) + return response diff --git a/litellm/budget_manager.py b/litellm/budget_manager.py index 841015753..9ef4bfafa 100644 --- a/litellm/budget_manager.py +++ b/litellm/budget_manager.py @@ -10,7 +10,7 @@ import os, json, time import litellm from litellm.utils import ModelResponse -import requests, threading +import requests, threading # type: ignore from typing import Optional, Union, Literal diff --git a/litellm/caching.py b/litellm/caching.py index 2401d9708..8c9157e53 100644 --- a/litellm/caching.py +++ b/litellm/caching.py @@ -13,6 +13,7 @@ import json, traceback, ast, hashlib from typing import Optional, Literal, List, Union, Any, BinaryIO from openai._models import BaseModel as OpenAIObject from litellm._logging import verbose_logger +from litellm.types.services import ServiceLoggerPayload, ServiceTypes import traceback @@ -88,6 +89,13 @@ class InMemoryCache(BaseCache): return_val.append(val) return return_val + def increment_cache(self, key, value: int, **kwargs) -> int: + # get the value + init_value = self.get_cache(key=key) or 0 + value = init_value + value + self.set_cache(key, value, **kwargs) + return value + async def async_get_cache(self, key, **kwargs): return self.get_cache(key=key, **kwargs) @@ -98,11 +106,12 @@ class InMemoryCache(BaseCache): return_val.append(val) return return_val - async def async_increment(self, key, value: int, **kwargs): + async def async_increment(self, key, value: float, **kwargs) -> float: # get the value init_value = await self.async_get_cache(key=key) or 0 value = init_value + value await self.async_set_cache(key, value, **kwargs) + return value def flush_cache(self): self.cache_dict.clear() @@ -129,6 +138,7 @@ class RedisCache(BaseCache): **kwargs, ): from ._redis import get_redis_client, get_redis_connection_pool + from litellm._service_logger import ServiceLogging import redis redis_kwargs = {} @@ -139,18 +149,19 @@ class RedisCache(BaseCache): if password is not None: redis_kwargs["password"] = password + ### HEALTH MONITORING OBJECT ### + if kwargs.get("service_logger_obj", None) is not None and isinstance( + kwargs["service_logger_obj"], ServiceLogging + ): + self.service_logger_obj = kwargs.pop("service_logger_obj") + else: + self.service_logger_obj = ServiceLogging() + redis_kwargs.update(kwargs) self.redis_client = get_redis_client(**redis_kwargs) self.redis_kwargs = redis_kwargs self.async_redis_conn_pool = get_redis_connection_pool(**redis_kwargs) - if "url" in redis_kwargs and redis_kwargs["url"] is not None: - parsed_kwargs = redis.connection.parse_url(redis_kwargs["url"]) - redis_kwargs.update(parsed_kwargs) - self.redis_kwargs.update(parsed_kwargs) - # pop url - self.redis_kwargs.pop("url") - # redis namespaces self.namespace = namespace # for high traffic, we store the redis results in memory and then batch write to redis @@ -162,6 +173,23 @@ class RedisCache(BaseCache): except Exception as e: pass + ### ASYNC HEALTH PING ### + try: + # asyncio.get_running_loop().create_task(self.ping()) + result = asyncio.get_running_loop().create_task(self.ping()) + except Exception as e: + verbose_logger.error( + "Error connecting to Async Redis client", extra={"error": str(e)} + ) + + ### SYNC HEALTH PING ### + try: + self.redis_client.ping() + except Exception as e: + verbose_logger.error( + "Error connecting to Sync Redis client", extra={"error": str(e)} + ) + def init_async_client(self): from ._redis import get_redis_async_client @@ -192,18 +220,101 @@ class RedisCache(BaseCache): f"LiteLLM Caching: set() - Got exception from REDIS : {str(e)}" ) + def increment_cache(self, key, value: int, **kwargs) -> int: + _redis_client = self.redis_client + start_time = time.time() + try: + result = _redis_client.incr(name=key, amount=value) + ## LOGGING ## + end_time = time.time() + _duration = end_time - start_time + asyncio.create_task( + self.service_logger_obj.service_success_hook( + service=ServiceTypes.REDIS, + duration=_duration, + call_type="increment_cache", + ) + ) + return result + except Exception as e: + ## LOGGING ## + end_time = time.time() + _duration = end_time - start_time + asyncio.create_task( + self.service_logger_obj.async_service_failure_hook( + service=ServiceTypes.REDIS, + duration=_duration, + error=e, + call_type="increment_cache", + ) + ) + verbose_logger.error( + "LiteLLM Redis Caching: increment_cache() - Got exception from REDIS %s, Writing value=%s", + str(e), + value, + ) + traceback.print_exc() + raise e + async def async_scan_iter(self, pattern: str, count: int = 100) -> list: - keys = [] - _redis_client = self.init_async_client() - async with _redis_client as redis_client: - async for key in redis_client.scan_iter(match=pattern + "*", count=count): - keys.append(key) - if len(keys) >= count: - break - return keys + start_time = time.time() + try: + keys = [] + _redis_client = self.init_async_client() + async with _redis_client as redis_client: + async for key in redis_client.scan_iter( + match=pattern + "*", count=count + ): + keys.append(key) + if len(keys) >= count: + break + + ## LOGGING ## + end_time = time.time() + _duration = end_time - start_time + asyncio.create_task( + self.service_logger_obj.async_service_success_hook( + service=ServiceTypes.REDIS, + duration=_duration, + call_type="async_scan_iter", + ) + ) # DO NOT SLOW DOWN CALL B/C OF THIS + return keys + except Exception as e: + # NON blocking - notify users Redis is throwing an exception + ## LOGGING ## + end_time = time.time() + _duration = end_time - start_time + asyncio.create_task( + self.service_logger_obj.async_service_failure_hook( + service=ServiceTypes.REDIS, + duration=_duration, + error=e, + call_type="async_scan_iter", + ) + ) + raise e async def async_set_cache(self, key, value, **kwargs): - _redis_client = self.init_async_client() + start_time = time.time() + try: + _redis_client = self.init_async_client() + except Exception as e: + end_time = time.time() + _duration = end_time - start_time + asyncio.create_task( + self.service_logger_obj.async_service_failure_hook( + service=ServiceTypes.REDIS, duration=_duration, error=e + ) + ) + # NON blocking - notify users Redis is throwing an exception + verbose_logger.error( + "LiteLLM Redis Caching: async set() - Got exception from REDIS %s, Writing value=%s", + str(e), + value, + ) + traceback.print_exc() + key = self.check_and_fix_namespace(key=key) async with _redis_client as redis_client: ttl = kwargs.get("ttl", None) @@ -215,7 +326,26 @@ class RedisCache(BaseCache): print_verbose( f"Successfully Set ASYNC Redis Cache: key: {key}\nValue {value}\nttl={ttl}" ) + end_time = time.time() + _duration = end_time - start_time + asyncio.create_task( + self.service_logger_obj.async_service_success_hook( + service=ServiceTypes.REDIS, + duration=_duration, + call_type="async_set_cache", + ) + ) except Exception as e: + end_time = time.time() + _duration = end_time - start_time + asyncio.create_task( + self.service_logger_obj.async_service_failure_hook( + service=ServiceTypes.REDIS, + duration=_duration, + error=e, + call_type="async_set_cache", + ) + ) # NON blocking - notify users Redis is throwing an exception verbose_logger.error( "LiteLLM Redis Caching: async set() - Got exception from REDIS %s, Writing value=%s", @@ -229,6 +359,11 @@ class RedisCache(BaseCache): Use Redis Pipelines for bulk write operations """ _redis_client = self.init_async_client() + start_time = time.time() + + print_verbose( + f"Set Async Redis Cache: key list: {cache_list}\nttl={ttl}, redis_version={self.redis_version}" + ) try: async with _redis_client as redis_client: async with redis_client.pipeline(transaction=True) as pipe: @@ -238,18 +373,41 @@ class RedisCache(BaseCache): print_verbose( f"Set ASYNC Redis Cache PIPELINE: key: {cache_key}\nValue {cache_value}\nttl={ttl}" ) + json_cache_value = json.dumps(cache_value) # Set the value with a TTL if it's provided. if ttl is not None: - pipe.setex(cache_key, ttl, json.dumps(cache_value)) + pipe.setex(cache_key, ttl, json_cache_value) else: - pipe.set(cache_key, json.dumps(cache_value)) + pipe.set(cache_key, json_cache_value) # Execute the pipeline and return the results. results = await pipe.execute() print_verbose(f"pipeline results: {results}") # Optionally, you could process 'results' to make sure that all set operations were successful. + ## LOGGING ## + end_time = time.time() + _duration = end_time - start_time + asyncio.create_task( + self.service_logger_obj.async_service_success_hook( + service=ServiceTypes.REDIS, + duration=_duration, + call_type="async_set_cache_pipeline", + ) + ) return results except Exception as e: + ## LOGGING ## + end_time = time.time() + _duration = end_time - start_time + asyncio.create_task( + self.service_logger_obj.async_service_failure_hook( + service=ServiceTypes.REDIS, + duration=_duration, + error=e, + call_type="async_set_cache_pipeline", + ) + ) + verbose_logger.error( "LiteLLM Redis Caching: async set_cache_pipeline() - Got exception from REDIS %s, Writing value=%s", str(e), @@ -264,20 +422,44 @@ class RedisCache(BaseCache): key = self.check_and_fix_namespace(key=key) self.redis_batch_writing_buffer.append((key, value)) if len(self.redis_batch_writing_buffer) >= self.redis_flush_size: - await self.flush_cache_buffer() + await self.flush_cache_buffer() # logging done in here - async def async_increment(self, key, value: int, **kwargs): + async def async_increment(self, key, value: float, **kwargs) -> float: _redis_client = self.init_async_client() + start_time = time.time() try: async with _redis_client as redis_client: - await redis_client.incr(name=key, amount=value) + result = await redis_client.incrbyfloat(name=key, amount=value) + ## LOGGING ## + end_time = time.time() + _duration = end_time - start_time + asyncio.create_task( + self.service_logger_obj.async_service_success_hook( + service=ServiceTypes.REDIS, + duration=_duration, + call_type="async_increment", + ) + ) + return result except Exception as e: + ## LOGGING ## + end_time = time.time() + _duration = end_time - start_time + asyncio.create_task( + self.service_logger_obj.async_service_failure_hook( + service=ServiceTypes.REDIS, + duration=_duration, + error=e, + call_type="async_increment", + ) + ) verbose_logger.error( "LiteLLM Redis Caching: async async_increment() - Got exception from REDIS %s, Writing value=%s", str(e), value, ) traceback.print_exc() + raise e async def flush_cache_buffer(self): print_verbose( @@ -345,6 +527,7 @@ class RedisCache(BaseCache): async def async_get_cache(self, key, **kwargs): _redis_client = self.init_async_client() key = self.check_and_fix_namespace(key=key) + start_time = time.time() async with _redis_client as redis_client: try: print_verbose(f"Get Async Redis Cache: key: {key}") @@ -353,8 +536,29 @@ class RedisCache(BaseCache): f"Got Async Redis Cache: key: {key}, cached_response {cached_response}" ) response = self._get_cache_logic(cached_response=cached_response) + ## LOGGING ## + end_time = time.time() + _duration = end_time - start_time + asyncio.create_task( + self.service_logger_obj.async_service_success_hook( + service=ServiceTypes.REDIS, + duration=_duration, + call_type="async_get_cache", + ) + ) return response except Exception as e: + ## LOGGING ## + end_time = time.time() + _duration = end_time - start_time + asyncio.create_task( + self.service_logger_obj.async_service_failure_hook( + service=ServiceTypes.REDIS, + duration=_duration, + error=e, + call_type="async_get_cache", + ) + ) # NON blocking - notify users Redis is throwing an exception print_verbose( f"LiteLLM Caching: async get() - Got exception from REDIS: {str(e)}" @@ -366,6 +570,7 @@ class RedisCache(BaseCache): """ _redis_client = await self.init_async_client() key_value_dict = {} + start_time = time.time() try: async with _redis_client as redis_client: _keys = [] @@ -374,29 +579,110 @@ class RedisCache(BaseCache): _keys.append(cache_key) results = await redis_client.mget(keys=_keys) + ## LOGGING ## + end_time = time.time() + _duration = end_time - start_time + asyncio.create_task( + self.service_logger_obj.async_service_success_hook( + service=ServiceTypes.REDIS, + duration=_duration, + call_type="async_batch_get_cache", + ) + ) + # Associate the results back with their keys. # 'results' is a list of values corresponding to the order of keys in 'key_list'. key_value_dict = dict(zip(key_list, results)) - decoded_results = { - k.decode("utf-8"): self._get_cache_logic(v) - for k, v in key_value_dict.items() - } + decoded_results = {} + for k, v in key_value_dict.items(): + if isinstance(k, bytes): + k = k.decode("utf-8") + v = self._get_cache_logic(v) + decoded_results[k] = v return decoded_results except Exception as e: + ## LOGGING ## + end_time = time.time() + _duration = end_time - start_time + asyncio.create_task( + self.service_logger_obj.async_service_failure_hook( + service=ServiceTypes.REDIS, + duration=_duration, + error=e, + call_type="async_batch_get_cache", + ) + ) print_verbose(f"Error occurred in pipeline read - {str(e)}") return key_value_dict - async def ping(self): + def sync_ping(self) -> bool: + """ + Tests if the sync redis client is correctly setup. + """ + print_verbose(f"Pinging Sync Redis Cache") + start_time = time.time() + try: + response = self.redis_client.ping() + print_verbose(f"Redis Cache PING: {response}") + ## LOGGING ## + end_time = time.time() + _duration = end_time - start_time + self.service_logger_obj.service_success_hook( + service=ServiceTypes.REDIS, + duration=_duration, + call_type="sync_ping", + ) + return response + except Exception as e: + # NON blocking - notify users Redis is throwing an exception + ## LOGGING ## + end_time = time.time() + _duration = end_time - start_time + self.service_logger_obj.service_failure_hook( + service=ServiceTypes.REDIS, + duration=_duration, + error=e, + call_type="sync_ping", + ) + print_verbose( + f"LiteLLM Redis Cache PING: - Got exception from REDIS : {str(e)}" + ) + traceback.print_exc() + raise e + + async def ping(self) -> bool: _redis_client = self.init_async_client() + start_time = time.time() async with _redis_client as redis_client: print_verbose(f"Pinging Async Redis Cache") try: response = await redis_client.ping() - print_verbose(f"Redis Cache PING: {response}") + ## LOGGING ## + end_time = time.time() + _duration = end_time - start_time + asyncio.create_task( + self.service_logger_obj.async_service_success_hook( + service=ServiceTypes.REDIS, + duration=_duration, + call_type="async_ping", + ) + ) + return response except Exception as e: # NON blocking - notify users Redis is throwing an exception + ## LOGGING ## + end_time = time.time() + _duration = end_time - start_time + asyncio.create_task( + self.service_logger_obj.async_service_failure_hook( + service=ServiceTypes.REDIS, + duration=_duration, + error=e, + call_type="async_ping", + ) + ) print_verbose( f"LiteLLM Redis Cache PING: - Got exception from REDIS : {str(e)}" ) @@ -525,9 +811,7 @@ class RedisSemanticCache(BaseCache): # get the prompt messages = kwargs["messages"] - prompt = "" - for message in messages: - prompt += message["content"] + prompt = "".join(message["content"] for message in messages) # create an embedding for prompt embedding_response = litellm.embedding( @@ -562,9 +846,7 @@ class RedisSemanticCache(BaseCache): # get the messages messages = kwargs["messages"] - prompt = "" - for message in messages: - prompt += message["content"] + prompt = "".join(message["content"] for message in messages) # convert to embedding embedding_response = litellm.embedding( @@ -624,9 +906,7 @@ class RedisSemanticCache(BaseCache): # get the prompt messages = kwargs["messages"] - prompt = "" - for message in messages: - prompt += message["content"] + prompt = "".join(message["content"] for message in messages) # create an embedding for prompt router_model_names = ( [m["model_name"] for m in llm_model_list] @@ -679,9 +959,7 @@ class RedisSemanticCache(BaseCache): # get the messages messages = kwargs["messages"] - prompt = "" - for message in messages: - prompt += message["content"] + prompt = "".join(message["content"] for message in messages) router_model_names = ( [m["model_name"] for m in llm_model_list] @@ -927,6 +1205,30 @@ class DualCache(BaseCache): except Exception as e: print_verbose(e) + def increment_cache( + self, key, value: int, local_only: bool = False, **kwargs + ) -> int: + """ + Key - the key in cache + + Value - int - the value you want to increment by + + Returns - int - the incremented value + """ + try: + result: int = value + if self.in_memory_cache is not None: + result = self.in_memory_cache.increment_cache(key, value, **kwargs) + + if self.redis_cache is not None and local_only == False: + result = self.redis_cache.increment_cache(key, value, **kwargs) + + return result + except Exception as e: + print_verbose(f"LiteLLM Cache: Excepton async add_cache: {str(e)}") + traceback.print_exc() + raise e + def get_cache(self, key, local_only: bool = False, **kwargs): # Try to fetch from in-memory cache first try: @@ -979,7 +1281,7 @@ class DualCache(BaseCache): self.in_memory_cache.set_cache(key, redis_result[key], **kwargs) for key, value in redis_result.items(): - result[sublist_keys.index(key)] = value + result[keys.index(key)] = value print_verbose(f"async batch get cache: cache result: {result}") return result @@ -1029,10 +1331,8 @@ class DualCache(BaseCache): keys, **kwargs ) - print_verbose(f"in_memory_result: {in_memory_result}") if in_memory_result is not None: result = in_memory_result - if None in result and self.redis_cache is not None and local_only == False: """ - for the none values in the result @@ -1048,22 +1348,23 @@ class DualCache(BaseCache): if redis_result is not None: # Update in-memory cache with the value from Redis - for key in redis_result: - await self.in_memory_cache.async_set_cache( - key, redis_result[key], **kwargs - ) + for key, value in redis_result.items(): + if value is not None: + await self.in_memory_cache.async_set_cache( + key, redis_result[key], **kwargs + ) + for key, value in redis_result.items(): + index = keys.index(key) + result[index] = value - sublist_dict = dict(zip(sublist_keys, redis_result)) - - for key, value in sublist_dict.items(): - result[sublist_keys.index(key)] = value - - print_verbose(f"async batch get cache: cache result: {result}") return result except Exception as e: traceback.print_exc() async def async_set_cache(self, key, value, local_only: bool = False, **kwargs): + print_verbose( + f"async set cache: cache key: {key}; local_only: {local_only}; value: {value}" + ) try: if self.in_memory_cache is not None: await self.in_memory_cache.async_set_cache(key, value, **kwargs) @@ -1074,24 +1375,55 @@ class DualCache(BaseCache): print_verbose(f"LiteLLM Cache: Excepton async add_cache: {str(e)}") traceback.print_exc() - async def async_increment_cache( - self, key, value: int, local_only: bool = False, **kwargs + async def async_batch_set_cache( + self, cache_list: list, local_only: bool = False, **kwargs ): """ - Key - the key in cache - - Value - int - the value you want to increment by + Batch write values to the cache """ + print_verbose( + f"async batch set cache: cache keys: {cache_list}; local_only: {local_only}" + ) try: if self.in_memory_cache is not None: - await self.in_memory_cache.async_increment(key, value, **kwargs) + await self.in_memory_cache.async_set_cache_pipeline( + cache_list=cache_list, **kwargs + ) if self.redis_cache is not None and local_only == False: - await self.redis_cache.async_increment(key, value, **kwargs) + await self.redis_cache.async_set_cache_pipeline( + cache_list=cache_list, ttl=kwargs.get("ttl", None) + ) except Exception as e: print_verbose(f"LiteLLM Cache: Excepton async add_cache: {str(e)}") traceback.print_exc() + async def async_increment_cache( + self, key, value: float, local_only: bool = False, **kwargs + ) -> float: + """ + Key - the key in cache + + Value - float - the value you want to increment by + + Returns - float - the incremented value + """ + try: + result: float = value + if self.in_memory_cache is not None: + result = await self.in_memory_cache.async_increment( + key, value, **kwargs + ) + + if self.redis_cache is not None and local_only == False: + result = await self.redis_cache.async_increment(key, value, **kwargs) + + return result + except Exception as e: + print_verbose(f"LiteLLM Cache: Excepton async add_cache: {str(e)}") + traceback.print_exc() + raise e + def flush_cache(self): if self.in_memory_cache is not None: self.in_memory_cache.flush_cache() @@ -1109,7 +1441,7 @@ class DualCache(BaseCache): class Cache: def __init__( self, - type: Optional[Literal["local", "redis", "redis-semantic", "s3"]] = "local", + type: Optional[Literal["local", "redis", "redis-semantic", "s3", "disk"]] = "local", host: Optional[str] = None, port: Optional[str] = None, password: Optional[str] = None, @@ -1152,13 +1484,14 @@ class Cache: redis_semantic_cache_use_async=False, redis_semantic_cache_embedding_model="text-embedding-ada-002", redis_flush_size=None, + disk_cache_dir=None, **kwargs, ): """ Initializes the cache based on the given type. Args: - type (str, optional): The type of cache to initialize. Can be "local", "redis", "redis-semantic", or "s3". Defaults to "local". + type (str, optional): The type of cache to initialize. Can be "local", "redis", "redis-semantic", "s3" or "disk". Defaults to "local". host (str, optional): The host address for the Redis cache. Required if type is "redis". port (int, optional): The port number for the Redis cache. Required if type is "redis". password (str, optional): The password for the Redis cache. Required if type is "redis". @@ -1204,6 +1537,8 @@ class Cache: s3_path=s3_path, **kwargs, ) + elif type == "disk": + self.cache = DiskCache(disk_cache_dir=disk_cache_dir) if "cache" not in litellm.input_callback: litellm.input_callback.append("cache") if "cache" not in litellm.success_callback: @@ -1575,8 +1910,86 @@ class Cache: await self.cache.disconnect() +class DiskCache(BaseCache): + def __init__(self, disk_cache_dir: Optional[str] = None): + import diskcache as dc + + # if users don't provider one, use the default litellm cache + if disk_cache_dir is None: + self.disk_cache = dc.Cache(".litellm_cache") + else: + self.disk_cache = dc.Cache(disk_cache_dir) + + def set_cache(self, key, value, **kwargs): + print_verbose("DiskCache: set_cache") + if "ttl" in kwargs: + self.disk_cache.set(key, value, expire=kwargs["ttl"]) + else: + self.disk_cache.set(key, value) + + async def async_set_cache(self, key, value, **kwargs): + self.set_cache(key=key, value=value, **kwargs) + + async def async_set_cache_pipeline(self, cache_list, ttl=None): + for cache_key, cache_value in cache_list: + if ttl is not None: + self.set_cache(key=cache_key, value=cache_value, ttl=ttl) + else: + self.set_cache(key=cache_key, value=cache_value) + + def get_cache(self, key, **kwargs): + original_cached_response = self.disk_cache.get(key) + if original_cached_response: + try: + cached_response = json.loads(original_cached_response) + except: + cached_response = original_cached_response + return cached_response + return None + + def batch_get_cache(self, keys: list, **kwargs): + return_val = [] + for k in keys: + val = self.get_cache(key=k, **kwargs) + return_val.append(val) + return return_val + + def increment_cache(self, key, value: int, **kwargs) -> int: + # get the value + init_value = self.get_cache(key=key) or 0 + value = init_value + value + self.set_cache(key, value, **kwargs) + return value + + async def async_get_cache(self, key, **kwargs): + return self.get_cache(key=key, **kwargs) + + async def async_batch_get_cache(self, keys: list, **kwargs): + return_val = [] + for k in keys: + val = self.get_cache(key=k, **kwargs) + return_val.append(val) + return return_val + + async def async_increment(self, key, value: int, **kwargs) -> int: + # get the value + init_value = await self.async_get_cache(key=key) or 0 + value = init_value + value + await self.async_set_cache(key, value, **kwargs) + return value + + def flush_cache(self): + self.disk_cache.clear() + + async def disconnect(self): + pass + + def delete_cache(self, key): + self.disk_cache.pop(key) + + def enable_cache( - type: Optional[Literal["local", "redis", "s3"]] = "local", + type: Optional[Literal["local", "redis", "s3", "disk"]] = "local", host: Optional[str] = None, port: Optional[str] = None, password: Optional[str] = None, @@ -1605,7 +2018,7 @@ def enable_cache( Enable cache with the specified configuration. Args: - type (Optional[Literal["local", "redis"]]): The type of cache to enable. Defaults to "local". + type (Optional[Literal["local", "redis", "s3", "disk"]]): The type of cache to enable. Defaults to "local". host (Optional[str]): The host address of the cache server. Defaults to None. port (Optional[str]): The port number of the cache server. Defaults to None. password (Optional[str]): The password for the cache server. Defaults to None. @@ -1641,7 +2054,7 @@ def enable_cache( def update_cache( - type: Optional[Literal["local", "redis"]] = "local", + type: Optional[Literal["local", "redis", "s3", "disk"]] = "local", host: Optional[str] = None, port: Optional[str] = None, password: Optional[str] = None, @@ -1670,7 +2083,7 @@ def update_cache( Update the cache for LiteLLM. Args: - type (Optional[Literal["local", "redis"]]): The type of cache. Defaults to "local". + type (Optional[Literal["local", "redis", "s3", "disk"]]): The type of cache. Defaults to "local". host (Optional[str]): The host of the cache. Defaults to None. port (Optional[str]): The port of the cache. Defaults to None. password (Optional[str]): The password for the cache. Defaults to None. diff --git a/litellm/exceptions.py b/litellm/exceptions.py index a7bf394f6..5eb66743b 100644 --- a/litellm/exceptions.py +++ b/litellm/exceptions.py @@ -9,55 +9,64 @@ ## LiteLLM versions of the OpenAI Exception Types -from openai import ( - AuthenticationError, - BadRequestError, - NotFoundError, - RateLimitError, - APIStatusError, - OpenAIError, - APIError, - APITimeoutError, - APIConnectionError, - APIResponseValidationError, - UnprocessableEntityError, - PermissionDeniedError, -) +import openai import httpx from typing import Optional -class AuthenticationError(AuthenticationError): # type: ignore - def __init__(self, message, llm_provider, model, response: httpx.Response): +class AuthenticationError(openai.AuthenticationError): # type: ignore + def __init__( + self, + message, + llm_provider, + model, + response: httpx.Response, + litellm_debug_info: Optional[str] = None, + ): self.status_code = 401 self.message = message self.llm_provider = llm_provider self.model = model + self.litellm_debug_info = litellm_debug_info super().__init__( self.message, response=response, body=None ) # Call the base class constructor with the parameters it needs # raise when invalid models passed, example gpt-8 -class NotFoundError(NotFoundError): # type: ignore - def __init__(self, message, model, llm_provider, response: httpx.Response): +class NotFoundError(openai.NotFoundError): # type: ignore + def __init__( + self, + message, + model, + llm_provider, + response: httpx.Response, + litellm_debug_info: Optional[str] = None, + ): self.status_code = 404 self.message = message self.model = model self.llm_provider = llm_provider + self.litellm_debug_info = litellm_debug_info super().__init__( self.message, response=response, body=None ) # Call the base class constructor with the parameters it needs -class BadRequestError(BadRequestError): # type: ignore +class BadRequestError(openai.BadRequestError): # type: ignore def __init__( - self, message, model, llm_provider, response: Optional[httpx.Response] = None + self, + message, + model, + llm_provider, + response: Optional[httpx.Response] = None, + litellm_debug_info: Optional[str] = None, ): self.status_code = 400 self.message = message self.model = model self.llm_provider = llm_provider + self.litellm_debug_info = litellm_debug_info response = response or httpx.Response( status_code=self.status_code, request=httpx.Request( @@ -69,46 +78,77 @@ class BadRequestError(BadRequestError): # type: ignore ) # Call the base class constructor with the parameters it needs -class UnprocessableEntityError(UnprocessableEntityError): # type: ignore - def __init__(self, message, model, llm_provider, response: httpx.Response): +class UnprocessableEntityError(openai.UnprocessableEntityError): # type: ignore + def __init__( + self, + message, + model, + llm_provider, + response: httpx.Response, + litellm_debug_info: Optional[str] = None, + ): self.status_code = 422 self.message = message self.model = model self.llm_provider = llm_provider + self.litellm_debug_info = litellm_debug_info super().__init__( self.message, response=response, body=None ) # Call the base class constructor with the parameters it needs -class Timeout(APITimeoutError): # type: ignore - def __init__(self, message, model, llm_provider): - self.status_code = 408 - self.message = message - self.model = model - self.llm_provider = llm_provider +class Timeout(openai.APITimeoutError): # type: ignore + def __init__( + self, message, model, llm_provider, litellm_debug_info: Optional[str] = None + ): request = httpx.Request(method="POST", url="https://api.openai.com/v1") super().__init__( request=request ) # Call the base class constructor with the parameters it needs + self.status_code = 408 + self.message = message + self.model = model + self.llm_provider = llm_provider + self.litellm_debug_info = litellm_debug_info + + # custom function to convert to str + def __str__(self): + return str(self.message) -class PermissionDeniedError(PermissionDeniedError): # type:ignore - def __init__(self, message, llm_provider, model, response: httpx.Response): +class PermissionDeniedError(openai.PermissionDeniedError): # type:ignore + def __init__( + self, + message, + llm_provider, + model, + response: httpx.Response, + litellm_debug_info: Optional[str] = None, + ): self.status_code = 403 self.message = message self.llm_provider = llm_provider self.model = model + self.litellm_debug_info = litellm_debug_info super().__init__( self.message, response=response, body=None ) # Call the base class constructor with the parameters it needs -class RateLimitError(RateLimitError): # type: ignore - def __init__(self, message, llm_provider, model, response: httpx.Response): +class RateLimitError(openai.RateLimitError): # type: ignore + def __init__( + self, + message, + llm_provider, + model, + response: httpx.Response, + litellm_debug_info: Optional[str] = None, + ): self.status_code = 429 self.message = message self.llm_provider = llm_provider self.modle = model + self.litellm_debug_info = litellm_debug_info super().__init__( self.message, response=response, body=None ) # Call the base class constructor with the parameters it needs @@ -116,11 +156,19 @@ class RateLimitError(RateLimitError): # type: ignore # sub class of rate limit error - meant to give more granularity for error handling context window exceeded errors class ContextWindowExceededError(BadRequestError): # type: ignore - def __init__(self, message, model, llm_provider, response: httpx.Response): + def __init__( + self, + message, + model, + llm_provider, + response: httpx.Response, + litellm_debug_info: Optional[str] = None, + ): self.status_code = 400 self.message = message self.model = model self.llm_provider = llm_provider + self.litellm_debug_info = litellm_debug_info super().__init__( message=self.message, model=self.model, # type: ignore @@ -131,11 +179,19 @@ class ContextWindowExceededError(BadRequestError): # type: ignore class ContentPolicyViolationError(BadRequestError): # type: ignore # Error code: 400 - {'error': {'code': 'content_policy_violation', 'message': 'Your request was rejected as a result of our safety system. Image descriptions generated from your prompt may contain text that is not allowed by our safety system. If you believe this was done in error, your request may succeed if retried, or by adjusting your prompt.', 'param': None, 'type': 'invalid_request_error'}} - def __init__(self, message, model, llm_provider, response: httpx.Response): + def __init__( + self, + message, + model, + llm_provider, + response: httpx.Response, + litellm_debug_info: Optional[str] = None, + ): self.status_code = 400 self.message = message self.model = model self.llm_provider = llm_provider + self.litellm_debug_info = litellm_debug_info super().__init__( message=self.message, model=self.model, # type: ignore @@ -144,51 +200,77 @@ class ContentPolicyViolationError(BadRequestError): # type: ignore ) # Call the base class constructor with the parameters it needs -class ServiceUnavailableError(APIStatusError): # type: ignore - def __init__(self, message, llm_provider, model, response: httpx.Response): +class ServiceUnavailableError(openai.APIStatusError): # type: ignore + def __init__( + self, + message, + llm_provider, + model, + response: httpx.Response, + litellm_debug_info: Optional[str] = None, + ): self.status_code = 503 self.message = message self.llm_provider = llm_provider self.model = model + self.litellm_debug_info = litellm_debug_info super().__init__( self.message, response=response, body=None ) # Call the base class constructor with the parameters it needs # raise this when the API returns an invalid response object - https://github.com/openai/openai-python/blob/1be14ee34a0f8e42d3f9aa5451aa4cb161f1781f/openai/api_requestor.py#L401 -class APIError(APIError): # type: ignore +class APIError(openai.APIError): # type: ignore def __init__( - self, status_code, message, llm_provider, model, request: httpx.Request + self, + status_code, + message, + llm_provider, + model, + request: httpx.Request, + litellm_debug_info: Optional[str] = None, ): self.status_code = status_code self.message = message self.llm_provider = llm_provider self.model = model + self.litellm_debug_info = litellm_debug_info super().__init__(self.message, request=request, body=None) # type: ignore # raised if an invalid request (not get, delete, put, post) is made -class APIConnectionError(APIConnectionError): # type: ignore - def __init__(self, message, llm_provider, model, request: httpx.Request): +class APIConnectionError(openai.APIConnectionError): # type: ignore + def __init__( + self, + message, + llm_provider, + model, + request: httpx.Request, + litellm_debug_info: Optional[str] = None, + ): self.message = message self.llm_provider = llm_provider self.model = model self.status_code = 500 + self.litellm_debug_info = litellm_debug_info super().__init__(message=self.message, request=request) # raised if an invalid request (not get, delete, put, post) is made -class APIResponseValidationError(APIResponseValidationError): # type: ignore - def __init__(self, message, llm_provider, model): +class APIResponseValidationError(openai.APIResponseValidationError): # type: ignore + def __init__( + self, message, llm_provider, model, litellm_debug_info: Optional[str] = None + ): self.message = message self.llm_provider = llm_provider self.model = model request = httpx.Request(method="POST", url="https://api.openai.com/v1") response = httpx.Response(status_code=500, request=request) + self.litellm_debug_info = litellm_debug_info super().__init__(response=response, body=None, message=message) -class OpenAIError(OpenAIError): # type: ignore +class OpenAIError(openai.OpenAIError): # type: ignore def __init__(self, original_exception): self.status_code = original_exception.http_status super().__init__( @@ -210,7 +292,7 @@ class BudgetExceededError(Exception): ## DEPRECATED ## -class InvalidRequestError(BadRequestError): # type: ignore +class InvalidRequestError(openai.BadRequestError): # type: ignore def __init__(self, message, model, llm_provider): self.status_code = 400 self.message = message diff --git a/litellm/integrations/aispend.py b/litellm/integrations/aispend.py index 2015d45dd..2fe8ea0df 100644 --- a/litellm/integrations/aispend.py +++ b/litellm/integrations/aispend.py @@ -1,9 +1,6 @@ #### What this does #### # On success + failure, log events to aispend.io import dotenv, os -import requests - -dotenv.load_dotenv() # Loading env variables using dotenv import traceback import datetime diff --git a/litellm/integrations/athina.py b/litellm/integrations/athina.py index 897cf6c8d..660dd51ef 100644 --- a/litellm/integrations/athina.py +++ b/litellm/integrations/athina.py @@ -4,18 +4,30 @@ import datetime class AthinaLogger: def __init__(self): import os + self.athina_api_key = os.getenv("ATHINA_API_KEY") self.headers = { "athina-api-key": self.athina_api_key, - "Content-Type": "application/json" + "Content-Type": "application/json", } self.athina_logging_url = "https://log.athina.ai/api/v1/log/inference" - self.additional_keys = ["environment", "prompt_slug", "customer_id", "customer_user_id", "session_id", "external_reference_id", "context", "expected_response", "user_query"] + self.additional_keys = [ + "environment", + "prompt_slug", + "customer_id", + "customer_user_id", + "session_id", + "external_reference_id", + "context", + "expected_response", + "user_query", + ] def log_event(self, kwargs, response_obj, start_time, end_time, print_verbose): - import requests + import requests # type: ignore import json import traceback + try: response_json = response_obj.model_dump() if response_obj else {} data = { @@ -23,32 +35,51 @@ class AthinaLogger: "request": kwargs, "response": response_json, "prompt_tokens": response_json.get("usage", {}).get("prompt_tokens"), - "completion_tokens": response_json.get("usage", {}).get("completion_tokens"), + "completion_tokens": response_json.get("usage", {}).get( + "completion_tokens" + ), "total_tokens": response_json.get("usage", {}).get("total_tokens"), } - - if type(end_time) == datetime.datetime and type(start_time) == datetime.datetime: - data["response_time"] = int((end_time - start_time).total_seconds() * 1000) + + if ( + type(end_time) == datetime.datetime + and type(start_time) == datetime.datetime + ): + data["response_time"] = int( + (end_time - start_time).total_seconds() * 1000 + ) if "messages" in kwargs: data["prompt"] = kwargs.get("messages", None) # Directly add tools or functions if present optional_params = kwargs.get("optional_params", {}) - data.update((k, v) for k, v in optional_params.items() if k in ["tools", "functions"]) + data.update( + (k, v) + for k, v in optional_params.items() + if k in ["tools", "functions"] + ) # Add additional metadata keys - metadata = kwargs.get("litellm_params", {}).get("metadata", {}) + metadata = kwargs.get("litellm_params", {}).get("metadata", {}) if metadata: for key in self.additional_keys: if key in metadata: data[key] = metadata[key] - response = requests.post(self.athina_logging_url, headers=self.headers, data=json.dumps(data, default=str)) + response = requests.post( + self.athina_logging_url, + headers=self.headers, + data=json.dumps(data, default=str), + ) if response.status_code != 200: - print_verbose(f"Athina Logger Error - {response.text}, {response.status_code}") + print_verbose( + f"Athina Logger Error - {response.text}, {response.status_code}" + ) else: print_verbose(f"Athina Logger Succeeded - {response.text}") except Exception as e: - print_verbose(f"Athina Logger Error - {e}, Stack trace: {traceback.format_exc()}") - pass \ No newline at end of file + print_verbose( + f"Athina Logger Error - {e}, Stack trace: {traceback.format_exc()}" + ) + pass diff --git a/litellm/integrations/berrispend.py b/litellm/integrations/berrispend.py index 7d91ffca7..7d30b706c 100644 --- a/litellm/integrations/berrispend.py +++ b/litellm/integrations/berrispend.py @@ -1,9 +1,8 @@ #### What this does #### # On success + failure, log events to aispend.io import dotenv, os -import requests +import requests # type: ignore -dotenv.load_dotenv() # Loading env variables using dotenv import traceback import datetime diff --git a/litellm/integrations/clickhouse.py b/litellm/integrations/clickhouse.py index d5000e5c4..0c38b8626 100644 --- a/litellm/integrations/clickhouse.py +++ b/litellm/integrations/clickhouse.py @@ -3,14 +3,11 @@ #### What this does #### # On success, logs events to Promptlayer import dotenv, os -import requests from litellm.proxy._types import UserAPIKeyAuth from litellm.caching import DualCache from typing import Literal, Union - -dotenv.load_dotenv() # Loading env variables using dotenv import traceback @@ -19,8 +16,6 @@ import traceback import dotenv, os import requests - -dotenv.load_dotenv() # Loading env variables using dotenv import traceback import datetime, subprocess, sys import litellm, uuid diff --git a/litellm/integrations/custom_logger.py b/litellm/integrations/custom_logger.py index 503b3ff9d..d50882592 100644 --- a/litellm/integrations/custom_logger.py +++ b/litellm/integrations/custom_logger.py @@ -1,14 +1,11 @@ #### What this does #### # On success, logs events to Promptlayer import dotenv, os -import requests from litellm.proxy._types import UserAPIKeyAuth from litellm.caching import DualCache -from typing import Literal, Union - -dotenv.load_dotenv() # Loading env variables using dotenv +from typing import Literal, Union, Optional import traceback @@ -46,6 +43,17 @@ class CustomLogger: # https://docs.litellm.ai/docs/observability/custom_callbac async def async_log_failure_event(self, kwargs, response_obj, start_time, end_time): pass + #### PRE-CALL CHECKS - router/proxy only #### + """ + Allows usage-based-routing-v2 to run pre-call rpm checks within the picked deployment's semaphore (concurrency-safe tpm/rpm checks). + """ + + async def async_pre_call_check(self, deployment: dict) -> Optional[dict]: + pass + + def pre_call_check(self, deployment: dict) -> Optional[dict]: + pass + #### CALL HOOKS - proxy only #### """ Control the modify incoming / outgoung data before calling the model diff --git a/litellm/integrations/datadog.py b/litellm/integrations/datadog.py index f5db5bf1f..6d5e08faf 100644 --- a/litellm/integrations/datadog.py +++ b/litellm/integrations/datadog.py @@ -2,9 +2,7 @@ # On success + failure, log events to Supabase import dotenv, os -import requests - -dotenv.load_dotenv() # Loading env variables using dotenv +import requests # type: ignore import traceback import datetime, subprocess, sys import litellm, uuid diff --git a/litellm/integrations/dynamodb.py b/litellm/integrations/dynamodb.py index 2ed6c3f9f..21ccabe4b 100644 --- a/litellm/integrations/dynamodb.py +++ b/litellm/integrations/dynamodb.py @@ -2,9 +2,7 @@ # On success + failure, log events to Supabase import dotenv, os -import requests - -dotenv.load_dotenv() # Loading env variables using dotenv +import requests # type: ignore import traceback import datetime, subprocess, sys import litellm, uuid diff --git a/litellm/integrations/greenscale.py b/litellm/integrations/greenscale.py new file mode 100644 index 000000000..78190d69d --- /dev/null +++ b/litellm/integrations/greenscale.py @@ -0,0 +1,68 @@ +import requests # type: ignore +import json +import traceback +from datetime import datetime, timezone + + +class GreenscaleLogger: + def __init__(self): + import os + + self.greenscale_api_key = os.getenv("GREENSCALE_API_KEY") + self.headers = { + "api-key": self.greenscale_api_key, + "Content-Type": "application/json", + } + self.greenscale_logging_url = os.getenv("GREENSCALE_ENDPOINT") + + def log_event(self, kwargs, response_obj, start_time, end_time, print_verbose): + try: + response_json = response_obj.model_dump() if response_obj else {} + data = { + "modelId": kwargs.get("model"), + "inputTokenCount": response_json.get("usage", {}).get("prompt_tokens"), + "outputTokenCount": response_json.get("usage", {}).get( + "completion_tokens" + ), + } + data["timestamp"] = datetime.now(timezone.utc).strftime( + "%Y-%m-%dT%H:%M:%SZ" + ) + + if type(end_time) == datetime and type(start_time) == datetime: + data["invocationLatency"] = int( + (end_time - start_time).total_seconds() * 1000 + ) + + # Add additional metadata keys to tags + tags = [] + metadata = kwargs.get("litellm_params", {}).get("metadata", {}) + for key, value in metadata.items(): + if key.startswith("greenscale"): + if key == "greenscale_project": + data["project"] = value + elif key == "greenscale_application": + data["application"] = value + else: + tags.append( + {"key": key.replace("greenscale_", ""), "value": str(value)} + ) + + data["tags"] = tags + + response = requests.post( + self.greenscale_logging_url, + headers=self.headers, + data=json.dumps(data, default=str), + ) + if response.status_code != 200: + print_verbose( + f"Greenscale Logger Error - {response.text}, {response.status_code}" + ) + else: + print_verbose(f"Greenscale Logger Succeeded - {response.text}") + except Exception as e: + print_verbose( + f"Greenscale Logger Error - {e}, Stack trace: {traceback.format_exc()}" + ) + pass diff --git a/litellm/integrations/helicone.py b/litellm/integrations/helicone.py index cb8663773..85e73258e 100644 --- a/litellm/integrations/helicone.py +++ b/litellm/integrations/helicone.py @@ -1,10 +1,8 @@ #### What this does #### # On success, logs events to Helicone import dotenv, os -import requests +import requests # type: ignore import litellm - -dotenv.load_dotenv() # Loading env variables using dotenv import traceback diff --git a/litellm/integrations/lago.py b/litellm/integrations/lago.py new file mode 100644 index 000000000..e6d38f530 --- /dev/null +++ b/litellm/integrations/lago.py @@ -0,0 +1,179 @@ +# What is this? +## On Success events log cost to Lago - https://github.com/BerriAI/litellm/issues/3639 + +import dotenv, os, json +import litellm +import traceback, httpx +from litellm.integrations.custom_logger import CustomLogger +from litellm.llms.custom_httpx.http_handler import AsyncHTTPHandler, HTTPHandler +import uuid +from typing import Optional, Literal + + +def get_utc_datetime(): + import datetime as dt + from datetime import datetime + + if hasattr(dt, "UTC"): + return datetime.now(dt.UTC) # type: ignore + else: + return datetime.utcnow() # type: ignore + + +class LagoLogger(CustomLogger): + def __init__(self) -> None: + super().__init__() + self.validate_environment() + self.async_http_handler = AsyncHTTPHandler() + self.sync_http_handler = HTTPHandler() + + def validate_environment(self): + """ + Expects + LAGO_API_BASE, + LAGO_API_KEY, + LAGO_API_EVENT_CODE, + + Optional: + LAGO_API_CHARGE_BY + + in the environment + """ + missing_keys = [] + if os.getenv("LAGO_API_KEY", None) is None: + missing_keys.append("LAGO_API_KEY") + + if os.getenv("LAGO_API_BASE", None) is None: + missing_keys.append("LAGO_API_BASE") + + if os.getenv("LAGO_API_EVENT_CODE", None) is None: + missing_keys.append("LAGO_API_EVENT_CODE") + + if len(missing_keys) > 0: + raise Exception("Missing keys={} in environment.".format(missing_keys)) + + def _common_logic(self, kwargs: dict, response_obj) -> dict: + call_id = response_obj.get("id", kwargs.get("litellm_call_id")) + dt = get_utc_datetime().isoformat() + cost = kwargs.get("response_cost", None) + model = kwargs.get("model") + usage = {} + + if ( + isinstance(response_obj, litellm.ModelResponse) + or isinstance(response_obj, litellm.EmbeddingResponse) + ) and hasattr(response_obj, "usage"): + usage = { + "prompt_tokens": response_obj["usage"].get("prompt_tokens", 0), + "completion_tokens": response_obj["usage"].get("completion_tokens", 0), + "total_tokens": response_obj["usage"].get("total_tokens"), + } + + litellm_params = kwargs.get("litellm_params", {}) or {} + proxy_server_request = litellm_params.get("proxy_server_request") or {} + end_user_id = proxy_server_request.get("body", {}).get("user", None) + user_id = litellm_params["metadata"].get("user_api_key_user_id", None) + team_id = litellm_params["metadata"].get("user_api_key_team_id", None) + org_id = litellm_params["metadata"].get("user_api_key_org_id", None) + + charge_by: Literal["end_user_id", "team_id", "user_id"] = "end_user_id" + external_customer_id: Optional[str] = None + + if os.getenv("LAGO_API_CHARGE_BY", None) is not None and isinstance( + os.environ["LAGO_API_CHARGE_BY"], str + ): + if os.environ["LAGO_API_CHARGE_BY"] in [ + "end_user_id", + "user_id", + "team_id", + ]: + charge_by = os.environ["LAGO_API_CHARGE_BY"] # type: ignore + else: + raise Exception("invalid LAGO_API_CHARGE_BY set") + + if charge_by == "end_user_id": + external_customer_id = end_user_id + elif charge_by == "team_id": + external_customer_id = team_id + elif charge_by == "user_id": + external_customer_id = user_id + + if external_customer_id is None: + raise Exception("External Customer ID is not set") + + return { + "event": { + "transaction_id": str(uuid.uuid4()), + "external_customer_id": external_customer_id, + "code": os.getenv("LAGO_API_EVENT_CODE"), + "properties": {"model": model, "response_cost": cost, **usage}, + } + } + + def log_success_event(self, kwargs, response_obj, start_time, end_time): + _url = os.getenv("LAGO_API_BASE") + assert _url is not None and isinstance( + _url, str + ), "LAGO_API_BASE missing or not set correctly. LAGO_API_BASE={}".format(_url) + if _url.endswith("/"): + _url += "api/v1/events" + else: + _url += "/api/v1/events" + + api_key = os.getenv("LAGO_API_KEY") + + _data = self._common_logic(kwargs=kwargs, response_obj=response_obj) + _headers = { + "Content-Type": "application/json", + "Authorization": "Bearer {}".format(api_key), + } + + try: + response = self.sync_http_handler.post( + url=_url, + data=json.dumps(_data), + headers=_headers, + ) + + response.raise_for_status() + except Exception as e: + if hasattr(response, "text"): + litellm.print_verbose(f"\nError Message: {response.text}") + raise e + + async def async_log_success_event(self, kwargs, response_obj, start_time, end_time): + try: + _url = os.getenv("LAGO_API_BASE") + assert _url is not None and isinstance( + _url, str + ), "LAGO_API_BASE missing or not set correctly. LAGO_API_BASE={}".format( + _url + ) + if _url.endswith("/"): + _url += "api/v1/events" + else: + _url += "/api/v1/events" + + api_key = os.getenv("LAGO_API_KEY") + + _data = self._common_logic(kwargs=kwargs, response_obj=response_obj) + _headers = { + "Content-Type": "application/json", + "Authorization": "Bearer {}".format(api_key), + } + except Exception as e: + raise e + + response: Optional[httpx.Response] = None + try: + response = await self.async_http_handler.post( + url=_url, + data=json.dumps(_data), + headers=_headers, + ) + + response.raise_for_status() + except Exception as e: + if response is not None and hasattr(response, "text"): + litellm.print_verbose(f"\nError Message: {response.text}") + raise e diff --git a/litellm/integrations/langfuse.py b/litellm/integrations/langfuse.py index 91fcb8cb7..f4a581eb9 100644 --- a/litellm/integrations/langfuse.py +++ b/litellm/integrations/langfuse.py @@ -1,8 +1,6 @@ #### What this does #### # On success, logs events to Langfuse -import dotenv, os - -dotenv.load_dotenv() # Loading env variables using dotenv +import os import copy import traceback from packaging.version import Version @@ -12,9 +10,12 @@ import litellm class LangFuseLogger: # Class variables or attributes - def __init__(self, langfuse_public_key=None, langfuse_secret=None): + def __init__( + self, langfuse_public_key=None, langfuse_secret=None, flush_interval=1 + ): try: from langfuse import Langfuse + import langfuse except Exception as e: raise Exception( f"\033[91mLangfuse not installed, try running 'pip install langfuse' to fix this error: {e}\n{traceback.format_exc()}\033[0m" @@ -25,14 +26,28 @@ class LangFuseLogger: self.langfuse_host = os.getenv("LANGFUSE_HOST", "https://cloud.langfuse.com") self.langfuse_release = os.getenv("LANGFUSE_RELEASE") self.langfuse_debug = os.getenv("LANGFUSE_DEBUG") - self.Langfuse = Langfuse( - public_key=self.public_key, - secret_key=self.secret_key, - host=self.langfuse_host, - release=self.langfuse_release, - debug=self.langfuse_debug, - flush_interval=1, # flush interval in seconds - ) + + parameters = { + "public_key": self.public_key, + "secret_key": self.secret_key, + "host": self.langfuse_host, + "release": self.langfuse_release, + "debug": self.langfuse_debug, + "flush_interval": flush_interval, # flush interval in seconds + } + + if Version(langfuse.version.__version__) >= Version("2.6.0"): + parameters["sdk_integration"] = "litellm" + + self.Langfuse = Langfuse(**parameters) + + # set the current langfuse project id in the environ + # this is used by Alerting to link to the correct project + try: + project_id = self.Langfuse.client.projects.get().data[0].id + os.environ["LANGFUSE_PROJECT_ID"] = project_id + except: + project_id = None if os.getenv("UPSTREAM_LANGFUSE_SECRET_KEY") is not None: self.upstream_langfuse_secret_key = os.getenv( @@ -69,13 +84,14 @@ class LangFuseLogger: print_verbose, level="DEFAULT", status_message=None, - ): + ) -> dict: # Method definition try: print_verbose( f"Langfuse Logging - Enters logging function for model {kwargs}" ) + litellm_params = kwargs.get("litellm_params", {}) metadata = ( litellm_params.get("metadata", {}) or {} @@ -129,10 +145,13 @@ class LangFuseLogger: input = prompt output = response_obj["data"] print_verbose(f"OUTPUT IN LANGFUSE: {output}; original: {response_obj}") + trace_id = None + generation_id = None if self._is_langfuse_v2(): - self._log_langfuse_v2( + trace_id, generation_id = self._log_langfuse_v2( user_id, metadata, + litellm_params, output, start_time, end_time, @@ -159,10 +178,12 @@ class LangFuseLogger: f"Langfuse Layer Logging - final response object: {response_obj}" ) verbose_logger.info(f"Langfuse Layer Logging - logging success") + + return {"trace_id": trace_id, "generation_id": generation_id} except: traceback.print_exc() verbose_logger.debug(f"Langfuse Layer Error - {traceback.format_exc()}") - pass + return {"trace_id": None, "generation_id": None} async def _async_log_event( self, kwargs, response_obj, start_time, end_time, user_id, print_verbose @@ -224,6 +245,7 @@ class LangFuseLogger: self, user_id, metadata, + litellm_params, output, start_time, end_time, @@ -233,11 +255,28 @@ class LangFuseLogger: response_obj, level, print_verbose, - ): + ) -> tuple: import langfuse try: tags = [] + try: + metadata = copy.deepcopy( + metadata + ) # Avoid modifying the original metadata + except: + new_metadata = {} + for key, value in metadata.items(): + if ( + isinstance(value, list) + or isinstance(value, dict) + or isinstance(value, str) + or isinstance(value, int) + or isinstance(value, float) + ): + new_metadata[key] = copy.deepcopy(value) + metadata = new_metadata + supports_tags = Version(langfuse.version.__version__) >= Version("2.6.3") supports_prompt = Version(langfuse.version.__version__) >= Version("2.7.3") supports_costs = Version(langfuse.version.__version__) >= Version("2.7.3") @@ -248,43 +287,22 @@ class LangFuseLogger: print_verbose(f"Langfuse Layer Logging - logging to langfuse v2 ") if supports_tags: - metadata_tags = metadata.get("tags", []) + metadata_tags = metadata.pop("tags", []) tags = metadata_tags - trace_name = metadata.get("trace_name", None) - if trace_name is None: - # just log `litellm-{call_type}` as the trace name - trace_name = f"litellm-{kwargs.get('call_type', 'completion')}" - - trace_params = { - "name": trace_name, - "input": input, - "user_id": metadata.get("trace_user_id", user_id), - "id": metadata.get("trace_id", None), - "session_id": metadata.get("session_id", None), - } - - if level == "ERROR": - trace_params["status_message"] = output - else: - trace_params["output"] = output - - cost = kwargs.get("response_cost", None) - print_verbose(f"trace: {cost}") - # Clean Metadata before logging - never log raw metadata # the raw metadata can contain circular references which leads to infinite recursion # we clean out all extra litellm metadata params before logging clean_metadata = {} if isinstance(metadata, dict): for key, value in metadata.items(): - # generate langfuse tags - if key in [ - "user_api_key", - "user_api_key_user_id", - "user_api_key_team_id", - "semantic-similarity", - ]: + + # generate langfuse tags - Default Tags sent to Langfuse from LiteLLM Proxy + if ( + litellm._langfuse_default_tags is not None + and isinstance(litellm._langfuse_default_tags, list) + and key in litellm._langfuse_default_tags + ): tags.append(f"{key}:{value}") # clean litellm metadata before logging @@ -298,13 +316,124 @@ class LangFuseLogger: else: clean_metadata[key] = value + session_id = clean_metadata.pop("session_id", None) + trace_name = clean_metadata.pop("trace_name", None) + trace_id = clean_metadata.pop("trace_id", None) + existing_trace_id = clean_metadata.pop("existing_trace_id", None) + update_trace_keys = clean_metadata.pop("update_trace_keys", []) + debug = clean_metadata.pop("debug_langfuse", None) + mask_input = clean_metadata.pop("mask_input", False) + mask_output = clean_metadata.pop("mask_output", False) + + if trace_name is None and existing_trace_id is None: + # just log `litellm-{call_type}` as the trace name + ## DO NOT SET TRACE_NAME if trace-id set. this can lead to overwriting of past traces. + trace_name = f"litellm-{kwargs.get('call_type', 'completion')}" + + if existing_trace_id is not None: + trace_params = {"id": existing_trace_id} + + # Update the following keys for this trace + for metadata_param_key in update_trace_keys: + trace_param_key = metadata_param_key.replace("trace_", "") + if trace_param_key not in trace_params: + updated_trace_value = clean_metadata.pop( + metadata_param_key, None + ) + if updated_trace_value is not None: + trace_params[trace_param_key] = updated_trace_value + + # Pop the trace specific keys that would have been popped if there were a new trace + for key in list( + filter(lambda key: key.startswith("trace_"), clean_metadata.keys()) + ): + clean_metadata.pop(key, None) + + # Special keys that are found in the function arguments and not the metadata + if "input" in update_trace_keys: + trace_params["input"] = input if not mask_input else "redacted-by-litellm" + if "output" in update_trace_keys: + trace_params["output"] = output if not mask_output else "redacted-by-litellm" + else: # don't overwrite an existing trace + trace_params = { + "id": trace_id, + "name": trace_name, + "session_id": session_id, + "input": input if not mask_input else "redacted-by-litellm", + "version": clean_metadata.pop( + "trace_version", clean_metadata.get("version", None) + ), # If provided just version, it will applied to the trace as well, if applied a trace version it will take precedence + "user_id": user_id, + } + for key in list( + filter(lambda key: key.startswith("trace_"), clean_metadata.keys()) + ): + trace_params[key.replace("trace_", "")] = clean_metadata.pop( + key, None + ) + + if level == "ERROR": + trace_params["status_message"] = output + else: + trace_params["output"] = output if not mask_output else "redacted-by-litellm" + + if debug == True or (isinstance(debug, str) and debug.lower() == "true"): + if "metadata" in trace_params: + # log the raw_metadata in the trace + trace_params["metadata"]["metadata_passed_to_litellm"] = metadata + else: + trace_params["metadata"] = {"metadata_passed_to_litellm": metadata} + + cost = kwargs.get("response_cost", None) + print_verbose(f"trace: {cost}") + + if ( + litellm._langfuse_default_tags is not None + and isinstance(litellm._langfuse_default_tags, list) + and "proxy_base_url" in litellm._langfuse_default_tags + ): + proxy_base_url = os.environ.get("PROXY_BASE_URL", None) + if proxy_base_url is not None: + tags.append(f"proxy_base_url:{proxy_base_url}") + + api_base = litellm_params.get("api_base", None) + if api_base: + clean_metadata["api_base"] = api_base + + vertex_location = kwargs.get("vertex_location", None) + if vertex_location: + clean_metadata["vertex_location"] = vertex_location + + aws_region_name = kwargs.get("aws_region_name", None) + if aws_region_name: + clean_metadata["aws_region_name"] = aws_region_name + if supports_tags: if "cache_hit" in kwargs: if kwargs["cache_hit"] is None: kwargs["cache_hit"] = False tags.append(f"cache_hit:{kwargs['cache_hit']}") - trace_params.update({"tags": tags}) + clean_metadata["cache_hit"] = kwargs["cache_hit"] + if existing_trace_id is None: + trace_params.update({"tags": tags}) + proxy_server_request = litellm_params.get("proxy_server_request", None) + if proxy_server_request: + method = proxy_server_request.get("method", None) + url = proxy_server_request.get("url", None) + headers = proxy_server_request.get("headers", None) + clean_headers = {} + if headers: + for key, value in headers.items(): + # these headers can leak our API keys and/or JWT tokens + if key.lower() not in ["authorization", "cookie", "referer"]: + clean_headers[key] = value + + clean_metadata["request"] = { + "method": method, + "url": url, + "headers": clean_headers, + } trace = self.Langfuse.trace(**trace_params) generation_id = None @@ -316,36 +445,70 @@ class LangFuseLogger: "completion_tokens": response_obj["usage"]["completion_tokens"], "total_cost": cost if supports_costs else None, } - generation_name = metadata.get("generation_name", None) + generation_name = clean_metadata.pop("generation_name", None) if generation_name is None: # just log `litellm-{call_type}` as the generation name generation_name = f"litellm-{kwargs.get('call_type', 'completion')}" + if response_obj is not None and "system_fingerprint" in response_obj: + system_fingerprint = response_obj.get("system_fingerprint", None) + else: + system_fingerprint = None + + if system_fingerprint is not None: + optional_params["system_fingerprint"] = system_fingerprint + generation_params = { "name": generation_name, - "id": metadata.get("generation_id", generation_id), - "startTime": start_time, - "endTime": end_time, + "id": clean_metadata.pop("generation_id", generation_id), + "start_time": start_time, + "end_time": end_time, "model": kwargs["model"], - "modelParameters": optional_params, - "input": input, - "output": output, + "model_parameters": optional_params, + "input": input if not mask_input else "redacted-by-litellm", + "output": output if not mask_output else "redacted-by-litellm", "usage": usage, "metadata": clean_metadata, "level": level, + "version": clean_metadata.pop("version", None), } if supports_prompt: - generation_params["prompt"] = metadata.get("prompt", None) + user_prompt = clean_metadata.pop("prompt", None) + if user_prompt is None: + pass + elif isinstance(user_prompt, dict): + from langfuse.model import ( + TextPromptClient, + ChatPromptClient, + Prompt_Text, + Prompt_Chat, + ) + + if user_prompt.get("type", "") == "chat": + _prompt_chat = Prompt_Chat(**user_prompt) + generation_params["prompt"] = ChatPromptClient( + prompt=_prompt_chat + ) + elif user_prompt.get("type", "") == "text": + _prompt_text = Prompt_Text(**user_prompt) + generation_params["prompt"] = TextPromptClient( + prompt=_prompt_text + ) + else: + generation_params["prompt"] = user_prompt if output is not None and isinstance(output, str) and level == "ERROR": - generation_params["statusMessage"] = output + generation_params["status_message"] = output if supports_completion_start_time: generation_params["completion_start_time"] = kwargs.get( "completion_start_time", None ) - trace.generation(**generation_params) + generation_client = trace.generation(**generation_params) + + return generation_client.trace_id, generation_id except Exception as e: verbose_logger.debug(f"Langfuse Layer Error - {traceback.format_exc()}") + return None, None diff --git a/litellm/integrations/langsmith.py b/litellm/integrations/langsmith.py index 72eb7a27e..3e25b4ee7 100644 --- a/litellm/integrations/langsmith.py +++ b/litellm/integrations/langsmith.py @@ -1,12 +1,22 @@ #### What this does #### # On success, logs events to Langsmith -import dotenv, os -import requests -import requests +import dotenv, os # type: ignore +import requests # type: ignore from datetime import datetime - -dotenv.load_dotenv() # Loading env variables using dotenv import traceback +import asyncio +import types +from pydantic import BaseModel # type: ignore + + +def is_serializable(value): + non_serializable_types = ( + types.CoroutineType, + types.FunctionType, + types.GeneratorType, + BaseModel, + ) + return not isinstance(value, non_serializable_types) class LangsmithLogger: @@ -21,7 +31,9 @@ class LangsmithLogger: def log_event(self, kwargs, response_obj, start_time, end_time, print_verbose): # Method definition # inspired by Langsmith http api here: https://github.com/langchain-ai/langsmith-cookbook/blob/main/tracing-examples/rest/rest.ipynb - metadata = kwargs.get('litellm_params', {}).get("metadata", {}) or {} # if metadata is None + metadata = ( + kwargs.get("litellm_params", {}).get("metadata", {}) or {} + ) # if metadata is None # set project name and run_name for langsmith logging # users can pass project_name and run name to litellm.completion() @@ -32,6 +44,8 @@ class LangsmithLogger: print_verbose( f"Langsmith Logging - project_name: {project_name}, run_name {run_name}" ) + langsmith_base_url = os.getenv("LANGSMITH_BASE_URL", "https://api.smith.langchain.com") + try: print_verbose( f"Langsmith Logging - Enters logging function for model {kwargs}" @@ -51,26 +65,43 @@ class LangsmithLogger: new_kwargs = {} for key in kwargs: value = kwargs[key] - if key == "start_time" or key == "end_time": + if key == "start_time" or key == "end_time" or value is None: pass elif type(value) == datetime.datetime: new_kwargs[key] = value.isoformat() - elif type(value) != dict: + elif type(value) != dict and is_serializable(value=value): new_kwargs[key] = value - requests.post( - "https://api.smith.langchain.com/runs", - json={ - "name": run_name, - "run_type": "llm", # this should always be llm, since litellm always logs llm calls. Langsmith allow us to log "chain" - "inputs": {**new_kwargs}, - "outputs": response_obj.json(), - "session_name": project_name, - "start_time": start_time, - "end_time": end_time, - }, + if isinstance(response_obj, BaseModel): + try: + response_obj = response_obj.model_dump() + except: + response_obj = response_obj.dict() # type: ignore + + data = { + "name": run_name, + "run_type": "llm", # this should always be llm, since litellm always logs llm calls. Langsmith allow us to log "chain" + "inputs": new_kwargs, + "outputs": response_obj, + "session_name": project_name, + "start_time": start_time, + "end_time": end_time, + } + + url = f"{langsmith_base_url}/runs" + print_verbose( + f"Langsmith Logging - About to send data to {url} ..." + ) + response = requests.post( + url=url, + json=data, headers={"x-api-key": self.langsmith_api_key}, ) + + if response.status_code >= 300: + print_verbose(f"Error: {response.status_code}") + else: + print_verbose("Run successfully created") print_verbose( f"Langsmith Layer Logging - final response object: {response_obj}" ) diff --git a/litellm/integrations/lunary.py b/litellm/integrations/lunary.py index 6ddf2ca59..2e16e44a1 100644 --- a/litellm/integrations/lunary.py +++ b/litellm/integrations/lunary.py @@ -2,14 +2,10 @@ # On success + failure, log events to lunary.ai from datetime import datetime, timezone import traceback -import dotenv import importlib -import sys import packaging -dotenv.load_dotenv() - # convert to {completion: xx, tokens: xx} def parse_usage(usage): @@ -18,13 +14,33 @@ def parse_usage(usage): "prompt": usage["prompt_tokens"] if "prompt_tokens" in usage else 0, } +def parse_tool_calls(tool_calls): + if tool_calls is None: + return None + + def clean_tool_call(tool_call): + + serialized = { + "type": tool_call.type, + "id": tool_call.id, + "function": { + "name": tool_call.function.name, + "arguments": tool_call.function.arguments, + } + } + + return serialized + + return [clean_tool_call(tool_call) for tool_call in tool_calls] + def parse_messages(input): + if input is None: return None def clean_message(message): - # if is strin, return as is + # if is string, return as is if isinstance(message, str): return message @@ -38,9 +54,7 @@ def parse_messages(input): # Only add tool_calls and function_call to res if they are set if message.get("tool_calls"): - serialized["tool_calls"] = message.get("tool_calls") - if message.get("function_call"): - serialized["function_call"] = message.get("function_call") + serialized["tool_calls"] = parse_tool_calls(message.get("tool_calls")) return serialized @@ -62,14 +76,16 @@ class LunaryLogger: version = importlib.metadata.version("lunary") # if version < 0.1.43 then raise ImportError if packaging.version.Version(version) < packaging.version.Version("0.1.43"): - print( + print( # noqa "Lunary version outdated. Required: >= 0.1.43. Upgrade via 'pip install lunary --upgrade'" ) raise ImportError self.lunary_client = lunary except ImportError: - print("Lunary not installed. Please install it using 'pip install lunary'") + print( # noqa + "Lunary not installed. Please install it using 'pip install lunary'" + ) # noqa raise ImportError def log_event( @@ -93,8 +109,13 @@ class LunaryLogger: print_verbose(f"Lunary Logging - Logging request for model {model}") litellm_params = kwargs.get("litellm_params", {}) + optional_params = kwargs.get("optional_params", {}) metadata = litellm_params.get("metadata", {}) or {} + if optional_params: + # merge into extra + extra = {**extra, **optional_params} + tags = litellm_params.pop("tags", None) or [] if extra: @@ -104,7 +125,7 @@ class LunaryLogger: # keep only serializable types for param, value in extra.items(): - if not isinstance(value, (str, int, bool, float)): + if not isinstance(value, (str, int, bool, float)) and param != "tools": try: extra[param] = str(value) except: @@ -140,7 +161,7 @@ class LunaryLogger: metadata=metadata, runtime="litellm", tags=tags, - extra=extra, + params=extra, ) self.lunary_client.track_event( diff --git a/litellm/integrations/openmeter.py b/litellm/integrations/openmeter.py new file mode 100644 index 000000000..2c470d6f4 --- /dev/null +++ b/litellm/integrations/openmeter.py @@ -0,0 +1,128 @@ +# What is this? +## On Success events log cost to OpenMeter - https://github.com/BerriAI/litellm/issues/1268 + +import dotenv, os, json +import litellm +import traceback +from litellm.integrations.custom_logger import CustomLogger +from litellm.llms.custom_httpx.http_handler import AsyncHTTPHandler, HTTPHandler +import uuid + + +def get_utc_datetime(): + import datetime as dt + from datetime import datetime + + if hasattr(dt, "UTC"): + return datetime.now(dt.UTC) # type: ignore + else: + return datetime.utcnow() # type: ignore + + +class OpenMeterLogger(CustomLogger): + def __init__(self) -> None: + super().__init__() + self.validate_environment() + self.async_http_handler = AsyncHTTPHandler() + self.sync_http_handler = HTTPHandler() + + def validate_environment(self): + """ + Expects + OPENMETER_API_ENDPOINT, + OPENMETER_API_KEY, + + in the environment + """ + missing_keys = [] + if os.getenv("OPENMETER_API_KEY", None) is None: + missing_keys.append("OPENMETER_API_KEY") + + if len(missing_keys) > 0: + raise Exception("Missing keys={} in environment.".format(missing_keys)) + + def _common_logic(self, kwargs: dict, response_obj): + call_id = response_obj.get("id", kwargs.get("litellm_call_id")) + dt = get_utc_datetime().isoformat() + cost = kwargs.get("response_cost", None) + model = kwargs.get("model") + usage = {} + if ( + isinstance(response_obj, litellm.ModelResponse) + or isinstance(response_obj, litellm.EmbeddingResponse) + ) and hasattr(response_obj, "usage"): + usage = { + "prompt_tokens": response_obj["usage"].get("prompt_tokens", 0), + "completion_tokens": response_obj["usage"].get("completion_tokens", 0), + "total_tokens": response_obj["usage"].get("total_tokens"), + } + + subject = (kwargs.get("user", None),) # end-user passed in via 'user' param + if not subject: + raise Exception("OpenMeter: user is required") + + return { + "specversion": "1.0", + "type": os.getenv("OPENMETER_EVENT_TYPE", "litellm_tokens"), + "id": call_id, + "time": dt, + "subject": subject, + "source": "litellm-proxy", + "data": {"model": model, "cost": cost, **usage}, + } + + def log_success_event(self, kwargs, response_obj, start_time, end_time): + _url = os.getenv("OPENMETER_API_ENDPOINT", "https://openmeter.cloud") + if _url.endswith("/"): + _url += "api/v1/events" + else: + _url += "/api/v1/events" + + api_key = os.getenv("OPENMETER_API_KEY") + + _data = self._common_logic(kwargs=kwargs, response_obj=response_obj) + _headers = { + "Content-Type": "application/cloudevents+json", + "Authorization": "Bearer {}".format(api_key), + } + + try: + response = self.sync_http_handler.post( + url=_url, + data=json.dumps(_data), + headers=_headers, + ) + + response.raise_for_status() + except Exception as e: + if hasattr(response, "text"): + litellm.print_verbose(f"\nError Message: {response.text}") + raise e + + async def async_log_success_event(self, kwargs, response_obj, start_time, end_time): + _url = os.getenv("OPENMETER_API_ENDPOINT", "https://openmeter.cloud") + if _url.endswith("/"): + _url += "api/v1/events" + else: + _url += "/api/v1/events" + + api_key = os.getenv("OPENMETER_API_KEY") + + _data = self._common_logic(kwargs=kwargs, response_obj=response_obj) + _headers = { + "Content-Type": "application/cloudevents+json", + "Authorization": "Bearer {}".format(api_key), + } + + try: + response = await self.async_http_handler.post( + url=_url, + data=json.dumps(_data), + headers=_headers, + ) + + response.raise_for_status() + except Exception as e: + if hasattr(response, "text"): + litellm.print_verbose(f"\nError Message: {response.text}") + raise e diff --git a/litellm/integrations/prometheus.py b/litellm/integrations/prometheus.py index 28e84e6f8..6fbc6ca4c 100644 --- a/litellm/integrations/prometheus.py +++ b/litellm/integrations/prometheus.py @@ -1,11 +1,9 @@ # used for /metrics endpoint on LiteLLM Proxy #### What this does #### -# On success + failure, log events to Supabase +# On success, log events to Prometheus import dotenv, os -import requests - -dotenv.load_dotenv() # Loading env variables using dotenv +import requests # type: ignore import traceback import datetime, subprocess, sys import litellm, uuid @@ -19,27 +17,32 @@ class PrometheusLogger: **kwargs, ): try: - verbose_logger.debug(f"in init prometheus metrics") from prometheus_client import Counter + self.litellm_llm_api_failed_requests_metric = Counter( + name="litellm_llm_api_failed_requests_metric", + documentation="Total number of failed LLM API calls via litellm", + labelnames=["end_user", "hashed_api_key", "model", "team", "user"], + ) + self.litellm_requests_metric = Counter( name="litellm_requests_metric", documentation="Total number of LLM calls to litellm", - labelnames=["end_user", "key", "model", "team"], + labelnames=["end_user", "hashed_api_key", "model", "team", "user"], ) # Counter for spend self.litellm_spend_metric = Counter( "litellm_spend_metric", "Total spend on LLM requests", - labelnames=["end_user", "key", "model", "team"], + labelnames=["end_user", "hashed_api_key", "model", "team", "user"], ) # Counter for total_output_tokens self.litellm_tokens_metric = Counter( "litellm_total_tokens", "Total number of input + output tokens from LLM requests", - labelnames=["end_user", "key", "model", "team"], + labelnames=["end_user", "hashed_api_key", "model", "team", "user"], ) except Exception as e: print_verbose(f"Got exception on init prometheus client {str(e)}") @@ -61,29 +64,50 @@ class PrometheusLogger: # unpack kwargs model = kwargs.get("model", "") - response_cost = kwargs.get("response_cost", 0.0) + response_cost = kwargs.get("response_cost", 0.0) or 0 litellm_params = kwargs.get("litellm_params", {}) or {} proxy_server_request = litellm_params.get("proxy_server_request") or {} end_user_id = proxy_server_request.get("body", {}).get("user", None) + user_id = litellm_params.get("metadata", {}).get( + "user_api_key_user_id", None + ) user_api_key = litellm_params.get("metadata", {}).get("user_api_key", None) user_api_team = litellm_params.get("metadata", {}).get( "user_api_key_team_id", None ) - tokens_used = response_obj.get("usage", {}).get("total_tokens", 0) + if response_obj is not None: + tokens_used = response_obj.get("usage", {}).get("total_tokens", 0) + else: + tokens_used = 0 print_verbose( f"inside track_prometheus_metrics, model {model}, response_cost {response_cost}, tokens_used {tokens_used}, end_user_id {end_user_id}, user_api_key {user_api_key}" ) + if ( + user_api_key is not None + and isinstance(user_api_key, str) + and user_api_key.startswith("sk-") + ): + from litellm.proxy.utils import hash_token + + user_api_key = hash_token(user_api_key) + self.litellm_requests_metric.labels( - end_user_id, user_api_key, model, user_api_team + end_user_id, user_api_key, model, user_api_team, user_id ).inc() self.litellm_spend_metric.labels( - end_user_id, user_api_key, model, user_api_team + end_user_id, user_api_key, model, user_api_team, user_id ).inc(response_cost) self.litellm_tokens_metric.labels( - end_user_id, user_api_key, model, user_api_team + end_user_id, user_api_key, model, user_api_team, user_id ).inc(tokens_used) + + ### FAILURE INCREMENT ### + if "exception" in kwargs: + self.litellm_llm_api_failed_requests_metric.labels( + end_user_id, user_api_key, model, user_api_team, user_id + ).inc() except Exception as e: traceback.print_exc() verbose_logger.debug( diff --git a/litellm/integrations/prometheus_services.py b/litellm/integrations/prometheus_services.py new file mode 100644 index 000000000..8fce8930d --- /dev/null +++ b/litellm/integrations/prometheus_services.py @@ -0,0 +1,195 @@ +# used for monitoring litellm services health on `/metrics` endpoint on LiteLLM Proxy +#### What this does #### +# On success + failure, log events to Prometheus for litellm / adjacent services (litellm, redis, postgres, llm api providers) + + +import dotenv, os +import requests # type: ignore +import traceback +import datetime, subprocess, sys +import litellm, uuid +from litellm._logging import print_verbose, verbose_logger +from litellm.types.services import ServiceLoggerPayload, ServiceTypes + + +class PrometheusServicesLogger: + # Class variables or attributes + litellm_service_latency = None # Class-level attribute to store the Histogram + + def __init__( + self, + mock_testing: bool = False, + **kwargs, + ): + try: + try: + from prometheus_client import Counter, Histogram, REGISTRY + except ImportError: + raise Exception( + "Missing prometheus_client. Run `pip install prometheus-client`" + ) + + self.Histogram = Histogram + self.Counter = Counter + self.REGISTRY = REGISTRY + + verbose_logger.debug(f"in init prometheus services metrics") + + self.services = [item.value for item in ServiceTypes] + + self.payload_to_prometheus_map = ( + {} + ) # store the prometheus histogram/counter we need to call for each field in payload + + for service in self.services: + histogram = self.create_histogram(service, type_of_request="latency") + counter_failed_request = self.create_counter( + service, type_of_request="failed_requests" + ) + counter_total_requests = self.create_counter( + service, type_of_request="total_requests" + ) + self.payload_to_prometheus_map[service] = [ + histogram, + counter_failed_request, + counter_total_requests, + ] + + self.prometheus_to_amount_map: dict = ( + {} + ) # the field / value in ServiceLoggerPayload the object needs to be incremented by + + ### MOCK TESTING ### + self.mock_testing = mock_testing + self.mock_testing_success_calls = 0 + self.mock_testing_failure_calls = 0 + + except Exception as e: + print_verbose(f"Got exception on init prometheus client {str(e)}") + raise e + + def is_metric_registered(self, metric_name) -> bool: + for metric in self.REGISTRY.collect(): + if metric_name == metric.name: + return True + return False + + def get_metric(self, metric_name): + for metric in self.REGISTRY.collect(): + for sample in metric.samples: + if metric_name == sample.name: + return metric + return None + + def create_histogram(self, service: str, type_of_request: str): + metric_name = "litellm_{}_{}".format(service, type_of_request) + is_registered = self.is_metric_registered(metric_name) + if is_registered: + return self.get_metric(metric_name) + return self.Histogram( + metric_name, + "Latency for {} service".format(service), + labelnames=[service], + ) + + def create_counter(self, service: str, type_of_request: str): + metric_name = "litellm_{}_{}".format(service, type_of_request) + is_registered = self.is_metric_registered(metric_name) + if is_registered: + return self.get_metric(metric_name) + return self.Counter( + metric_name, + "Total {} for {} service".format(type_of_request, service), + labelnames=[service], + ) + + def observe_histogram( + self, + histogram, + labels: str, + amount: float, + ): + assert isinstance(histogram, self.Histogram) + + histogram.labels(labels).observe(amount) + + def increment_counter( + self, + counter, + labels: str, + amount: float, + ): + assert isinstance(counter, self.Counter) + + counter.labels(labels).inc(amount) + + def service_success_hook(self, payload: ServiceLoggerPayload): + if self.mock_testing: + self.mock_testing_success_calls += 1 + + if payload.service.value in self.payload_to_prometheus_map: + prom_objects = self.payload_to_prometheus_map[payload.service.value] + for obj in prom_objects: + if isinstance(obj, self.Histogram): + self.observe_histogram( + histogram=obj, + labels=payload.service.value, + amount=payload.duration, + ) + elif isinstance(obj, self.Counter) and "total_requests" in obj._name: + self.increment_counter( + counter=obj, + labels=payload.service.value, + amount=1, # LOG TOTAL REQUESTS TO PROMETHEUS + ) + + def service_failure_hook(self, payload: ServiceLoggerPayload): + if self.mock_testing: + self.mock_testing_failure_calls += 1 + + if payload.service.value in self.payload_to_prometheus_map: + prom_objects = self.payload_to_prometheus_map[payload.service.value] + for obj in prom_objects: + if isinstance(obj, self.Counter): + self.increment_counter( + counter=obj, + labels=payload.service.value, + amount=1, # LOG ERROR COUNT / TOTAL REQUESTS TO PROMETHEUS + ) + + async def async_service_success_hook(self, payload: ServiceLoggerPayload): + """ + Log successful call to prometheus + """ + if self.mock_testing: + self.mock_testing_success_calls += 1 + + if payload.service.value in self.payload_to_prometheus_map: + prom_objects = self.payload_to_prometheus_map[payload.service.value] + for obj in prom_objects: + if isinstance(obj, self.Histogram): + self.observe_histogram( + histogram=obj, + labels=payload.service.value, + amount=payload.duration, + ) + elif isinstance(obj, self.Counter) and "total_requests" in obj._name: + self.increment_counter( + counter=obj, + labels=payload.service.value, + amount=1, # LOG TOTAL REQUESTS TO PROMETHEUS + ) + + async def async_service_failure_hook(self, payload: ServiceLoggerPayload): + if self.mock_testing: + self.mock_testing_failure_calls += 1 + + if payload.service.value in self.payload_to_prometheus_map: + prom_objects = self.payload_to_prometheus_map[payload.service.value] + for obj in prom_objects: + if isinstance(obj, self.Counter): + self.increment_counter( + counter=obj, + labels=payload.service.value, + amount=1, # LOG ERROR COUNT TO PROMETHEUS + ) diff --git a/litellm/integrations/prompt_layer.py b/litellm/integrations/prompt_layer.py index 39a80940b..531ed75fe 100644 --- a/litellm/integrations/prompt_layer.py +++ b/litellm/integrations/prompt_layer.py @@ -1,12 +1,11 @@ #### What this does #### # On success, logs events to Promptlayer import dotenv, os -import requests +import requests # type: ignore from pydantic import BaseModel - -dotenv.load_dotenv() # Loading env variables using dotenv import traceback + class PromptLayerLogger: # Class variables or attributes def __init__(self): @@ -32,7 +31,11 @@ class PromptLayerLogger: tags = kwargs["litellm_params"]["metadata"]["pl_tags"] # Remove "pl_tags" from metadata - metadata = {k:v for k, v in kwargs["litellm_params"]["metadata"].items() if k != "pl_tags"} + metadata = { + k: v + for k, v in kwargs["litellm_params"]["metadata"].items() + if k != "pl_tags" + } print_verbose( f"Prompt Layer Logging - Enters logging function for model kwargs: {new_kwargs}\n, response: {response_obj}" diff --git a/litellm/integrations/s3.py b/litellm/integrations/s3.py index dc35430bc..d131e44f0 100644 --- a/litellm/integrations/s3.py +++ b/litellm/integrations/s3.py @@ -1,10 +1,7 @@ #### What this does #### # On success + failure, log events to Supabase -import dotenv, os -import requests - -dotenv.load_dotenv() # Loading env variables using dotenv +import os import traceback import datetime, subprocess, sys import litellm, uuid diff --git a/litellm/integrations/slack_alerting.py b/litellm/integrations/slack_alerting.py new file mode 100644 index 000000000..015278c55 --- /dev/null +++ b/litellm/integrations/slack_alerting.py @@ -0,0 +1,1009 @@ +#### What this does #### +# Class for sending Slack Alerts # +import dotenv, os +from litellm.proxy._types import UserAPIKeyAuth +from litellm._logging import verbose_logger, verbose_proxy_logger +import litellm, threading +from typing import List, Literal, Any, Union, Optional, Dict +from litellm.caching import DualCache +import asyncio +import aiohttp +from litellm.llms.custom_httpx.http_handler import AsyncHTTPHandler +import datetime +from pydantic import BaseModel +from enum import Enum +from datetime import datetime as dt, timedelta, timezone +from litellm.integrations.custom_logger import CustomLogger +import random + + +class LiteLLMBase(BaseModel): + """ + Implements default functions, all pydantic objects should have. + """ + + def json(self, **kwargs): + try: + return self.model_dump() # noqa + except: + # if using pydantic v1 + return self.dict() + + +class SlackAlertingArgs(LiteLLMBase): + default_daily_report_frequency: int = 12 * 60 * 60 # 12 hours + daily_report_frequency: int = int( + os.getenv("SLACK_DAILY_REPORT_FREQUENCY", default_daily_report_frequency) + ) + report_check_interval: int = 5 * 60 # 5 minutes + + +class DeploymentMetrics(LiteLLMBase): + """ + Metrics per deployment, stored in cache + + Used for daily reporting + """ + + id: str + """id of deployment in router model list""" + + failed_request: bool + """did it fail the request?""" + + latency_per_output_token: Optional[float] + """latency/output token of deployment""" + + updated_at: dt + """Current time of deployment being updated""" + + +class SlackAlertingCacheKeys(Enum): + """ + Enum for deployment daily metrics keys - {deployment_id}:{enum} + """ + + failed_requests_key = "failed_requests_daily_metrics" + latency_key = "latency_daily_metrics" + report_sent_key = "daily_metrics_report_sent" + + +class SlackAlerting(CustomLogger): + """ + Class for sending Slack Alerts + """ + + # Class variables or attributes + def __init__( + self, + internal_usage_cache: Optional[DualCache] = None, + alerting_threshold: float = 300, # threshold for slow / hanging llm responses (in seconds) + alerting: Optional[List] = [], + alert_types: List[ + Literal[ + "llm_exceptions", + "llm_too_slow", + "llm_requests_hanging", + "budget_alerts", + "db_exceptions", + "daily_reports", + "spend_reports", + "cooldown_deployment", + "new_model_added", + ] + ] = [ + "llm_exceptions", + "llm_too_slow", + "llm_requests_hanging", + "budget_alerts", + "db_exceptions", + "daily_reports", + "spend_reports", + "cooldown_deployment", + "new_model_added", + ], + alert_to_webhook_url: Optional[ + Dict + ] = None, # if user wants to separate alerts to diff channels + alerting_args={}, + default_webhook_url: Optional[str] = None, + ): + self.alerting_threshold = alerting_threshold + self.alerting = alerting + self.alert_types = alert_types + self.internal_usage_cache = internal_usage_cache or DualCache() + self.async_http_handler = AsyncHTTPHandler() + self.alert_to_webhook_url = alert_to_webhook_url + self.is_running = False + self.alerting_args = SlackAlertingArgs(**alerting_args) + self.default_webhook_url = default_webhook_url + + def update_values( + self, + alerting: Optional[List] = None, + alerting_threshold: Optional[float] = None, + alert_types: Optional[List] = None, + alert_to_webhook_url: Optional[Dict] = None, + alerting_args: Optional[Dict] = None, + ): + if alerting is not None: + self.alerting = alerting + if alerting_threshold is not None: + self.alerting_threshold = alerting_threshold + if alert_types is not None: + self.alert_types = alert_types + if alerting_args is not None: + self.alerting_args = SlackAlertingArgs(**alerting_args) + if alert_to_webhook_url is not None: + # update the dict + if self.alert_to_webhook_url is None: + self.alert_to_webhook_url = alert_to_webhook_url + else: + self.alert_to_webhook_url.update(alert_to_webhook_url) + + async def deployment_in_cooldown(self): + pass + + async def deployment_removed_from_cooldown(self): + pass + + def _all_possible_alert_types(self): + # used by the UI to show all supported alert types + # Note: This is not the alerts the user has configured, instead it's all possible alert types a user can select + return [ + "llm_exceptions", + "llm_too_slow", + "llm_requests_hanging", + "budget_alerts", + "db_exceptions", + ] + + def _add_langfuse_trace_id_to_alert( + self, + request_data: Optional[dict] = None, + ) -> Optional[str]: + """ + Returns langfuse trace url + """ + # do nothing for now + if ( + request_data is not None + and request_data.get("metadata", {}).get("trace_id", None) is not None + ): + trace_id = request_data["metadata"]["trace_id"] + if litellm.utils.langFuseLogger is not None: + base_url = litellm.utils.langFuseLogger.Langfuse.base_url + return f"{base_url}/trace/{trace_id}" + return None + + def _response_taking_too_long_callback_helper( + self, + kwargs, # kwargs to completion + start_time, + end_time, # start/end time + ): + try: + time_difference = end_time - start_time + # Convert the timedelta to float (in seconds) + time_difference_float = time_difference.total_seconds() + litellm_params = kwargs.get("litellm_params", {}) + model = kwargs.get("model", "") + api_base = litellm.get_api_base(model=model, optional_params=litellm_params) + messages = kwargs.get("messages", None) + # if messages does not exist fallback to "input" + if messages is None: + messages = kwargs.get("input", None) + + # only use first 100 chars for alerting + _messages = str(messages)[:100] + + return time_difference_float, model, api_base, _messages + except Exception as e: + raise e + + def _get_deployment_latencies_to_alert(self, metadata=None): + if metadata is None: + return None + + if "_latency_per_deployment" in metadata: + # Translate model_id to -> api_base + # _latency_per_deployment is a dictionary that looks like this: + """ + _latency_per_deployment: { + api_base: 0.01336697916666667 + } + """ + _message_to_send = "" + _deployment_latencies = metadata["_latency_per_deployment"] + if len(_deployment_latencies) == 0: + return None + try: + # try sorting deployments by latency + _deployment_latencies = sorted( + _deployment_latencies.items(), key=lambda x: x[1] + ) + _deployment_latencies = dict(_deployment_latencies) + except: + pass + for api_base, latency in _deployment_latencies.items(): + _message_to_send += f"\n{api_base}: {round(latency,2)}s" + _message_to_send = "```" + _message_to_send + "```" + return _message_to_send + + async def response_taking_too_long_callback( + self, + kwargs, # kwargs to completion + completion_response, # response from completion + start_time, + end_time, # start/end time + ): + if self.alerting is None or self.alert_types is None: + return + + time_difference_float, model, api_base, messages = ( + self._response_taking_too_long_callback_helper( + kwargs=kwargs, + start_time=start_time, + end_time=end_time, + ) + ) + if litellm.turn_off_message_logging: + messages = "Message not logged. `litellm.turn_off_message_logging=True`." + request_info = f"\nRequest Model: `{model}`\nAPI Base: `{api_base}`\nMessages: `{messages}`" + slow_message = f"`Responses are slow - {round(time_difference_float,2)}s response time > Alerting threshold: {self.alerting_threshold}s`" + if time_difference_float > self.alerting_threshold: + # add deployment latencies to alert + if ( + kwargs is not None + and "litellm_params" in kwargs + and "metadata" in kwargs["litellm_params"] + ): + _metadata = kwargs["litellm_params"]["metadata"] + request_info = litellm.utils._add_key_name_and_team_to_alert( + request_info=request_info, metadata=_metadata + ) + + _deployment_latency_map = self._get_deployment_latencies_to_alert( + metadata=_metadata + ) + if _deployment_latency_map is not None: + request_info += ( + f"\nAvailable Deployment Latencies\n{_deployment_latency_map}" + ) + await self.send_alert( + message=slow_message + request_info, + level="Low", + alert_type="llm_too_slow", + ) + + async def async_update_daily_reports( + self, deployment_metrics: DeploymentMetrics + ) -> int: + """ + Store the perf by deployment in cache + - Number of failed requests per deployment + - Latency / output tokens per deployment + + 'deployment_id:daily_metrics:failed_requests' + 'deployment_id:daily_metrics:latency_per_output_token' + + Returns + int - count of metrics set (1 - if just latency, 2 - if failed + latency) + """ + + return_val = 0 + try: + ## FAILED REQUESTS ## + if deployment_metrics.failed_request: + await self.internal_usage_cache.async_increment_cache( + key="{}:{}".format( + deployment_metrics.id, + SlackAlertingCacheKeys.failed_requests_key.value, + ), + value=1, + ) + + return_val += 1 + + ## LATENCY ## + if deployment_metrics.latency_per_output_token is not None: + await self.internal_usage_cache.async_increment_cache( + key="{}:{}".format( + deployment_metrics.id, SlackAlertingCacheKeys.latency_key.value + ), + value=deployment_metrics.latency_per_output_token, + ) + + return_val += 1 + + return return_val + except Exception as e: + return 0 + + async def send_daily_reports(self, router) -> bool: + """ + Send a daily report on: + - Top 5 deployments with most failed requests + - Top 5 slowest deployments (normalized by latency/output tokens) + + Get the value from redis cache (if available) or in-memory and send it + + Cleanup: + - reset values in cache -> prevent memory leak + + Returns: + True -> if successfuly sent + False -> if not sent + """ + + ids = router.get_model_ids() + + # get keys + failed_request_keys = [ + "{}:{}".format(id, SlackAlertingCacheKeys.failed_requests_key.value) + for id in ids + ] + latency_keys = [ + "{}:{}".format(id, SlackAlertingCacheKeys.latency_key.value) for id in ids + ] + + combined_metrics_keys = failed_request_keys + latency_keys # reduce cache calls + + combined_metrics_values = await self.internal_usage_cache.async_batch_get_cache( + keys=combined_metrics_keys + ) # [1, 2, None, ..] + + all_none = True + for val in combined_metrics_values: + if val is not None and val > 0: + all_none = False + break + + if all_none: + return False + + failed_request_values = combined_metrics_values[ + : len(failed_request_keys) + ] # # [1, 2, None, ..] + latency_values = combined_metrics_values[len(failed_request_keys) :] + + # find top 5 failed + ## Replace None values with a placeholder value (-1 in this case) + placeholder_value = 0 + replaced_failed_values = [ + value if value is not None else placeholder_value + for value in failed_request_values + ] + + ## Get the indices of top 5 keys with the highest numerical values (ignoring None and 0 values) + top_5_failed = sorted( + range(len(replaced_failed_values)), + key=lambda i: replaced_failed_values[i], + reverse=True, + )[:5] + top_5_failed = [ + index for index in top_5_failed if replaced_failed_values[index] > 0 + ] + + # find top 5 slowest + # Replace None values with a placeholder value (-1 in this case) + placeholder_value = 0 + replaced_slowest_values = [ + value if value is not None else placeholder_value + for value in latency_values + ] + + # Get the indices of top 5 values with the highest numerical values (ignoring None and 0 values) + top_5_slowest = sorted( + range(len(replaced_slowest_values)), + key=lambda i: replaced_slowest_values[i], + reverse=True, + )[:5] + top_5_slowest = [ + index for index in top_5_slowest if replaced_slowest_values[index] > 0 + ] + + # format alert -> return the litellm model name + api base + message = f"\n\nHere are today's key metrics 📈: \n\n" + + message += "\n\n*❗️ Top Deployments with Most Failed Requests:*\n\n" + if not top_5_failed: + message += "\tNone\n" + for i in range(len(top_5_failed)): + key = failed_request_keys[top_5_failed[i]].split(":")[0] + _deployment = router.get_model_info(key) + if isinstance(_deployment, dict): + deployment_name = _deployment["litellm_params"].get("model", "") + else: + return False + + api_base = litellm.get_api_base( + model=deployment_name, + optional_params=( + _deployment["litellm_params"] if _deployment is not None else {} + ), + ) + if api_base is None: + api_base = "" + value = replaced_failed_values[top_5_failed[i]] + message += f"\t{i+1}. Deployment: `{deployment_name}`, Failed Requests: `{value}`, API Base: `{api_base}`\n" + + message += "\n\n*😅 Top Slowest Deployments:*\n\n" + if not top_5_slowest: + message += "\tNone\n" + for i in range(len(top_5_slowest)): + key = latency_keys[top_5_slowest[i]].split(":")[0] + _deployment = router.get_model_info(key) + if _deployment is not None: + deployment_name = _deployment["litellm_params"].get("model", "") + else: + deployment_name = "" + api_base = litellm.get_api_base( + model=deployment_name, + optional_params=( + _deployment["litellm_params"] if _deployment is not None else {} + ), + ) + value = round(replaced_slowest_values[top_5_slowest[i]], 3) + message += f"\t{i+1}. Deployment: `{deployment_name}`, Latency per output token: `{value}s/token`, API Base: `{api_base}`\n\n" + + # cache cleanup -> reset values to 0 + latency_cache_keys = [(key, 0) for key in latency_keys] + failed_request_cache_keys = [(key, 0) for key in failed_request_keys] + combined_metrics_cache_keys = latency_cache_keys + failed_request_cache_keys + await self.internal_usage_cache.async_batch_set_cache( + cache_list=combined_metrics_cache_keys + ) + + # send alert + await self.send_alert(message=message, level="Low", alert_type="daily_reports") + + return True + + async def response_taking_too_long( + self, + start_time: Optional[datetime.datetime] = None, + end_time: Optional[datetime.datetime] = None, + type: Literal["hanging_request", "slow_response"] = "hanging_request", + request_data: Optional[dict] = None, + ): + if self.alerting is None or self.alert_types is None: + return + if request_data is not None: + model = request_data.get("model", "") + messages = request_data.get("messages", None) + if messages is None: + # if messages does not exist fallback to "input" + messages = request_data.get("input", None) + + # try casting messages to str and get the first 100 characters, else mark as None + try: + messages = str(messages) + messages = messages[:100] + except: + messages = "" + + if litellm.turn_off_message_logging: + messages = ( + "Message not logged. `litellm.turn_off_message_logging=True`." + ) + request_info = f"\nRequest Model: `{model}`\nMessages: `{messages}`" + else: + request_info = "" + + if type == "hanging_request": + await asyncio.sleep( + self.alerting_threshold + ) # Set it to 5 minutes - i'd imagine this might be different for streaming, non-streaming, non-completion (embedding + img) requests + if ( + request_data is not None + and request_data.get("litellm_status", "") != "success" + and request_data.get("litellm_status", "") != "fail" + ): + if request_data.get("deployment", None) is not None and isinstance( + request_data["deployment"], dict + ): + _api_base = litellm.get_api_base( + model=model, + optional_params=request_data["deployment"].get( + "litellm_params", {} + ), + ) + + if _api_base is None: + _api_base = "" + + request_info += f"\nAPI Base: {_api_base}" + elif request_data.get("metadata", None) is not None and isinstance( + request_data["metadata"], dict + ): + # In hanging requests sometime it has not made it to the point where the deployment is passed to the `request_data`` + # in that case we fallback to the api base set in the request metadata + _metadata = request_data["metadata"] + _api_base = _metadata.get("api_base", "") + + request_info = litellm.utils._add_key_name_and_team_to_alert( + request_info=request_info, metadata=_metadata + ) + + if _api_base is None: + _api_base = "" + request_info += f"\nAPI Base: `{_api_base}`" + # only alert hanging responses if they have not been marked as success + alerting_message = ( + f"`Requests are hanging - {self.alerting_threshold}s+ request time`" + ) + + if "langfuse" in litellm.success_callback: + langfuse_url = self._add_langfuse_trace_id_to_alert( + request_data=request_data, + ) + + if langfuse_url is not None: + request_info += "\n🪢 Langfuse Trace: {}".format(langfuse_url) + + # add deployment latencies to alert + _deployment_latency_map = self._get_deployment_latencies_to_alert( + metadata=request_data.get("metadata", {}) + ) + if _deployment_latency_map is not None: + request_info += f"\nDeployment Latencies\n{_deployment_latency_map}" + + await self.send_alert( + message=alerting_message + request_info, + level="Medium", + alert_type="llm_requests_hanging", + ) + + async def budget_alerts( + self, + type: Literal[ + "token_budget", + "user_budget", + "user_and_proxy_budget", + "failed_budgets", + "failed_tracking", + "projected_limit_exceeded", + ], + user_max_budget: float, + user_current_spend: float, + user_info=None, + error_message="", + ): + if self.alerting is None or self.alert_types is None: + # do nothing if alerting is not switched on + return + if "budget_alerts" not in self.alert_types: + return + _id: str = "default_id" # used for caching + if type == "user_and_proxy_budget": + user_info = dict(user_info) + user_id = user_info["user_id"] + _id = user_id + max_budget = user_info["max_budget"] + spend = user_info["spend"] + user_email = user_info["user_email"] + user_info = f"""\nUser ID: {user_id}\nMax Budget: ${max_budget}\nSpend: ${spend}\nUser Email: {user_email}""" + elif type == "token_budget": + token_info = dict(user_info) + token = token_info["token"] + _id = token + spend = token_info["spend"] + max_budget = token_info["max_budget"] + user_id = token_info["user_id"] + user_info = f"""\nToken: {token}\nSpend: ${spend}\nMax Budget: ${max_budget}\nUser ID: {user_id}""" + elif type == "failed_tracking": + user_id = str(user_info) + _id = user_id + user_info = f"\nUser ID: {user_id}\n Error {error_message}" + message = "Failed Tracking Cost for" + user_info + await self.send_alert( + message=message, level="High", alert_type="budget_alerts" + ) + return + elif type == "projected_limit_exceeded" and user_info is not None: + """ + Input variables: + user_info = { + "key_alias": key_alias, + "projected_spend": projected_spend, + "projected_exceeded_date": projected_exceeded_date, + } + user_max_budget=soft_limit, + user_current_spend=new_spend + """ + message = f"""\n🚨 `ProjectedLimitExceededError` 💸\n\n`Key Alias:` {user_info["key_alias"]} \n`Expected Day of Error`: {user_info["projected_exceeded_date"]} \n`Current Spend`: {user_current_spend} \n`Projected Spend at end of month`: {user_info["projected_spend"]} \n`Soft Limit`: {user_max_budget}""" + await self.send_alert( + message=message, level="High", alert_type="budget_alerts" + ) + return + else: + user_info = str(user_info) + + # percent of max_budget left to spend + if user_max_budget > 0: + percent_left = (user_max_budget - user_current_spend) / user_max_budget + else: + percent_left = 0 + verbose_proxy_logger.debug( + f"Budget Alerts: Percent left: {percent_left} for {user_info}" + ) + + ## PREVENTITIVE ALERTING ## - https://github.com/BerriAI/litellm/issues/2727 + # - Alert once within 28d period + # - Cache this information + # - Don't re-alert, if alert already sent + _cache: DualCache = self.internal_usage_cache + + # check if crossed budget + if user_current_spend >= user_max_budget: + verbose_proxy_logger.debug("Budget Crossed for %s", user_info) + message = "Budget Crossed for" + user_info + result = await _cache.async_get_cache(key=message) + if result is None: + await self.send_alert( + message=message, level="High", alert_type="budget_alerts" + ) + await _cache.async_set_cache(key=message, value="SENT", ttl=2419200) + return + + # check if 5% of max budget is left + if percent_left <= 0.05: + message = "5% budget left for" + user_info + cache_key = "alerting:{}".format(_id) + result = await _cache.async_get_cache(key=cache_key) + if result is None: + await self.send_alert( + message=message, level="Medium", alert_type="budget_alerts" + ) + + await _cache.async_set_cache(key=cache_key, value="SENT", ttl=2419200) + + return + + # check if 15% of max budget is left + if percent_left <= 0.15: + message = "15% budget left for" + user_info + result = await _cache.async_get_cache(key=message) + if result is None: + await self.send_alert( + message=message, level="Low", alert_type="budget_alerts" + ) + await _cache.async_set_cache(key=message, value="SENT", ttl=2419200) + return + + return + + async def model_added_alert(self, model_name: str, litellm_model_name: str): + model_info = litellm.model_cost.get(litellm_model_name, {}) + model_info_str = "" + for k, v in model_info.items(): + if k == "input_cost_per_token" or k == "output_cost_per_token": + # when converting to string it should not be 1.63e-06 + v = "{:.8f}".format(v) + + model_info_str += f"{k}: {v}\n" + + message = f""" +*🚅 New Model Added* +Model Name: `{model_name}` + +Usage OpenAI Python SDK: +``` +import openai +client = openai.OpenAI( + api_key="your_api_key", + base_url={os.getenv("PROXY_BASE_URL", "http://0.0.0.0:4000")} +) + +response = client.chat.completions.create( + model="{model_name}", # model to send to the proxy + messages = [ + {{ + "role": "user", + "content": "this is a test request, write a short poem" + }} + ] +) +``` + +Model Info: +``` +{model_info_str} +``` +""" + + await self.send_alert( + message=message, level="Low", alert_type="new_model_added" + ) + pass + + async def model_removed_alert(self, model_name: str): + pass + + async def send_alert( + self, + message: str, + level: Literal["Low", "Medium", "High"], + alert_type: Literal[ + "llm_exceptions", + "llm_too_slow", + "llm_requests_hanging", + "budget_alerts", + "db_exceptions", + "daily_reports", + "spend_reports", + "new_model_added", + "cooldown_deployment", + ], + **kwargs, + ): + """ + Alerting based on thresholds: - https://github.com/BerriAI/litellm/issues/1298 + + - Responses taking too long + - Requests are hanging + - Calls are failing + - DB Read/Writes are failing + - Proxy Close to max budget + - Key Close to max budget + + Parameters: + level: str - Low|Medium|High - if calls might fail (Medium) or are failing (High); Currently, no alerts would be 'Low'. + message: str - what is the alert about + """ + if self.alerting is None: + return + + if alert_type not in self.alert_types: + return + + from datetime import datetime + import json + + # Get the current timestamp + current_time = datetime.now().strftime("%H:%M:%S") + _proxy_base_url = os.getenv("PROXY_BASE_URL", None) + if alert_type == "daily_reports" or alert_type == "new_model_added": + formatted_message = message + else: + formatted_message = ( + f"Level: `{level}`\nTimestamp: `{current_time}`\n\nMessage: {message}" + ) + + if kwargs: + for key, value in kwargs.items(): + formatted_message += f"\n\n{key}: `{value}`\n\n" + if _proxy_base_url is not None: + formatted_message += f"\n\nProxy URL: `{_proxy_base_url}`" + + # check if we find the slack webhook url in self.alert_to_webhook_url + if ( + self.alert_to_webhook_url is not None + and alert_type in self.alert_to_webhook_url + ): + slack_webhook_url = self.alert_to_webhook_url[alert_type] + elif self.default_webhook_url is not None: + slack_webhook_url = self.default_webhook_url + else: + slack_webhook_url = os.getenv("SLACK_WEBHOOK_URL", None) + + if slack_webhook_url is None: + raise Exception("Missing SLACK_WEBHOOK_URL from environment") + payload = {"text": formatted_message} + headers = {"Content-type": "application/json"} + + response = await self.async_http_handler.post( + url=slack_webhook_url, + headers=headers, + data=json.dumps(payload), + ) + if response.status_code == 200: + pass + else: + print("Error sending slack alert. Error=", response.text) # noqa + + async def async_log_success_event(self, kwargs, response_obj, start_time, end_time): + """Log deployment latency""" + if "daily_reports" in self.alert_types: + model_id = ( + kwargs.get("litellm_params", {}).get("model_info", {}).get("id", "") + ) + response_s: timedelta = end_time - start_time + + final_value = response_s + total_tokens = 0 + + if isinstance(response_obj, litellm.ModelResponse): + completion_tokens = response_obj.usage.completion_tokens + final_value = float(response_s.total_seconds() / completion_tokens) + + await self.async_update_daily_reports( + DeploymentMetrics( + id=model_id, + failed_request=False, + latency_per_output_token=final_value, + updated_at=litellm.utils.get_utc_datetime(), + ) + ) + + async def async_log_failure_event(self, kwargs, response_obj, start_time, end_time): + """Log failure + deployment latency""" + if "daily_reports" in self.alert_types: + model_id = ( + kwargs.get("litellm_params", {}).get("model_info", {}).get("id", "") + ) + await self.async_update_daily_reports( + DeploymentMetrics( + id=model_id, + failed_request=True, + latency_per_output_token=None, + updated_at=litellm.utils.get_utc_datetime(), + ) + ) + + async def _run_scheduler_helper(self, llm_router) -> bool: + """ + Returns: + - True -> report sent + - False -> report not sent + """ + report_sent_bool = False + + report_sent = await self.internal_usage_cache.async_get_cache( + key=SlackAlertingCacheKeys.report_sent_key.value + ) # None | datetime + + current_time = litellm.utils.get_utc_datetime() + + if report_sent is None: + _current_time = current_time.isoformat() + await self.internal_usage_cache.async_set_cache( + key=SlackAlertingCacheKeys.report_sent_key.value, + value=_current_time, + ) + else: + # Check if current time - interval >= time last sent + delta_naive = timedelta(seconds=self.alerting_args.daily_report_frequency) + if isinstance(report_sent, str): + report_sent = dt.fromisoformat(report_sent) + + # Ensure report_sent is an aware datetime object + if report_sent.tzinfo is None: + report_sent = report_sent.replace(tzinfo=timezone.utc) + + # Calculate delta as an aware datetime object with the same timezone as report_sent + delta = report_sent - delta_naive + + current_time_utc = current_time.astimezone(timezone.utc) + delta_utc = delta.astimezone(timezone.utc) + + if current_time_utc >= delta_utc: + # Sneak in the reporting logic here + await self.send_daily_reports(router=llm_router) + # Also, don't forget to update the report_sent time after sending the report! + _current_time = current_time.isoformat() + await self.internal_usage_cache.async_set_cache( + key=SlackAlertingCacheKeys.report_sent_key.value, + value=_current_time, + ) + report_sent_bool = True + + return report_sent_bool + + async def _run_scheduled_daily_report(self, llm_router: Optional[Any] = None): + """ + If 'daily_reports' enabled + + Ping redis cache every 5 minutes to check if we should send the report + + If yes -> call send_daily_report() + """ + if llm_router is None or self.alert_types is None: + return + + if "daily_reports" in self.alert_types: + while True: + await self._run_scheduler_helper(llm_router=llm_router) + interval = random.randint( + self.alerting_args.report_check_interval - 3, + self.alerting_args.report_check_interval + 3, + ) # shuffle to prevent collisions + await asyncio.sleep(interval) + return + + async def send_weekly_spend_report(self): + """ """ + try: + from litellm.proxy.proxy_server import _get_spend_report_for_time_range + + todays_date = datetime.datetime.now().date() + week_before = todays_date - datetime.timedelta(days=7) + + weekly_spend_per_team, weekly_spend_per_tag = ( + await _get_spend_report_for_time_range( + start_date=week_before.strftime("%Y-%m-%d"), + end_date=todays_date.strftime("%Y-%m-%d"), + ) + ) + + _weekly_spend_message = f"*💸 Weekly Spend Report for `{week_before.strftime('%m-%d-%Y')} - {todays_date.strftime('%m-%d-%Y')}` *\n" + + if weekly_spend_per_team is not None: + _weekly_spend_message += "\n*Team Spend Report:*\n" + for spend in weekly_spend_per_team: + _team_spend = spend["total_spend"] + _team_spend = float(_team_spend) + # round to 4 decimal places + _team_spend = round(_team_spend, 4) + _weekly_spend_message += ( + f"Team: `{spend['team_alias']}` | Spend: `${_team_spend}`\n" + ) + + if weekly_spend_per_tag is not None: + _weekly_spend_message += "\n*Tag Spend Report:*\n" + for spend in weekly_spend_per_tag: + _tag_spend = spend["total_spend"] + _tag_spend = float(_tag_spend) + # round to 4 decimal places + _tag_spend = round(_tag_spend, 4) + _weekly_spend_message += f"Tag: `{spend['individual_request_tag']}` | Spend: `${_tag_spend}`\n" + + await self.send_alert( + message=_weekly_spend_message, + level="Low", + alert_type="spend_reports", + ) + except Exception as e: + verbose_proxy_logger.error("Error sending weekly spend report", e) + + async def send_monthly_spend_report(self): + """ """ + try: + from calendar import monthrange + + from litellm.proxy.proxy_server import _get_spend_report_for_time_range + + todays_date = datetime.datetime.now().date() + first_day_of_month = todays_date.replace(day=1) + _, last_day_of_month = monthrange(todays_date.year, todays_date.month) + last_day_of_month = first_day_of_month + datetime.timedelta( + days=last_day_of_month - 1 + ) + + monthly_spend_per_team, monthly_spend_per_tag = ( + await _get_spend_report_for_time_range( + start_date=first_day_of_month.strftime("%Y-%m-%d"), + end_date=last_day_of_month.strftime("%Y-%m-%d"), + ) + ) + + _spend_message = f"*💸 Monthly Spend Report for `{first_day_of_month.strftime('%m-%d-%Y')} - {last_day_of_month.strftime('%m-%d-%Y')}` *\n" + + if monthly_spend_per_team is not None: + _spend_message += "\n*Team Spend Report:*\n" + for spend in monthly_spend_per_team: + _team_spend = spend["total_spend"] + _team_spend = float(_team_spend) + # round to 4 decimal places + _team_spend = round(_team_spend, 4) + _spend_message += ( + f"Team: `{spend['team_alias']}` | Spend: `${_team_spend}`\n" + ) + + if monthly_spend_per_tag is not None: + _spend_message += "\n*Tag Spend Report:*\n" + for spend in monthly_spend_per_tag: + _tag_spend = spend["total_spend"] + _tag_spend = float(_tag_spend) + # round to 4 decimal places + _tag_spend = round(_tag_spend, 4) + _spend_message += f"Tag: `{spend['individual_request_tag']}` | Spend: `${_tag_spend}`\n" + + await self.send_alert( + message=_spend_message, + level="Low", + alert_type="spend_reports", + ) + except Exception as e: + verbose_proxy_logger.error("Error sending weekly spend report", e) diff --git a/litellm/integrations/supabase.py b/litellm/integrations/supabase.py index a99e4abc4..4e6bf517f 100644 --- a/litellm/integrations/supabase.py +++ b/litellm/integrations/supabase.py @@ -2,9 +2,7 @@ # On success + failure, log events to Supabase import dotenv, os -import requests - -dotenv.load_dotenv() # Loading env variables using dotenv +import requests # type: ignore import traceback import datetime, subprocess, sys import litellm diff --git a/litellm/integrations/weights_biases.py b/litellm/integrations/weights_biases.py index 53e6070a5..a56233b22 100644 --- a/litellm/integrations/weights_biases.py +++ b/litellm/integrations/weights_biases.py @@ -21,11 +21,11 @@ try: # contains a (known) object attribute object: Literal["chat.completion", "edit", "text_completion"] - def __getitem__(self, key: K) -> V: - ... # pragma: no cover + def __getitem__(self, key: K) -> V: ... # noqa - def get(self, key: K, default: Optional[V] = None) -> Optional[V]: - ... # pragma: no cover + def get( # noqa + self, key: K, default: Optional[V] = None + ) -> Optional[V]: ... # pragma: no cover class OpenAIRequestResponseResolver: def __call__( @@ -173,12 +173,11 @@ except: #### What this does #### # On success, logs events to Langfuse -import dotenv, os +import os import requests import requests from datetime import datetime -dotenv.load_dotenv() # Loading env variables using dotenv import traceback diff --git a/litellm/llms/ai21.py b/litellm/llms/ai21.py index 73d5afebe..a39a83f15 100644 --- a/litellm/llms/ai21.py +++ b/litellm/llms/ai21.py @@ -1,8 +1,8 @@ import os, types, traceback import json from enum import Enum -import requests -import time, httpx +import requests # type: ignore +import time, httpx # type: ignore from typing import Callable, Optional from litellm.utils import ModelResponse, Choices, Message import litellm diff --git a/litellm/llms/aleph_alpha.py b/litellm/llms/aleph_alpha.py index 3c1bd5dde..7edd11964 100644 --- a/litellm/llms/aleph_alpha.py +++ b/litellm/llms/aleph_alpha.py @@ -1,12 +1,12 @@ import os, types import json from enum import Enum -import requests +import requests # type: ignore import time from typing import Callable, Optional import litellm from litellm.utils import ModelResponse, Choices, Message, Usage -import httpx +import httpx # type: ignore class AlephAlphaError(Exception): @@ -298,7 +298,7 @@ def completion( completion_tokens=completion_tokens, total_tokens=prompt_tokens + completion_tokens, ) - model_response.usage = usage + setattr(model_response, "usage", usage) return model_response diff --git a/litellm/llms/anthropic.py b/litellm/llms/anthropic.py index d836ed8db..f14dabc03 100644 --- a/litellm/llms/anthropic.py +++ b/litellm/llms/anthropic.py @@ -1,15 +1,15 @@ import os, types import json from enum import Enum -import requests, copy +import requests, copy # type: ignore import time -from typing import Callable, Optional, List +from typing import Callable, Optional, List, Union from litellm.utils import ModelResponse, Usage, map_finish_reason, CustomStreamWrapper import litellm from .prompt_templates.factory import prompt_factory, custom_prompt from litellm.llms.custom_httpx.http_handler import AsyncHTTPHandler from .base import BaseLLM -import httpx +import httpx # type: ignore class AnthropicConstants(Enum): @@ -84,6 +84,52 @@ class AnthropicConfig: and v is not None } + def get_supported_openai_params(self): + return [ + "stream", + "stop", + "temperature", + "top_p", + "max_tokens", + "tools", + "tool_choice", + "extra_headers", + ] + + def map_openai_params(self, non_default_params: dict, optional_params: dict): + for param, value in non_default_params.items(): + if param == "max_tokens": + optional_params["max_tokens"] = value + if param == "tools": + optional_params["tools"] = value + if param == "stream" and value == True: + optional_params["stream"] = value + if param == "stop": + if isinstance(value, str): + if ( + value == "\n" + ) and litellm.drop_params == True: # anthropic doesn't allow whitespace characters as stop-sequences + continue + value = [value] + elif isinstance(value, list): + new_v = [] + for v in value: + if ( + v == "\n" + ) and litellm.drop_params == True: # anthropic doesn't allow whitespace characters as stop-sequences + continue + new_v.append(v) + if len(new_v) > 0: + value = new_v + else: + continue + optional_params["stop_sequences"] = value + if param == "temperature": + optional_params["temperature"] = value + if param == "top_p": + optional_params["top_p"] = value + return optional_params + # makes headers for API call def validate_environment(api_key, user_headers): @@ -106,19 +152,135 @@ class AnthropicChatCompletion(BaseLLM): def __init__(self) -> None: super().__init__() + def process_streaming_response( + self, + model: str, + response: Union[requests.Response, httpx.Response], + model_response: ModelResponse, + stream: bool, + logging_obj: litellm.utils.Logging, + optional_params: dict, + api_key: str, + data: Union[dict, str], + messages: List, + print_verbose, + encoding, + ) -> CustomStreamWrapper: + """ + Return stream object for tool-calling + streaming + """ + ## LOGGING + logging_obj.post_call( + input=messages, + api_key=api_key, + original_response=response.text, + additional_args={"complete_input_dict": data}, + ) + print_verbose(f"raw model_response: {response.text}") + ## RESPONSE OBJECT + try: + completion_response = response.json() + except: + raise AnthropicError( + message=response.text, status_code=response.status_code + ) + text_content = "" + tool_calls = [] + for content in completion_response["content"]: + if content["type"] == "text": + text_content += content["text"] + ## TOOL CALLING + elif content["type"] == "tool_use": + tool_calls.append( + { + "id": content["id"], + "type": "function", + "function": { + "name": content["name"], + "arguments": json.dumps(content["input"]), + }, + } + ) + if "error" in completion_response: + raise AnthropicError( + message=str(completion_response["error"]), + status_code=response.status_code, + ) + _message = litellm.Message( + tool_calls=tool_calls, + content=text_content or None, + ) + model_response.choices[0].message = _message # type: ignore + model_response._hidden_params["original_response"] = completion_response[ + "content" + ] # allow user to access raw anthropic tool calling response + + model_response.choices[0].finish_reason = map_finish_reason( + completion_response["stop_reason"] + ) + + print_verbose("INSIDE ANTHROPIC STREAMING TOOL CALLING CONDITION BLOCK") + # return an iterator + streaming_model_response = ModelResponse(stream=True) + streaming_model_response.choices[0].finish_reason = model_response.choices[ # type: ignore + 0 + ].finish_reason + # streaming_model_response.choices = [litellm.utils.StreamingChoices()] + streaming_choice = litellm.utils.StreamingChoices() + streaming_choice.index = model_response.choices[0].index + _tool_calls = [] + print_verbose( + f"type of model_response.choices[0]: {type(model_response.choices[0])}" + ) + print_verbose(f"type of streaming_choice: {type(streaming_choice)}") + if isinstance(model_response.choices[0], litellm.Choices): + if getattr( + model_response.choices[0].message, "tool_calls", None + ) is not None and isinstance( + model_response.choices[0].message.tool_calls, list + ): + for tool_call in model_response.choices[0].message.tool_calls: + _tool_call = {**tool_call.dict(), "index": 0} + _tool_calls.append(_tool_call) + delta_obj = litellm.utils.Delta( + content=getattr(model_response.choices[0].message, "content", None), + role=model_response.choices[0].message.role, + tool_calls=_tool_calls, + ) + streaming_choice.delta = delta_obj + streaming_model_response.choices = [streaming_choice] + completion_stream = ModelResponseIterator( + model_response=streaming_model_response + ) + print_verbose( + "Returns anthropic CustomStreamWrapper with 'cached_response' streaming object" + ) + return CustomStreamWrapper( + completion_stream=completion_stream, + model=model, + custom_llm_provider="cached_response", + logging_obj=logging_obj, + ) + else: + raise AnthropicError( + status_code=422, + message="Unprocessable response object - {}".format(response.text), + ) + def process_response( self, - model, - response, - model_response, - _is_function_call, - stream, - logging_obj, - api_key, - data, - messages, + model: str, + response: Union[requests.Response, httpx.Response], + model_response: ModelResponse, + stream: bool, + logging_obj: litellm.utils.Logging, + optional_params: dict, + api_key: str, + data: Union[dict, str], + messages: List, print_verbose, - ): + encoding, + ) -> ModelResponse: ## LOGGING logging_obj.post_call( input=messages, @@ -139,11 +301,6 @@ class AnthropicChatCompletion(BaseLLM): message=str(completion_response["error"]), status_code=response.status_code, ) - elif len(completion_response["content"]) == 0: - raise AnthropicError( - message="No content in response", - status_code=response.status_code, - ) else: text_content = "" tool_calls = [] @@ -176,51 +333,6 @@ class AnthropicChatCompletion(BaseLLM): completion_response["stop_reason"] ) - print_verbose(f"_is_function_call: {_is_function_call}; stream: {stream}") - if _is_function_call and stream: - print_verbose("INSIDE ANTHROPIC STREAMING TOOL CALLING CONDITION BLOCK") - # return an iterator - streaming_model_response = ModelResponse(stream=True) - streaming_model_response.choices[0].finish_reason = model_response.choices[ - 0 - ].finish_reason - # streaming_model_response.choices = [litellm.utils.StreamingChoices()] - streaming_choice = litellm.utils.StreamingChoices() - streaming_choice.index = model_response.choices[0].index - _tool_calls = [] - print_verbose( - f"type of model_response.choices[0]: {type(model_response.choices[0])}" - ) - print_verbose(f"type of streaming_choice: {type(streaming_choice)}") - if isinstance(model_response.choices[0], litellm.Choices): - if getattr( - model_response.choices[0].message, "tool_calls", None - ) is not None and isinstance( - model_response.choices[0].message.tool_calls, list - ): - for tool_call in model_response.choices[0].message.tool_calls: - _tool_call = {**tool_call.dict(), "index": 0} - _tool_calls.append(_tool_call) - delta_obj = litellm.utils.Delta( - content=getattr(model_response.choices[0].message, "content", None), - role=model_response.choices[0].message.role, - tool_calls=_tool_calls, - ) - streaming_choice.delta = delta_obj - streaming_model_response.choices = [streaming_choice] - completion_stream = ModelResponseIterator( - model_response=streaming_model_response - ) - print_verbose( - "Returns anthropic CustomStreamWrapper with 'cached_response' streaming object" - ) - return CustomStreamWrapper( - completion_stream=completion_stream, - model=model, - custom_llm_provider="cached_response", - logging_obj=logging_obj, - ) - ## CALCULATING USAGE prompt_tokens = completion_response["usage"]["input_tokens"] completion_tokens = completion_response["usage"]["output_tokens"] @@ -233,7 +345,7 @@ class AnthropicChatCompletion(BaseLLM): completion_tokens=completion_tokens, total_tokens=total_tokens, ) - model_response.usage = usage + setattr(model_response, "usage", usage) # type: ignore return model_response async def acompletion_stream_function( @@ -249,7 +361,7 @@ class AnthropicChatCompletion(BaseLLM): logging_obj, stream, _is_function_call, - data=None, + data: dict, optional_params=None, litellm_params=None, logger_fn=None, @@ -258,8 +370,9 @@ class AnthropicChatCompletion(BaseLLM): self.async_handler = AsyncHTTPHandler( timeout=httpx.Timeout(timeout=600.0, connect=5.0) ) + data["stream"] = True response = await self.async_handler.post( - api_base, headers=headers, data=json.dumps(data) + api_base, headers=headers, data=json.dumps(data), stream=True ) if response.status_code != 200: @@ -290,29 +403,44 @@ class AnthropicChatCompletion(BaseLLM): logging_obj, stream, _is_function_call, - data=None, - optional_params=None, + data: dict, + optional_params: dict, litellm_params=None, logger_fn=None, headers={}, - ): + ) -> Union[ModelResponse, CustomStreamWrapper]: self.async_handler = AsyncHTTPHandler( timeout=httpx.Timeout(timeout=600.0, connect=5.0) ) response = await self.async_handler.post( api_base, headers=headers, data=json.dumps(data) ) + if stream and _is_function_call: + return self.process_streaming_response( + model=model, + response=response, + model_response=model_response, + stream=stream, + logging_obj=logging_obj, + api_key=api_key, + data=data, + messages=messages, + print_verbose=print_verbose, + optional_params=optional_params, + encoding=encoding, + ) return self.process_response( model=model, response=response, model_response=model_response, - _is_function_call=_is_function_call, stream=stream, logging_obj=logging_obj, api_key=api_key, data=data, messages=messages, print_verbose=print_verbose, + optional_params=optional_params, + encoding=encoding, ) def completion( @@ -326,7 +454,7 @@ class AnthropicChatCompletion(BaseLLM): encoding, api_key, logging_obj, - optional_params=None, + optional_params: dict, acompletion=None, litellm_params=None, logger_fn=None, @@ -377,7 +505,9 @@ class AnthropicChatCompletion(BaseLLM): ## Handle Tool Calling if "tools" in optional_params: _is_function_call = True - headers["anthropic-beta"] = "tools-2024-04-04" + if "anthropic-beta" not in headers: + # default to v1 of "anthropic-beta" + headers["anthropic-beta"] = "tools-2024-05-16" anthropic_tools = [] for tool in optional_params["tools"]: @@ -485,17 +615,33 @@ class AnthropicChatCompletion(BaseLLM): raise AnthropicError( status_code=response.status_code, message=response.text ) + + if stream and _is_function_call: + return self.process_streaming_response( + model=model, + response=response, + model_response=model_response, + stream=stream, + logging_obj=logging_obj, + api_key=api_key, + data=data, + messages=messages, + print_verbose=print_verbose, + optional_params=optional_params, + encoding=encoding, + ) return self.process_response( model=model, response=response, model_response=model_response, - _is_function_call=_is_function_call, stream=stream, logging_obj=logging_obj, api_key=api_key, data=data, messages=messages, print_verbose=print_verbose, + optional_params=optional_params, + encoding=encoding, ) def embedding(self): diff --git a/litellm/llms/anthropic_text.py b/litellm/llms/anthropic_text.py index c9a9adfc2..0093d9f35 100644 --- a/litellm/llms/anthropic_text.py +++ b/litellm/llms/anthropic_text.py @@ -8,6 +8,8 @@ from litellm.utils import ModelResponse, Usage, CustomStreamWrapper import litellm from .prompt_templates.factory import prompt_factory, custom_prompt import httpx +from .base import BaseLLM +from litellm.llms.custom_httpx.http_handler import AsyncHTTPHandler, HTTPHandler class AnthropicConstants(Enum): @@ -94,98 +96,13 @@ def validate_environment(api_key, user_headers): return headers -def completion( - model: str, - messages: list, - api_base: str, - custom_prompt_dict: dict, - model_response: ModelResponse, - print_verbose: Callable, - encoding, - api_key, - logging_obj, - optional_params=None, - litellm_params=None, - logger_fn=None, - headers={}, -): - headers = validate_environment(api_key, headers) - if model in custom_prompt_dict: - # check if the model has a registered custom prompt - model_prompt_details = custom_prompt_dict[model] - prompt = custom_prompt( - role_dict=model_prompt_details["roles"], - initial_prompt_value=model_prompt_details["initial_prompt_value"], - final_prompt_value=model_prompt_details["final_prompt_value"], - messages=messages, - ) - else: - prompt = prompt_factory( - model=model, messages=messages, custom_llm_provider="anthropic" - ) +class AnthropicTextCompletion(BaseLLM): + def __init__(self) -> None: + super().__init__() - ## Load Config - config = litellm.AnthropicTextConfig.get_config() - for k, v in config.items(): - if ( - k not in optional_params - ): # completion(top_k=3) > anthropic_config(top_k=3) <- allows for dynamic variables to be passed in - optional_params[k] = v - - data = { - "model": model, - "prompt": prompt, - **optional_params, - } - - ## LOGGING - logging_obj.pre_call( - input=prompt, - api_key=api_key, - additional_args={ - "complete_input_dict": data, - "api_base": api_base, - "headers": headers, - }, - ) - - ## COMPLETION CALL - if "stream" in optional_params and optional_params["stream"] == True: - response = requests.post( - api_base, - headers=headers, - data=json.dumps(data), - stream=optional_params["stream"], - ) - - if response.status_code != 200: - raise AnthropicError( - status_code=response.status_code, message=response.text - ) - completion_stream = response.iter_lines() - stream_response = CustomStreamWrapper( - completion_stream=completion_stream, - model=model, - custom_llm_provider="anthropic", - logging_obj=logging_obj, - ) - return stream_response - - else: - response = requests.post(api_base, headers=headers, data=json.dumps(data)) - if response.status_code != 200: - raise AnthropicError( - status_code=response.status_code, message=response.text - ) - - ## LOGGING - logging_obj.post_call( - input=prompt, - api_key=api_key, - original_response=response.text, - additional_args={"complete_input_dict": data}, - ) - print_verbose(f"raw model_response: {response.text}") + def _process_response( + self, model_response: ModelResponse, response, encoding, prompt: str, model: str + ): ## RESPONSE OBJECT try: completion_response = response.json() @@ -220,10 +137,208 @@ def completion( completion_tokens=completion_tokens, total_tokens=prompt_tokens + completion_tokens, ) - model_response.usage = usage + + setattr(model_response, "usage", usage) + return model_response + async def async_completion( + self, + model: str, + model_response: ModelResponse, + api_base: str, + logging_obj, + encoding, + headers: dict, + data: dict, + client=None, + ): + if client is None: + client = AsyncHTTPHandler(timeout=httpx.Timeout(timeout=600.0, connect=5.0)) -def embedding(): - # logic for parsing in - calling - parsing out model embedding calls - pass + response = await client.post(api_base, headers=headers, data=json.dumps(data)) + + if response.status_code != 200: + raise AnthropicError( + status_code=response.status_code, message=response.text + ) + + ## LOGGING + logging_obj.post_call( + input=data["prompt"], + api_key=headers.get("x-api-key"), + original_response=response.text, + additional_args={"complete_input_dict": data}, + ) + + response = self._process_response( + model_response=model_response, + response=response, + encoding=encoding, + prompt=data["prompt"], + model=model, + ) + return response + + async def async_streaming( + self, + model: str, + api_base: str, + logging_obj, + headers: dict, + data: Optional[dict], + client=None, + ): + if client is None: + client = AsyncHTTPHandler(timeout=httpx.Timeout(timeout=600.0, connect=5.0)) + + response = await client.post(api_base, headers=headers, data=json.dumps(data)) + + if response.status_code != 200: + raise AnthropicError( + status_code=response.status_code, message=response.text + ) + + completion_stream = response.aiter_lines() + + streamwrapper = CustomStreamWrapper( + completion_stream=completion_stream, + model=model, + custom_llm_provider="anthropic_text", + logging_obj=logging_obj, + ) + return streamwrapper + + def completion( + self, + model: str, + messages: list, + api_base: str, + acompletion: str, + custom_prompt_dict: dict, + model_response: ModelResponse, + print_verbose: Callable, + encoding, + api_key, + logging_obj, + optional_params=None, + litellm_params=None, + logger_fn=None, + headers={}, + client=None, + ): + headers = validate_environment(api_key, headers) + if model in custom_prompt_dict: + # check if the model has a registered custom prompt + model_prompt_details = custom_prompt_dict[model] + prompt = custom_prompt( + role_dict=model_prompt_details["roles"], + initial_prompt_value=model_prompt_details["initial_prompt_value"], + final_prompt_value=model_prompt_details["final_prompt_value"], + messages=messages, + ) + else: + prompt = prompt_factory( + model=model, messages=messages, custom_llm_provider="anthropic" + ) + + ## Load Config + config = litellm.AnthropicTextConfig.get_config() + for k, v in config.items(): + if ( + k not in optional_params + ): # completion(top_k=3) > anthropic_config(top_k=3) <- allows for dynamic variables to be passed in + optional_params[k] = v + + data = { + "model": model, + "prompt": prompt, + **optional_params, + } + + ## LOGGING + logging_obj.pre_call( + input=prompt, + api_key=api_key, + additional_args={ + "complete_input_dict": data, + "api_base": api_base, + "headers": headers, + }, + ) + + ## COMPLETION CALL + if "stream" in optional_params and optional_params["stream"] == True: + if acompletion == True: + return self.async_streaming( + model=model, + api_base=api_base, + logging_obj=logging_obj, + headers=headers, + data=data, + client=None, + ) + + if client is None: + client = HTTPHandler(timeout=httpx.Timeout(timeout=600.0, connect=5.0)) + + response = client.post( + api_base, + headers=headers, + data=json.dumps(data), + # stream=optional_params["stream"], + ) + + if response.status_code != 200: + raise AnthropicError( + status_code=response.status_code, message=response.text + ) + completion_stream = response.iter_lines() + stream_response = CustomStreamWrapper( + completion_stream=completion_stream, + model=model, + custom_llm_provider="anthropic_text", + logging_obj=logging_obj, + ) + return stream_response + elif acompletion == True: + return self.async_completion( + model=model, + model_response=model_response, + api_base=api_base, + logging_obj=logging_obj, + encoding=encoding, + headers=headers, + data=data, + client=client, + ) + else: + if client is None: + client = HTTPHandler(timeout=httpx.Timeout(timeout=600.0, connect=5.0)) + response = client.post(api_base, headers=headers, data=json.dumps(data)) + if response.status_code != 200: + raise AnthropicError( + status_code=response.status_code, message=response.text + ) + + ## LOGGING + logging_obj.post_call( + input=prompt, + api_key=api_key, + original_response=response.text, + additional_args={"complete_input_dict": data}, + ) + print_verbose(f"raw model_response: {response.text}") + + response = self._process_response( + model_response=model_response, + response=response, + encoding=encoding, + prompt=data["prompt"], + model=model, + ) + return response + + def embedding(self): + # logic for parsing in - calling - parsing out model embedding calls + pass diff --git a/litellm/llms/azure.py b/litellm/llms/azure.py index 7f268c25a..02fe4a08f 100644 --- a/litellm/llms/azure.py +++ b/litellm/llms/azure.py @@ -1,4 +1,4 @@ -from typing import Optional, Union, Any +from typing import Optional, Union, Any, Literal import types, requests from .base import BaseLLM from litellm.utils import ( @@ -8,14 +8,16 @@ from litellm.utils import ( CustomStreamWrapper, convert_to_model_response_object, TranscriptionResponse, + get_secret, ) -from typing import Callable, Optional, BinaryIO +from typing import Callable, Optional, BinaryIO, List from litellm import OpenAIConfig import litellm, json -import httpx +import httpx # type: ignore from .custom_httpx.azure_dall_e_2 import CustomHTTPTransport, AsyncCustomHTTPTransport from openai import AzureOpenAI, AsyncAzureOpenAI import uuid +import os class AzureOpenAIError(Exception): @@ -96,6 +98,21 @@ class AzureOpenAIConfig(OpenAIConfig): top_p, ) + def get_mapped_special_auth_params(self) -> dict: + return {"token": "azure_ad_token"} + + def map_special_auth_params(self, non_default_params: dict, optional_params: dict): + for param, value in non_default_params.items(): + if param == "token": + optional_params["azure_ad_token"] = value + return optional_params + + def get_eu_regions(self) -> List[str]: + """ + Source: https://learn.microsoft.com/en-us/azure/ai-services/openai/concepts/models#gpt-4-and-gpt-4-turbo-model-availability + """ + return ["europe", "sweden", "switzerland", "france", "uk"] + def select_azure_base_url_or_endpoint(azure_client_params: dict): # azure_client_params = { @@ -117,6 +134,51 @@ def select_azure_base_url_or_endpoint(azure_client_params: dict): return azure_client_params +def get_azure_ad_token_from_oidc(azure_ad_token: str): + azure_client_id = os.getenv("AZURE_CLIENT_ID", None) + azure_tenant = os.getenv("AZURE_TENANT_ID", None) + + if azure_client_id is None or azure_tenant is None: + raise AzureOpenAIError( + status_code=422, + message="AZURE_CLIENT_ID and AZURE_TENANT_ID must be set", + ) + + oidc_token = get_secret(azure_ad_token) + + if oidc_token is None: + raise AzureOpenAIError( + status_code=401, + message="OIDC token could not be retrieved from secret manager.", + ) + + req_token = httpx.post( + f"https://login.microsoftonline.com/{azure_tenant}/oauth2/v2.0/token", + data={ + "client_id": azure_client_id, + "grant_type": "client_credentials", + "scope": "https://cognitiveservices.azure.com/.default", + "client_assertion_type": "urn:ietf:params:oauth:client-assertion-type:jwt-bearer", + "client_assertion": oidc_token, + }, + ) + + if req_token.status_code != 200: + raise AzureOpenAIError( + status_code=req_token.status_code, + message=req_token.text, + ) + + possible_azure_ad_token = req_token.json().get("access_token", None) + + if possible_azure_ad_token is None: + raise AzureOpenAIError( + status_code=422, message="Azure AD Token not returned" + ) + + return possible_azure_ad_token + + class AzureChatCompletion(BaseLLM): def __init__(self) -> None: super().__init__() @@ -128,6 +190,8 @@ class AzureChatCompletion(BaseLLM): if api_key is not None: headers["api-key"] = api_key elif azure_ad_token is not None: + if azure_ad_token.startswith("oidc/"): + azure_ad_token = get_azure_ad_token_from_oidc(azure_ad_token) headers["Authorization"] = f"Bearer {azure_ad_token}" return headers @@ -142,7 +206,7 @@ class AzureChatCompletion(BaseLLM): api_type: str, azure_ad_token: str, print_verbose: Callable, - timeout, + timeout: Union[float, httpx.Timeout], logging_obj, optional_params, litellm_params, @@ -180,6 +244,9 @@ class AzureChatCompletion(BaseLLM): if api_key is not None: azure_client_params["api_key"] = api_key elif azure_ad_token is not None: + if azure_ad_token.startswith("oidc/"): + azure_ad_token = get_azure_ad_token_from_oidc(azure_ad_token) + azure_client_params["azure_ad_token"] = azure_ad_token if acompletion is True: @@ -267,6 +334,8 @@ class AzureChatCompletion(BaseLLM): if api_key is not None: azure_client_params["api_key"] = api_key elif azure_ad_token is not None: + if azure_ad_token.startswith("oidc/"): + azure_ad_token = get_azure_ad_token_from_oidc(azure_ad_token) azure_client_params["azure_ad_token"] = azure_ad_token if client is None: azure_client = AzureOpenAI(**azure_client_params) @@ -342,6 +411,8 @@ class AzureChatCompletion(BaseLLM): if api_key is not None: azure_client_params["api_key"] = api_key elif azure_ad_token is not None: + if azure_ad_token.startswith("oidc/"): + azure_ad_token = get_azure_ad_token_from_oidc(azure_ad_token) azure_client_params["azure_ad_token"] = azure_ad_token # setting Azure client @@ -413,6 +484,8 @@ class AzureChatCompletion(BaseLLM): if api_key is not None: azure_client_params["api_key"] = api_key elif azure_ad_token is not None: + if azure_ad_token.startswith("oidc/"): + azure_ad_token = get_azure_ad_token_from_oidc(azure_ad_token) azure_client_params["azure_ad_token"] = azure_ad_token if client is None: azure_client = AzureOpenAI(**azure_client_params) @@ -469,6 +542,8 @@ class AzureChatCompletion(BaseLLM): if api_key is not None: azure_client_params["api_key"] = api_key elif azure_ad_token is not None: + if azure_ad_token.startswith("oidc/"): + azure_ad_token = get_azure_ad_token_from_oidc(azure_ad_token) azure_client_params["azure_ad_token"] = azure_ad_token if client is None: azure_client = AsyncAzureOpenAI(**azure_client_params) @@ -590,6 +665,8 @@ class AzureChatCompletion(BaseLLM): if api_key is not None: azure_client_params["api_key"] = api_key elif azure_ad_token is not None: + if azure_ad_token.startswith("oidc/"): + azure_ad_token = get_azure_ad_token_from_oidc(azure_ad_token) azure_client_params["azure_ad_token"] = azure_ad_token ## LOGGING @@ -746,6 +823,8 @@ class AzureChatCompletion(BaseLLM): if api_key is not None: azure_client_params["api_key"] = api_key elif azure_ad_token is not None: + if azure_ad_token.startswith("oidc/"): + azure_ad_token = get_azure_ad_token_from_oidc(azure_ad_token) azure_client_params["azure_ad_token"] = azure_ad_token if aimg_generation == True: @@ -824,6 +903,8 @@ class AzureChatCompletion(BaseLLM): if api_key is not None: azure_client_params["api_key"] = api_key elif azure_ad_token is not None: + if azure_ad_token.startswith("oidc/"): + azure_ad_token = get_azure_ad_token_from_oidc(azure_ad_token) azure_client_params["azure_ad_token"] = azure_ad_token if max_retries is not None: @@ -943,6 +1024,81 @@ class AzureChatCompletion(BaseLLM): ) raise e + def get_headers( + self, + model: Optional[str], + api_key: str, + api_base: str, + api_version: str, + timeout: float, + mode: str, + messages: Optional[list] = None, + input: Optional[list] = None, + prompt: Optional[str] = None, + ) -> dict: + client_session = litellm.client_session or httpx.Client( + transport=CustomHTTPTransport(), # handle dall-e-2 calls + ) + if "gateway.ai.cloudflare.com" in api_base: + ## build base url - assume api base includes resource name + if not api_base.endswith("/"): + api_base += "/" + api_base += f"{model}" + client = AzureOpenAI( + base_url=api_base, + api_version=api_version, + api_key=api_key, + timeout=timeout, + http_client=client_session, + ) + model = None + # cloudflare ai gateway, needs model=None + else: + client = AzureOpenAI( + api_version=api_version, + azure_endpoint=api_base, + api_key=api_key, + timeout=timeout, + http_client=client_session, + ) + + # only run this check if it's not cloudflare ai gateway + if model is None and mode != "image_generation": + raise Exception("model is not set") + + completion = None + + if messages is None: + messages = [{"role": "user", "content": "Hey"}] + try: + completion = client.chat.completions.with_raw_response.create( + model=model, # type: ignore + messages=messages, # type: ignore + ) + except Exception as e: + raise e + response = {} + + if completion is None or not hasattr(completion, "headers"): + raise Exception("invalid completion response") + + if ( + completion.headers.get("x-ratelimit-remaining-requests", None) is not None + ): # not provided for dall-e requests + response["x-ratelimit-remaining-requests"] = completion.headers[ + "x-ratelimit-remaining-requests" + ] + + if completion.headers.get("x-ratelimit-remaining-tokens", None) is not None: + response["x-ratelimit-remaining-tokens"] = completion.headers[ + "x-ratelimit-remaining-tokens" + ] + + if completion.headers.get("x-ms-region", None) is not None: + response["x-ms-region"] = completion.headers["x-ms-region"] + + return response + async def ahealth_check( self, model: Optional[str], @@ -954,7 +1110,7 @@ class AzureChatCompletion(BaseLLM): messages: Optional[list] = None, input: Optional[list] = None, prompt: Optional[str] = None, - ): + ) -> dict: client_session = litellm.aclient_session or httpx.AsyncClient( transport=AsyncCustomHTTPTransport(), # handle dall-e-2 calls ) @@ -1031,4 +1187,8 @@ class AzureChatCompletion(BaseLLM): response["x-ratelimit-remaining-tokens"] = completion.headers[ "x-ratelimit-remaining-tokens" ] + + if completion.headers.get("x-ms-region", None) is not None: + response["x-ms-region"] = completion.headers["x-ms-region"] + return response diff --git a/litellm/llms/azure_text.py b/litellm/llms/azure_text.py index e0d547477..640ab8222 100644 --- a/litellm/llms/azure_text.py +++ b/litellm/llms/azure_text.py @@ -1,5 +1,5 @@ from typing import Optional, Union, Any -import types, requests +import types, requests # type: ignore from .base import BaseLLM from litellm.utils import ( ModelResponse, diff --git a/litellm/llms/base.py b/litellm/llms/base.py index 62b8069f0..d940d9471 100644 --- a/litellm/llms/base.py +++ b/litellm/llms/base.py @@ -1,12 +1,32 @@ ## This is a template base class to be used for adding new LLM providers via API calls import litellm -import httpx -from typing import Optional +import httpx, requests +from typing import Optional, Union +from litellm.utils import Logging class BaseLLM: _client_session: Optional[httpx.Client] = None + def process_response( + self, + model: str, + response: Union[requests.Response, httpx.Response], + model_response: litellm.utils.ModelResponse, + stream: bool, + logging_obj: Logging, + optional_params: dict, + api_key: str, + data: Union[dict, str], + messages: list, + print_verbose, + encoding, + ) -> litellm.utils.ModelResponse: + """ + Helper function to process the response across sync + async completion calls + """ + return model_response + def create_client_session(self): if litellm.client_session: _client_session = litellm.client_session diff --git a/litellm/llms/baseten.py b/litellm/llms/baseten.py index b94491014..643dae530 100644 --- a/litellm/llms/baseten.py +++ b/litellm/llms/baseten.py @@ -1,7 +1,7 @@ import os import json from enum import Enum -import requests +import requests # type: ignore import time from typing import Callable from litellm.utils import ModelResponse, Usage @@ -55,9 +55,11 @@ def completion( "inputs": prompt, "prompt": prompt, "parameters": optional_params, - "stream": True - if "stream" in optional_params and optional_params["stream"] == True - else False, + "stream": ( + True + if "stream" in optional_params and optional_params["stream"] == True + else False + ), } ## LOGGING @@ -71,9 +73,11 @@ def completion( completion_url_fragment_1 + model + completion_url_fragment_2, headers=headers, data=json.dumps(data), - stream=True - if "stream" in optional_params and optional_params["stream"] == True - else False, + stream=( + True + if "stream" in optional_params and optional_params["stream"] == True + else False + ), ) if "text/event-stream" in response.headers["Content-Type"] or ( "stream" in optional_params and optional_params["stream"] == True @@ -102,28 +106,28 @@ def completion( and "data" in completion_response["model_output"] and isinstance(completion_response["model_output"]["data"], list) ): - model_response["choices"][0]["message"][ - "content" - ] = completion_response["model_output"]["data"][0] + model_response["choices"][0]["message"]["content"] = ( + completion_response["model_output"]["data"][0] + ) elif isinstance(completion_response["model_output"], str): - model_response["choices"][0]["message"][ - "content" - ] = completion_response["model_output"] + model_response["choices"][0]["message"]["content"] = ( + completion_response["model_output"] + ) elif "completion" in completion_response and isinstance( completion_response["completion"], str ): - model_response["choices"][0]["message"][ - "content" - ] = completion_response["completion"] + model_response["choices"][0]["message"]["content"] = ( + completion_response["completion"] + ) elif isinstance(completion_response, list) and len(completion_response) > 0: if "generated_text" not in completion_response: raise BasetenError( message=f"Unable to parse response. Original response: {response.text}", status_code=response.status_code, ) - model_response["choices"][0]["message"][ - "content" - ] = completion_response[0]["generated_text"] + model_response["choices"][0]["message"]["content"] = ( + completion_response[0]["generated_text"] + ) ## GETTING LOGPROBS if ( "details" in completion_response[0] @@ -155,7 +159,8 @@ def completion( completion_tokens=completion_tokens, total_tokens=prompt_tokens + completion_tokens, ) - model_response.usage = usage + + setattr(model_response, "usage", usage) return model_response diff --git a/litellm/llms/bedrock.py b/litellm/llms/bedrock.py index 2a50739a0..4314032e7 100644 --- a/litellm/llms/bedrock.py +++ b/litellm/llms/bedrock.py @@ -4,7 +4,13 @@ from enum import Enum import time, uuid from typing import Callable, Optional, Any, Union, List import litellm -from litellm.utils import ModelResponse, get_secret, Usage, ImageResponse +from litellm.utils import ( + ModelResponse, + get_secret, + Usage, + ImageResponse, + map_finish_reason, +) from .prompt_templates.factory import ( prompt_factory, custom_prompt, @@ -29,6 +35,34 @@ class BedrockError(Exception): ) # Call the base class constructor with the parameters it needs +class AmazonBedrockGlobalConfig: + def __init__(self): + pass + + def get_mapped_special_auth_params(self) -> dict: + """ + Mapping of common auth params across bedrock/vertex/azure/watsonx + """ + return {"region_name": "aws_region_name"} + + def map_special_auth_params(self, non_default_params: dict, optional_params: dict): + mapped_params = self.get_mapped_special_auth_params() + for param, value in non_default_params.items(): + if param in mapped_params: + optional_params[mapped_params[param]] = value + return optional_params + + def get_eu_regions(self) -> List[str]: + """ + Source: https://www.aws-services.info/bedrock.html + """ + return [ + "eu-west-1", + "eu-west-3", + "eu-central-1", + ] + + class AmazonTitanConfig: """ Reference: https://us-west-2.console.aws.amazon.com/bedrock/home?region=us-west-2#/providers?model=titan-text-express-v1 @@ -139,6 +173,7 @@ class AmazonAnthropicClaude3Config: "stop", "temperature", "top_p", + "extra_headers", ] def map_openai_params(self, non_default_params: dict, optional_params: dict): @@ -506,6 +541,17 @@ class AmazonStabilityConfig: } +def add_custom_header(headers): + """Closure to capture the headers and add them.""" + + def callback(request, **kwargs): + """Actual callback function that Boto3 will call.""" + for header_name, header_value in headers.items(): + request.headers.add_header(header_name, header_value) + + return callback + + def init_bedrock_client( region_name=None, aws_access_key_id: Optional[str] = None, @@ -515,12 +561,13 @@ def init_bedrock_client( aws_session_name: Optional[str] = None, aws_profile_name: Optional[str] = None, aws_role_name: Optional[str] = None, - timeout: Optional[int] = None, + aws_web_identity_token: Optional[str] = None, + extra_headers: Optional[dict] = None, + timeout: Optional[Union[float, httpx.Timeout]] = None, ): # check for custom AWS_REGION_NAME and use it if not passed to init_bedrock_client litellm_aws_region_name = get_secret("AWS_REGION_NAME", None) standard_aws_region_name = get_secret("AWS_REGION", None) - ## CHECK IS 'os.environ/' passed in # Define the list of parameters to check params_to_check = [ @@ -531,6 +578,7 @@ def init_bedrock_client( aws_session_name, aws_profile_name, aws_role_name, + aws_web_identity_token, ] # Iterate over parameters and update if needed @@ -546,6 +594,7 @@ def init_bedrock_client( aws_session_name, aws_profile_name, aws_role_name, + aws_web_identity_token, ) = params_to_check ### SET REGION NAME @@ -574,10 +623,48 @@ def init_bedrock_client( import boto3 - config = boto3.session.Config(connect_timeout=timeout, read_timeout=timeout) + if isinstance(timeout, float): + config = boto3.session.Config(connect_timeout=timeout, read_timeout=timeout) + elif isinstance(timeout, httpx.Timeout): + config = boto3.session.Config( + connect_timeout=timeout.connect, read_timeout=timeout.read + ) + else: + config = boto3.session.Config() ### CHECK STS ### - if aws_role_name is not None and aws_session_name is not None: + if aws_web_identity_token is not None and aws_role_name is not None and aws_session_name is not None: + oidc_token = get_secret(aws_web_identity_token) + + if oidc_token is None: + raise BedrockError( + message="OIDC token could not be retrieved from secret manager.", + status_code=401, + ) + + sts_client = boto3.client( + "sts" + ) + + # https://docs.aws.amazon.com/STS/latest/APIReference/API_AssumeRoleWithWebIdentity.html + # https://boto3.amazonaws.com/v1/documentation/api/latest/reference/services/sts/client/assume_role_with_web_identity.html + sts_response = sts_client.assume_role_with_web_identity( + RoleArn=aws_role_name, + RoleSessionName=aws_session_name, + WebIdentityToken=oidc_token, + DurationSeconds=3600, + ) + + client = boto3.client( + service_name="bedrock-runtime", + aws_access_key_id=sts_response["Credentials"]["AccessKeyId"], + aws_secret_access_key=sts_response["Credentials"]["SecretAccessKey"], + aws_session_token=sts_response["Credentials"]["SessionToken"], + region_name=region_name, + endpoint_url=endpoint_url, + config=config, + ) + elif aws_role_name is not None and aws_session_name is not None: # use sts if role name passed in sts_client = boto3.client( "sts", @@ -629,6 +716,10 @@ def init_bedrock_client( endpoint_url=endpoint_url, config=config, ) + if extra_headers: + client.meta.events.register( + "before-sign.bedrock-runtime.*", add_custom_header(extra_headers) + ) return client @@ -653,6 +744,10 @@ def convert_messages_to_prompt(model, messages, provider, custom_prompt_dict): prompt = prompt_factory( model=model, messages=messages, custom_llm_provider="bedrock" ) + elif provider == "meta": + prompt = prompt_factory( + model=model, messages=messages, custom_llm_provider="bedrock" + ) else: prompt = "" for message in messages: @@ -688,6 +783,7 @@ def completion( litellm_params=None, logger_fn=None, timeout=None, + extra_headers: Optional[dict] = None, ): exception_mapping_worked = False _is_function_call = False @@ -703,6 +799,7 @@ def completion( aws_bedrock_runtime_endpoint = optional_params.pop( "aws_bedrock_runtime_endpoint", None ) + aws_web_identity_token = optional_params.pop("aws_web_identity_token", None) # use passed in BedrockRuntime.Client if provided, otherwise create a new one client = optional_params.pop("aws_bedrock_client", None) @@ -717,6 +814,8 @@ def completion( aws_role_name=aws_role_name, aws_session_name=aws_session_name, aws_profile_name=aws_profile_name, + aws_web_identity_token=aws_web_identity_token, + extra_headers=extra_headers, timeout=timeout, ) @@ -930,7 +1029,7 @@ def completion( original_response=json.dumps(response_body), additional_args={"complete_input_dict": data}, ) - print_verbose(f"raw model_response: {response}") + print_verbose(f"raw model_response: {response_body}") ## RESPONSE OBJECT outputText = "default" if provider == "ai21": @@ -1021,14 +1120,16 @@ def completion( logging_obj=logging_obj, ) - model_response["finish_reason"] = response_body["stop_reason"] + model_response["finish_reason"] = map_finish_reason( + response_body["stop_reason"] + ) _usage = litellm.Usage( prompt_tokens=response_body["usage"]["input_tokens"], completion_tokens=response_body["usage"]["output_tokens"], total_tokens=response_body["usage"]["input_tokens"] + response_body["usage"]["output_tokens"], ) - model_response.usage = _usage + setattr(model_response, "usage", _usage) else: outputText = response_body["completion"] model_response["finish_reason"] = response_body["stop_reason"] @@ -1043,6 +1144,7 @@ def completion( outputText = response_body.get("results")[0].get("outputText") response_metadata = response.get("ResponseMetadata", {}) + if response_metadata.get("HTTPStatusCode", 500) >= 400: raise BedrockError( message=outputText, @@ -1071,16 +1173,20 @@ def completion( status_code=response_metadata.get("HTTPStatusCode", 500), ) - ## CALCULATING USAGE - baseten charges on time, not tokens - have some mapping of cost here. - if getattr(model_response.usage, "total_tokens", None) is None: + ## CALCULATING USAGE - bedrock charges on time, not tokens - have some mapping of cost here. + if not hasattr(model_response, "usage"): + setattr(model_response, "usage", Usage()) + if getattr(model_response.usage, "total_tokens", None) is None: # type: ignore prompt_tokens = response_metadata.get( "x-amzn-bedrock-input-token-count", len(encoding.encode(prompt)) ) + _text_response = model_response["choices"][0]["message"].get("content", "") completion_tokens = response_metadata.get( "x-amzn-bedrock-output-token-count", len( encoding.encode( - model_response["choices"][0]["message"].get("content", "") + _text_response, + disallowed_special=(), ) ), ) @@ -1089,7 +1195,7 @@ def completion( completion_tokens=completion_tokens, total_tokens=prompt_tokens + completion_tokens, ) - model_response.usage = usage + setattr(model_response, "usage", usage) model_response["created"] = int(time.time()) model_response["model"] = model @@ -1167,7 +1273,7 @@ def _embedding_func_single( "input_type", "search_document" ) # aws bedrock example default - https://us-east-1.console.aws.amazon.com/bedrock/home?region=us-east-1#/providers?model=cohere.embed-english-v3 data = {"texts": [input], **inference_params} # type: ignore - body = json.dumps(data).encode("utf-8") + body = json.dumps(data).encode("utf-8") # type: ignore ## LOGGING request_str = f""" response = client.invoke_model( @@ -1231,6 +1337,7 @@ def embedding( aws_bedrock_runtime_endpoint = optional_params.pop( "aws_bedrock_runtime_endpoint", None ) + aws_web_identity_token = optional_params.pop("aws_web_identity_token", None) # use passed in BedrockRuntime.Client if provided, otherwise create a new one client = init_bedrock_client( @@ -1238,6 +1345,7 @@ def embedding( aws_secret_access_key=aws_secret_access_key, aws_region_name=aws_region_name, aws_bedrock_runtime_endpoint=aws_bedrock_runtime_endpoint, + aws_web_identity_token=aws_web_identity_token, aws_role_name=aws_role_name, aws_session_name=aws_session_name, ) @@ -1320,6 +1428,7 @@ def image_generation( aws_bedrock_runtime_endpoint = optional_params.pop( "aws_bedrock_runtime_endpoint", None ) + aws_web_identity_token = optional_params.pop("aws_web_identity_token", None) # use passed in BedrockRuntime.Client if provided, otherwise create a new one client = init_bedrock_client( @@ -1327,6 +1436,7 @@ def image_generation( aws_secret_access_key=aws_secret_access_key, aws_region_name=aws_region_name, aws_bedrock_runtime_endpoint=aws_bedrock_runtime_endpoint, + aws_web_identity_token=aws_web_identity_token, aws_role_name=aws_role_name, aws_session_name=aws_session_name, timeout=timeout, @@ -1359,7 +1469,7 @@ def image_generation( ## LOGGING request_str = f""" response = client.invoke_model( - body={body}, + body={body}, # type: ignore modelId={modelId}, accept="application/json", contentType="application/json", diff --git a/litellm/llms/bedrock_httpx.py b/litellm/llms/bedrock_httpx.py new file mode 100644 index 000000000..1ff3767bd --- /dev/null +++ b/litellm/llms/bedrock_httpx.py @@ -0,0 +1,733 @@ +# What is this? +## Initial implementation of calling bedrock via httpx client (allows for async calls). +## V0 - just covers cohere command-r support + +import os, types +import json +from enum import Enum +import requests, copy # type: ignore +import time +from typing import ( + Callable, + Optional, + List, + Literal, + Union, + Any, + TypedDict, + Tuple, + Iterator, + AsyncIterator, +) +from litellm.utils import ( + ModelResponse, + Usage, + map_finish_reason, + CustomStreamWrapper, + Message, + Choices, + get_secret, + Logging, +) +import litellm +from .prompt_templates.factory import prompt_factory, custom_prompt, cohere_message_pt +from litellm.llms.custom_httpx.http_handler import AsyncHTTPHandler, HTTPHandler +from .base import BaseLLM +import httpx # type: ignore +from .bedrock import BedrockError, convert_messages_to_prompt +from litellm.types.llms.bedrock import * + + +class AmazonCohereChatConfig: + """ + Reference - https://docs.aws.amazon.com/bedrock/latest/userguide/model-parameters-cohere-command-r-plus.html + """ + + documents: Optional[List[Document]] = None + search_queries_only: Optional[bool] = None + preamble: Optional[str] = None + max_tokens: Optional[int] = None + temperature: Optional[float] = None + p: Optional[float] = None + k: Optional[float] = None + prompt_truncation: Optional[str] = None + frequency_penalty: Optional[float] = None + presence_penalty: Optional[float] = None + seed: Optional[int] = None + return_prompt: Optional[bool] = None + stop_sequences: Optional[List[str]] = None + raw_prompting: Optional[bool] = None + + def __init__( + self, + documents: Optional[List[Document]] = None, + search_queries_only: Optional[bool] = None, + preamble: Optional[str] = None, + max_tokens: Optional[int] = None, + temperature: Optional[float] = None, + p: Optional[float] = None, + k: Optional[float] = None, + prompt_truncation: Optional[str] = None, + frequency_penalty: Optional[float] = None, + presence_penalty: Optional[float] = None, + seed: Optional[int] = None, + return_prompt: Optional[bool] = None, + stop_sequences: Optional[str] = None, + raw_prompting: Optional[bool] = None, + ) -> None: + locals_ = locals() + for key, value in locals_.items(): + if key != "self" and value is not None: + setattr(self.__class__, key, value) + + @classmethod + def get_config(cls): + return { + k: v + for k, v in cls.__dict__.items() + if not k.startswith("__") + and not isinstance( + v, + ( + types.FunctionType, + types.BuiltinFunctionType, + classmethod, + staticmethod, + ), + ) + and v is not None + } + + def get_supported_openai_params(self) -> List[str]: + return [ + "max_tokens", + "stream", + "stop", + "temperature", + "top_p", + "frequency_penalty", + "presence_penalty", + "seed", + "stop", + ] + + def map_openai_params( + self, non_default_params: dict, optional_params: dict + ) -> dict: + for param, value in non_default_params.items(): + if param == "max_tokens": + optional_params["max_tokens"] = value + if param == "stream": + optional_params["stream"] = value + if param == "stop": + if isinstance(value, str): + value = [value] + optional_params["stop_sequences"] = value + if param == "temperature": + optional_params["temperature"] = value + if param == "top_p": + optional_params["p"] = value + if param == "frequency_penalty": + optional_params["frequency_penalty"] = value + if param == "presence_penalty": + optional_params["presence_penalty"] = value + if "seed": + optional_params["seed"] = value + return optional_params + + +class BedrockLLM(BaseLLM): + """ + Example call + + ``` + curl --location --request POST 'https://bedrock-runtime.{aws_region_name}.amazonaws.com/model/{bedrock_model_name}/invoke' \ + --header 'Content-Type: application/json' \ + --header 'Accept: application/json' \ + --user "$AWS_ACCESS_KEY_ID":"$AWS_SECRET_ACCESS_KEY" \ + --aws-sigv4 "aws:amz:us-east-1:bedrock" \ + --data-raw '{ + "prompt": "Hi", + "temperature": 0, + "p": 0.9, + "max_tokens": 4096 + }' + ``` + """ + + def __init__(self) -> None: + super().__init__() + + def convert_messages_to_prompt( + self, model, messages, provider, custom_prompt_dict + ) -> Tuple[str, Optional[list]]: + # handle anthropic prompts and amazon titan prompts + prompt = "" + chat_history: Optional[list] = None + if provider == "anthropic" or provider == "amazon": + if model in custom_prompt_dict: + # check if the model has a registered custom prompt + model_prompt_details = custom_prompt_dict[model] + prompt = custom_prompt( + role_dict=model_prompt_details["roles"], + initial_prompt_value=model_prompt_details["initial_prompt_value"], + final_prompt_value=model_prompt_details["final_prompt_value"], + messages=messages, + ) + else: + prompt = prompt_factory( + model=model, messages=messages, custom_llm_provider="bedrock" + ) + elif provider == "mistral": + prompt = prompt_factory( + model=model, messages=messages, custom_llm_provider="bedrock" + ) + elif provider == "meta": + prompt = prompt_factory( + model=model, messages=messages, custom_llm_provider="bedrock" + ) + elif provider == "cohere": + prompt, chat_history = cohere_message_pt(messages=messages) + else: + prompt = "" + for message in messages: + if "role" in message: + if message["role"] == "user": + prompt += f"{message['content']}" + else: + prompt += f"{message['content']}" + else: + prompt += f"{message['content']}" + return prompt, chat_history # type: ignore + + def get_credentials( + self, + aws_access_key_id: Optional[str] = None, + aws_secret_access_key: Optional[str] = None, + aws_region_name: Optional[str] = None, + aws_session_name: Optional[str] = None, + aws_profile_name: Optional[str] = None, + aws_role_name: Optional[str] = None, + ): + """ + Return a boto3.Credentials object + """ + import boto3 + + ## CHECK IS 'os.environ/' passed in + params_to_check: List[Optional[str]] = [ + aws_access_key_id, + aws_secret_access_key, + aws_region_name, + aws_session_name, + aws_profile_name, + aws_role_name, + ] + + # Iterate over parameters and update if needed + for i, param in enumerate(params_to_check): + if param and param.startswith("os.environ/"): + _v = get_secret(param) + if _v is not None and isinstance(_v, str): + params_to_check[i] = _v + # Assign updated values back to parameters + ( + aws_access_key_id, + aws_secret_access_key, + aws_region_name, + aws_session_name, + aws_profile_name, + aws_role_name, + ) = params_to_check + + ### CHECK STS ### + if aws_role_name is not None and aws_session_name is not None: + sts_client = boto3.client( + "sts", + aws_access_key_id=aws_access_key_id, # [OPTIONAL] + aws_secret_access_key=aws_secret_access_key, # [OPTIONAL] + ) + + sts_response = sts_client.assume_role( + RoleArn=aws_role_name, RoleSessionName=aws_session_name + ) + + return sts_response["Credentials"] + elif aws_profile_name is not None: ### CHECK SESSION ### + # uses auth values from AWS profile usually stored in ~/.aws/credentials + client = boto3.Session(profile_name=aws_profile_name) + + return client.get_credentials() + else: + session = boto3.Session( + aws_access_key_id=aws_access_key_id, + aws_secret_access_key=aws_secret_access_key, + region_name=aws_region_name, + ) + + return session.get_credentials() + + def process_response( + self, + model: str, + response: Union[requests.Response, httpx.Response], + model_response: ModelResponse, + stream: bool, + logging_obj: Logging, + optional_params: dict, + api_key: str, + data: Union[dict, str], + messages: List, + print_verbose, + encoding, + ) -> ModelResponse: + ## LOGGING + logging_obj.post_call( + input=messages, + api_key=api_key, + original_response=response.text, + additional_args={"complete_input_dict": data}, + ) + print_verbose(f"raw model_response: {response.text}") + + ## RESPONSE OBJECT + try: + completion_response = response.json() + except: + raise BedrockError(message=response.text, status_code=422) + + try: + model_response.choices[0].message.content = completion_response["text"] # type: ignore + except Exception as e: + raise BedrockError(message=response.text, status_code=422) + + ## CALCULATING USAGE - bedrock returns usage in the headers + prompt_tokens = int( + response.headers.get( + "x-amzn-bedrock-input-token-count", + len(encoding.encode("".join(m.get("content", "") for m in messages))), + ) + ) + completion_tokens = int( + response.headers.get( + "x-amzn-bedrock-output-token-count", + len( + encoding.encode( + model_response.choices[0].message.content, # type: ignore + disallowed_special=(), + ) + ), + ) + ) + + model_response["created"] = int(time.time()) + model_response["model"] = model + usage = Usage( + prompt_tokens=prompt_tokens, + completion_tokens=completion_tokens, + total_tokens=prompt_tokens + completion_tokens, + ) + setattr(model_response, "usage", usage) + + return model_response + + def completion( + self, + model: str, + messages: list, + custom_prompt_dict: dict, + model_response: ModelResponse, + print_verbose: Callable, + encoding, + logging_obj, + optional_params: dict, + acompletion: bool, + timeout: Optional[Union[float, httpx.Timeout]], + litellm_params=None, + logger_fn=None, + extra_headers: Optional[dict] = None, + client: Optional[Union[AsyncHTTPHandler, HTTPHandler]] = None, + ) -> Union[ModelResponse, CustomStreamWrapper]: + try: + import boto3 + + from botocore.auth import SigV4Auth + from botocore.awsrequest import AWSRequest + from botocore.credentials import Credentials + except ImportError as e: + raise ImportError("Missing boto3 to call bedrock. Run 'pip install boto3'.") + + ## SETUP ## + stream = optional_params.pop("stream", None) + + ## CREDENTIALS ## + # pop aws_secret_access_key, aws_access_key_id, aws_region_name from kwargs, since completion calls fail with them + aws_secret_access_key = optional_params.pop("aws_secret_access_key", None) + aws_access_key_id = optional_params.pop("aws_access_key_id", None) + aws_region_name = optional_params.pop("aws_region_name", None) + aws_role_name = optional_params.pop("aws_role_name", None) + aws_session_name = optional_params.pop("aws_session_name", None) + aws_profile_name = optional_params.pop("aws_profile_name", None) + aws_bedrock_runtime_endpoint = optional_params.pop( + "aws_bedrock_runtime_endpoint", None + ) # https://bedrock-runtime.{region_name}.amazonaws.com + + ### SET REGION NAME ### + if aws_region_name is None: + # check env # + litellm_aws_region_name = get_secret("AWS_REGION_NAME", None) + + if litellm_aws_region_name is not None and isinstance( + litellm_aws_region_name, str + ): + aws_region_name = litellm_aws_region_name + + standard_aws_region_name = get_secret("AWS_REGION", None) + if standard_aws_region_name is not None and isinstance( + standard_aws_region_name, str + ): + aws_region_name = standard_aws_region_name + + if aws_region_name is None: + aws_region_name = "us-west-2" + + credentials: Credentials = self.get_credentials( + aws_access_key_id=aws_access_key_id, + aws_secret_access_key=aws_secret_access_key, + aws_region_name=aws_region_name, + aws_session_name=aws_session_name, + aws_profile_name=aws_profile_name, + aws_role_name=aws_role_name, + ) + + ### SET RUNTIME ENDPOINT ### + endpoint_url = "" + env_aws_bedrock_runtime_endpoint = get_secret("AWS_BEDROCK_RUNTIME_ENDPOINT") + if aws_bedrock_runtime_endpoint is not None and isinstance( + aws_bedrock_runtime_endpoint, str + ): + endpoint_url = aws_bedrock_runtime_endpoint + elif env_aws_bedrock_runtime_endpoint and isinstance( + env_aws_bedrock_runtime_endpoint, str + ): + endpoint_url = env_aws_bedrock_runtime_endpoint + else: + endpoint_url = f"https://bedrock-runtime.{aws_region_name}.amazonaws.com" + + if stream is not None and stream == True: + endpoint_url = f"{endpoint_url}/model/{model}/invoke-with-response-stream" + else: + endpoint_url = f"{endpoint_url}/model/{model}/invoke" + + sigv4 = SigV4Auth(credentials, "bedrock", aws_region_name) + + provider = model.split(".")[0] + prompt, chat_history = self.convert_messages_to_prompt( + model, messages, provider, custom_prompt_dict + ) + inference_params = copy.deepcopy(optional_params) + + if provider == "cohere": + if model.startswith("cohere.command-r"): + ## LOAD CONFIG + config = litellm.AmazonCohereChatConfig().get_config() + for k, v in config.items(): + if ( + k not in inference_params + ): # completion(top_k=3) > anthropic_config(top_k=3) <- allows for dynamic variables to be passed in + inference_params[k] = v + _data = {"message": prompt, **inference_params} + if chat_history is not None: + _data["chat_history"] = chat_history + data = json.dumps(_data) + else: + ## LOAD CONFIG + config = litellm.AmazonCohereConfig.get_config() + for k, v in config.items(): + if ( + k not in inference_params + ): # completion(top_k=3) > anthropic_config(top_k=3) <- allows for dynamic variables to be passed in + inference_params[k] = v + if stream == True: + inference_params["stream"] = ( + True # cohere requires stream = True in inference params + ) + data = json.dumps({"prompt": prompt, **inference_params}) + else: + raise Exception("UNSUPPORTED PROVIDER") + + ## COMPLETION CALL + + headers = {"Content-Type": "application/json"} + if extra_headers is not None: + headers = {"Content-Type": "application/json", **extra_headers} + request = AWSRequest( + method="POST", url=endpoint_url, data=data, headers=headers + ) + sigv4.add_auth(request) + prepped = request.prepare() + + ## LOGGING + logging_obj.pre_call( + input=messages, + api_key="", + additional_args={ + "complete_input_dict": data, + "api_base": prepped.url, + "headers": prepped.headers, + }, + ) + + ### ROUTING (ASYNC, STREAMING, SYNC) + if acompletion: + if isinstance(client, HTTPHandler): + client = None + if stream: + return self.async_streaming( + model=model, + messages=messages, + data=data, + api_base=prepped.url, + model_response=model_response, + print_verbose=print_verbose, + encoding=encoding, + logging_obj=logging_obj, + optional_params=optional_params, + stream=True, + litellm_params=litellm_params, + logger_fn=logger_fn, + headers=prepped.headers, + timeout=timeout, + client=client, + ) # type: ignore + ### ASYNC COMPLETION + return self.async_completion( + model=model, + messages=messages, + data=data, + api_base=prepped.url, + model_response=model_response, + print_verbose=print_verbose, + encoding=encoding, + logging_obj=logging_obj, + optional_params=optional_params, + stream=False, + litellm_params=litellm_params, + logger_fn=logger_fn, + headers=prepped.headers, + timeout=timeout, + client=client, + ) # type: ignore + + if client is None or isinstance(client, AsyncHTTPHandler): + _params = {} + if timeout is not None: + if isinstance(timeout, float) or isinstance(timeout, int): + timeout = httpx.Timeout(timeout) + _params["timeout"] = timeout + self.client = HTTPHandler(**_params) # type: ignore + else: + self.client = client + if stream is not None and stream == True: + response = self.client.post( + url=prepped.url, + headers=prepped.headers, # type: ignore + data=data, + stream=stream, + ) + + if response.status_code != 200: + raise BedrockError( + status_code=response.status_code, message=response.text + ) + + decoder = AWSEventStreamDecoder() + + completion_stream = decoder.iter_bytes(response.iter_bytes(chunk_size=1024)) + streaming_response = CustomStreamWrapper( + completion_stream=completion_stream, + model=model, + custom_llm_provider="bedrock", + logging_obj=logging_obj, + ) + return streaming_response + + response = self.client.post(url=prepped.url, headers=prepped.headers, data=data) # type: ignore + + try: + response.raise_for_status() + except httpx.HTTPStatusError as err: + error_code = err.response.status_code + raise BedrockError(status_code=error_code, message=response.text) + + return self.process_response( + model=model, + response=response, + model_response=model_response, + stream=stream, + logging_obj=logging_obj, + optional_params=optional_params, + api_key="", + data=data, + messages=messages, + print_verbose=print_verbose, + encoding=encoding, + ) + + async def async_completion( + self, + model: str, + messages: list, + api_base: str, + model_response: ModelResponse, + print_verbose: Callable, + data: str, + timeout: Optional[Union[float, httpx.Timeout]], + encoding, + logging_obj, + stream, + optional_params: dict, + litellm_params=None, + logger_fn=None, + headers={}, + client: Optional[AsyncHTTPHandler] = None, + ) -> ModelResponse: + if client is None: + _params = {} + if timeout is not None: + if isinstance(timeout, float) or isinstance(timeout, int): + timeout = httpx.Timeout(timeout) + _params["timeout"] = timeout + self.client = AsyncHTTPHandler(**_params) # type: ignore + else: + self.client = client # type: ignore + + response = await self.client.post(api_base, headers=headers, data=data) # type: ignore + return self.process_response( + model=model, + response=response, + model_response=model_response, + stream=stream, + logging_obj=logging_obj, + api_key="", + data=data, + messages=messages, + print_verbose=print_verbose, + optional_params=optional_params, + encoding=encoding, + ) + + async def async_streaming( + self, + model: str, + messages: list, + api_base: str, + model_response: ModelResponse, + print_verbose: Callable, + data: str, + timeout: Optional[Union[float, httpx.Timeout]], + encoding, + logging_obj, + stream, + optional_params: dict, + litellm_params=None, + logger_fn=None, + headers={}, + client: Optional[AsyncHTTPHandler] = None, + ) -> CustomStreamWrapper: + if client is None: + _params = {} + if timeout is not None: + if isinstance(timeout, float) or isinstance(timeout, int): + timeout = httpx.Timeout(timeout) + _params["timeout"] = timeout + self.client = AsyncHTTPHandler(**_params) # type: ignore + else: + self.client = client # type: ignore + + response = await self.client.post(api_base, headers=headers, data=data, stream=True) # type: ignore + + if response.status_code != 200: + raise BedrockError(status_code=response.status_code, message=response.text) + + decoder = AWSEventStreamDecoder() + + completion_stream = decoder.aiter_bytes(response.aiter_bytes(chunk_size=1024)) + streaming_response = CustomStreamWrapper( + completion_stream=completion_stream, + model=model, + custom_llm_provider="bedrock", + logging_obj=logging_obj, + ) + return streaming_response + + def embedding(self, *args, **kwargs): + return super().embedding(*args, **kwargs) + + +def get_response_stream_shape(): + from botocore.model import ServiceModel + from botocore.loaders import Loader + + loader = Loader() + bedrock_service_dict = loader.load_service_model("bedrock-runtime", "service-2") + bedrock_service_model = ServiceModel(bedrock_service_dict) + return bedrock_service_model.shape_for("ResponseStream") + + +class AWSEventStreamDecoder: + def __init__(self) -> None: + from botocore.parsers import EventStreamJSONParser + + self.parser = EventStreamJSONParser() + + def iter_bytes(self, iterator: Iterator[bytes]) -> Iterator[GenericStreamingChunk]: + """Given an iterator that yields lines, iterate over it & yield every event encountered""" + from botocore.eventstream import EventStreamBuffer + + event_stream_buffer = EventStreamBuffer() + for chunk in iterator: + event_stream_buffer.add_data(chunk) + for event in event_stream_buffer: + message = self._parse_message_from_event(event) + if message: + # sse_event = ServerSentEvent(data=message, event="completion") + _data = json.loads(message) + streaming_chunk: GenericStreamingChunk = GenericStreamingChunk( + text=_data.get("text", ""), + is_finished=_data.get("is_finished", False), + finish_reason=_data.get("finish_reason", ""), + ) + yield streaming_chunk + + async def aiter_bytes( + self, iterator: AsyncIterator[bytes] + ) -> AsyncIterator[GenericStreamingChunk]: + """Given an async iterator that yields lines, iterate over it & yield every event encountered""" + from botocore.eventstream import EventStreamBuffer + + event_stream_buffer = EventStreamBuffer() + async for chunk in iterator: + event_stream_buffer.add_data(chunk) + for event in event_stream_buffer: + message = self._parse_message_from_event(event) + if message: + _data = json.loads(message) + streaming_chunk: GenericStreamingChunk = GenericStreamingChunk( + text=_data.get("text", ""), + is_finished=_data.get("is_finished", False), + finish_reason=_data.get("finish_reason", ""), + ) + yield streaming_chunk + + def _parse_message_from_event(self, event) -> Optional[str]: + response_dict = event.to_response_dict() + parsed_response = self.parser.parse(response_dict, get_response_stream_shape()) + if response_dict["status_code"] != 200: + raise ValueError(f"Bad response code, expected 200: {response_dict}") + + chunk = parsed_response.get("chunk") + if not chunk: + return None + + return chunk.get("bytes").decode() # type: ignore[no-any-return] diff --git a/litellm/llms/clarifai.py b/litellm/llms/clarifai.py new file mode 100644 index 000000000..e07a8d9e8 --- /dev/null +++ b/litellm/llms/clarifai.py @@ -0,0 +1,328 @@ +import os, types, traceback +import json +import requests +import time +from typing import Callable, Optional +from litellm.utils import ModelResponse, Usage, Choices, Message, CustomStreamWrapper +import litellm +import httpx +from litellm.llms.custom_httpx.http_handler import AsyncHTTPHandler +from .prompt_templates.factory import prompt_factory, custom_prompt + + +class ClarifaiError(Exception): + def __init__(self, status_code, message, url): + self.status_code = status_code + self.message = message + self.request = httpx.Request( + method="POST", url=url + ) + self.response = httpx.Response(status_code=status_code, request=self.request) + super().__init__( + self.message + ) + +class ClarifaiConfig: + """ + Reference: https://clarifai.com/meta/Llama-2/models/llama2-70b-chat + TODO fill in the details + """ + max_tokens: Optional[int] = None + temperature: Optional[int] = None + top_k: Optional[int] = None + + def __init__( + self, + max_tokens: Optional[int] = None, + temperature: Optional[int] = None, + top_k: Optional[int] = None, + ) -> None: + locals_ = locals() + for key, value in locals_.items(): + if key != "self" and value is not None: + setattr(self.__class__, key, value) + + @classmethod + def get_config(cls): + return { + k: v + for k, v in cls.__dict__.items() + if not k.startswith("__") + and not isinstance( + v, + ( + types.FunctionType, + types.BuiltinFunctionType, + classmethod, + staticmethod, + ), + ) + and v is not None + } + +def validate_environment(api_key): + headers = { + "accept": "application/json", + "content-type": "application/json", + } + if api_key: + headers["Authorization"] = f"Bearer {api_key}" + return headers + +def completions_to_model(payload): + # if payload["n"] != 1: + # raise HTTPException( + # status_code=422, + # detail="Only one generation is supported. Please set candidate_count to 1.", + # ) + + params = {} + if temperature := payload.get("temperature"): + params["temperature"] = temperature + if max_tokens := payload.get("max_tokens"): + params["max_tokens"] = max_tokens + return { + "inputs": [{"data": {"text": {"raw": payload["prompt"]}}}], + "model": {"output_info": {"params": params}}, +} + +def process_response( + model, + prompt, + response, + model_response, + api_key, + data, + encoding, + logging_obj + ): + logging_obj.post_call( + input=prompt, + api_key=api_key, + original_response=response.text, + additional_args={"complete_input_dict": data}, + ) + ## RESPONSE OBJECT + try: + completion_response = response.json() + except Exception: + raise ClarifaiError( + message=response.text, status_code=response.status_code, url=model + ) + # print(completion_response) + try: + choices_list = [] + for idx, item in enumerate(completion_response["outputs"]): + if len(item["data"]["text"]["raw"]) > 0: + message_obj = Message(content=item["data"]["text"]["raw"]) + else: + message_obj = Message(content=None) + choice_obj = Choices( + finish_reason="stop", + index=idx + 1, #check + message=message_obj, + ) + choices_list.append(choice_obj) + model_response["choices"] = choices_list + + except Exception as e: + raise ClarifaiError( + message=traceback.format_exc(), status_code=response.status_code, url=model + ) + + # Calculate Usage + prompt_tokens = len(encoding.encode(prompt)) + completion_tokens = len( + encoding.encode(model_response["choices"][0]["message"].get("content")) + ) + model_response["model"] = model + model_response["usage"] = Usage( + prompt_tokens=prompt_tokens, + completion_tokens=completion_tokens, + total_tokens=prompt_tokens + completion_tokens, + ) + return model_response + +def convert_model_to_url(model: str, api_base: str): + user_id, app_id, model_id = model.split(".") + return f"{api_base}/users/{user_id}/apps/{app_id}/models/{model_id}/outputs" + +def get_prompt_model_name(url: str): + clarifai_model_name = url.split("/")[-2] + if "claude" in clarifai_model_name: + return "anthropic", clarifai_model_name.replace("_", ".") + if ("llama" in clarifai_model_name)or ("mistral" in clarifai_model_name): + return "", "meta-llama/llama-2-chat" + else: + return "", clarifai_model_name + +async def async_completion( + model: str, + prompt: str, + api_base: str, + custom_prompt_dict: dict, + model_response: ModelResponse, + print_verbose: Callable, + encoding, + api_key, + logging_obj, + data=None, + optional_params=None, + litellm_params=None, + logger_fn=None, + headers={}): + + async_handler = AsyncHTTPHandler( + timeout=httpx.Timeout(timeout=600.0, connect=5.0) + ) + response = await async_handler.post( + api_base, headers=headers, data=json.dumps(data) + ) + + return process_response( + model=model, + prompt=prompt, + response=response, + model_response=model_response, + api_key=api_key, + data=data, + encoding=encoding, + logging_obj=logging_obj, + ) + +def completion( + model: str, + messages: list, + api_base: str, + model_response: ModelResponse, + print_verbose: Callable, + encoding, + api_key, + logging_obj, + custom_prompt_dict={}, + acompletion=False, + optional_params=None, + litellm_params=None, + logger_fn=None, +): + headers = validate_environment(api_key) + model = convert_model_to_url(model, api_base) + prompt = " ".join(message["content"] for message in messages) # TODO + + ## Load Config + config = litellm.ClarifaiConfig.get_config() + for k, v in config.items(): + if ( + k not in optional_params + ): + optional_params[k] = v + + custom_llm_provider, orig_model_name = get_prompt_model_name(model) + if custom_llm_provider == "anthropic": + prompt = prompt_factory( + model=orig_model_name, + messages=messages, + api_key=api_key, + custom_llm_provider="clarifai" + ) + else: + prompt = prompt_factory( + model=orig_model_name, + messages=messages, + api_key=api_key, + custom_llm_provider=custom_llm_provider + ) + # print(prompt); exit(0) + + data = { + "prompt": prompt, + **optional_params, + } + data = completions_to_model(data) + + + ## LOGGING + logging_obj.pre_call( + input=prompt, + api_key=api_key, + additional_args={ + "complete_input_dict": data, + "headers": headers, + "api_base": api_base, + }, + ) + if acompletion==True: + return async_completion( + model=model, + prompt=prompt, + api_base=api_base, + custom_prompt_dict=custom_prompt_dict, + model_response=model_response, + print_verbose=print_verbose, + encoding=encoding, + api_key=api_key, + logging_obj=logging_obj, + data=data, + optional_params=optional_params, + litellm_params=litellm_params, + logger_fn=logger_fn, + headers=headers, + ) + else: + ## COMPLETION CALL + response = requests.post( + model, + headers=headers, + data=json.dumps(data), + ) + # print(response.content); exit() + + if response.status_code != 200: + raise ClarifaiError(status_code=response.status_code, message=response.text, url=model) + + if "stream" in optional_params and optional_params["stream"] == True: + completion_stream = response.iter_lines() + stream_response = CustomStreamWrapper( + completion_stream=completion_stream, + model=model, + custom_llm_provider="clarifai", + logging_obj=logging_obj, + ) + return stream_response + + else: + return process_response( + model=model, + prompt=prompt, + response=response, + model_response=model_response, + api_key=api_key, + data=data, + encoding=encoding, + logging_obj=logging_obj) + + +class ModelResponseIterator: + def __init__(self, model_response): + self.model_response = model_response + self.is_done = False + + # Sync iterator + def __iter__(self): + return self + + def __next__(self): + if self.is_done: + raise StopIteration + self.is_done = True + return self.model_response + + # Async iterator + def __aiter__(self): + return self + + async def __anext__(self): + if self.is_done: + raise StopAsyncIteration + self.is_done = True + return self.model_response \ No newline at end of file diff --git a/litellm/llms/cloudflare.py b/litellm/llms/cloudflare.py index a9e60bb7e..5a24b3b44 100644 --- a/litellm/llms/cloudflare.py +++ b/litellm/llms/cloudflare.py @@ -1,11 +1,11 @@ import os, types import json from enum import Enum -import requests +import requests # type: ignore import time from typing import Callable, Optional import litellm -import httpx +import httpx # type: ignore from litellm.utils import ModelResponse, Usage from .prompt_templates.factory import prompt_factory, custom_prompt @@ -167,7 +167,7 @@ def completion( completion_tokens=completion_tokens, total_tokens=prompt_tokens + completion_tokens, ) - model_response.usage = usage + setattr(model_response, "usage", usage) return model_response diff --git a/litellm/llms/cohere.py b/litellm/llms/cohere.py index a09e249af..0ebdf38f1 100644 --- a/litellm/llms/cohere.py +++ b/litellm/llms/cohere.py @@ -1,12 +1,12 @@ import os, types import json from enum import Enum -import requests +import requests # type: ignore import time, traceback from typing import Callable, Optional from litellm.utils import ModelResponse, Choices, Message, Usage import litellm -import httpx +import httpx # type: ignore class CohereError(Exception): @@ -237,7 +237,7 @@ def completion( completion_tokens=completion_tokens, total_tokens=prompt_tokens + completion_tokens, ) - model_response.usage = usage + setattr(model_response, "usage", usage) return model_response diff --git a/litellm/llms/cohere_chat.py b/litellm/llms/cohere_chat.py index c51ef8ded..e4de6ddcb 100644 --- a/litellm/llms/cohere_chat.py +++ b/litellm/llms/cohere_chat.py @@ -1,12 +1,12 @@ import os, types import json from enum import Enum -import requests +import requests # type: ignore import time, traceback from typing import Callable, Optional from litellm.utils import ModelResponse, Choices, Message, Usage import litellm -import httpx +import httpx # type: ignore from .prompt_templates.factory import cohere_message_pt @@ -43,6 +43,7 @@ class CohereChatConfig: presence_penalty (float, optional): Used to reduce repetitiveness of generated tokens. tools (List[Dict[str, str]], optional): A list of available tools (functions) that the model may suggest invoking. tool_results (List[Dict[str, Any]], optional): A list of results from invoking tools. + seed (int, optional): A seed to assist reproducibility of the model's response. """ preamble: Optional[str] = None @@ -62,6 +63,7 @@ class CohereChatConfig: presence_penalty: Optional[int] = None tools: Optional[list] = None tool_results: Optional[list] = None + seed: Optional[int] = None def __init__( self, @@ -82,6 +84,7 @@ class CohereChatConfig: presence_penalty: Optional[int] = None, tools: Optional[list] = None, tool_results: Optional[list] = None, + seed: Optional[int] = None, ) -> None: locals_ = locals() for key, value in locals_.items(): @@ -302,5 +305,5 @@ def completion( completion_tokens=completion_tokens, total_tokens=prompt_tokens + completion_tokens, ) - model_response.usage = usage + setattr(model_response, "usage", usage) return model_response diff --git a/litellm/llms/custom_httpx/http_handler.py b/litellm/llms/custom_httpx/http_handler.py index 67e6c80da..0adbd95bf 100644 --- a/litellm/llms/custom_httpx/http_handler.py +++ b/litellm/llms/custom_httpx/http_handler.py @@ -41,13 +41,12 @@ class AsyncHTTPHandler: data: Optional[Union[dict, str]] = None, # type: ignore params: Optional[dict] = None, headers: Optional[dict] = None, + stream: bool = False, ): - response = await self.client.post( - url, - data=data, # type: ignore - params=params, - headers=headers, + req = self.client.build_request( + "POST", url, data=data, params=params, headers=headers # type: ignore ) + response = await self.client.send(req, stream=stream) return response def __del__(self) -> None: @@ -58,14 +57,26 @@ class AsyncHTTPHandler: class HTTPHandler: - def __init__(self, concurrent_limit=1000): - # Create a client with a connection pool - self.client = httpx.Client( - limits=httpx.Limits( - max_connections=concurrent_limit, - max_keepalive_connections=concurrent_limit, + def __init__( + self, + timeout: Optional[httpx.Timeout] = None, + concurrent_limit=1000, + client: Optional[httpx.Client] = None, + ): + if timeout is None: + timeout = _DEFAULT_TIMEOUT + + if client is None: + # Create a client with a connection pool + self.client = httpx.Client( + timeout=timeout, + limits=httpx.Limits( + max_connections=concurrent_limit, + max_keepalive_connections=concurrent_limit, + ), ) - ) + else: + self.client = client def close(self): # Close the client when you're done with it @@ -80,11 +91,15 @@ class HTTPHandler: def post( self, url: str, - data: Optional[dict] = None, + data: Optional[Union[dict, str]] = None, params: Optional[dict] = None, headers: Optional[dict] = None, + stream: bool = False, ): - response = self.client.post(url, data=data, params=params, headers=headers) + req = self.client.build_request( + "POST", url, data=data, params=params, headers=headers # type: ignore + ) + response = self.client.send(req, stream=stream) return response def __del__(self) -> None: diff --git a/litellm/llms/gemini.py b/litellm/llms/gemini.py index ee398d44e..a55b39aef 100644 --- a/litellm/llms/gemini.py +++ b/litellm/llms/gemini.py @@ -260,7 +260,7 @@ def completion( message_obj = Message(content=item.content.parts[0].text) else: message_obj = Message(content=None) - choice_obj = Choices(index=idx + 1, message=message_obj) + choice_obj = Choices(index=idx, message=message_obj) choices_list.append(choice_obj) model_response["choices"] = choices_list except Exception as e: @@ -311,7 +311,7 @@ def completion( completion_tokens=completion_tokens, total_tokens=prompt_tokens + completion_tokens, ) - model_response.usage = usage + setattr(model_response, "usage", usage) return model_response @@ -352,7 +352,7 @@ async def async_completion( message_obj = Message(content=item.content.parts[0].text) else: message_obj = Message(content=None) - choice_obj = Choices(index=idx + 1, message=message_obj) + choice_obj = Choices(index=idx, message=message_obj) choices_list.append(choice_obj) model_response["choices"] = choices_list except Exception as e: diff --git a/litellm/llms/huggingface_restapi.py b/litellm/llms/huggingface_restapi.py index 293773289..c54dba75f 100644 --- a/litellm/llms/huggingface_restapi.py +++ b/litellm/llms/huggingface_restapi.py @@ -6,10 +6,12 @@ import httpx, requests from .base import BaseLLM import time import litellm -from typing import Callable, Dict, List, Any +from typing import Callable, Dict, List, Any, Literal, Tuple from litellm.utils import ModelResponse, Choices, Message, CustomStreamWrapper, Usage from typing import Optional from .prompt_templates.factory import prompt_factory, custom_prompt +from litellm.types.completion import ChatCompletionMessageToolCallParam +import enum class HuggingfaceError(Exception): @@ -39,11 +41,29 @@ class HuggingfaceError(Exception): ) # Call the base class constructor with the parameters it needs +hf_task_list = [ + "text-generation-inference", + "conversational", + "text-classification", + "text-generation", +] + +hf_tasks = Literal[ + "text-generation-inference", + "conversational", + "text-classification", + "text-generation", +] + + class HuggingfaceConfig: """ Reference: https://huggingface.github.io/text-generation-inference/#/Text%20Generation%20Inference/compat_generate """ + hf_task: Optional[hf_tasks] = ( + None # litellm-specific param, used to know the api spec to use when calling huggingface api + ) best_of: Optional[int] = None decoder_input_details: Optional[bool] = None details: Optional[bool] = True # enables returning logprobs + best of @@ -101,6 +121,51 @@ class HuggingfaceConfig: and v is not None } + def get_supported_openai_params(self): + return [ + "stream", + "temperature", + "max_tokens", + "top_p", + "stop", + "n", + "echo", + ] + + def map_openai_params( + self, non_default_params: dict, optional_params: dict + ) -> dict: + for param, value in non_default_params.items(): + # temperature, top_p, n, stream, stop, max_tokens, n, presence_penalty default to None + if param == "temperature": + if value == 0.0 or value == 0: + # hugging face exception raised when temp==0 + # Failed: Error occurred: HuggingfaceException - Input validation error: `temperature` must be strictly positive + value = 0.01 + optional_params["temperature"] = value + if param == "top_p": + optional_params["top_p"] = value + if param == "n": + optional_params["best_of"] = value + optional_params["do_sample"] = ( + True # Need to sample if you want best of for hf inference endpoints + ) + if param == "stream": + optional_params["stream"] = value + if param == "stop": + optional_params["stop"] = value + if param == "max_tokens": + # HF TGI raises the following exception when max_new_tokens==0 + # Failed: Error occurred: HuggingfaceException - Input validation error: `max_new_tokens` must be strictly positive + if value == 0: + value = 1 + optional_params["max_new_tokens"] = value + if param == "echo": + # https://huggingface.co/docs/huggingface_hub/main/en/package_reference/inference_client#huggingface_hub.InferenceClient.text_generation.decoder_input_details + # Return the decoder input token logprobs and ids. You must set details=True as well for it to be taken into account. Defaults to False + optional_params["decoder_input_details"] = True + return optional_params + def output_parser(generated_text: str): """ @@ -162,18 +227,21 @@ def read_tgi_conv_models(): return set(), set() -def get_hf_task_for_model(model): +def get_hf_task_for_model(model: str) -> Tuple[hf_tasks, str]: # read text file, cast it to set # read the file called "huggingface_llms_metadata/hf_text_generation_models.txt" + if model.split("/")[0] in hf_task_list: + split_model = model.split("/", 1) + return split_model[0], split_model[1] # type: ignore tgi_models, conversational_models = read_tgi_conv_models() if model in tgi_models: - return "text-generation-inference" + return "text-generation-inference", model elif model in conversational_models: - return "conversational" + return "conversational", model elif "roneneldan/TinyStories" in model: - return None + return "text-generation", model else: - return "text-generation-inference" # default to tgi + return "text-generation-inference", model # default to tgi class Huggingface(BaseLLM): @@ -202,7 +270,7 @@ class Huggingface(BaseLLM): self, completion_response, model_response, - task, + task: hf_tasks, optional_params, encoding, input_text, @@ -270,6 +338,10 @@ class Huggingface(BaseLLM): ) choices_list.append(choice_obj) model_response["choices"].extend(choices_list) + elif task == "text-classification": + model_response["choices"][0]["message"]["content"] = json.dumps( + completion_response + ) else: if len(completion_response[0]["generated_text"]) > 0: model_response["choices"][0]["message"]["content"] = output_parser( @@ -322,9 +394,9 @@ class Huggingface(BaseLLM): encoding, api_key, logging_obj, + optional_params: dict, custom_prompt_dict={}, acompletion: bool = False, - optional_params=None, litellm_params=None, logger_fn=None, ): @@ -332,7 +404,13 @@ class Huggingface(BaseLLM): exception_mapping_worked = False try: headers = self.validate_environment(api_key, headers) - task = get_hf_task_for_model(model) + task, model = get_hf_task_for_model(model) + ## VALIDATE API FORMAT + if task is None or not isinstance(task, str) or task not in hf_task_list: + raise Exception( + "Invalid hf task - {}. Valid formats - {}.".format(task, hf_tasks) + ) + print_verbose(f"{model}, {task}") completion_url = "" input_text = "" @@ -399,10 +477,11 @@ class Huggingface(BaseLLM): data = { "inputs": prompt, "parameters": optional_params, - "stream": ( + "stream": ( # type: ignore True if "stream" in optional_params - and optional_params["stream"] == True + and isinstance(optional_params["stream"], bool) + and optional_params["stream"] == True # type: ignore else False ), } @@ -432,14 +511,15 @@ class Huggingface(BaseLLM): inference_params.pop("return_full_text") data = { "inputs": prompt, - "parameters": inference_params, - "stream": ( - True + } + if task == "text-generation-inference": + data["parameters"] = inference_params + data["stream"] = ( # type: ignore + True # type: ignore if "stream" in optional_params and optional_params["stream"] == True else False - ), - } + ) input_text = prompt ## LOGGING logging_obj.pre_call( @@ -530,10 +610,10 @@ class Huggingface(BaseLLM): isinstance(completion_response, dict) and "error" in completion_response ): - print_verbose(f"completion error: {completion_response['error']}") + print_verbose(f"completion error: {completion_response['error']}") # type: ignore print_verbose(f"response.status_code: {response.status_code}") raise HuggingfaceError( - message=completion_response["error"], + message=completion_response["error"], # type: ignore status_code=response.status_code, ) return self.convert_to_model_response_object( @@ -562,7 +642,7 @@ class Huggingface(BaseLLM): data: dict, headers: dict, model_response: ModelResponse, - task: str, + task: hf_tasks, encoding: Any, input_text: str, model: str, diff --git a/litellm/llms/maritalk.py b/litellm/llms/maritalk.py index 77267b13d..dfe53e9df 100644 --- a/litellm/llms/maritalk.py +++ b/litellm/llms/maritalk.py @@ -1,7 +1,7 @@ import os, types import json from enum import Enum -import requests +import requests # type: ignore import time, traceback from typing import Callable, Optional, List from litellm.utils import ModelResponse, Choices, Message, Usage @@ -152,9 +152,9 @@ def completion( else: try: if len(completion_response["answer"]) > 0: - model_response["choices"][0]["message"][ - "content" - ] = completion_response["answer"] + model_response["choices"][0]["message"]["content"] = ( + completion_response["answer"] + ) except Exception as e: raise MaritalkError( message=response.text, status_code=response.status_code @@ -174,7 +174,7 @@ def completion( completion_tokens=completion_tokens, total_tokens=prompt_tokens + completion_tokens, ) - model_response.usage = usage + setattr(model_response, "usage", usage) return model_response diff --git a/litellm/llms/nlp_cloud.py b/litellm/llms/nlp_cloud.py index f827975ce..cd5f17a90 100644 --- a/litellm/llms/nlp_cloud.py +++ b/litellm/llms/nlp_cloud.py @@ -1,7 +1,7 @@ import os, types import json from enum import Enum -import requests +import requests # type: ignore import time from typing import Callable, Optional import litellm @@ -185,9 +185,9 @@ def completion( else: try: if len(completion_response["generated_text"]) > 0: - model_response["choices"][0]["message"][ - "content" - ] = completion_response["generated_text"] + model_response["choices"][0]["message"]["content"] = ( + completion_response["generated_text"] + ) except: raise NLPCloudError( message=json.dumps(completion_response), @@ -205,7 +205,7 @@ def completion( completion_tokens=completion_tokens, total_tokens=prompt_tokens + completion_tokens, ) - model_response.usage = usage + setattr(model_response, "usage", usage) return model_response diff --git a/litellm/llms/ollama.py b/litellm/llms/ollama.py index a14c3cb50..9c9b5e898 100644 --- a/litellm/llms/ollama.py +++ b/litellm/llms/ollama.py @@ -1,9 +1,10 @@ -import requests, types, time +from itertools import chain +import requests, types, time # type: ignore import json, uuid import traceback from typing import Optional import litellm -import httpx, aiohttp, asyncio +import httpx, aiohttp, asyncio # type: ignore from .prompt_templates.factory import prompt_factory, custom_prompt @@ -212,24 +213,31 @@ def get_ollama_response( ## RESPONSE OBJECT model_response["choices"][0]["finish_reason"] = "stop" - if optional_params.get("format", "") == "json": + if data.get("format", "") == "json": + function_call = json.loads(response_json["response"]) message = litellm.Message( content=None, tool_calls=[ { "id": f"call_{str(uuid.uuid4())}", - "function": {"arguments": response_json["response"], "name": ""}, + "function": { + "name": function_call["name"], + "arguments": json.dumps(function_call["arguments"]), + }, "type": "function", } ], ) model_response["choices"][0]["message"] = message + model_response["choices"][0]["finish_reason"] = "tool_calls" else: model_response["choices"][0]["message"]["content"] = response_json["response"] model_response["created"] = int(time.time()) model_response["model"] = "ollama/" + model - prompt_tokens = response_json.get("prompt_eval_count", len(encoding.encode(prompt))) # type: ignore - completion_tokens = response_json.get("eval_count", len(response_json.get("message",dict()).get("content", ""))) + prompt_tokens = response_json.get("prompt_eval_count", len(encoding.encode(prompt, disallowed_special=()))) # type: ignore + completion_tokens = response_json.get( + "eval_count", len(response_json.get("message", dict()).get("content", "")) + ) model_response["usage"] = litellm.Usage( prompt_tokens=prompt_tokens, completion_tokens=completion_tokens, @@ -254,8 +262,37 @@ def ollama_completion_stream(url, data, logging_obj): custom_llm_provider="ollama", logging_obj=logging_obj, ) - for transformed_chunk in streamwrapper: - yield transformed_chunk + # If format is JSON, this was a function call + # Gather all chunks and return the function call as one delta to simplify parsing + if data.get("format", "") == "json": + first_chunk = next(streamwrapper) + response_content = "".join( + chunk.choices[0].delta.content + for chunk in chain([first_chunk], streamwrapper) + if chunk.choices[0].delta.content + ) + + function_call = json.loads(response_content) + delta = litellm.utils.Delta( + content=None, + tool_calls=[ + { + "id": f"call_{str(uuid.uuid4())}", + "function": { + "name": function_call["name"], + "arguments": json.dumps(function_call["arguments"]), + }, + "type": "function", + } + ], + ) + model_response = first_chunk + model_response["choices"][0]["delta"] = delta + model_response["choices"][0]["finish_reason"] = "tool_calls" + yield model_response + else: + for transformed_chunk in streamwrapper: + yield transformed_chunk except Exception as e: raise e @@ -277,8 +314,40 @@ async def ollama_async_streaming(url, data, model_response, encoding, logging_ob custom_llm_provider="ollama", logging_obj=logging_obj, ) - async for transformed_chunk in streamwrapper: - yield transformed_chunk + + # If format is JSON, this was a function call + # Gather all chunks and return the function call as one delta to simplify parsing + if data.get("format", "") == "json": + first_chunk = await anext(streamwrapper) + first_chunk_content = first_chunk.choices[0].delta.content or "" + response_content = first_chunk_content + "".join( + [ + chunk.choices[0].delta.content + async for chunk in streamwrapper + if chunk.choices[0].delta.content + ] + ) + function_call = json.loads(response_content) + delta = litellm.utils.Delta( + content=None, + tool_calls=[ + { + "id": f"call_{str(uuid.uuid4())}", + "function": { + "name": function_call["name"], + "arguments": json.dumps(function_call["arguments"]), + }, + "type": "function", + } + ], + ) + model_response = first_chunk + model_response["choices"][0]["delta"] = delta + model_response["choices"][0]["finish_reason"] = "tool_calls" + yield model_response + else: + async for transformed_chunk in streamwrapper: + yield transformed_chunk except Exception as e: traceback.print_exc() raise e @@ -310,28 +379,33 @@ async def ollama_acompletion(url, data, model_response, encoding, logging_obj): ## RESPONSE OBJECT model_response["choices"][0]["finish_reason"] = "stop" if data.get("format", "") == "json": + function_call = json.loads(response_json["response"]) message = litellm.Message( content=None, tool_calls=[ { "id": f"call_{str(uuid.uuid4())}", "function": { - "arguments": response_json["response"], - "name": "", + "name": function_call["name"], + "arguments": json.dumps(function_call["arguments"]), }, "type": "function", } ], ) model_response["choices"][0]["message"] = message + model_response["choices"][0]["finish_reason"] = "tool_calls" else: model_response["choices"][0]["message"]["content"] = response_json[ "response" ] model_response["created"] = int(time.time()) model_response["model"] = "ollama/" + data["model"] - prompt_tokens = response_json.get("prompt_eval_count", len(encoding.encode(data["prompt"]))) # type: ignore - completion_tokens = response_json.get("eval_count", len(response_json.get("message",dict()).get("content", ""))) + prompt_tokens = response_json.get("prompt_eval_count", len(encoding.encode(data["prompt"], disallowed_special=()))) # type: ignore + completion_tokens = response_json.get( + "eval_count", + len(response_json.get("message", dict()).get("content", "")), + ) model_response["usage"] = litellm.Usage( prompt_tokens=prompt_tokens, completion_tokens=completion_tokens, @@ -418,3 +492,25 @@ async def ollama_aembeddings( "total_tokens": total_input_tokens, } return model_response + + +def ollama_embeddings( + api_base: str, + model: str, + prompts: list, + optional_params=None, + logging_obj=None, + model_response=None, + encoding=None, +): + return asyncio.run( + ollama_aembeddings( + api_base, + model, + prompts, + optional_params, + logging_obj, + model_response, + encoding, + ) + ) diff --git a/litellm/llms/ollama_chat.py b/litellm/llms/ollama_chat.py index d442ba5aa..d1ff4953f 100644 --- a/litellm/llms/ollama_chat.py +++ b/litellm/llms/ollama_chat.py @@ -1,3 +1,4 @@ +from itertools import chain import requests, types, time import json, uuid import traceback @@ -148,7 +149,7 @@ class OllamaChatConfig: if param == "top_p": optional_params["top_p"] = value if param == "frequency_penalty": - optional_params["repeat_penalty"] = param + optional_params["repeat_penalty"] = value if param == "stop": optional_params["stop"] = value if param == "response_format" and value["type"] == "json_object": @@ -184,6 +185,7 @@ class OllamaChatConfig: # ollama implementation def get_ollama_response( api_base="http://localhost:11434", + api_key: Optional[str] = None, model="llama2", messages=None, optional_params=None, @@ -236,6 +238,7 @@ def get_ollama_response( if stream == True: response = ollama_async_streaming( url=url, + api_key=api_key, data=data, model_response=model_response, encoding=encoding, @@ -244,6 +247,7 @@ def get_ollama_response( else: response = ollama_acompletion( url=url, + api_key=api_key, data=data, model_response=model_response, encoding=encoding, @@ -252,12 +256,17 @@ def get_ollama_response( ) return response elif stream == True: - return ollama_completion_stream(url=url, data=data, logging_obj=logging_obj) + return ollama_completion_stream( + url=url, api_key=api_key, data=data, logging_obj=logging_obj + ) - response = requests.post( - url=f"{url}", - json=data, - ) + _request = { + "url": f"{url}", + "json": data, + } + if api_key is not None: + _request["headers"] = "Bearer {}".format(api_key) + response = requests.post(**_request) # type: ignore if response.status_code != 200: raise OllamaError(status_code=response.status_code, message=response.text) @@ -277,22 +286,21 @@ def get_ollama_response( ## RESPONSE OBJECT model_response["choices"][0]["finish_reason"] = "stop" if data.get("format", "") == "json": + function_call = json.loads(response_json["message"]["content"]) message = litellm.Message( content=None, tool_calls=[ { "id": f"call_{str(uuid.uuid4())}", - "function": { - "arguments": response_json["message"]["content"], - "name": "", - }, + "function": {"name": function_call["name"], "arguments": json.dumps(function_call["arguments"])}, "type": "function", } ], ) model_response["choices"][0]["message"] = message + model_response["choices"][0]["finish_reason"] = "tool_calls" else: - model_response["choices"][0]["message"] = response_json["message"] + model_response["choices"][0]["message"]["content"] = response_json["message"]["content"] model_response["created"] = int(time.time()) model_response["model"] = "ollama/" + model prompt_tokens = response_json.get("prompt_eval_count", litellm.token_counter(messages=messages)) # type: ignore @@ -307,10 +315,16 @@ def get_ollama_response( return model_response -def ollama_completion_stream(url, data, logging_obj): - with httpx.stream( - url=url, json=data, method="POST", timeout=litellm.request_timeout - ) as response: +def ollama_completion_stream(url, api_key, data, logging_obj): + _request = { + "url": f"{url}", + "json": data, + "method": "POST", + "timeout": litellm.request_timeout, + } + if api_key is not None: + _request["headers"] = "Bearer {}".format(api_key) + with httpx.stream(**_request) as response: try: if response.status_code != 200: raise OllamaError( @@ -323,18 +337,53 @@ def ollama_completion_stream(url, data, logging_obj): custom_llm_provider="ollama_chat", logging_obj=logging_obj, ) - for transformed_chunk in streamwrapper: - yield transformed_chunk + + # If format is JSON, this was a function call + # Gather all chunks and return the function call as one delta to simplify parsing + if data.get("format", "") == "json": + first_chunk = next(streamwrapper) + response_content = "".join( + chunk.choices[0].delta.content + for chunk in chain([first_chunk], streamwrapper) + if chunk.choices[0].delta.content + ) + + function_call = json.loads(response_content) + delta = litellm.utils.Delta( + content=None, + tool_calls=[ + { + "id": f"call_{str(uuid.uuid4())}", + "function": {"name": function_call["name"], "arguments": json.dumps(function_call["arguments"])}, + "type": "function", + } + ], + ) + model_response = first_chunk + model_response["choices"][0]["delta"] = delta + model_response["choices"][0]["finish_reason"] = "tool_calls" + yield model_response + else: + for transformed_chunk in streamwrapper: + yield transformed_chunk except Exception as e: raise e -async def ollama_async_streaming(url, data, model_response, encoding, logging_obj): +async def ollama_async_streaming( + url, api_key, data, model_response, encoding, logging_obj +): try: client = httpx.AsyncClient() - async with client.stream( - url=f"{url}", json=data, method="POST", timeout=litellm.request_timeout - ) as response: + _request = { + "url": f"{url}", + "json": data, + "method": "POST", + "timeout": litellm.request_timeout, + } + if api_key is not None: + _request["headers"] = "Bearer {}".format(api_key) + async with client.stream(**_request) as response: if response.status_code != 200: raise OllamaError( status_code=response.status_code, message=response.text @@ -346,20 +395,60 @@ async def ollama_async_streaming(url, data, model_response, encoding, logging_ob custom_llm_provider="ollama_chat", logging_obj=logging_obj, ) - async for transformed_chunk in streamwrapper: - yield transformed_chunk + + # If format is JSON, this was a function call + # Gather all chunks and return the function call as one delta to simplify parsing + if data.get("format", "") == "json": + first_chunk = await anext(streamwrapper) + first_chunk_content = first_chunk.choices[0].delta.content or "" + response_content = first_chunk_content + "".join( + [ + chunk.choices[0].delta.content + async for chunk in streamwrapper + if chunk.choices[0].delta.content] + ) + function_call = json.loads(response_content) + delta = litellm.utils.Delta( + content=None, + tool_calls=[ + { + "id": f"call_{str(uuid.uuid4())}", + "function": {"name": function_call["name"], "arguments": json.dumps(function_call["arguments"])}, + "type": "function", + } + ], + ) + model_response = first_chunk + model_response["choices"][0]["delta"] = delta + model_response["choices"][0]["finish_reason"] = "tool_calls" + yield model_response + else: + async for transformed_chunk in streamwrapper: + yield transformed_chunk except Exception as e: traceback.print_exc() async def ollama_acompletion( - url, data, model_response, encoding, logging_obj, function_name + url, + api_key: Optional[str], + data, + model_response, + encoding, + logging_obj, + function_name, ): data["stream"] = False try: timeout = aiohttp.ClientTimeout(total=litellm.request_timeout) # 10 minutes async with aiohttp.ClientSession(timeout=timeout) as session: - resp = await session.post(url, json=data) + _request = { + "url": f"{url}", + "json": data, + } + if api_key is not None: + _request["headers"] = "Bearer {}".format(api_key) + resp = await session.post(**_request) if resp.status != 200: text = await resp.text() @@ -381,22 +470,21 @@ async def ollama_acompletion( ## RESPONSE OBJECT model_response["choices"][0]["finish_reason"] = "stop" if data.get("format", "") == "json": + function_call = json.loads(response_json["message"]["content"]) message = litellm.Message( content=None, tool_calls=[ { "id": f"call_{str(uuid.uuid4())}", - "function": { - "arguments": response_json["message"]["content"], - "name": function_name or "", - }, + "function": {"name": function_call["name"], "arguments": json.dumps(function_call["arguments"])}, "type": "function", } ], ) model_response["choices"][0]["message"] = message + model_response["choices"][0]["finish_reason"] = "tool_calls" else: - model_response["choices"][0]["message"] = response_json["message"] + model_response["choices"][0]["message"]["content"] = response_json["message"]["content"] model_response["created"] = int(time.time()) model_response["model"] = "ollama_chat/" + data["model"] diff --git a/litellm/llms/oobabooga.py b/litellm/llms/oobabooga.py index 2a6e9c9ac..f8f32e0fe 100644 --- a/litellm/llms/oobabooga.py +++ b/litellm/llms/oobabooga.py @@ -1,7 +1,7 @@ import os import json from enum import Enum -import requests +import requests # type: ignore import time from typing import Callable, Optional from litellm.utils import ModelResponse, Usage @@ -99,9 +99,9 @@ def completion( ) else: try: - model_response["choices"][0]["message"][ - "content" - ] = completion_response["choices"][0]["message"]["content"] + model_response["choices"][0]["message"]["content"] = ( + completion_response["choices"][0]["message"]["content"] + ) except: raise OobaboogaError( message=json.dumps(completion_response), @@ -115,7 +115,7 @@ def completion( completion_tokens=completion_response["usage"]["completion_tokens"], total_tokens=completion_response["usage"]["total_tokens"], ) - model_response.usage = usage + setattr(model_response, "usage", usage) return model_response diff --git a/litellm/llms/openai.py b/litellm/llms/openai.py index 2065a724b..7acbdfae0 100644 --- a/litellm/llms/openai.py +++ b/litellm/llms/openai.py @@ -1,4 +1,13 @@ -from typing import Optional, Union, Any, BinaryIO +from typing import ( + Optional, + Union, + Any, + BinaryIO, + Literal, + Iterable, +) +from typing_extensions import override +from pydantic import BaseModel import types, time, json, traceback import httpx from .base import BaseLLM @@ -13,10 +22,10 @@ from litellm.utils import ( TextCompletionResponse, ) from typing import Callable, Optional -import aiohttp, requests import litellm from .prompt_templates.factory import prompt_factory, custom_prompt from openai import OpenAI, AsyncOpenAI +from ..types.llms.openai import * class OpenAIError(Exception): @@ -44,6 +53,113 @@ class OpenAIError(Exception): ) # Call the base class constructor with the parameters it needs +class MistralConfig: + """ + Reference: https://docs.mistral.ai/api/ + + The class `MistralConfig` provides configuration for the Mistral's Chat API interface. Below are the parameters: + + - `temperature` (number or null): Defines the sampling temperature to use, varying between 0 and 2. API Default - 0.7. + + - `top_p` (number or null): An alternative to sampling with temperature, used for nucleus sampling. API Default - 1. + + - `max_tokens` (integer or null): This optional parameter helps to set the maximum number of tokens to generate in the chat completion. API Default - null. + + - `tools` (list or null): A list of available tools for the model. Use this to specify functions for which the model can generate JSON inputs. + + - `tool_choice` (string - 'auto'/'any'/'none' or null): Specifies if/how functions are called. If set to none the model won't call a function and will generate a message instead. If set to auto the model can choose to either generate a message or call a function. If set to any the model is forced to call a function. Default - 'auto'. + + - `random_seed` (integer or null): The seed to use for random sampling. If set, different calls will generate deterministic results. + + - `safe_prompt` (boolean): Whether to inject a safety prompt before all conversations. API Default - 'false'. + + - `response_format` (object or null): An object specifying the format that the model must output. Setting to { "type": "json_object" } enables JSON mode, which guarantees the message the model generates is in JSON. When using JSON mode you MUST also instruct the model to produce JSON yourself with a system or a user message. + """ + + temperature: Optional[int] = None + top_p: Optional[int] = None + max_tokens: Optional[int] = None + tools: Optional[list] = None + tool_choice: Optional[Literal["auto", "any", "none"]] = None + random_seed: Optional[int] = None + safe_prompt: Optional[bool] = None + response_format: Optional[dict] = None + + def __init__( + self, + temperature: Optional[int] = None, + top_p: Optional[int] = None, + max_tokens: Optional[int] = None, + tools: Optional[list] = None, + tool_choice: Optional[Literal["auto", "any", "none"]] = None, + random_seed: Optional[int] = None, + safe_prompt: Optional[bool] = None, + response_format: Optional[dict] = None, + ) -> None: + locals_ = locals() + for key, value in locals_.items(): + if key != "self" and value is not None: + setattr(self.__class__, key, value) + + @classmethod + def get_config(cls): + return { + k: v + for k, v in cls.__dict__.items() + if not k.startswith("__") + and not isinstance( + v, + ( + types.FunctionType, + types.BuiltinFunctionType, + classmethod, + staticmethod, + ), + ) + and v is not None + } + + def get_supported_openai_params(self): + return [ + "stream", + "temperature", + "top_p", + "max_tokens", + "tools", + "tool_choice", + "seed", + "response_format", + ] + + def _map_tool_choice(self, tool_choice: str) -> str: + if tool_choice == "auto" or tool_choice == "none": + return tool_choice + elif tool_choice == "required": + return "any" + else: # openai 'tool_choice' object param not supported by Mistral API + return "any" + + def map_openai_params(self, non_default_params: dict, optional_params: dict): + for param, value in non_default_params.items(): + if param == "max_tokens": + optional_params["max_tokens"] = value + if param == "tools": + optional_params["tools"] = value + if param == "stream" and value == True: + optional_params["stream"] = value + if param == "temperature": + optional_params["temperature"] = value + if param == "top_p": + optional_params["top_p"] = value + if param == "tool_choice" and isinstance(value, str): + optional_params["tool_choice"] = self._map_tool_choice( + tool_choice=value + ) + if param == "seed": + optional_params["extra_body"] = {"random_seed": value} + return optional_params + + class OpenAIConfig: """ Reference: https://platform.openai.com/docs/api-reference/chat/create @@ -223,7 +339,7 @@ class OpenAITextCompletionConfig: model_response_object.choices = choice_list if "usage" in response_object: - model_response_object.usage = response_object["usage"] + setattr(model_response_object, "usage", response_object["usage"]) if "id" in response_object: model_response_object.id = response_object["id"] @@ -246,7 +362,7 @@ class OpenAIChatCompletion(BaseLLM): def completion( self, model_response: ModelResponse, - timeout: float, + timeout: Union[float, httpx.Timeout], model: Optional[str] = None, messages: Optional[list] = None, print_verbose: Optional[Callable] = None, @@ -271,9 +387,12 @@ class OpenAIChatCompletion(BaseLLM): if model is None or messages is None: raise OpenAIError(status_code=422, message=f"Missing model or messages") - if not isinstance(timeout, float): + if not isinstance(timeout, float) and not isinstance( + timeout, httpx.Timeout + ): raise OpenAIError( - status_code=422, message=f"Timeout needs to be a float" + status_code=422, + message=f"Timeout needs to be a float or httpx.Timeout", ) if custom_llm_provider != "openai": @@ -425,7 +544,7 @@ class OpenAIChatCompletion(BaseLLM): self, data: dict, model_response: ModelResponse, - timeout: float, + timeout: Union[float, httpx.Timeout], api_key: Optional[str] = None, api_base: Optional[str] = None, organization: Optional[str] = None, @@ -447,6 +566,7 @@ class OpenAIChatCompletion(BaseLLM): ) else: openai_aclient = client + ## LOGGING logging_obj.pre_call( input=data["messages"], @@ -479,7 +599,7 @@ class OpenAIChatCompletion(BaseLLM): def streaming( self, logging_obj, - timeout: float, + timeout: Union[float, httpx.Timeout], data: dict, model: str, api_key: Optional[str] = None, @@ -517,13 +637,14 @@ class OpenAIChatCompletion(BaseLLM): model=model, custom_llm_provider="openai", logging_obj=logging_obj, + stream_options=data.get("stream_options", None), ) return streamwrapper async def async_streaming( self, logging_obj, - timeout: float, + timeout: Union[float, httpx.Timeout], data: dict, model: str, api_key: Optional[str] = None, @@ -566,6 +687,7 @@ class OpenAIChatCompletion(BaseLLM): model=model, custom_llm_provider="openai", logging_obj=logging_obj, + stream_options=data.get("stream_options", None), ) return streamwrapper except ( @@ -1190,6 +1312,7 @@ class OpenAITextCompletion(BaseLLM): model=model, custom_llm_provider="text-completion-openai", logging_obj=logging_obj, + stream_options=data.get("stream_options", None), ) for chunk in streamwrapper: @@ -1228,7 +1351,228 @@ class OpenAITextCompletion(BaseLLM): model=model, custom_llm_provider="text-completion-openai", logging_obj=logging_obj, + stream_options=data.get("stream_options", None), ) async for transformed_chunk in streamwrapper: yield transformed_chunk + + +class OpenAIAssistantsAPI(BaseLLM): + def __init__(self) -> None: + super().__init__() + + def get_openai_client( + self, + api_key: Optional[str], + api_base: Optional[str], + timeout: Union[float, httpx.Timeout], + max_retries: Optional[int], + organization: Optional[str], + client: Optional[OpenAI] = None, + ) -> OpenAI: + received_args = locals() + if client is None: + data = {} + for k, v in received_args.items(): + if k == "self" or k == "client": + pass + elif k == "api_base" and v is not None: + data["base_url"] = v + elif v is not None: + data[k] = v + openai_client = OpenAI(**data) # type: ignore + else: + openai_client = client + + return openai_client + + ### ASSISTANTS ### + + def get_assistants( + self, + api_key: Optional[str], + api_base: Optional[str], + timeout: Union[float, httpx.Timeout], + max_retries: Optional[int], + organization: Optional[str], + client: Optional[OpenAI], + ) -> SyncCursorPage[Assistant]: + openai_client = self.get_openai_client( + api_key=api_key, + api_base=api_base, + timeout=timeout, + max_retries=max_retries, + organization=organization, + client=client, + ) + + response = openai_client.beta.assistants.list() + + return response + + ### MESSAGES ### + + def add_message( + self, + thread_id: str, + message_data: MessageData, + api_key: Optional[str], + api_base: Optional[str], + timeout: Union[float, httpx.Timeout], + max_retries: Optional[int], + organization: Optional[str], + client: Optional[OpenAI] = None, + ) -> OpenAIMessage: + + openai_client = self.get_openai_client( + api_key=api_key, + api_base=api_base, + timeout=timeout, + max_retries=max_retries, + organization=organization, + client=client, + ) + + thread_message: OpenAIMessage = openai_client.beta.threads.messages.create( # type: ignore + thread_id, **message_data # type: ignore + ) + + response_obj: Optional[OpenAIMessage] = None + if getattr(thread_message, "status", None) is None: + thread_message.status = "completed" + response_obj = OpenAIMessage(**thread_message.dict()) + else: + response_obj = OpenAIMessage(**thread_message.dict()) + return response_obj + + def get_messages( + self, + thread_id: str, + api_key: Optional[str], + api_base: Optional[str], + timeout: Union[float, httpx.Timeout], + max_retries: Optional[int], + organization: Optional[str], + client: Optional[OpenAI] = None, + ) -> SyncCursorPage[OpenAIMessage]: + openai_client = self.get_openai_client( + api_key=api_key, + api_base=api_base, + timeout=timeout, + max_retries=max_retries, + organization=organization, + client=client, + ) + + response = openai_client.beta.threads.messages.list(thread_id=thread_id) + + return response + + ### THREADS ### + + def create_thread( + self, + metadata: Optional[dict], + api_key: Optional[str], + api_base: Optional[str], + timeout: Union[float, httpx.Timeout], + max_retries: Optional[int], + organization: Optional[str], + client: Optional[OpenAI], + messages: Optional[Iterable[OpenAICreateThreadParamsMessage]], + ) -> Thread: + """ + Here's an example: + ``` + from litellm.llms.openai import OpenAIAssistantsAPI, MessageData + + # create thread + message: MessageData = {"role": "user", "content": "Hey, how's it going?"} + openai_api.create_thread(messages=[message]) + ``` + """ + openai_client = self.get_openai_client( + api_key=api_key, + api_base=api_base, + timeout=timeout, + max_retries=max_retries, + organization=organization, + client=client, + ) + + data = {} + if messages is not None: + data["messages"] = messages # type: ignore + if metadata is not None: + data["metadata"] = metadata # type: ignore + + message_thread = openai_client.beta.threads.create(**data) # type: ignore + + return Thread(**message_thread.dict()) + + def get_thread( + self, + thread_id: str, + api_key: Optional[str], + api_base: Optional[str], + timeout: Union[float, httpx.Timeout], + max_retries: Optional[int], + organization: Optional[str], + client: Optional[OpenAI], + ) -> Thread: + openai_client = self.get_openai_client( + api_key=api_key, + api_base=api_base, + timeout=timeout, + max_retries=max_retries, + organization=organization, + client=client, + ) + + response = openai_client.beta.threads.retrieve(thread_id=thread_id) + + return Thread(**response.dict()) + + def delete_thread(self): + pass + + ### RUNS ### + + def run_thread( + self, + thread_id: str, + assistant_id: str, + additional_instructions: Optional[str], + instructions: Optional[str], + metadata: Optional[object], + model: Optional[str], + stream: Optional[bool], + tools: Optional[Iterable[AssistantToolParam]], + api_key: Optional[str], + api_base: Optional[str], + timeout: Union[float, httpx.Timeout], + max_retries: Optional[int], + organization: Optional[str], + client: Optional[OpenAI], + ) -> Run: + openai_client = self.get_openai_client( + api_key=api_key, + api_base=api_base, + timeout=timeout, + max_retries=max_retries, + organization=organization, + client=client, + ) + + response = openai_client.beta.threads.runs.create_and_poll( # type: ignore + thread_id=thread_id, + assistant_id=assistant_id, + additional_instructions=additional_instructions, + instructions=instructions, + metadata=metadata, + model=model, + tools=tools, + ) + + return response diff --git a/litellm/llms/palm.py b/litellm/llms/palm.py index 3f0664b1d..f15be43db 100644 --- a/litellm/llms/palm.py +++ b/litellm/llms/palm.py @@ -191,7 +191,7 @@ def completion( completion_tokens=completion_tokens, total_tokens=prompt_tokens + completion_tokens, ) - model_response.usage = usage + setattr(model_response, "usage", usage) return model_response diff --git a/litellm/llms/petals.py b/litellm/llms/petals.py index bc30306a6..334b80d38 100644 --- a/litellm/llms/petals.py +++ b/litellm/llms/petals.py @@ -1,7 +1,7 @@ import os, types import json from enum import Enum -import requests +import requests # type: ignore import time from typing import Callable, Optional import litellm @@ -41,9 +41,9 @@ class PetalsConfig: """ max_length: Optional[int] = None - max_new_tokens: Optional[ - int - ] = litellm.max_tokens # petals requires max tokens to be set + max_new_tokens: Optional[int] = ( + litellm.max_tokens + ) # petals requires max tokens to be set do_sample: Optional[bool] = None temperature: Optional[float] = None top_k: Optional[int] = None @@ -203,7 +203,7 @@ def completion( completion_tokens=completion_tokens, total_tokens=prompt_tokens + completion_tokens, ) - model_response.usage = usage + setattr(model_response, "usage", usage) return model_response diff --git a/litellm/llms/predibase.py b/litellm/llms/predibase.py new file mode 100644 index 000000000..1e7e1d334 --- /dev/null +++ b/litellm/llms/predibase.py @@ -0,0 +1,518 @@ +# What is this? +## Controller file for Predibase Integration - https://predibase.com/ + + +import os, types +import json +from enum import Enum +import requests, copy # type: ignore +import time +from typing import Callable, Optional, List, Literal, Union +from litellm.utils import ( + ModelResponse, + Usage, + map_finish_reason, + CustomStreamWrapper, + Message, + Choices, +) +import litellm +from .prompt_templates.factory import prompt_factory, custom_prompt +from litellm.llms.custom_httpx.http_handler import AsyncHTTPHandler +from .base import BaseLLM +import httpx # type: ignore + + +class PredibaseError(Exception): + def __init__( + self, + status_code, + message, + request: Optional[httpx.Request] = None, + response: Optional[httpx.Response] = None, + ): + self.status_code = status_code + self.message = message + if request is not None: + self.request = request + else: + self.request = httpx.Request( + method="POST", + url="https://docs.predibase.com/user-guide/inference/rest_api", + ) + if response is not None: + self.response = response + else: + self.response = httpx.Response( + status_code=status_code, request=self.request + ) + super().__init__( + self.message + ) # Call the base class constructor with the parameters it needs + + +class PredibaseConfig: + """ + Reference: https://docs.predibase.com/user-guide/inference/rest_api + + """ + + adapter_id: Optional[str] = None + adapter_source: Optional[Literal["pbase", "hub", "s3"]] = None + best_of: Optional[int] = None + decoder_input_details: Optional[bool] = None + details: bool = True # enables returning logprobs + best of + max_new_tokens: int = ( + 256 # openai default - requests hang if max_new_tokens not given + ) + repetition_penalty: Optional[float] = None + return_full_text: Optional[bool] = ( + False # by default don't return the input as part of the output + ) + seed: Optional[int] = None + stop: Optional[List[str]] = None + temperature: Optional[float] = None + top_k: Optional[int] = None + top_p: Optional[int] = None + truncate: Optional[int] = None + typical_p: Optional[float] = None + watermark: Optional[bool] = None + + def __init__( + self, + best_of: Optional[int] = None, + decoder_input_details: Optional[bool] = None, + details: Optional[bool] = None, + max_new_tokens: Optional[int] = None, + repetition_penalty: Optional[float] = None, + return_full_text: Optional[bool] = None, + seed: Optional[int] = None, + stop: Optional[List[str]] = None, + temperature: Optional[float] = None, + top_k: Optional[int] = None, + top_p: Optional[int] = None, + truncate: Optional[int] = None, + typical_p: Optional[float] = None, + watermark: Optional[bool] = None, + ) -> None: + locals_ = locals() + for key, value in locals_.items(): + if key != "self" and value is not None: + setattr(self.__class__, key, value) + + @classmethod + def get_config(cls): + return { + k: v + for k, v in cls.__dict__.items() + if not k.startswith("__") + and not isinstance( + v, + ( + types.FunctionType, + types.BuiltinFunctionType, + classmethod, + staticmethod, + ), + ) + and v is not None + } + + def get_supported_openai_params(self): + return ["stream", "temperature", "max_tokens", "top_p", "stop", "n"] + + +class PredibaseChatCompletion(BaseLLM): + def __init__(self) -> None: + super().__init__() + + def _validate_environment(self, api_key: Optional[str], user_headers: dict) -> dict: + if api_key is None: + raise ValueError( + "Missing Predibase API Key - A call is being made to predibase but no key is set either in the environment variables or via params" + ) + headers = { + "content-type": "application/json", + "Authorization": "Bearer {}".format(api_key), + } + if user_headers is not None and isinstance(user_headers, dict): + headers = {**headers, **user_headers} + return headers + + def output_parser(self, generated_text: str): + """ + Parse the output text to remove any special characters. In our current approach we just check for ChatML tokens. + + Initial issue that prompted this - https://github.com/BerriAI/litellm/issues/763 + """ + chat_template_tokens = [ + "<|assistant|>", + "<|system|>", + "<|user|>", + "", + "", + ] + for token in chat_template_tokens: + if generated_text.strip().startswith(token): + generated_text = generated_text.replace(token, "", 1) + if generated_text.endswith(token): + generated_text = generated_text[::-1].replace(token[::-1], "", 1)[::-1] + return generated_text + + def process_response( + self, + model: str, + response: Union[requests.Response, httpx.Response], + model_response: ModelResponse, + stream: bool, + logging_obj: litellm.utils.Logging, + optional_params: dict, + api_key: str, + data: Union[dict, str], + messages: list, + print_verbose, + encoding, + ) -> ModelResponse: + ## LOGGING + logging_obj.post_call( + input=messages, + api_key=api_key, + original_response=response.text, + additional_args={"complete_input_dict": data}, + ) + print_verbose(f"raw model_response: {response.text}") + ## RESPONSE OBJECT + try: + completion_response = response.json() + except: + raise PredibaseError(message=response.text, status_code=422) + if "error" in completion_response: + raise PredibaseError( + message=str(completion_response["error"]), + status_code=response.status_code, + ) + else: + if ( + not isinstance(completion_response, dict) + or "generated_text" not in completion_response + ): + raise PredibaseError( + status_code=422, + message=f"response is not in expected format - {completion_response}", + ) + + if len(completion_response["generated_text"]) > 0: + model_response["choices"][0]["message"]["content"] = self.output_parser( + completion_response["generated_text"] + ) + ## GETTING LOGPROBS + FINISH REASON + if ( + "details" in completion_response + and "tokens" in completion_response["details"] + ): + model_response.choices[0].finish_reason = completion_response[ + "details" + ]["finish_reason"] + sum_logprob = 0 + for token in completion_response["details"]["tokens"]: + if token["logprob"] != None: + sum_logprob += token["logprob"] + model_response["choices"][0][ + "message" + ]._logprob = ( + sum_logprob # [TODO] move this to using the actual logprobs + ) + if "best_of" in optional_params and optional_params["best_of"] > 1: + if ( + "details" in completion_response + and "best_of_sequences" in completion_response["details"] + ): + choices_list = [] + for idx, item in enumerate( + completion_response["details"]["best_of_sequences"] + ): + sum_logprob = 0 + for token in item["tokens"]: + if token["logprob"] != None: + sum_logprob += token["logprob"] + if len(item["generated_text"]) > 0: + message_obj = Message( + content=self.output_parser(item["generated_text"]), + logprobs=sum_logprob, + ) + else: + message_obj = Message(content=None) + choice_obj = Choices( + finish_reason=item["finish_reason"], + index=idx + 1, + message=message_obj, + ) + choices_list.append(choice_obj) + model_response["choices"].extend(choices_list) + + ## CALCULATING USAGE + prompt_tokens = 0 + try: + prompt_tokens = len( + encoding.encode(model_response["choices"][0]["message"]["content"]) + ) ##[TODO] use a model-specific tokenizer here + except: + # this should remain non blocking we should not block a response returning if calculating usage fails + pass + output_text = model_response["choices"][0]["message"].get("content", "") + if output_text is not None and len(output_text) > 0: + completion_tokens = 0 + try: + completion_tokens = len( + encoding.encode( + model_response["choices"][0]["message"].get("content", "") + ) + ) ##[TODO] use a model-specific tokenizer + except: + # this should remain non blocking we should not block a response returning if calculating usage fails + pass + else: + completion_tokens = 0 + + total_tokens = prompt_tokens + completion_tokens + + model_response["created"] = int(time.time()) + model_response["model"] = model + usage = Usage( + prompt_tokens=prompt_tokens, + completion_tokens=completion_tokens, + total_tokens=total_tokens, + ) + model_response.usage = usage # type: ignore + return model_response + + def completion( + self, + model: str, + messages: list, + api_base: str, + custom_prompt_dict: dict, + model_response: ModelResponse, + print_verbose: Callable, + encoding, + api_key: str, + logging_obj, + optional_params: dict, + tenant_id: str, + acompletion=None, + litellm_params=None, + logger_fn=None, + headers: dict = {}, + ) -> Union[ModelResponse, CustomStreamWrapper]: + headers = self._validate_environment(api_key, headers) + completion_url = "" + input_text = "" + base_url = "https://serving.app.predibase.com" + if "https" in model: + completion_url = model + elif api_base: + base_url = api_base + elif "PREDIBASE_API_BASE" in os.environ: + base_url = os.getenv("PREDIBASE_API_BASE", "") + + completion_url = f"{base_url}/{tenant_id}/deployments/v2/llms/{model}" + + if optional_params.get("stream", False) == True: + completion_url += "/generate_stream" + else: + completion_url += "/generate" + + if model in custom_prompt_dict: + # check if the model has a registered custom prompt + model_prompt_details = custom_prompt_dict[model] + prompt = custom_prompt( + role_dict=model_prompt_details["roles"], + initial_prompt_value=model_prompt_details["initial_prompt_value"], + final_prompt_value=model_prompt_details["final_prompt_value"], + messages=messages, + ) + else: + prompt = prompt_factory(model=model, messages=messages) + + ## Load Config + config = litellm.PredibaseConfig.get_config() + for k, v in config.items(): + if ( + k not in optional_params + ): # completion(top_k=3) > anthropic_config(top_k=3) <- allows for dynamic variables to be passed in + optional_params[k] = v + + stream = optional_params.pop("stream", False) + + data = { + "inputs": prompt, + "parameters": optional_params, + } + input_text = prompt + ## LOGGING + logging_obj.pre_call( + input=input_text, + api_key=api_key, + additional_args={ + "complete_input_dict": data, + "headers": headers, + "api_base": completion_url, + "acompletion": acompletion, + }, + ) + ## COMPLETION CALL + if acompletion == True: + ### ASYNC STREAMING + if stream == True: + return self.async_streaming( + model=model, + messages=messages, + data=data, + api_base=completion_url, + model_response=model_response, + print_verbose=print_verbose, + encoding=encoding, + api_key=api_key, + logging_obj=logging_obj, + optional_params=optional_params, + litellm_params=litellm_params, + logger_fn=logger_fn, + headers=headers, + ) # type: ignore + else: + ### ASYNC COMPLETION + return self.async_completion( + model=model, + messages=messages, + data=data, + api_base=completion_url, + model_response=model_response, + print_verbose=print_verbose, + encoding=encoding, + api_key=api_key, + logging_obj=logging_obj, + optional_params=optional_params, + stream=False, + litellm_params=litellm_params, + logger_fn=logger_fn, + headers=headers, + ) # type: ignore + + ### SYNC STREAMING + if stream == True: + response = requests.post( + completion_url, + headers=headers, + data=json.dumps(data), + stream=stream, + ) + _response = CustomStreamWrapper( + response.iter_lines(), + model, + custom_llm_provider="predibase", + logging_obj=logging_obj, + ) + return _response + ### SYNC COMPLETION + else: + response = requests.post( + url=completion_url, + headers=headers, + data=json.dumps(data), + ) + + return self.process_response( + model=model, + response=response, + model_response=model_response, + stream=optional_params.get("stream", False), + logging_obj=logging_obj, # type: ignore + optional_params=optional_params, + api_key=api_key, + data=data, + messages=messages, + print_verbose=print_verbose, + encoding=encoding, + ) + + async def async_completion( + self, + model: str, + messages: list, + api_base: str, + model_response: ModelResponse, + print_verbose: Callable, + encoding, + api_key, + logging_obj, + stream, + data: dict, + optional_params: dict, + litellm_params=None, + logger_fn=None, + headers={}, + ) -> ModelResponse: + self.async_handler = AsyncHTTPHandler( + timeout=httpx.Timeout(timeout=600.0, connect=5.0) + ) + response = await self.async_handler.post( + api_base, headers=headers, data=json.dumps(data) + ) + return self.process_response( + model=model, + response=response, + model_response=model_response, + stream=stream, + logging_obj=logging_obj, + api_key=api_key, + data=data, + messages=messages, + print_verbose=print_verbose, + optional_params=optional_params, + encoding=encoding, + ) + + async def async_streaming( + self, + model: str, + messages: list, + api_base: str, + model_response: ModelResponse, + print_verbose: Callable, + encoding, + api_key, + logging_obj, + data: dict, + optional_params=None, + litellm_params=None, + logger_fn=None, + headers={}, + ) -> CustomStreamWrapper: + self.async_handler = AsyncHTTPHandler( + timeout=httpx.Timeout(timeout=600.0, connect=5.0) + ) + data["stream"] = True + response = await self.async_handler.post( + url=api_base, + headers=headers, + data=json.dumps(data), + stream=True, + ) + + if response.status_code != 200: + raise PredibaseError( + status_code=response.status_code, message=response.text + ) + + completion_stream = response.aiter_lines() + + streamwrapper = CustomStreamWrapper( + completion_stream=completion_stream, + model=model, + custom_llm_provider="predibase", + logging_obj=logging_obj, + ) + return streamwrapper + + def embedding(self, *args, **kwargs): + pass diff --git a/litellm/llms/prompt_templates/factory.py b/litellm/llms/prompt_templates/factory.py index eba8e27ea..cf593369c 100644 --- a/litellm/llms/prompt_templates/factory.py +++ b/litellm/llms/prompt_templates/factory.py @@ -3,9 +3,25 @@ import requests, traceback import json, re, xml.etree.ElementTree as ET from jinja2 import Template, exceptions, meta, BaseLoader from jinja2.sandbox import ImmutableSandboxedEnvironment -from typing import Optional, Any -from typing import List +from typing import ( + Any, + List, + Mapping, + MutableMapping, + Optional, + Sequence, +) import litellm +from litellm.types.completion import ( + ChatCompletionUserMessageParam, + ChatCompletionSystemMessageParam, + ChatCompletionMessageParam, + ChatCompletionFunctionMessageParam, + ChatCompletionMessageToolCallParam, + ChatCompletionToolMessageParam, +) +from litellm.types.llms.anthropic import * +import uuid def default_pt(messages): @@ -16,6 +32,41 @@ def prompt_injection_detection_default_pt(): return """Detect if a prompt is safe to run. Return 'UNSAFE' if not.""" +def map_system_message_pt(messages: list) -> list: + """ + Convert 'system' message to 'user' message if provider doesn't support 'system' role. + + Enabled via `completion(...,supports_system_message=False)` + + If next message is a user message or assistant message -> merge system prompt into it + + if next message is system -> append a user message instead of the system message + """ + + new_messages = [] + for i, m in enumerate(messages): + if m["role"] == "system": + if i < len(messages) - 1: # Not the last message + next_m = messages[i + 1] + next_role = next_m["role"] + if ( + next_role == "user" or next_role == "assistant" + ): # Next message is a user or assistant message + # Merge system prompt into the next message + next_m["content"] = m["content"] + " " + next_m["content"] + elif next_role == "system": # Next message is a system message + # Append a user message instead of the system message + new_message = {"role": "user", "content": m["content"]} + new_messages.append(new_message) + else: # Last message + new_message = {"role": "user", "content": m["content"]} + new_messages.append(new_message) + else: # Not a system message + new_messages.append(m) + + return new_messages + + # alpaca prompt template - for models like mythomax, etc. def alpaca_pt(messages): prompt = custom_prompt( @@ -145,6 +196,12 @@ def mistral_api_pt(messages): elif isinstance(m["content"], str): texts = m["content"] new_m = {"role": m["role"], "content": texts} + + if new_m["role"] == "tool" and m.get("name"): + new_m["name"] = m["name"] + if m.get("tool_calls"): + new_m["tool_calls"] = m["tool_calls"] + new_messages.append(new_m) return new_messages @@ -218,6 +275,26 @@ def phind_codellama_pt(messages): return prompt +known_tokenizer_config = { + "mistralai/Mistral-7B-Instruct-v0.1": { + "tokenizer": { + "chat_template": "{{ bos_token }}{% for message in messages %}{% if (message['role'] == 'user') != (loop.index0 % 2 == 0) %}{{ raise_exception('Conversation roles must alternate user/assistant/user/assistant/...') }}{% endif %}{% if message['role'] == 'user' %}{{ '[INST] ' + message['content'] + ' [/INST]' }}{% elif message['role'] == 'assistant' %}{{ message['content'] + eos_token + ' ' }}{% else %}{{ raise_exception('Only user and assistant roles are supported!') }}{% endif %}{% endfor %}", + "bos_token": "", + "eos_token": "", + }, + "status": "success", + }, + "meta-llama/Meta-Llama-3-8B-Instruct": { + "tokenizer": { + "chat_template": "{% set loop_messages = messages %}{% for message in loop_messages %}{% set content = '<|start_header_id|>' + message['role'] + '<|end_header_id|>\n\n'+ message['content'] | trim + '<|eot_id|>' %}{% if loop.index0 == 0 %}{% set content = bos_token + content %}{% endif %}{{ content }}{% endfor %}{{ '<|start_header_id|>assistant<|end_header_id|>\n\n' }}", + "bos_token": "<|begin_of_text|>", + "eos_token": "", + }, + "status": "success", + }, +} + + def hf_chat_template(model: str, messages: list, chat_template: Optional[Any] = None): # Define Jinja2 environment env = ImmutableSandboxedEnvironment() @@ -246,20 +323,23 @@ def hf_chat_template(model: str, messages: list, chat_template: Optional[Any] = else: return {"status": "failure"} - tokenizer_config = _get_tokenizer_config(model) + if model in known_tokenizer_config: + tokenizer_config = known_tokenizer_config[model] + else: + tokenizer_config = _get_tokenizer_config(model) if ( tokenizer_config["status"] == "failure" or "chat_template" not in tokenizer_config["tokenizer"] ): raise Exception("No chat template found") ## read the bos token, eos token and chat template from the json - tokenizer_config = tokenizer_config["tokenizer"] - bos_token = tokenizer_config["bos_token"] - eos_token = tokenizer_config["eos_token"] - chat_template = tokenizer_config["chat_template"] + tokenizer_config = tokenizer_config["tokenizer"] # type: ignore + bos_token = tokenizer_config["bos_token"] # type: ignore + eos_token = tokenizer_config["eos_token"] # type: ignore + chat_template = tokenizer_config["chat_template"] # type: ignore try: - template = env.from_string(chat_template) + template = env.from_string(chat_template) # type: ignore except Exception as e: raise e @@ -402,6 +482,35 @@ def format_prompt_togetherai(messages, prompt_format, chat_template): return prompt +### IBM Granite + + +def ibm_granite_pt(messages: list): + """ + IBM's Granite models uses the template: + <|system|> {system_message} <|user|> {user_message} <|assistant|> {assistant_message} + + See: https://www.ibm.com/docs/en/watsonx-as-a-service?topic=solutions-supported-foundation-models + """ + return custom_prompt( + messages=messages, + role_dict={ + "system": { + "pre_message": "<|system|>\n", + "post_message": "\n", + }, + "user": { + "pre_message": "<|user|>\n", + "post_message": "\n", + }, + "assistant": { + "pre_message": "<|assistant|>\n", + "post_message": "\n", + }, + }, + ).strip() + + ### ANTHROPIC ### @@ -466,10 +575,11 @@ def construct_tool_use_system_prompt( ): # from https://github.com/anthropics/anthropic-cookbook/blob/main/function_calling/function_calling.ipynb tool_str_list = [] for tool in tools: + tool_function = get_attribute_or_key(tool, "function") tool_str = construct_format_tool_for_claude_prompt( - tool["function"]["name"], - tool["function"].get("description", ""), - tool["function"].get("parameters", {}), + get_attribute_or_key(tool_function, "name"), + get_attribute_or_key(tool_function, "description", ""), + get_attribute_or_key(tool_function, "parameters", {}), ) tool_str_list.append(tool_str) tool_use_system_prompt = ( @@ -593,7 +703,8 @@ def convert_to_anthropic_tool_result_xml(message: dict) -> str: """ name = message.get("name") - content = message.get("content") + content = message.get("content", "") + content = content.replace("<", "<").replace(">", ">").replace("&", "&") # We can't determine from openai message format whether it's a successful or # error call result so default to the successful result template @@ -614,13 +725,15 @@ def convert_to_anthropic_tool_result_xml(message: dict) -> str: def convert_to_anthropic_tool_invoke_xml(tool_calls: list) -> str: invokes = "" for tool in tool_calls: - if tool["type"] != "function": + if get_attribute_or_key(tool, "type") != "function": continue - tool_name = tool["function"]["name"] + tool_function = get_attribute_or_key(tool, "function") + tool_name = get_attribute_or_key(tool_function, "name") + tool_arguments = get_attribute_or_key(tool_function, "arguments") parameters = "".join( f"<{param}>{val}\n" - for param, val in json.loads(tool["function"]["arguments"]).items() + for param, val in json.loads(tool_arguments).items() ) invokes += ( "\n" @@ -674,7 +787,7 @@ def anthropic_messages_pt_xml(messages: list): { "type": "text", "text": ( - convert_to_anthropic_tool_result(messages[msg_i]) + convert_to_anthropic_tool_result_xml(messages[msg_i]) if messages[msg_i]["role"] == "tool" else messages[msg_i]["content"] ), @@ -695,7 +808,7 @@ def anthropic_messages_pt_xml(messages: list): if messages[msg_i].get( "tool_calls", [] ): # support assistant tool invoke convertion - assistant_text += convert_to_anthropic_tool_invoke( # type: ignore + assistant_text += convert_to_anthropic_tool_invoke_xml( # type: ignore messages[msg_i]["tool_calls"] ) @@ -705,7 +818,7 @@ def anthropic_messages_pt_xml(messages: list): if assistant_content: new_messages.append({"role": "assistant", "content": assistant_content}) - if new_messages[0]["role"] != "user": + if not new_messages or new_messages[0]["role"] != "user": if litellm.modify_params: new_messages.insert( 0, {"role": "user", "content": [{"type": "text", "text": "."}]} @@ -737,6 +850,13 @@ def convert_to_anthropic_tool_result(message: dict) -> dict: "name": "get_current_weather", "content": "function result goes here", }, + + OpenAI message with a function call result looks like: + { + "role": "function", + "name": "get_current_weather", + "content": "function result goes here", + } """ """ @@ -753,18 +873,42 @@ def convert_to_anthropic_tool_result(message: dict) -> dict: ] } """ - tool_call_id = message.get("tool_call_id") - content = message.get("content") + if message["role"] == "tool": + tool_call_id = message.get("tool_call_id") + content = message.get("content") - # We can't determine from openai message format whether it's a successful or - # error call result so default to the successful result template - anthropic_tool_result = { - "type": "tool_result", - "tool_use_id": tool_call_id, - "content": content, - } + # We can't determine from openai message format whether it's a successful or + # error call result so default to the successful result template + anthropic_tool_result = { + "type": "tool_result", + "tool_use_id": tool_call_id, + "content": content, + } + return anthropic_tool_result + elif message["role"] == "function": + content = message.get("content") + anthropic_tool_result = { + "type": "tool_result", + "tool_use_id": str(uuid.uuid4()), + "content": content, + } + return anthropic_tool_result + return {} - return anthropic_tool_result + +def convert_function_to_anthropic_tool_invoke(function_call): + try: + anthropic_tool_invoke = [ + { + "type": "tool_use", + "id": str(uuid.uuid4()), + "name": get_attribute_or_key(function_call, "name"), + "input": json.loads(get_attribute_or_key(function_call, "arguments")), + } + ] + return anthropic_tool_invoke + except Exception as e: + raise e def convert_to_anthropic_tool_invoke(tool_calls: list) -> list: @@ -807,12 +951,18 @@ def convert_to_anthropic_tool_invoke(tool_calls: list) -> list: anthropic_tool_invoke = [ { "type": "tool_use", - "id": tool["id"], - "name": tool["function"]["name"], - "input": json.loads(tool["function"]["arguments"]), + "id": get_attribute_or_key(tool, "id"), + "name": get_attribute_or_key( + get_attribute_or_key(tool, "function"), "name" + ), + "input": json.loads( + get_attribute_or_key( + get_attribute_or_key(tool, "function"), "arguments" + ) + ), } for tool in tool_calls - if tool["type"] == "function" + if get_attribute_or_key(tool, "type") == "function" ] return anthropic_tool_invoke @@ -821,7 +971,7 @@ def convert_to_anthropic_tool_invoke(tool_calls: list) -> list: def anthropic_messages_pt(messages: list): """ format messages for anthropic - 1. Anthropic supports roles like "user" and "assistant", (here litellm translates system-> assistant) + 1. Anthropic supports roles like "user" and "assistant" (system prompt sent separately) 2. The first message always needs to be of role "user" 3. Each message must alternate between "user" and "assistant" (this is not addressed as now by litellm) 4. final assistant content cannot end with trailing whitespace (anthropic raises an error otherwise) @@ -829,12 +979,14 @@ def anthropic_messages_pt(messages: list): 6. Ensure we only accept role, content. (message.name is not supported) """ # add role=tool support to allow function call result/error submission - user_message_types = {"user", "tool"} + user_message_types = {"user", "tool", "function"} # reformat messages to ensure user/assistant are alternating, if there's either 2 consecutive 'user' messages or 2 consecutive 'assistant' message, merge them. - new_messages = [] + new_messages: list = [] msg_i = 0 + tool_use_param = False while msg_i < len(messages): user_content = [] + init_msg_i = msg_i ## MERGE CONSECUTIVE USER CONTENT ## while msg_i < len(messages) and messages[msg_i]["role"] in user_message_types: if isinstance(messages[msg_i]["content"], list): @@ -850,7 +1002,10 @@ def anthropic_messages_pt(messages: list): ) elif m.get("type", "") == "text": user_content.append({"type": "text", "text": m["text"]}) - elif messages[msg_i]["role"] == "tool": + elif ( + messages[msg_i]["role"] == "tool" + or messages[msg_i]["role"] == "function" + ): # OpenAI's tool message content will always be a string user_content.append(convert_to_anthropic_tool_result(messages[msg_i])) else: @@ -879,12 +1034,25 @@ def anthropic_messages_pt(messages: list): convert_to_anthropic_tool_invoke(messages[msg_i]["tool_calls"]) ) + if messages[msg_i].get("function_call"): + assistant_content.extend( + convert_function_to_anthropic_tool_invoke( + messages[msg_i]["function_call"] + ) + ) + msg_i += 1 if assistant_content: new_messages.append({"role": "assistant", "content": assistant_content}) - if new_messages[0]["role"] != "user": + if msg_i == init_msg_i: # prevent infinite loops + raise Exception( + "Invalid Message passed in - {}. File an issue https://github.com/BerriAI/litellm/issues".format( + messages[msg_i] + ) + ) + if not new_messages or new_messages[0]["role"] != "user": if litellm.modify_params: new_messages.insert( 0, {"role": "user", "content": [{"type": "text", "text": "."}]} @@ -895,11 +1063,14 @@ def anthropic_messages_pt(messages: list): ) if new_messages[-1]["role"] == "assistant": - for content in new_messages[-1]["content"]: - if isinstance(content, dict) and content["type"] == "text": - content["text"] = content[ - "text" - ].rstrip() # no trailing whitespace for final assistant message + if isinstance(new_messages[-1]["content"], str): + new_messages[-1]["content"] = new_messages[-1]["content"].rstrip() + elif isinstance(new_messages[-1]["content"], list): + for content in new_messages[-1]["content"]: + if isinstance(content, dict) and content["type"] == "text": + content["text"] = content[ + "text" + ].rstrip() # no trailing whitespace for final assistant message return new_messages @@ -978,6 +1149,30 @@ def get_system_prompt(messages): return system_prompt, messages +def convert_to_documents( + observations: Any, +) -> List[MutableMapping]: + """Converts observations into a 'document' dict""" + documents: List[MutableMapping] = [] + if isinstance(observations, str): + # strings are turned into a key/value pair and a key of 'output' is added. + observations = [{"output": observations}] + elif isinstance(observations, Mapping): + # single mappings are transformed into a list to simplify the rest of the code. + observations = [observations] + elif not isinstance(observations, Sequence): + # all other types are turned into a key/value pair within a list + observations = [{"output": observations}] + + for doc in observations: + if not isinstance(doc, Mapping): + # types that aren't Mapping are turned into a key/value pair. + doc = {"output": doc} + documents.append(doc) + + return documents + + def convert_openai_message_to_cohere_tool_result(message): """ OpenAI message with a tool result looks like: @@ -1019,7 +1214,7 @@ def convert_openai_message_to_cohere_tool_result(message): "parameters": {"location": "San Francisco, CA"}, "generation_id": tool_call_id, }, - "outputs": [content], + "outputs": convert_to_documents(content), } return cohere_tool_result @@ -1032,8 +1227,9 @@ def cohere_message_pt(messages: list): if message["role"] == "tool": tool_result = convert_openai_message_to_cohere_tool_result(message) tool_results.append(tool_result) - else: - prompt += message["content"] + elif message.get("content"): + prompt += message["content"] + "\n\n" + prompt = prompt.rstrip() return prompt, tool_results @@ -1107,12 +1303,6 @@ def _gemini_vision_convert_messages(messages: list): Returns: tuple: A tuple containing the prompt (a string) and the processed images (a list of objects representing the images). """ - try: - from PIL import Image - except: - raise Exception( - "gemini image conversion failed please run `pip install Pillow`" - ) try: # given messages for gpt-4 vision, convert them for gemini @@ -1139,6 +1329,12 @@ def _gemini_vision_convert_messages(messages: list): image = _load_image_from_url(img) processed_images.append(image) else: + try: + from PIL import Image + except: + raise Exception( + "gemini image conversion failed please run `pip install Pillow`" + ) # Case 2: Image filepath (e.g. temp.jpeg) given image = Image.open(img) processed_images.append(image) @@ -1286,7 +1482,11 @@ def prompt_factory( messages=messages, prompt_format=prompt_format, chat_template=chat_template ) elif custom_llm_provider == "gemini": - if model == "gemini-pro-vision": + if ( + model == "gemini-pro-vision" + or litellm.supports_vision(model=model) + or litellm.supports_vision(model=custom_llm_provider + "/" + model) + ): return _gemini_vision_convert_messages(messages=messages) else: return gemini_text_image_pt(messages=messages) @@ -1302,15 +1502,62 @@ def prompt_factory( return anthropic_pt(messages=messages) elif "mistral." in model: return mistral_instruct_pt(messages=messages) + elif "llama2" in model and "chat" in model: + return llama_2_chat_pt(messages=messages) + elif "llama3" in model and "instruct" in model: + return hf_chat_template( + model="meta-llama/Meta-Llama-3-8B-Instruct", + messages=messages, + ) + + elif custom_llm_provider == "clarifai": + if "claude" in model: + return anthropic_pt(messages=messages) + elif custom_llm_provider == "perplexity": for message in messages: message.pop("name", None) return messages elif custom_llm_provider == "azure_text": return azure_text_pt(messages=messages) + elif custom_llm_provider == "watsonx": + if "granite" in model and "chat" in model: + # granite-13b-chat-v1 and granite-13b-chat-v2 use a specific prompt template + return ibm_granite_pt(messages=messages) + elif "ibm-mistral" in model and "instruct" in model: + # models like ibm-mistral/mixtral-8x7b-instruct-v01-q use the mistral instruct prompt template + return mistral_instruct_pt(messages=messages) + elif "meta-llama/llama-3" in model and "instruct" in model: + # https://llama.meta.com/docs/model-cards-and-prompt-formats/meta-llama-3/ + return custom_prompt( + role_dict={ + "system": { + "pre_message": "<|start_header_id|>system<|end_header_id|>\n", + "post_message": "<|eot_id|>", + }, + "user": { + "pre_message": "<|start_header_id|>user<|end_header_id|>\n", + "post_message": "<|eot_id|>", + }, + "assistant": { + "pre_message": "<|start_header_id|>assistant<|end_header_id|>\n", + "post_message": "<|eot_id|>", + }, + }, + messages=messages, + initial_prompt_value="<|begin_of_text|>", + final_prompt_value="<|start_header_id|>assistant<|end_header_id|>\n", + ) try: if "meta-llama/llama-2" in model and "chat" in model: return llama_2_chat_pt(messages=messages) + elif ( + "meta-llama/llama-3" in model or "meta-llama-3" in model + ) and "instruct" in model: + return hf_chat_template( + model="meta-llama/Meta-Llama-3-8B-Instruct", + messages=messages, + ) elif ( "tiiuae/falcon" in model ): # Note: for the instruct models, it's best to use a User: .., Assistant:.. approach in your prompt template. @@ -1351,3 +1598,9 @@ def prompt_factory( return default_pt( messages=messages ) # default that covers Bloom, T-5, any non-chat tuned model (e.g. base Llama2) + + +def get_attribute_or_key(tool_or_function, attribute, default=None): + if hasattr(tool_or_function, attribute): + return getattr(tool_or_function, attribute) + return tool_or_function.get(attribute, default) diff --git a/litellm/llms/replicate.py b/litellm/llms/replicate.py index 38ff9f572..386d24f59 100644 --- a/litellm/llms/replicate.py +++ b/litellm/llms/replicate.py @@ -1,12 +1,13 @@ import os, types import json -import requests +import requests # type: ignore import time -from typing import Callable, Optional -from litellm.utils import ModelResponse, Usage -import litellm -import httpx +from typing import Callable, Optional, Union, Tuple, Any +from litellm.utils import ModelResponse, Usage, CustomStreamWrapper +import litellm, asyncio +import httpx # type: ignore from .prompt_templates.factory import prompt_factory, custom_prompt +from litellm.llms.custom_httpx.http_handler import AsyncHTTPHandler class ReplicateError(Exception): @@ -112,10 +113,16 @@ def start_prediction( } initial_prediction_data = { - "version": version_id, "input": input_data, } + if ":" in version_id and len(version_id) > 64: + model_parts = version_id.split(":") + if ( + len(model_parts) > 1 and len(model_parts[1]) == 64 + ): ## checks if model name has a 64 digit code - e.g. "meta/llama-2-70b-chat:02e509c789964a7ea8736978a43525956ef40397be9033abf9fd2badfe68c9e3" + initial_prediction_data["version"] = model_parts[1] + ## LOGGING logging_obj.pre_call( input=input_data["prompt"], @@ -139,6 +146,65 @@ def start_prediction( ) +async def async_start_prediction( + version_id, + input_data, + api_token, + api_base, + logging_obj, + print_verbose, + http_handler: AsyncHTTPHandler, +) -> str: + base_url = api_base + if "deployments" in version_id: + print_verbose("\nLiteLLM: Request to custom replicate deployment") + version_id = version_id.replace("deployments/", "") + base_url = f"https://api.replicate.com/v1/deployments/{version_id}" + print_verbose(f"Deployment base URL: {base_url}\n") + else: # assume it's a model + base_url = f"https://api.replicate.com/v1/models/{version_id}" + headers = { + "Authorization": f"Token {api_token}", + "Content-Type": "application/json", + } + + initial_prediction_data = { + "input": input_data, + } + + if ":" in version_id and len(version_id) > 64: + model_parts = version_id.split(":") + if ( + len(model_parts) > 1 and len(model_parts[1]) == 64 + ): ## checks if model name has a 64 digit code - e.g. "meta/llama-2-70b-chat:02e509c789964a7ea8736978a43525956ef40397be9033abf9fd2badfe68c9e3" + initial_prediction_data["version"] = model_parts[1] + + ## LOGGING + logging_obj.pre_call( + input=input_data["prompt"], + api_key="", + additional_args={ + "complete_input_dict": initial_prediction_data, + "headers": headers, + "api_base": base_url, + }, + ) + + response = await http_handler.post( + url="{}/predictions".format(base_url), + data=json.dumps(initial_prediction_data), + headers=headers, + ) + + if response.status_code == 201: + response_data = response.json() + return response_data.get("urls", {}).get("get") + else: + raise ReplicateError( + response.status_code, f"Failed to start prediction {response.text}" + ) + + # Function to handle prediction response (non-streaming) def handle_prediction_response(prediction_url, api_token, print_verbose): output_string = "" @@ -172,6 +238,40 @@ def handle_prediction_response(prediction_url, api_token, print_verbose): return output_string, logs +async def async_handle_prediction_response( + prediction_url, api_token, print_verbose, http_handler: AsyncHTTPHandler +) -> Tuple[str, Any]: + output_string = "" + headers = { + "Authorization": f"Token {api_token}", + "Content-Type": "application/json", + } + + status = "" + logs = "" + while True and (status not in ["succeeded", "failed", "canceled"]): + print_verbose(f"replicate: polling endpoint: {prediction_url}") + await asyncio.sleep(0.5) + response = await http_handler.get(prediction_url, headers=headers) + if response.status_code == 200: + response_data = response.json() + if "output" in response_data: + output_string = "".join(response_data["output"]) + print_verbose(f"Non-streamed output:{output_string}") + status = response_data.get("status", None) + logs = response_data.get("logs", "") + if status == "failed": + replicate_error = response_data.get("error", "") + raise ReplicateError( + status_code=400, + message=f"Error: {replicate_error}, \nReplicate logs:{logs}", + ) + else: + # this can fail temporarily but it does not mean the replicate request failed, replicate request fails when status=="failed" + print_verbose("Replicate: Failed to fetch prediction status and output.") + return output_string, logs + + # Function to handle prediction response (streaming) def handle_prediction_response_streaming(prediction_url, api_token, print_verbose): previous_output = "" @@ -208,6 +308,45 @@ def handle_prediction_response_streaming(prediction_url, api_token, print_verbos ) +# Function to handle prediction response (streaming) +async def async_handle_prediction_response_streaming( + prediction_url, api_token, print_verbose +): + http_handler = AsyncHTTPHandler(concurrent_limit=1) + previous_output = "" + output_string = "" + + headers = { + "Authorization": f"Token {api_token}", + "Content-Type": "application/json", + } + status = "" + while True and (status not in ["succeeded", "failed", "canceled"]): + await asyncio.sleep(0.5) # prevent being rate limited by replicate + print_verbose(f"replicate: polling endpoint: {prediction_url}") + response = await http_handler.get(prediction_url, headers=headers) + if response.status_code == 200: + response_data = response.json() + status = response_data["status"] + if "output" in response_data: + output_string = "".join(response_data["output"]) + new_output = output_string[len(previous_output) :] + print_verbose(f"New chunk: {new_output}") + yield {"output": new_output, "status": status} + previous_output = output_string + status = response_data["status"] + if status == "failed": + replicate_error = response_data.get("error", "") + raise ReplicateError( + status_code=400, message=f"Error: {replicate_error}" + ) + else: + # this can fail temporarily but it does not mean the replicate request failed, replicate request fails when status=="failed" + print_verbose( + f"Replicate: Failed to fetch prediction status and output.{response.status_code}{response.text}" + ) + + # Function to extract version ID from model string def model_to_version_id(model): if ":" in model: @@ -216,6 +355,39 @@ def model_to_version_id(model): return model +def process_response( + model_response: ModelResponse, + result: str, + model: str, + encoding: Any, + prompt: str, +) -> ModelResponse: + if len(result) == 0: # edge case, where result from replicate is empty + result = " " + + ## Building RESPONSE OBJECT + if len(result) > 1: + model_response["choices"][0]["message"]["content"] = result + + # Calculate usage + prompt_tokens = len(encoding.encode(prompt, disallowed_special=())) + completion_tokens = len( + encoding.encode( + model_response["choices"][0]["message"].get("content", ""), + disallowed_special=(), + ) + ) + model_response["model"] = "replicate/" + model + usage = Usage( + prompt_tokens=prompt_tokens, + completion_tokens=completion_tokens, + total_tokens=prompt_tokens + completion_tokens, + ) + setattr(model_response, "usage", usage) + + return model_response + + # Main function for prediction completion def completion( model: str, @@ -223,14 +395,15 @@ def completion( api_base: str, model_response: ModelResponse, print_verbose: Callable, + optional_params: dict, logging_obj, api_key, encoding, custom_prompt_dict={}, - optional_params=None, litellm_params=None, logger_fn=None, -): + acompletion=None, +) -> Union[ModelResponse, CustomStreamWrapper]: # Start a prediction and get the prediction URL version_id = model_to_version_id(model) ## Load Config @@ -268,6 +441,12 @@ def completion( else: prompt = prompt_factory(model=model, messages=messages) + if prompt is None or not isinstance(prompt, str): + raise ReplicateError( + status_code=400, + message="LiteLLM Error - prompt is not a string - {}".format(prompt), + ) + # If system prompt is supported, and a system prompt is provided, use it if system_prompt is not None: input_data = { @@ -279,6 +458,20 @@ def completion( else: input_data = {"prompt": prompt, **optional_params} + if acompletion is not None and acompletion == True: + return async_completion( + model_response=model_response, + model=model, + prompt=prompt, + encoding=encoding, + optional_params=optional_params, + version_id=version_id, + input_data=input_data, + api_key=api_key, + api_base=api_base, + logging_obj=logging_obj, + print_verbose=print_verbose, + ) # type: ignore ## COMPLETION CALL ## Replicate Compeltion calls have 2 steps ## Step1: Start Prediction: gets a prediction url @@ -287,6 +480,7 @@ def completion( model_response["created"] = int( time.time() ) # for pricing this must remain right before calling api + prediction_url = start_prediction( version_id, input_data, @@ -300,16 +494,15 @@ def completion( # Handle the prediction response (streaming or non-streaming) if "stream" in optional_params and optional_params["stream"] == True: print_verbose("streaming request") - return handle_prediction_response_streaming( + _response = handle_prediction_response_streaming( prediction_url, api_key, print_verbose ) + return CustomStreamWrapper(_response, model, logging_obj=logging_obj, custom_llm_provider="replicate") # type: ignore else: result, logs = handle_prediction_response( prediction_url, api_key, print_verbose ) - model_response["ended"] = ( - time.time() - ) # for pricing this must remain right after calling api + ## LOGGING logging_obj.post_call( input=prompt, @@ -324,29 +517,56 @@ def completion( print_verbose(f"raw model_response: {result}") - if len(result) == 0: # edge case, where result from replicate is empty - result = " " - - ## Building RESPONSE OBJECT - if len(result) > 1: - model_response["choices"][0]["message"]["content"] = result - - # Calculate usage - prompt_tokens = len(encoding.encode(prompt, disallowed_special=())) - completion_tokens = len( - encoding.encode( - model_response["choices"][0]["message"].get("content", ""), - disallowed_special=(), - ) + return process_response( + model_response=model_response, + result=result, + model=model, + encoding=encoding, + prompt=prompt, ) - model_response["model"] = "replicate/" + model - usage = Usage( - prompt_tokens=prompt_tokens, - completion_tokens=completion_tokens, - total_tokens=prompt_tokens + completion_tokens, + + +async def async_completion( + model_response: ModelResponse, + model: str, + prompt: str, + encoding, + optional_params: dict, + version_id, + input_data, + api_key, + api_base, + logging_obj, + print_verbose, +) -> Union[ModelResponse, CustomStreamWrapper]: + http_handler = AsyncHTTPHandler(concurrent_limit=1) + prediction_url = await async_start_prediction( + version_id, + input_data, + api_key, + api_base, + logging_obj=logging_obj, + print_verbose=print_verbose, + http_handler=http_handler, + ) + + if "stream" in optional_params and optional_params["stream"] == True: + _response = async_handle_prediction_response_streaming( + prediction_url, api_key, print_verbose ) - model_response.usage = usage - return model_response + return CustomStreamWrapper(_response, model, logging_obj=logging_obj, custom_llm_provider="replicate") # type: ignore + + result, logs = await async_handle_prediction_response( + prediction_url, api_key, print_verbose, http_handler=http_handler + ) + + return process_response( + model_response=model_response, + result=result, + model=model, + encoding=encoding, + prompt=prompt, + ) # # Example usage: diff --git a/litellm/llms/sagemaker.py b/litellm/llms/sagemaker.py index 94a940ddc..8e75428bb 100644 --- a/litellm/llms/sagemaker.py +++ b/litellm/llms/sagemaker.py @@ -1,14 +1,14 @@ import os, types, traceback from enum import Enum import json -import requests +import requests # type: ignore import time from typing import Callable, Optional, Any import litellm from litellm.utils import ModelResponse, EmbeddingResponse, get_secret, Usage import sys from copy import deepcopy -import httpx +import httpx # type: ignore from .prompt_templates.factory import prompt_factory, custom_prompt @@ -295,7 +295,7 @@ def completion( EndpointName={model}, InferenceComponentName={model_id}, ContentType="application/json", - Body={data}, + Body={data}, # type: ignore CustomAttributes="accept_eula=true", ) """ # type: ignore @@ -321,7 +321,7 @@ def completion( response = client.invoke_endpoint( EndpointName={model}, ContentType="application/json", - Body={data}, + Body={data}, # type: ignore CustomAttributes="accept_eula=true", ) """ # type: ignore @@ -399,7 +399,7 @@ def completion( completion_tokens=completion_tokens, total_tokens=prompt_tokens + completion_tokens, ) - model_response.usage = usage + setattr(model_response, "usage", usage) return model_response @@ -617,7 +617,7 @@ async def async_completion( completion_tokens=completion_tokens, total_tokens=prompt_tokens + completion_tokens, ) - model_response.usage = usage + setattr(model_response, "usage", usage) return model_response @@ -688,7 +688,7 @@ def embedding( response = client.invoke_endpoint( EndpointName={model}, ContentType="application/json", - Body={data}, + Body={data}, # type: ignore CustomAttributes="accept_eula=true", )""" # type: ignore logging_obj.pre_call( diff --git a/litellm/llms/together_ai.py b/litellm/llms/together_ai.py index 15ed29916..47453ca88 100644 --- a/litellm/llms/together_ai.py +++ b/litellm/llms/together_ai.py @@ -2,14 +2,15 @@ Deprecated. We now do together ai calls via the openai client. Reference: https://docs.together.ai/docs/openai-api-compatibility """ + import os, types import json from enum import Enum -import requests +import requests # type: ignore import time from typing import Callable, Optional import litellm -import httpx +import httpx # type: ignore from litellm.utils import ModelResponse, Usage from .prompt_templates.factory import prompt_factory, custom_prompt @@ -225,7 +226,7 @@ def completion( completion_tokens=completion_tokens, total_tokens=prompt_tokens + completion_tokens, ) - model_response.usage = usage + setattr(model_response, "usage", usage) return model_response diff --git a/litellm/llms/triton.py b/litellm/llms/triton.py new file mode 100644 index 000000000..711186b3f --- /dev/null +++ b/litellm/llms/triton.py @@ -0,0 +1,119 @@ +import os, types +import json +from enum import Enum +import requests, copy # type: ignore +import time +from typing import Callable, Optional, List +from litellm.utils import ModelResponse, Usage, map_finish_reason, CustomStreamWrapper +import litellm +from .prompt_templates.factory import prompt_factory, custom_prompt +from litellm.llms.custom_httpx.http_handler import AsyncHTTPHandler +from .base import BaseLLM +import httpx # type: ignore + + +class TritonError(Exception): + def __init__(self, status_code, message): + self.status_code = status_code + self.message = message + self.request = httpx.Request( + method="POST", + url="https://api.anthropic.com/v1/messages", # using anthropic api base since httpx requires a url + ) + self.response = httpx.Response(status_code=status_code, request=self.request) + super().__init__( + self.message + ) # Call the base class constructor with the parameters it needs + + +class TritonChatCompletion(BaseLLM): + def __init__(self) -> None: + super().__init__() + + async def aembedding( + self, + data: dict, + model_response: litellm.utils.EmbeddingResponse, + api_base: str, + logging_obj=None, + api_key: Optional[str] = None, + ): + + async_handler = AsyncHTTPHandler( + timeout=httpx.Timeout(timeout=600.0, connect=5.0) + ) + + response = await async_handler.post(url=api_base, data=json.dumps(data)) + + if response.status_code != 200: + raise TritonError(status_code=response.status_code, message=response.text) + + _text_response = response.text + + logging_obj.post_call(original_response=_text_response) + + _json_response = response.json() + + _outputs = _json_response["outputs"] + _output_data = _outputs[0]["data"] + _embedding_output = { + "object": "embedding", + "index": 0, + "embedding": _output_data, + } + + model_response.model = _json_response.get("model_name", "None") + model_response.data = [_embedding_output] + + return model_response + + def embedding( + self, + model: str, + input: list, + timeout: float, + api_base: str, + model_response: litellm.utils.EmbeddingResponse, + api_key: Optional[str] = None, + logging_obj=None, + optional_params=None, + client=None, + aembedding=None, + ): + data_for_triton = { + "inputs": [ + { + "name": "input_text", + "shape": [1], + "datatype": "BYTES", + "data": input, + } + ] + } + + ## LOGGING + + curl_string = f"curl {api_base} -X POST -H 'Content-Type: application/json' -d '{data_for_triton}'" + + logging_obj.pre_call( + input="", + api_key=None, + additional_args={ + "complete_input_dict": optional_params, + "request_str": curl_string, + }, + ) + + if aembedding == True: + response = self.aembedding( + data=data_for_triton, + model_response=model_response, + logging_obj=logging_obj, + api_base=api_base, + api_key=api_key, + ) + return response + else: + raise Exception( + "Only async embedding supported for triton, please use litellm.aembedding() for now" + ) diff --git a/litellm/llms/vertex_ai.py b/litellm/llms/vertex_ai.py index aded41148..84fec734f 100644 --- a/litellm/llms/vertex_ai.py +++ b/litellm/llms/vertex_ai.py @@ -1,12 +1,12 @@ import os, types import json from enum import Enum -import requests +import requests # type: ignore import time from typing import Callable, Optional, Union, List from litellm.utils import ModelResponse, Usage, CustomStreamWrapper, map_finish_reason import litellm, uuid -import httpx, inspect +import httpx, inspect # type: ignore class VertexAIError(Exception): @@ -22,6 +22,35 @@ class VertexAIError(Exception): ) # Call the base class constructor with the parameters it needs +class ExtendedGenerationConfig(dict): + """Extended parameters for the generation.""" + + def __init__( + self, + *, + temperature: Optional[float] = None, + top_p: Optional[float] = None, + top_k: Optional[int] = None, + candidate_count: Optional[int] = None, + max_output_tokens: Optional[int] = None, + stop_sequences: Optional[List[str]] = None, + response_mime_type: Optional[str] = None, + frequency_penalty: Optional[float] = None, + presence_penalty: Optional[float] = None, + ): + super().__init__( + temperature=temperature, + top_p=top_p, + top_k=top_k, + candidate_count=candidate_count, + max_output_tokens=max_output_tokens, + stop_sequences=stop_sequences, + response_mime_type=response_mime_type, + frequency_penalty=frequency_penalty, + presence_penalty=presence_penalty, + ) + + class VertexAIConfig: """ Reference: https://cloud.google.com/vertex-ai/docs/generative-ai/chat/test-chat-prompts @@ -43,6 +72,10 @@ class VertexAIConfig: - `stop_sequences` (List[str]): The set of character sequences (up to 5) that will stop output generation. If specified, the API will stop at the first appearance of a stop sequence. The stop sequence will not be included as part of the response. + - `frequency_penalty` (float): This parameter is used to penalize the model from repeating the same output. The default value is 0.0. + + - `presence_penalty` (float): This parameter is used to penalize the model from generating the same output as the input. The default value is 0.0. + Note: Please make sure to modify the default parameters as required for your use case. """ @@ -53,6 +86,8 @@ class VertexAIConfig: response_mime_type: Optional[str] = None candidate_count: Optional[int] = None stop_sequences: Optional[list] = None + frequency_penalty: Optional[float] = None + presence_penalty: Optional[float] = None def __init__( self, @@ -63,6 +98,8 @@ class VertexAIConfig: response_mime_type: Optional[str] = None, candidate_count: Optional[int] = None, stop_sequences: Optional[list] = None, + frequency_penalty: Optional[float] = None, + presence_penalty: Optional[float] = None, ) -> None: locals_ = locals() for key, value in locals_.items(): @@ -87,6 +124,97 @@ class VertexAIConfig: and v is not None } + def get_supported_openai_params(self): + return [ + "temperature", + "top_p", + "max_tokens", + "stream", + "tools", + "tool_choice", + "response_format", + "n", + "stop", + ] + + def map_openai_params(self, non_default_params: dict, optional_params: dict): + for param, value in non_default_params.items(): + if param == "temperature": + optional_params["temperature"] = value + if param == "top_p": + optional_params["top_p"] = value + if ( + param == "stream" and value == True + ): # sending stream = False, can cause it to get passed unchecked and raise issues + optional_params["stream"] = value + if param == "n": + optional_params["candidate_count"] = value + if param == "stop": + if isinstance(value, str): + optional_params["stop_sequences"] = [value] + elif isinstance(value, list): + optional_params["stop_sequences"] = value + if param == "max_tokens": + optional_params["max_output_tokens"] = value + if param == "response_format" and value["type"] == "json_object": + optional_params["response_mime_type"] = "application/json" + if param == "frequency_penalty": + optional_params["frequency_penalty"] = value + if param == "presence_penalty": + optional_params["presence_penalty"] = value + if param == "tools" and isinstance(value, list): + from vertexai.preview import generative_models + + gtool_func_declarations = [] + for tool in value: + gtool_func_declaration = generative_models.FunctionDeclaration( + name=tool["function"]["name"], + description=tool["function"].get("description", ""), + parameters=tool["function"].get("parameters", {}), + ) + gtool_func_declarations.append(gtool_func_declaration) + optional_params["tools"] = [ + generative_models.Tool( + function_declarations=gtool_func_declarations + ) + ] + if param == "tool_choice" and ( + isinstance(value, str) or isinstance(value, dict) + ): + pass + return optional_params + + def get_mapped_special_auth_params(self) -> dict: + """ + Common auth params across bedrock/vertex_ai/azure/watsonx + """ + return {"project": "vertex_project", "region_name": "vertex_location"} + + def map_special_auth_params(self, non_default_params: dict, optional_params: dict): + mapped_params = self.get_mapped_special_auth_params() + + for param, value in non_default_params.items(): + if param in mapped_params: + optional_params[mapped_params[param]] = value + return optional_params + + def get_eu_regions(self) -> List[str]: + """ + Source: https://cloud.google.com/vertex-ai/generative-ai/docs/learn/locations#available-regions + """ + return [ + "europe-central2", + "europe-north1", + "europe-southwest1", + "europe-west1", + "europe-west2", + "europe-west3", + "europe-west4", + "europe-west6", + "europe-west8", + "europe-west9", + ] + import asyncio @@ -130,8 +258,7 @@ def _get_image_bytes_from_url(image_url: str) -> bytes: image_bytes = response.content return image_bytes except requests.exceptions.RequestException as e: - # Handle any request exceptions (e.g., connection error, timeout) - return b"" # Return an empty bytes object or handle the error as needed + raise Exception(f"An exception occurs with this image - {str(e)}") def _load_image_from_url(image_url: str): @@ -152,7 +279,8 @@ def _load_image_from_url(image_url: str): ) image_bytes = _get_image_bytes_from_url(image_url) - return Image.from_bytes(image_bytes) + + return Image.from_bytes(data=image_bytes) def _gemini_vision_convert_messages(messages: list): @@ -270,6 +398,7 @@ def completion( logging_obj, vertex_project=None, vertex_location=None, + vertex_credentials=None, optional_params=None, litellm_params=None, logger_fn=None, @@ -307,48 +436,23 @@ def completion( from google.protobuf.struct_pb2 import Value # type: ignore from google.cloud.aiplatform_v1beta1.types import content as gapic_content_types # type: ignore import google.auth # type: ignore - - class ExtendedGenerationConfig(GenerationConfig): - """Extended parameters for the generation.""" - - def __init__( - self, - *, - temperature: Optional[float] = None, - top_p: Optional[float] = None, - top_k: Optional[int] = None, - candidate_count: Optional[int] = None, - max_output_tokens: Optional[int] = None, - stop_sequences: Optional[List[str]] = None, - response_mime_type: Optional[str] = None, - ): - args_spec = inspect.getfullargspec(gapic_content_types.GenerationConfig) - - if "response_mime_type" in args_spec.args: - self._raw_generation_config = gapic_content_types.GenerationConfig( - temperature=temperature, - top_p=top_p, - top_k=top_k, - candidate_count=candidate_count, - max_output_tokens=max_output_tokens, - stop_sequences=stop_sequences, - response_mime_type=response_mime_type, - ) - else: - self._raw_generation_config = gapic_content_types.GenerationConfig( - temperature=temperature, - top_p=top_p, - top_k=top_k, - candidate_count=candidate_count, - max_output_tokens=max_output_tokens, - stop_sequences=stop_sequences, - ) + import proto # type: ignore ## Load credentials with the correct quota project ref: https://github.com/googleapis/python-aiplatform/issues/2557#issuecomment-1709284744 print_verbose( f"VERTEX AI: vertex_project={vertex_project}; vertex_location={vertex_location}" ) - creds, _ = google.auth.default(quota_project_id=vertex_project) + if vertex_credentials is not None and isinstance(vertex_credentials, str): + import google.oauth2.service_account + + json_obj = json.loads(vertex_credentials) + + creds = google.oauth2.service_account.Credentials.from_service_account_info( + json_obj, + scopes=["https://www.googleapis.com/auth/cloud-platform"], + ) + else: + creds, _ = google.auth.default(quota_project_id=vertex_project) print_verbose( f"VERTEX AI: creds={creds}; google application credentials: {os.getenv('GOOGLE_APPLICATION_CREDENTIALS')}" ) @@ -457,6 +561,7 @@ def completion( "instances": instances, "vertex_location": vertex_location, "vertex_project": vertex_project, + "safety_settings": safety_settings, **optional_params, } if optional_params.get("stream", False) is True: @@ -471,8 +576,9 @@ def completion( tools = optional_params.pop("tools", None) prompt, images = _gemini_vision_convert_messages(messages=messages) content = [prompt] + images - if "stream" in optional_params and optional_params["stream"] == True: - stream = optional_params.pop("stream") + stream = optional_params.pop("stream", False) + if stream == True: + request_str += f"response = llm_model.generate_content({content}, generation_config=GenerationConfig(**{optional_params}), safety_settings={safety_settings}, stream={stream})\n" logging_obj.pre_call( input=prompt, @@ -485,12 +591,12 @@ def completion( model_response = llm_model.generate_content( contents=content, - generation_config=ExtendedGenerationConfig(**optional_params), + generation_config=optional_params, safety_settings=safety_settings, stream=True, tools=tools, ) - optional_params["stream"] = True + return model_response request_str += f"response = llm_model.generate_content({content})\n" @@ -507,7 +613,7 @@ def completion( ## LLM Call response = llm_model.generate_content( contents=content, - generation_config=ExtendedGenerationConfig(**optional_params), + generation_config=optional_params, safety_settings=safety_settings, tools=tools, ) @@ -517,9 +623,21 @@ def completion( ): function_call = response.candidates[0].content.parts[0].function_call args_dict = {} - for k, v in function_call.args.items(): - args_dict[k] = v - args_str = json.dumps(args_dict) + + # Check if it's a RepeatedComposite instance + for key, val in function_call.args.items(): + if isinstance( + val, proto.marshal.collections.repeated.RepeatedComposite + ): + # If so, convert to list + args_dict[key] = [v for v in val] + else: + args_dict[key] = val + + try: + args_str = json.dumps(args_dict) + except Exception as e: + raise VertexAIError(status_code=422, message=str(e)) message = litellm.Message( content=None, tool_calls=[ @@ -562,7 +680,7 @@ def completion( }, ) model_response = chat.send_message_streaming(prompt, **optional_params) - optional_params["stream"] = True + return model_response request_str += f"chat.send_message({prompt}, **{optional_params}).text\n" @@ -594,7 +712,7 @@ def completion( }, ) model_response = llm_model.predict_streaming(prompt, **optional_params) - optional_params["stream"] = True + return model_response request_str += f"llm_model.predict({prompt}, **{optional_params}).text\n" @@ -719,9 +837,11 @@ def completion( completion_tokens=completion_tokens, total_tokens=prompt_tokens + completion_tokens, ) - model_response.usage = usage + setattr(model_response, "usage", usage) return model_response except Exception as e: + if isinstance(e, VertexAIError): + raise e raise VertexAIError(status_code=500, message=str(e)) @@ -740,55 +860,20 @@ async def async_completion( instances=None, vertex_project=None, vertex_location=None, + safety_settings=None, **optional_params, ): """ Add support for acompletion calls for gemini-pro """ try: - from vertexai.preview.generative_models import GenerationConfig - from google.cloud.aiplatform_v1beta1.types import content as gapic_content_types # type: ignore - - class ExtendedGenerationConfig(GenerationConfig): - """Extended parameters for the generation.""" - - def __init__( - self, - *, - temperature: Optional[float] = None, - top_p: Optional[float] = None, - top_k: Optional[int] = None, - candidate_count: Optional[int] = None, - max_output_tokens: Optional[int] = None, - stop_sequences: Optional[List[str]] = None, - response_mime_type: Optional[str] = None, - ): - args_spec = inspect.getfullargspec(gapic_content_types.GenerationConfig) - - if "response_mime_type" in args_spec.args: - self._raw_generation_config = gapic_content_types.GenerationConfig( - temperature=temperature, - top_p=top_p, - top_k=top_k, - candidate_count=candidate_count, - max_output_tokens=max_output_tokens, - stop_sequences=stop_sequences, - response_mime_type=response_mime_type, - ) - else: - self._raw_generation_config = gapic_content_types.GenerationConfig( - temperature=temperature, - top_p=top_p, - top_k=top_k, - candidate_count=candidate_count, - max_output_tokens=max_output_tokens, - stop_sequences=stop_sequences, - ) + import proto # type: ignore if mode == "vision": - print_verbose("\nMaking VertexAI Gemini Pro Vision Call") + print_verbose("\nMaking VertexAI Gemini Pro/Vision Call") print_verbose(f"\nProcessing input messages = {messages}") tools = optional_params.pop("tools", None) + stream = optional_params.pop("stream", False) prompt, images = _gemini_vision_convert_messages(messages=messages) content = [prompt] + images @@ -805,20 +890,34 @@ async def async_completion( ) ## LLM Call + # print(f"final content: {content}") response = await llm_model._generate_content_async( contents=content, - generation_config=ExtendedGenerationConfig(**optional_params), + generation_config=optional_params, + safety_settings=safety_settings, tools=tools, ) - if tools is not None and hasattr( - response.candidates[0].content.parts[0], "function_call" + if tools is not None and bool( + getattr(response.candidates[0].content.parts[0], "function_call", None) ): function_call = response.candidates[0].content.parts[0].function_call args_dict = {} - for k, v in function_call.args.items(): - args_dict[k] = v - args_str = json.dumps(args_dict) + + # Check if it's a RepeatedComposite instance + for key, val in function_call.args.items(): + if isinstance( + val, proto.marshal.collections.repeated.RepeatedComposite + ): + # If so, convert to list + args_dict[key] = [v for v in val] + else: + args_dict[key] = val + + try: + args_str = json.dumps(args_dict) + except Exception as e: + raise VertexAIError(status_code=422, message=str(e)) message = litellm.Message( content=None, tool_calls=[ @@ -965,7 +1064,7 @@ async def async_completion( completion_tokens=completion_tokens, total_tokens=prompt_tokens + completion_tokens, ) - model_response.usage = usage + setattr(model_response, "usage", usage) return model_response except Exception as e: raise VertexAIError(status_code=500, message=str(e)) @@ -986,50 +1085,12 @@ async def async_streaming( instances=None, vertex_project=None, vertex_location=None, + safety_settings=None, **optional_params, ): """ Add support for async streaming calls for gemini-pro """ - from vertexai.preview.generative_models import GenerationConfig - from google.cloud.aiplatform_v1beta1.types import content as gapic_content_types # type: ignore - - class ExtendedGenerationConfig(GenerationConfig): - """Extended parameters for the generation.""" - - def __init__( - self, - *, - temperature: Optional[float] = None, - top_p: Optional[float] = None, - top_k: Optional[int] = None, - candidate_count: Optional[int] = None, - max_output_tokens: Optional[int] = None, - stop_sequences: Optional[List[str]] = None, - response_mime_type: Optional[str] = None, - ): - args_spec = inspect.getfullargspec(gapic_content_types.GenerationConfig) - - if "response_mime_type" in args_spec.args: - self._raw_generation_config = gapic_content_types.GenerationConfig( - temperature=temperature, - top_p=top_p, - top_k=top_k, - candidate_count=candidate_count, - max_output_tokens=max_output_tokens, - stop_sequences=stop_sequences, - response_mime_type=response_mime_type, - ) - else: - self._raw_generation_config = gapic_content_types.GenerationConfig( - temperature=temperature, - top_p=top_p, - top_k=top_k, - candidate_count=candidate_count, - max_output_tokens=max_output_tokens, - stop_sequences=stop_sequences, - ) - if mode == "vision": stream = optional_params.pop("stream") tools = optional_params.pop("tools", None) @@ -1050,11 +1111,11 @@ async def async_streaming( response = await llm_model._generate_content_streaming_async( contents=content, - generation_config=ExtendedGenerationConfig(**optional_params), + generation_config=optional_params, + safety_settings=safety_settings, tools=tools, ) - optional_params["stream"] = True - optional_params["tools"] = tools + elif mode == "chat": chat = llm_model.start_chat() optional_params.pop( @@ -1073,7 +1134,7 @@ async def async_streaming( }, ) response = chat.send_message_streaming_async(prompt, **optional_params) - optional_params["stream"] = True + elif mode == "text": optional_params.pop( "stream", None @@ -1169,6 +1230,7 @@ def embedding( encoding=None, vertex_project=None, vertex_location=None, + vertex_credentials=None, aembedding=False, print_verbose=None, ): @@ -1189,7 +1251,17 @@ def embedding( print_verbose( f"VERTEX AI: vertex_project={vertex_project}; vertex_location={vertex_location}" ) - creds, _ = google.auth.default(quota_project_id=vertex_project) + if vertex_credentials is not None and isinstance(vertex_credentials, str): + import google.oauth2.service_account + + json_obj = json.loads(vertex_credentials) + + creds = google.oauth2.service_account.Credentials.from_service_account_info( + json_obj, + scopes=["https://www.googleapis.com/auth/cloud-platform"], + ) + else: + creds, _ = google.auth.default(quota_project_id=vertex_project) print_verbose( f"VERTEX AI: creds={creds}; google application credentials: {os.getenv('GOOGLE_APPLICATION_CREDENTIALS')}" ) diff --git a/litellm/llms/vertex_ai_anthropic.py b/litellm/llms/vertex_ai_anthropic.py index eb4cc864f..3bdcf4fd6 100644 --- a/litellm/llms/vertex_ai_anthropic.py +++ b/litellm/llms/vertex_ai_anthropic.py @@ -3,7 +3,7 @@ import os, types import json from enum import Enum -import requests, copy +import requests, copy # type: ignore import time, uuid from typing import Callable, Optional, List from litellm.utils import ModelResponse, Usage, map_finish_reason, CustomStreamWrapper @@ -17,7 +17,7 @@ from .prompt_templates.factory import ( extract_between_tags, parse_xml_params, ) -import httpx +import httpx # type: ignore class VertexAIError(Exception): @@ -129,6 +129,18 @@ class VertexAIAnthropicConfig: # makes headers for API call +def refresh_auth( + credentials, +) -> str: # used when user passes in credentials as json string + from google.auth.transport.requests import Request # type: ignore[import-untyped] + + if credentials.token is None: + credentials.refresh(Request()) + + if not credentials.token: + raise RuntimeError("Could not resolve API token from the credentials") + + return credentials.token def completion( @@ -140,6 +152,7 @@ def completion( logging_obj, vertex_project=None, vertex_location=None, + vertex_credentials=None, optional_params=None, litellm_params=None, logger_fn=None, @@ -217,11 +230,28 @@ def completion( ## Completion Call print_verbose( - f"VERTEX AI: vertex_project={vertex_project}; vertex_location={vertex_location}" + f"VERTEX AI: vertex_project={vertex_project}; vertex_location={vertex_location}; vertex_credentials={vertex_credentials}" ) + access_token = None if client is None: + if vertex_credentials is not None and isinstance(vertex_credentials, str): + import google.oauth2.service_account + + json_obj = json.loads(vertex_credentials) + + creds = ( + google.oauth2.service_account.Credentials.from_service_account_info( + json_obj, + scopes=["https://www.googleapis.com/auth/cloud-platform"], + ) + ) + ### CHECK IF ACCESS + access_token = refresh_auth(credentials=creds) + vertex_ai_client = AnthropicVertex( - project_id=vertex_project, region=vertex_location + project_id=vertex_project, + region=vertex_location, + access_token=access_token, ) else: vertex_ai_client = client @@ -243,6 +273,7 @@ def completion( vertex_location=vertex_location, optional_params=optional_params, client=client, + access_token=access_token, ) else: return async_completion( @@ -256,6 +287,7 @@ def completion( vertex_location=vertex_location, optional_params=optional_params, client=client, + access_token=access_token, ) if stream is not None and stream == True: ## LOGGING @@ -317,7 +349,7 @@ def completion( completion_tokens=completion_tokens, total_tokens=prompt_tokens + completion_tokens, ) - model_response.usage = usage + setattr(model_response, "usage", usage) return model_response except Exception as e: raise VertexAIError(status_code=500, message=str(e)) @@ -334,12 +366,13 @@ async def async_completion( vertex_location=None, optional_params=None, client=None, + access_token=None, ): from anthropic import AsyncAnthropicVertex if client is None: vertex_ai_client = AsyncAnthropicVertex( - project_id=vertex_project, region=vertex_location + project_id=vertex_project, region=vertex_location, access_token=access_token ) else: vertex_ai_client = client @@ -389,7 +422,7 @@ async def async_completion( completion_tokens=completion_tokens, total_tokens=prompt_tokens + completion_tokens, ) - model_response.usage = usage + setattr(model_response, "usage", usage) return model_response @@ -404,12 +437,13 @@ async def async_streaming( vertex_location=None, optional_params=None, client=None, + access_token=None, ): from anthropic import AsyncAnthropicVertex if client is None: vertex_ai_client = AsyncAnthropicVertex( - project_id=vertex_project, region=vertex_location + project_id=vertex_project, region=vertex_location, access_token=access_token ) else: vertex_ai_client = client diff --git a/litellm/llms/vllm.py b/litellm/llms/vllm.py index 2b130765b..b2a9dd54d 100644 --- a/litellm/llms/vllm.py +++ b/litellm/llms/vllm.py @@ -1,8 +1,8 @@ import os import json from enum import Enum -import requests -import time, httpx +import requests # type: ignore +import time, httpx # type: ignore from typing import Callable, Any from litellm.utils import ModelResponse, Usage from .prompt_templates.factory import prompt_factory, custom_prompt @@ -104,7 +104,7 @@ def completion( completion_tokens=completion_tokens, total_tokens=prompt_tokens + completion_tokens, ) - model_response.usage = usage + setattr(model_response, "usage", usage) return model_response @@ -186,7 +186,7 @@ def batch_completions( completion_tokens=completion_tokens, total_tokens=prompt_tokens + completion_tokens, ) - model_response.usage = usage + setattr(model_response, "usage", usage) final_outputs.append(model_response) return final_outputs diff --git a/litellm/llms/watsonx.py b/litellm/llms/watsonx.py new file mode 100644 index 000000000..34176a23a --- /dev/null +++ b/litellm/llms/watsonx.py @@ -0,0 +1,790 @@ +from enum import Enum +import json, types, time # noqa: E401 +from contextlib import asynccontextmanager, contextmanager +from typing import ( + Callable, + Dict, + Generator, + AsyncGenerator, + Iterator, + AsyncIterator, + Optional, + Any, + Union, + List, + ContextManager, + AsyncContextManager, +) + +import httpx # type: ignore +import requests # type: ignore +import litellm +from litellm.utils import ModelResponse, Usage, get_secret +from litellm.llms.custom_httpx.http_handler import AsyncHTTPHandler + +from .base import BaseLLM +from .prompt_templates import factory as ptf + + +class WatsonXAIError(Exception): + def __init__(self, status_code, message, url: Optional[str] = None): + self.status_code = status_code + self.message = message + url = url or "https://https://us-south.ml.cloud.ibm.com" + self.request = httpx.Request(method="POST", url=url) + self.response = httpx.Response(status_code=status_code, request=self.request) + super().__init__( + self.message + ) # Call the base class constructor with the parameters it needs + + +class IBMWatsonXAIConfig: + """ + Reference: https://cloud.ibm.com/apidocs/watsonx-ai#text-generation + (See ibm_watsonx_ai.metanames.GenTextParamsMetaNames for a list of all available params) + + Supported params for all available watsonx.ai foundational models. + + - `decoding_method` (str): One of "greedy" or "sample" + + - `temperature` (float): Sets the model temperature for sampling - not available when decoding_method='greedy'. + + - `max_new_tokens` (integer): Maximum length of the generated tokens. + + - `min_new_tokens` (integer): Maximum length of input tokens. Any more than this will be truncated. + + - `length_penalty` (dict): A dictionary with keys "decay_factor" and "start_index". + + - `stop_sequences` (string[]): list of strings to use as stop sequences. + + - `top_k` (integer): top k for sampling - not available when decoding_method='greedy'. + + - `top_p` (integer): top p for sampling - not available when decoding_method='greedy'. + + - `repetition_penalty` (float): token repetition penalty during text generation. + + - `truncate_input_tokens` (integer): Truncate input tokens to this length. + + - `include_stop_sequences` (bool): If True, the stop sequence will be included at the end of the generated text in the case of a match. + + - `return_options` (dict): A dictionary of options to return. Options include "input_text", "generated_tokens", "input_tokens", "token_ranks". Values are boolean. + + - `random_seed` (integer): Random seed for text generation. + + - `moderations` (dict): Dictionary of properties that control the moderations, for usages such as Hate and profanity (HAP) and PII filtering. + + - `stream` (bool): If True, the model will return a stream of responses. + """ + + decoding_method: Optional[str] = "sample" + temperature: Optional[float] = None + max_new_tokens: Optional[int] = None # litellm.max_tokens + min_new_tokens: Optional[int] = None + length_penalty: Optional[dict] = None # e.g {"decay_factor": 2.5, "start_index": 5} + stop_sequences: Optional[List[str]] = None # e.g ["}", ")", "."] + top_k: Optional[int] = None + top_p: Optional[float] = None + repetition_penalty: Optional[float] = None + truncate_input_tokens: Optional[int] = None + include_stop_sequences: Optional[bool] = False + return_options: Optional[Dict[str, bool]] = None + random_seed: Optional[int] = None # e.g 42 + moderations: Optional[dict] = None + stream: Optional[bool] = False + + def __init__( + self, + decoding_method: Optional[str] = None, + temperature: Optional[float] = None, + max_new_tokens: Optional[int] = None, + min_new_tokens: Optional[int] = None, + length_penalty: Optional[dict] = None, + stop_sequences: Optional[List[str]] = None, + top_k: Optional[int] = None, + top_p: Optional[float] = None, + repetition_penalty: Optional[float] = None, + truncate_input_tokens: Optional[int] = None, + include_stop_sequences: Optional[bool] = None, + return_options: Optional[dict] = None, + random_seed: Optional[int] = None, + moderations: Optional[dict] = None, + stream: Optional[bool] = None, + **kwargs, + ) -> None: + locals_ = locals() + for key, value in locals_.items(): + if key != "self" and value is not None: + setattr(self.__class__, key, value) + + @classmethod + def get_config(cls): + return { + k: v + for k, v in cls.__dict__.items() + if not k.startswith("__") + and not isinstance( + v, + ( + types.FunctionType, + types.BuiltinFunctionType, + classmethod, + staticmethod, + ), + ) + and v is not None + } + + def get_supported_openai_params(self): + return [ + "temperature", # equivalent to temperature + "max_tokens", # equivalent to max_new_tokens + "top_p", # equivalent to top_p + "frequency_penalty", # equivalent to repetition_penalty + "stop", # equivalent to stop_sequences + "seed", # equivalent to random_seed + "stream", # equivalent to stream + ] + + def get_mapped_special_auth_params(self) -> dict: + """ + Common auth params across bedrock/vertex_ai/azure/watsonx + """ + return { + "project": "watsonx_project", + "region_name": "watsonx_region_name", + "token": "watsonx_token", + } + + def map_special_auth_params(self, non_default_params: dict, optional_params: dict): + mapped_params = self.get_mapped_special_auth_params() + + for param, value in non_default_params.items(): + if param in mapped_params: + optional_params[mapped_params[param]] = value + return optional_params + + def get_eu_regions(self) -> List[str]: + """ + Source: https://www.ibm.com/docs/en/watsonx/saas?topic=integrations-regional-availability + """ + return [ + "eu-de", + "eu-gb", + ] + + +def convert_messages_to_prompt(model, messages, provider, custom_prompt_dict): + # handle anthropic prompts and amazon titan prompts + if model in custom_prompt_dict: + # check if the model has a registered custom prompt + model_prompt_dict = custom_prompt_dict[model] + prompt = ptf.custom_prompt( + messages=messages, + role_dict=model_prompt_dict.get( + "role_dict", model_prompt_dict.get("roles") + ), + initial_prompt_value=model_prompt_dict.get("initial_prompt_value", ""), + final_prompt_value=model_prompt_dict.get("final_prompt_value", ""), + bos_token=model_prompt_dict.get("bos_token", ""), + eos_token=model_prompt_dict.get("eos_token", ""), + ) + return prompt + elif provider == "ibm": + prompt = ptf.prompt_factory( + model=model, messages=messages, custom_llm_provider="watsonx" + ) + elif provider == "ibm-mistralai": + prompt = ptf.mistral_instruct_pt(messages=messages) + else: + prompt = ptf.prompt_factory( + model=model, messages=messages, custom_llm_provider="watsonx" + ) + return prompt + + +class WatsonXAIEndpoint(str, Enum): + TEXT_GENERATION = "/ml/v1/text/generation" + TEXT_GENERATION_STREAM = "/ml/v1/text/generation_stream" + DEPLOYMENT_TEXT_GENERATION = "/ml/v1/deployments/{deployment_id}/text/generation" + DEPLOYMENT_TEXT_GENERATION_STREAM = ( + "/ml/v1/deployments/{deployment_id}/text/generation_stream" + ) + EMBEDDINGS = "/ml/v1/text/embeddings" + PROMPTS = "/ml/v1/prompts" + AVAILABLE_MODELS = "/ml/v1/foundation_model_specs" + + +class IBMWatsonXAI(BaseLLM): + """ + Class to interface with IBM watsonx.ai API for text generation and embeddings. + + Reference: https://cloud.ibm.com/apidocs/watsonx-ai + """ + + api_version = "2024-03-13" + + def __init__(self) -> None: + super().__init__() + + def _prepare_text_generation_req( + self, + model_id: str, + prompt: str, + stream: bool, + optional_params: dict, + print_verbose: Optional[Callable] = None, + ) -> dict: + """ + Get the request parameters for text generation. + """ + api_params = self._get_api_params(optional_params, print_verbose=print_verbose) + # build auth headers + api_token = api_params.get("token") + + headers = { + "Authorization": f"Bearer {api_token}", + "Content-Type": "application/json", + "Accept": "application/json", + } + extra_body_params = optional_params.pop("extra_body", {}) + optional_params.update(extra_body_params) + # init the payload to the text generation call + payload = { + "input": prompt, + "moderations": optional_params.pop("moderations", {}), + "parameters": optional_params, + } + request_params = dict(version=api_params["api_version"]) + # text generation endpoint deployment or model / stream or not + if model_id.startswith("deployment/"): + # deployment models are passed in as 'deployment/' + if api_params.get("space_id") is None: + raise WatsonXAIError( + status_code=401, + url=api_params["url"], + message="Error: space_id is required for models called using the 'deployment/' endpoint. Pass in the space_id as a parameter or set it in the WX_SPACE_ID environment variable.", + ) + deployment_id = "/".join(model_id.split("/")[1:]) + endpoint = ( + WatsonXAIEndpoint.DEPLOYMENT_TEXT_GENERATION_STREAM.value + if stream + else WatsonXAIEndpoint.DEPLOYMENT_TEXT_GENERATION.value + ) + endpoint = endpoint.format(deployment_id=deployment_id) + else: + payload["model_id"] = model_id + payload["project_id"] = api_params["project_id"] + endpoint = ( + WatsonXAIEndpoint.TEXT_GENERATION_STREAM + if stream + else WatsonXAIEndpoint.TEXT_GENERATION + ) + url = api_params["url"].rstrip("/") + endpoint + return dict( + method="POST", url=url, headers=headers, json=payload, params=request_params + ) + + def _get_api_params( + self, params: dict, print_verbose: Optional[Callable] = None + ) -> dict: + """ + Find watsonx.ai credentials in the params or environment variables and return the headers for authentication. + """ + # Load auth variables from params + url = params.pop("url", params.pop("api_base", params.pop("base_url", None))) + api_key = params.pop("apikey", None) + token = params.pop("token", None) + project_id = params.pop( + "project_id", params.pop("watsonx_project", None) + ) # watsonx.ai project_id - allow 'watsonx_project' to be consistent with how vertex project implementation works -> reduce provider-specific params + space_id = params.pop("space_id", None) # watsonx.ai deployment space_id + region_name = params.pop("region_name", params.pop("region", None)) + if region_name is None: + region_name = params.pop( + "watsonx_region_name", params.pop("watsonx_region", None) + ) # consistent with how vertex ai + aws regions are accepted + wx_credentials = params.pop( + "wx_credentials", + params.pop( + "watsonx_credentials", None + ), # follow {provider}_credentials, same as vertex ai + ) + api_version = params.pop("api_version", IBMWatsonXAI.api_version) + # Load auth variables from environment variables + if url is None: + url = ( + get_secret("WATSONX_API_BASE") # consistent with 'AZURE_API_BASE' + or get_secret("WATSONX_URL") + or get_secret("WX_URL") + or get_secret("WML_URL") + ) + if api_key is None: + api_key = ( + get_secret("WATSONX_APIKEY") + or get_secret("WATSONX_API_KEY") + or get_secret("WX_API_KEY") + ) + if token is None: + token = get_secret("WATSONX_TOKEN") or get_secret("WX_TOKEN") + if project_id is None: + project_id = ( + get_secret("WATSONX_PROJECT_ID") + or get_secret("WX_PROJECT_ID") + or get_secret("PROJECT_ID") + ) + if region_name is None: + region_name = ( + get_secret("WATSONX_REGION") + or get_secret("WX_REGION") + or get_secret("REGION") + ) + if space_id is None: + space_id = ( + get_secret("WATSONX_DEPLOYMENT_SPACE_ID") + or get_secret("WATSONX_SPACE_ID") + or get_secret("WX_SPACE_ID") + or get_secret("SPACE_ID") + ) + + # credentials parsing + if wx_credentials is not None: + url = wx_credentials.get("url", url) + api_key = wx_credentials.get( + "apikey", wx_credentials.get("api_key", api_key) + ) + token = wx_credentials.get( + "token", + wx_credentials.get( + "watsonx_token", token + ), # follow format of {provider}_token, same as azure - e.g. 'azure_ad_token=..' + ) + + # verify that all required credentials are present + if url is None: + raise WatsonXAIError( + status_code=401, + message="Error: Watsonx URL not set. Set WX_URL in environment variables or pass in as a parameter.", + ) + if token is None and api_key is not None: + # generate the auth token + if print_verbose is not None: + print_verbose("Generating IAM token for Watsonx.ai") + token = self.generate_iam_token(api_key) + elif token is None and api_key is None: + raise WatsonXAIError( + status_code=401, + url=url, + message="Error: API key or token not found. Set WX_API_KEY or WX_TOKEN in environment variables or pass in as a parameter.", + ) + if project_id is None: + raise WatsonXAIError( + status_code=401, + url=url, + message="Error: Watsonx project_id not set. Set WX_PROJECT_ID in environment variables or pass in as a parameter.", + ) + + return { + "url": url, + "api_key": api_key, + "token": token, + "project_id": project_id, + "space_id": space_id, + "region_name": region_name, + "api_version": api_version, + } + + def completion( + self, + model: str, + messages: list, + custom_prompt_dict: dict, + model_response: ModelResponse, + print_verbose: Callable, + encoding, + logging_obj, + optional_params=None, + acompletion=None, + litellm_params=None, + logger_fn=None, + timeout=None, + ): + """ + Send a text generation request to the IBM Watsonx.ai API. + Reference: https://cloud.ibm.com/apidocs/watsonx-ai#text-generation + """ + stream = optional_params.pop("stream", False) + + # Load default configs + config = IBMWatsonXAIConfig.get_config() + for k, v in config.items(): + if k not in optional_params: + optional_params[k] = v + + # Make prompt to send to model + provider = model.split("/")[0] + # model_name = "/".join(model.split("/")[1:]) + prompt = convert_messages_to_prompt( + model, messages, provider, custom_prompt_dict + ) + + def process_text_gen_response(json_resp: dict) -> ModelResponse: + if "results" not in json_resp: + raise WatsonXAIError( + status_code=500, + message=f"Error: Invalid response from Watsonx.ai API: {json_resp}", + ) + generated_text = json_resp["results"][0]["generated_text"] + prompt_tokens = json_resp["results"][0]["input_token_count"] + completion_tokens = json_resp["results"][0]["generated_token_count"] + model_response["choices"][0]["message"]["content"] = generated_text + model_response["finish_reason"] = json_resp["results"][0]["stop_reason"] + model_response["created"] = int(time.time()) + model_response["model"] = model + usage = Usage( + prompt_tokens=prompt_tokens, + completion_tokens=completion_tokens, + total_tokens=prompt_tokens + completion_tokens, + ) + setattr(model_response, "usage", usage) + return model_response + + def process_stream_response( + stream_resp: Union[Iterator[str], AsyncIterator], + ) -> litellm.CustomStreamWrapper: + streamwrapper = litellm.CustomStreamWrapper( + stream_resp, + model=model, + custom_llm_provider="watsonx", + logging_obj=logging_obj, + ) + return streamwrapper + + # create the function to manage the request to watsonx.ai + self.request_manager = RequestManager(logging_obj) + + def handle_text_request(request_params: dict) -> ModelResponse: + with self.request_manager.request( + request_params, + input=prompt, + timeout=timeout, + ) as resp: + json_resp = resp.json() + + return process_text_gen_response(json_resp) + + async def handle_text_request_async(request_params: dict) -> ModelResponse: + async with self.request_manager.async_request( + request_params, + input=prompt, + timeout=timeout, + ) as resp: + json_resp = resp.json() + return process_text_gen_response(json_resp) + + def handle_stream_request(request_params: dict) -> litellm.CustomStreamWrapper: + # stream the response - generated chunks will be handled + # by litellm.utils.CustomStreamWrapper.handle_watsonx_stream + with self.request_manager.request( + request_params, + stream=True, + input=prompt, + timeout=timeout, + ) as resp: + streamwrapper = process_stream_response(resp.iter_lines()) + return streamwrapper + + async def handle_stream_request_async(request_params: dict) -> litellm.CustomStreamWrapper: + # stream the response - generated chunks will be handled + # by litellm.utils.CustomStreamWrapper.handle_watsonx_stream + async with self.request_manager.async_request( + request_params, + stream=True, + input=prompt, + timeout=timeout, + ) as resp: + streamwrapper = process_stream_response(resp.aiter_lines()) + return streamwrapper + + try: + ## Get the response from the model + req_params = self._prepare_text_generation_req( + model_id=model, + prompt=prompt, + stream=stream, + optional_params=optional_params, + print_verbose=print_verbose, + ) + if stream and (acompletion is True): + # stream and async text generation + return handle_stream_request_async(req_params) + elif stream: + # streaming text generation + return handle_stream_request(req_params) + elif (acompletion is True): + # async text generation + return handle_text_request_async(req_params) + else: + # regular text generation + return handle_text_request(req_params) + except WatsonXAIError as e: + raise e + except Exception as e: + raise WatsonXAIError(status_code=500, message=str(e)) + + def embedding( + self, + model: str, + input: Union[list, str], + api_key: Optional[str] = None, + logging_obj=None, + model_response=None, + optional_params=None, + encoding=None, + aembedding=None, + ): + """ + Send a text embedding request to the IBM Watsonx.ai API. + """ + if optional_params is None: + optional_params = {} + # Load default configs + config = IBMWatsonXAIConfig.get_config() + for k, v in config.items(): + if k not in optional_params: + optional_params[k] = v + + # Load auth variables from environment variables + if isinstance(input, str): + input = [input] + if api_key is not None: + optional_params["api_key"] = api_key + api_params = self._get_api_params(optional_params) + # build auth headers + api_token = api_params.get("token") + headers = { + "Authorization": f"Bearer {api_token}", + "Content-Type": "application/json", + "Accept": "application/json", + } + # init the payload to the text generation call + payload = { + "inputs": input, + "model_id": model, + "project_id": api_params["project_id"], + "parameters": optional_params, + } + request_params = dict(version=api_params["api_version"]) + url = api_params["url"].rstrip("/") + WatsonXAIEndpoint.EMBEDDINGS + req_params = { + "method": "POST", + "url": url, + "headers": headers, + "json": payload, + "params": request_params, + } + request_manager = RequestManager(logging_obj) + + def process_embedding_response(json_resp: dict) -> ModelResponse: + results = json_resp.get("results", []) + embedding_response = [] + for idx, result in enumerate(results): + embedding_response.append( + { + "object": "embedding", + "index": idx, + "embedding": result["embedding"], + } + ) + model_response["object"] = "list" + model_response["data"] = embedding_response + model_response["model"] = model + input_tokens = json_resp.get("input_token_count", 0) + model_response.usage = Usage( + prompt_tokens=input_tokens, + completion_tokens=0, + total_tokens=input_tokens, + ) + return model_response + + def handle_embedding(request_params: dict) -> ModelResponse: + with request_manager.request(request_params, input=input) as resp: + json_resp = resp.json() + return process_embedding_response(json_resp) + + async def handle_aembedding(request_params: dict) -> ModelResponse: + async with request_manager.async_request(request_params, input=input) as resp: + json_resp = resp.json() + return process_embedding_response(json_resp) + + try: + if aembedding is True: + return handle_embedding(req_params) + else: + return handle_aembedding(req_params) + except WatsonXAIError as e: + raise e + except Exception as e: + raise WatsonXAIError(status_code=500, message=str(e)) + + def generate_iam_token(self, api_key=None, **params): + headers = {} + headers["Content-Type"] = "application/x-www-form-urlencoded" + if api_key is None: + api_key = get_secret("WX_API_KEY") or get_secret("WATSONX_API_KEY") + if api_key is None: + raise ValueError("API key is required") + headers["Accept"] = "application/json" + data = { + "grant_type": "urn:ibm:params:oauth:grant-type:apikey", + "apikey": api_key, + } + response = httpx.post( + "https://iam.cloud.ibm.com/identity/token", data=data, headers=headers + ) + response.raise_for_status() + json_data = response.json() + iam_access_token = json_data["access_token"] + self.token = iam_access_token + return iam_access_token + + def get_available_models(self, *, ids_only: bool = True, **params): + api_params = self._get_api_params(params) + headers = { + "Authorization": f"Bearer {api_params['token']}", + "Content-Type": "application/json", + "Accept": "application/json", + } + request_params = dict(version=api_params["api_version"]) + url = api_params["url"].rstrip("/") + WatsonXAIEndpoint.AVAILABLE_MODELS + req_params = dict(method="GET", url=url, headers=headers, params=request_params) + with RequestManager(logging_obj=None).request(req_params) as resp: + json_resp = resp.json() + if not ids_only: + return json_resp + return [res["model_id"] for res in json_resp["resources"]] + +class RequestManager: + """ + Returns a context manager that manages the response from the request. + if async_ is True, returns an async context manager, otherwise returns a regular context manager. + + Usage: + ```python + request_params = dict(method="POST", url="https://api.example.com", headers={"Authorization" : "Bearer token"}, json={"key": "value"}) + request_manager = RequestManager(logging_obj=logging_obj) + async with request_manager.request(request_params) as resp: + ... + # or + with request_manager.async_request(request_params) as resp: + ... + ``` + """ + + def __init__(self, logging_obj=None): + self.logging_obj = logging_obj + + def pre_call( + self, + request_params: dict, + input: Optional[Any] = None, + ): + if self.logging_obj is None: + return + request_str = ( + f"response = {request_params['method']}(\n" + f"\turl={request_params['url']},\n" + f"\tjson={request_params.get('json')},\n" + f")" + ) + self.logging_obj.pre_call( + input=input, + api_key=request_params["headers"].get("Authorization"), + additional_args={ + "complete_input_dict": request_params.get("json"), + "request_str": request_str, + }, + ) + + def post_call(self, resp, request_params): + if self.logging_obj is None: + return + self.logging_obj.post_call( + input=input, + api_key=request_params["headers"].get("Authorization"), + original_response=json.dumps(resp.json()), + additional_args={ + "status_code": resp.status_code, + "complete_input_dict": request_params.get( + "data", request_params.get("json") + ), + }, + ) + + @contextmanager + def request( + self, + request_params: dict, + stream: bool = False, + input: Optional[Any] = None, + timeout=None, + ) -> Generator[requests.Response, None, None]: + """ + Returns a context manager that yields the response from the request. + """ + self.pre_call(request_params, input) + if timeout: + request_params["timeout"] = timeout + if stream: + request_params["stream"] = stream + try: + resp = requests.request(**request_params) + if not resp.ok: + raise WatsonXAIError( + status_code=resp.status_code, + message=f"Error {resp.status_code} ({resp.reason}): {resp.text}", + ) + yield resp + except Exception as e: + raise WatsonXAIError(status_code=500, message=str(e)) + if not stream: + self.post_call(resp, request_params) + + @asynccontextmanager + async def async_request( + self, + request_params: dict, + stream: bool = False, + input: Optional[Any] = None, + timeout=None, + ) -> AsyncGenerator[httpx.Response, None]: + self.pre_call(request_params, input) + if timeout: + request_params["timeout"] = timeout + if stream: + request_params["stream"] = stream + try: + # async with AsyncHTTPHandler(timeout=timeout) as client: + self.async_handler = AsyncHTTPHandler( + timeout=httpx.Timeout( + timeout=request_params.pop("timeout", 600.0), connect=5.0 + ), + ) + # async_handler.client.verify = False + if "json" in request_params: + request_params["data"] = json.dumps(request_params.pop("json", {})) + method = request_params.pop("method") + if method.upper() == "POST": + resp = await self.async_handler.post(**request_params) + else: + resp = await self.async_handler.get(**request_params) + if resp.status_code not in [200, 201]: + raise WatsonXAIError( + status_code=resp.status_code, + message=f"Error {resp.status_code} ({resp.reason}): {resp.text}", + ) + yield resp + # await async_handler.close() + except Exception as e: + raise WatsonXAIError(status_code=500, message=str(e)) + if not stream: + self.post_call(resp, request_params) \ No newline at end of file diff --git a/litellm/main.py b/litellm/main.py index 903fd76d5..2e4132a42 100644 --- a/litellm/main.py +++ b/litellm/main.py @@ -9,11 +9,12 @@ import os, openai, sys, json, inspect, uuid, datetime, threading from typing import Any, Literal, Union, BinaryIO +from typing_extensions import overload from functools import partial import dotenv, traceback, random, asyncio, time, contextvars from copy import deepcopy - import httpx + import litellm from ._logging import verbose_logger from litellm import ( # type: ignore @@ -34,9 +35,12 @@ from litellm.utils import ( async_mock_completion_streaming_obj, convert_to_model_response_object, token_counter, + create_pretrained_tokenizer, + create_tokenizer, Usage, get_optional_params_embeddings, get_optional_params_image_gen, + supports_httpx_timeout, ) from .llms import ( anthropic_text, @@ -44,6 +48,7 @@ from .llms import ( ai21, sagemaker, bedrock, + triton, huggingface_restapi, replicate, aleph_alpha, @@ -53,6 +58,7 @@ from .llms import ( ollama, ollama_chat, cloudflare, + clarifai, cohere, cohere_chat, petals, @@ -63,16 +69,22 @@ from .llms import ( vertex_ai, vertex_ai_anthropic, maritalk, + watsonx, ) from .llms.openai import OpenAIChatCompletion, OpenAITextCompletion from .llms.azure import AzureChatCompletion from .llms.azure_text import AzureTextCompletion from .llms.anthropic import AnthropicChatCompletion +from .llms.anthropic_text import AnthropicTextCompletion from .llms.huggingface_restapi import Huggingface +from .llms.predibase import PredibaseChatCompletion +from .llms.bedrock_httpx import BedrockLLM +from .llms.triton import TritonChatCompletion from .llms.prompt_templates.factory import ( prompt_factory, custom_prompt, function_call_prompt, + map_system_message_pt, ) import tiktoken from concurrent.futures import ThreadPoolExecutor @@ -96,13 +108,16 @@ from litellm.utils import ( ) ####### ENVIRONMENT VARIABLES ################### -dotenv.load_dotenv() # Loading env variables using dotenv openai_chat_completions = OpenAIChatCompletion() openai_text_completions = OpenAITextCompletion() anthropic_chat_completions = AnthropicChatCompletion() +anthropic_text_completions = AnthropicTextCompletion() azure_chat_completions = AzureChatCompletion() azure_text_completions = AzureTextCompletion() huggingface = Huggingface() +predibase_chat_completions = PredibaseChatCompletion() +triton_chat_completions = TritonChatCompletion() +bedrock_chat_completion = BedrockLLM() ####### COMPLETION ENDPOINTS ################ @@ -181,6 +196,7 @@ async def acompletion( top_p: Optional[float] = None, n: Optional[int] = None, stream: Optional[bool] = None, + stream_options: Optional[dict] = None, stop=None, max_tokens: Optional[int] = None, presence_penalty: Optional[float] = None, @@ -200,6 +216,7 @@ async def acompletion( api_version: Optional[str] = None, api_key: Optional[str] = None, model_list: Optional[list] = None, # pass in a list of api_base,keys, etc. + extra_headers: Optional[dict] = None, # Optional liteLLM function params **kwargs, ): @@ -217,6 +234,7 @@ async def acompletion( top_p (float, optional): The top-p parameter for nucleus sampling (default is 1.0). n (int, optional): The number of completions to generate (default is 1). stream (bool, optional): If True, return a streaming response (default is False). + stream_options (dict, optional): A dictionary containing options for the streaming response. Only use this if stream is True. stop(string/list, optional): - Up to 4 sequences where the LLM API will stop generating further tokens. max_tokens (integer, optional): The maximum number of tokens in the generated completion (default is infinity). presence_penalty (float, optional): It is used to penalize new tokens based on their existence in the text so far. @@ -242,7 +260,7 @@ async def acompletion( - If `stream` is True, the function returns an async generator that yields completion lines. """ loop = asyncio.get_event_loop() - custom_llm_provider = None + custom_llm_provider = kwargs.get("custom_llm_provider", None) # Adjusted to use explicit arguments instead of *args and **kwargs completion_kwargs = { "model": model, @@ -254,6 +272,7 @@ async def acompletion( "top_p": top_p, "n": n, "stream": stream, + "stream_options": stream_options, "stop": stop, "max_tokens": max_tokens, "presence_penalty": presence_penalty, @@ -273,9 +292,10 @@ async def acompletion( "model_list": model_list, "acompletion": True, # assuming this is a required parameter } - _, custom_llm_provider, _, _ = get_llm_provider( - model=model, api_base=completion_kwargs.get("base_url", None) - ) + if custom_llm_provider is None: + _, custom_llm_provider, _, _ = get_llm_provider( + model=model, api_base=completion_kwargs.get("base_url", None) + ) try: # Use a partial function to pass your keyword arguments func = partial(completion, **completion_kwargs, **kwargs) @@ -284,9 +304,6 @@ async def acompletion( ctx = contextvars.copy_context() func_with_context = partial(ctx.run, func) - _, custom_llm_provider, _, _ = get_llm_provider( - model=model, api_base=kwargs.get("api_base", None) - ) if ( custom_llm_provider == "openai" or custom_llm_provider == "azure" @@ -298,14 +315,18 @@ async def acompletion( or custom_llm_provider == "deepinfra" or custom_llm_provider == "perplexity" or custom_llm_provider == "groq" + or custom_llm_provider == "deepseek" or custom_llm_provider == "text-completion-openai" or custom_llm_provider == "huggingface" or custom_llm_provider == "ollama" or custom_llm_provider == "ollama_chat" + or custom_llm_provider == "replicate" or custom_llm_provider == "vertex_ai" or custom_llm_provider == "gemini" or custom_llm_provider == "sagemaker" or custom_llm_provider == "anthropic" + or custom_llm_provider == "predibase" + or (custom_llm_provider == "bedrock" and "cohere" in model) or custom_llm_provider in litellm.openai_compatible_providers ): # currently implemented aiohttp calls for just azure, openai, hf, ollama, vertex ai soon all. init_response = await loop.run_in_executor(None, func_with_context) @@ -340,6 +361,7 @@ async def acompletion( custom_llm_provider=custom_llm_provider, original_exception=e, completion_kwargs=completion_kwargs, + extra_kwargs=kwargs, ) @@ -357,7 +379,7 @@ def mock_completion( model: str, messages: List, stream: Optional[bool] = False, - mock_response: str = "This is a mock request", + mock_response: Union[str, Exception] = "This is a mock request", logging=None, **kwargs, ): @@ -384,6 +406,20 @@ def mock_completion( - If 'stream' is True, it returns a response that mimics the behavior of a streaming completion. """ try: + ## LOGGING + if logging is not None: + logging.pre_call( + input=messages, + api_key="mock-key", + ) + if isinstance(mock_response, Exception): + raise litellm.APIError( + status_code=500, # type: ignore + message=str(mock_response), + llm_provider="openai", # type: ignore + model=model, # type: ignore + request=httpx.Request(method="POST", url="https://api.openai.com/v1/"), + ) model_response = ModelResponse(stream=stream) if stream is True: # don't try to access stream object, @@ -405,8 +441,10 @@ def mock_completion( model_response["created"] = int(time.time()) model_response["model"] = model - model_response.usage = Usage( - prompt_tokens=10, completion_tokens=20, total_tokens=30 + setattr( + model_response, + "usage", + Usage(prompt_tokens=10, completion_tokens=20, total_tokens=30), ) try: @@ -428,11 +466,12 @@ def completion( model: str, # Optional OpenAI params: see https://platform.openai.com/docs/api-reference/chat/create messages: List = [], - timeout: Optional[Union[float, int]] = None, + timeout: Optional[Union[float, str, httpx.Timeout]] = None, temperature: Optional[float] = None, top_p: Optional[float] = None, n: Optional[int] = None, stream: Optional[bool] = None, + stream_options: Optional[dict] = None, stop=None, max_tokens: Optional[int] = None, presence_penalty: Optional[float] = None, @@ -472,6 +511,7 @@ def completion( top_p (float, optional): The top-p parameter for nucleus sampling (default is 1.0). n (int, optional): The number of completions to generate (default is 1). stream (bool, optional): If True, return a streaming response (default is False). + stream_options (dict, optional): A dictionary containing options for the streaming response. Only set this when you set stream: true. stop(string/list, optional): - Up to 4 sequences where the LLM API will stop generating further tokens. max_tokens (integer, optional): The maximum number of tokens in the generated completion (default is infinity). presence_penalty (float, optional): It is used to penalize new tokens based on their existence in the text so far. @@ -513,7 +553,7 @@ def completion( model_info = kwargs.get("model_info", None) proxy_server_request = kwargs.get("proxy_server_request", None) fallbacks = kwargs.get("fallbacks", None) - headers = kwargs.get("headers", None) + headers = kwargs.get("headers", None) or extra_headers num_retries = kwargs.get("num_retries", None) ## deprecated max_retries = kwargs.get("max_retries", None) context_window_fallback_dict = kwargs.get("context_window_fallback_dict", None) @@ -531,6 +571,7 @@ def completion( eos_token = kwargs.get("eos_token", None) preset_cache_key = kwargs.get("preset_cache_key", None) hf_model_name = kwargs.get("hf_model_name", None) + supports_system_message = kwargs.get("supports_system_message", None) ### TEXT COMPLETION CALLS ### text_completion = kwargs.get("text_completion", False) atext_completion = kwargs.get("atext_completion", False) @@ -548,6 +589,7 @@ def completion( "top_p", "n", "stream", + "stream_options", "stop", "max_tokens", "presence_penalty", @@ -596,6 +638,7 @@ def completion( "model_list", "num_retries", "context_window_fallback_dict", + "retry_policy", "roles", "final_prompt_value", "bos_token", @@ -606,6 +649,7 @@ def completion( "client", "rpm", "tpm", + "max_parallel_requests", "input_cost_per_token", "output_cost_per_token", "input_cost_per_second", @@ -620,16 +664,17 @@ def completion( "no-log", "base_model", "stream_timeout", + "supports_system_message", + "region_name", + "allowed_model_region", + "model_config", ] + default_params = openai_params + litellm_params non_default_params = { k: v for k, v in kwargs.items() if k not in default_params } # model-specific params - pass them straight to the model/provider - if timeout is None: - timeout = ( - kwargs.get("request_timeout", None) or 600 - ) # set timeout for 10 minutes by default - timeout = float(timeout) + try: if base_url is not None: api_base = base_url @@ -649,6 +694,7 @@ def completion( model ] # update the model to the actual value if an alias has been passed in model_response = ModelResponse() + setattr(model_response, "usage", litellm.Usage()) if ( kwargs.get("azure", False) == True ): # don't remove flag check, to remain backwards compatible for repos like Codium @@ -668,9 +714,18 @@ def completion( "aws_region_name", None ) # support region-based pricing for bedrock + ### TIMEOUT LOGIC ### + timeout = timeout or kwargs.get("request_timeout", 600) or 600 + # set timeout for 10 minutes by default + if isinstance(timeout, httpx.Timeout) and not supports_httpx_timeout( + custom_llm_provider + ): + timeout = timeout.read or 600 # default 10 min timeout + elif not isinstance(timeout, httpx.Timeout): + timeout = float(timeout) # type: ignore + ### REGISTER CUSTOM MODEL PRICING -- IF GIVEN ### if input_cost_per_token is not None and output_cost_per_token is not None: - print_verbose(f"Registering model={model} in model cost map") litellm.register_model( { f"{custom_llm_provider}/{model}": { @@ -723,6 +778,13 @@ def completion( custom_prompt_dict[model]["bos_token"] = bos_token if eos_token: custom_prompt_dict[model]["eos_token"] = eos_token + + if ( + supports_system_message is not None + and isinstance(supports_system_message, bool) + and supports_system_message == False + ): + messages = map_system_message_pt(messages=messages) model_api_key = get_api_key( llm_provider=custom_llm_provider, dynamic_api_key=api_key ) # get the api key from the environment if required for the model @@ -737,6 +799,7 @@ def completion( top_p=top_p, n=n, stream=stream, + stream_options=stream_options, stop=stop, max_tokens=max_tokens, presence_penalty=presence_penalty, @@ -784,6 +847,10 @@ def completion( proxy_server_request=proxy_server_request, preset_cache_key=preset_cache_key, no_log=no_log, + input_cost_per_second=input_cost_per_second, + input_cost_per_token=input_cost_per_token, + output_cost_per_second=output_cost_per_second, + output_cost_per_token=output_cost_per_token, ) logging.update_environment_variables( model=model, @@ -849,7 +916,7 @@ def completion( logger_fn=logger_fn, logging_obj=logging, acompletion=acompletion, - timeout=timeout, + timeout=timeout, # type: ignore client=client, # pass AsyncAzureOpenAI, AzureOpenAI client ) @@ -936,6 +1003,7 @@ def completion( or custom_llm_provider == "deepinfra" or custom_llm_provider == "perplexity" or custom_llm_provider == "groq" + or custom_llm_provider == "deepseek" or custom_llm_provider == "anyscale" or custom_llm_provider == "mistral" or custom_llm_provider == "openai" @@ -990,7 +1058,7 @@ def completion( optional_params=optional_params, litellm_params=litellm_params, logger_fn=logger_fn, - timeout=timeout, + timeout=timeout, # type: ignore custom_prompt_dict=custom_prompt_dict, client=client, # pass AsyncOpenAI, OpenAI client organization=organization, @@ -1075,7 +1143,7 @@ def completion( optional_params=optional_params, litellm_params=litellm_params, logger_fn=logger_fn, - timeout=timeout, + timeout=timeout, # type: ignore ) if ( @@ -1121,7 +1189,7 @@ def completion( custom_prompt_dict = custom_prompt_dict or litellm.custom_prompt_dict - model_response = replicate.completion( + model_response = replicate.completion( # type: ignore model=model, messages=messages, api_base=api_base, @@ -1134,12 +1202,10 @@ def completion( api_key=replicate_key, logging_obj=logging, custom_prompt_dict=custom_prompt_dict, + acompletion=acompletion, ) - if "stream" in optional_params and optional_params["stream"] == True: - # don't try to access stream object, - model_response = CustomStreamWrapper(model_response, model, logging_obj=logging, custom_llm_provider="replicate") # type: ignore - if optional_params.get("stream", False) or acompletion == True: + if optional_params.get("stream", False) == True: ## LOGGING logging.post_call( input=messages, @@ -1148,6 +1214,61 @@ def completion( ) response = model_response + elif ( + "clarifai" in model + or custom_llm_provider == "clarifai" + or model in litellm.clarifai_models + ): + clarifai_key = None + clarifai_key = ( + api_key + or litellm.clarifai_key + or litellm.api_key + or get_secret("CLARIFAI_API_KEY") + or get_secret("CLARIFAI_API_TOKEN") + ) + + api_base = ( + api_base + or litellm.api_base + or get_secret("CLARIFAI_API_BASE") + or "https://api.clarifai.com/v2" + ) + + custom_prompt_dict = custom_prompt_dict or litellm.custom_prompt_dict + model_response = clarifai.completion( + model=model, + messages=messages, + api_base=api_base, + model_response=model_response, + print_verbose=print_verbose, + optional_params=optional_params, + litellm_params=litellm_params, + acompletion=acompletion, + logger_fn=logger_fn, + encoding=encoding, # for calculating input/output tokens + api_key=clarifai_key, + logging_obj=logging, + custom_prompt_dict=custom_prompt_dict, + ) + + if "stream" in optional_params and optional_params["stream"] == True: + # don't try to access stream object, + ## LOGGING + logging.post_call( + input=messages, + api_key=api_key, + original_response=model_response, + ) + + if optional_params.get("stream", False) or acompletion == True: + ## LOGGING + logging.post_call( + input=messages, + api_key=clarifai_key, + original_response=model_response, + ) + response = model_response elif custom_llm_provider == "anthropic": api_key = ( @@ -1166,10 +1287,11 @@ def completion( or get_secret("ANTHROPIC_API_BASE") or "https://api.anthropic.com/v1/complete" ) - response = anthropic_text.completion( + response = anthropic_text_completions.completion( model=model, messages=messages, api_base=api_base, + acompletion=acompletion, custom_prompt_dict=litellm.custom_prompt_dict, model_response=model_response, print_verbose=print_verbose, @@ -1448,7 +1570,7 @@ def completion( acompletion=acompletion, logging_obj=logging, custom_prompt_dict=custom_prompt_dict, - timeout=timeout, + timeout=timeout, # type: ignore ) if ( "stream" in optional_params @@ -1541,7 +1663,7 @@ def completion( logger_fn=logger_fn, logging_obj=logging, acompletion=acompletion, - timeout=timeout, + timeout=timeout, # type: ignore ) ## LOGGING logging.post_call( @@ -1674,19 +1796,25 @@ def completion( or litellm.vertex_location or get_secret("VERTEXAI_LOCATION") ) - + vertex_credentials = ( + optional_params.pop("vertex_credentials", None) + or optional_params.pop("vertex_ai_credentials", None) + or get_secret("VERTEXAI_CREDENTIALS") + ) + new_params = deepcopy(optional_params) if "claude-3" in model: model_response = vertex_ai_anthropic.completion( model=model, messages=messages, model_response=model_response, print_verbose=print_verbose, - optional_params=optional_params, + optional_params=new_params, litellm_params=litellm_params, logger_fn=logger_fn, encoding=encoding, vertex_location=vertex_ai_location, vertex_project=vertex_ai_project, + vertex_credentials=vertex_credentials, logging_obj=logging, acompletion=acompletion, ) @@ -1696,12 +1824,13 @@ def completion( messages=messages, model_response=model_response, print_verbose=print_verbose, - optional_params=optional_params, + optional_params=new_params, litellm_params=litellm_params, logger_fn=logger_fn, encoding=encoding, vertex_location=vertex_ai_location, vertex_project=vertex_ai_project, + vertex_credentials=vertex_credentials, logging_obj=logging, acompletion=acompletion, ) @@ -1719,6 +1848,52 @@ def completion( ) return response response = model_response + elif custom_llm_provider == "predibase": + tenant_id = ( + optional_params.pop("tenant_id", None) + or optional_params.pop("predibase_tenant_id", None) + or litellm.predibase_tenant_id + or get_secret("PREDIBASE_TENANT_ID") + ) + + api_base = ( + optional_params.pop("api_base", None) + or optional_params.pop("base_url", None) + or litellm.api_base + or get_secret("PREDIBASE_API_BASE") + ) + + api_key = ( + api_key + or litellm.api_key + or litellm.predibase_key + or get_secret("PREDIBASE_API_KEY") + ) + + _model_response = predibase_chat_completions.completion( + model=model, + messages=messages, + model_response=model_response, + print_verbose=print_verbose, + optional_params=optional_params, + litellm_params=litellm_params, + logger_fn=logger_fn, + encoding=encoding, + logging_obj=logging, + acompletion=acompletion, + api_base=api_base, + custom_prompt_dict=custom_prompt_dict, + api_key=api_key, + tenant_id=tenant_id, + ) + + if ( + "stream" in optional_params + and optional_params["stream"] == True + and acompletion == False + ): + return _model_response + response = _model_response elif custom_llm_provider == "ai21": custom_llm_provider = "ai21" ai21_key = ( @@ -1803,40 +1978,59 @@ def completion( elif custom_llm_provider == "bedrock": # boto3 reads keys from .env custom_prompt_dict = custom_prompt_dict or litellm.custom_prompt_dict - response = bedrock.completion( - model=model, - messages=messages, - custom_prompt_dict=litellm.custom_prompt_dict, - model_response=model_response, - print_verbose=print_verbose, - optional_params=optional_params, - litellm_params=litellm_params, - logger_fn=logger_fn, - encoding=encoding, - logging_obj=logging, - timeout=timeout, - ) - if ( - "stream" in optional_params - and optional_params["stream"] == True - and not isinstance(response, CustomStreamWrapper) - ): - # don't try to access stream object, - if "ai21" in model: - response = CustomStreamWrapper( - response, - model, - custom_llm_provider="bedrock", - logging_obj=logging, - ) - else: - response = CustomStreamWrapper( - iter(response), - model, - custom_llm_provider="bedrock", - logging_obj=logging, - ) + if "cohere" in model: + response = bedrock_chat_completion.completion( + model=model, + messages=messages, + custom_prompt_dict=litellm.custom_prompt_dict, + model_response=model_response, + print_verbose=print_verbose, + optional_params=optional_params, + litellm_params=litellm_params, + logger_fn=logger_fn, + encoding=encoding, + logging_obj=logging, + extra_headers=extra_headers, + timeout=timeout, + acompletion=acompletion, + ) + else: + response = bedrock.completion( + model=model, + messages=messages, + custom_prompt_dict=litellm.custom_prompt_dict, + model_response=model_response, + print_verbose=print_verbose, + optional_params=optional_params, + litellm_params=litellm_params, + logger_fn=logger_fn, + encoding=encoding, + logging_obj=logging, + extra_headers=extra_headers, + timeout=timeout, + ) + + if ( + "stream" in optional_params + and optional_params["stream"] == True + and not isinstance(response, CustomStreamWrapper) + ): + # don't try to access stream object, + if "ai21" in model: + response = CustomStreamWrapper( + response, + model, + custom_llm_provider="bedrock", + logging_obj=logging, + ) + else: + response = CustomStreamWrapper( + iter(response), + model, + custom_llm_provider="bedrock", + logging_obj=logging, + ) if optional_params.get("stream", False): ## LOGGING @@ -1848,6 +2042,43 @@ def completion( ## RESPONSE OBJECT response = response + elif custom_llm_provider == "watsonx": + custom_prompt_dict = custom_prompt_dict or litellm.custom_prompt_dict + response = watsonx.IBMWatsonXAI().completion( + model=model, + messages=messages, + custom_prompt_dict=custom_prompt_dict, + model_response=model_response, + print_verbose=print_verbose, + optional_params=optional_params, + litellm_params=litellm_params, # type: ignore + logger_fn=logger_fn, + encoding=encoding, + logging_obj=logging, + timeout=timeout, # type: ignore + ) + if ( + "stream" in optional_params + and optional_params["stream"] == True + and not isinstance(response, CustomStreamWrapper) + ): + # don't try to access stream object, + response = CustomStreamWrapper( + iter(response), + model, + custom_llm_provider="watsonx", + logging_obj=logging, + ) + + if optional_params.get("stream", False): + ## LOGGING + logging.post_call( + input=messages, + api_key=None, + original_response=response, + ) + ## RESPONSE OBJECT + response = response elif custom_llm_provider == "vllm": custom_prompt_dict = custom_prompt_dict or litellm.custom_prompt_dict model_response = vllm.completion( @@ -1931,9 +2162,16 @@ def completion( or "http://localhost:11434" ) + api_key = ( + api_key + or litellm.ollama_key + or os.environ.get("OLLAMA_API_KEY") + or litellm.api_key + ) ## LOGGING generator = ollama_chat.get_ollama_response( api_base, + api_key, model, messages, optional_params, @@ -2069,7 +2307,7 @@ def completion( """ assume input to custom LLM api bases follow this format: resp = requests.post( - api_base, + api_base, json={ 'model': 'meta-llama/Llama-2-13b-hf', # model name 'params': { @@ -2129,6 +2367,7 @@ def completion( custom_llm_provider=custom_llm_provider, original_exception=e, completion_kwargs=args, + extra_kwargs=kwargs, ) @@ -2197,7 +2436,7 @@ def batch_completion( n: Optional[int] = None, stream: Optional[bool] = None, stop=None, - max_tokens: Optional[float] = None, + max_tokens: Optional[int] = None, presence_penalty: Optional[float] = None, frequency_penalty: Optional[float] = None, logit_bias: Optional[dict] = None, @@ -2460,11 +2699,13 @@ async def aembedding(*args, **kwargs): or custom_llm_provider == "voyage" or custom_llm_provider == "mistral" or custom_llm_provider == "custom_openai" + or custom_llm_provider == "triton" or custom_llm_provider == "anyscale" or custom_llm_provider == "openrouter" or custom_llm_provider == "deepinfra" or custom_llm_provider == "perplexity" or custom_llm_provider == "groq" + or custom_llm_provider == "deepseek" or custom_llm_provider == "fireworks_ai" or custom_llm_provider == "ollama" or custom_llm_provider == "vertex_ai" @@ -2490,6 +2731,7 @@ async def aembedding(*args, **kwargs): custom_llm_provider=custom_llm_provider, original_exception=e, completion_kwargs=args, + extra_kwargs=kwargs, ) @@ -2541,6 +2783,7 @@ def embedding( client = kwargs.pop("client", None) rpm = kwargs.pop("rpm", None) tpm = kwargs.pop("tpm", None) + max_parallel_requests = kwargs.pop("max_parallel_requests", None) model_info = kwargs.get("model_info", None) metadata = kwargs.get("metadata", None) encoding_format = kwargs.get("encoding_format", None) @@ -2588,6 +2831,7 @@ def embedding( "model_list", "num_retries", "context_window_fallback_dict", + "retry_policy", "roles", "final_prompt_value", "bos_token", @@ -2598,6 +2842,7 @@ def embedding( "client", "rpm", "tpm", + "max_parallel_requests", "input_cost_per_token", "output_cost_per_token", "input_cost_per_second", @@ -2610,6 +2855,9 @@ def embedding( "ttl", "cache", "no-log", + "region_name", + "allowed_model_region", + "model_config", ] default_params = openai_params + litellm_params non_default_params = { @@ -2786,18 +3034,43 @@ def embedding( optional_params=optional_params, model_response=EmbeddingResponse(), ) + elif custom_llm_provider == "triton": + if api_base is None: + raise ValueError( + "api_base is required for triton. Please pass `api_base`" + ) + response = triton_chat_completions.embedding( + model=model, + input=input, + api_base=api_base, + api_key=api_key, + logging_obj=logging, + timeout=timeout, + model_response=EmbeddingResponse(), + optional_params=optional_params, + client=client, + aembedding=aembedding, + ) elif custom_llm_provider == "vertex_ai": vertex_ai_project = ( optional_params.pop("vertex_project", None) or optional_params.pop("vertex_ai_project", None) or litellm.vertex_project or get_secret("VERTEXAI_PROJECT") + or get_secret("VERTEX_PROJECT") ) vertex_ai_location = ( optional_params.pop("vertex_location", None) or optional_params.pop("vertex_ai_location", None) or litellm.vertex_location or get_secret("VERTEXAI_LOCATION") + or get_secret("VERTEX_LOCATION") + ) + vertex_credentials = ( + optional_params.pop("vertex_credentials", None) + or optional_params.pop("vertex_ai_credentials", None) + or get_secret("VERTEXAI_CREDENTIALS") + or get_secret("VERTEX_CREDENTIALS") ) response = vertex_ai.embedding( @@ -2809,6 +3082,7 @@ def embedding( model_response=EmbeddingResponse(), vertex_project=vertex_ai_project, vertex_location=vertex_ai_location, + vertex_credentials=vertex_credentials, aembedding=aembedding, print_verbose=print_verbose, ) @@ -2837,16 +3111,18 @@ def embedding( model=model, # type: ignore llm_provider="ollama", # type: ignore ) - if aembedding: - response = ollama.ollama_aembeddings( - api_base=api_base, - model=model, - prompts=input, - encoding=encoding, - logging_obj=logging, - optional_params=optional_params, - model_response=EmbeddingResponse(), - ) + ollama_embeddings_fn = ( + ollama.ollama_aembeddings if aembedding else ollama.ollama_embeddings + ) + response = ollama_embeddings_fn( + api_base=api_base, + model=model, + prompts=input, + encoding=encoding, + logging_obj=logging, + optional_params=optional_params, + model_response=EmbeddingResponse(), + ) elif custom_llm_provider == "sagemaker": response = sagemaker.embedding( model=model, @@ -2910,6 +3186,15 @@ def embedding( client=client, aembedding=aembedding, ) + elif custom_llm_provider == "watsonx": + response = watsonx.IBMWatsonXAI().embedding( + model=model, + input=input, + encoding=encoding, + logging_obj=logging, + optional_params=optional_params, + model_response=EmbeddingResponse(), + ) else: args = locals() raise ValueError(f"No valid embedding model args passed in - {args}") @@ -2925,7 +3210,10 @@ def embedding( ) ## Map to OpenAI Exception raise exception_type( - model=model, original_exception=e, custom_llm_provider=custom_llm_provider + model=model, + original_exception=e, + custom_llm_provider=custom_llm_provider, + extra_kwargs=kwargs, ) @@ -2963,11 +3251,13 @@ async def atext_completion(*args, **kwargs): or custom_llm_provider == "deepinfra" or custom_llm_provider == "perplexity" or custom_llm_provider == "groq" + or custom_llm_provider == "deepseek" or custom_llm_provider == "fireworks_ai" or custom_llm_provider == "text-completion-openai" or custom_llm_provider == "huggingface" or custom_llm_provider == "ollama" or custom_llm_provider == "vertex_ai" + or custom_llm_provider in litellm.openai_compatible_providers ): # currently implemented aiohttp calls for just azure and openai, soon all. # Await normally response = await loop.run_in_executor(None, func_with_context) @@ -2998,6 +3288,8 @@ async def atext_completion(*args, **kwargs): ## TRANSLATE CHAT TO TEXT FORMAT ## if isinstance(response, TextCompletionResponse): return response + elif asyncio.iscoroutine(response): + response = await response text_completion_response = TextCompletionResponse() text_completion_response["id"] = response.get("id", None) @@ -3019,6 +3311,7 @@ async def atext_completion(*args, **kwargs): custom_llm_provider=custom_llm_provider, original_exception=e, completion_kwargs=args, + extra_kwargs=kwargs, ) @@ -3056,6 +3349,7 @@ def text_completion( Union[str, List[str]] ] = None, # Optional: Sequences where the API will stop generating further tokens. stream: Optional[bool] = None, # Optional: Whether to stream back partial progress. + stream_options: Optional[dict] = None, suffix: Optional[ str ] = None, # Optional: The suffix that comes after a completion of inserted text. @@ -3133,6 +3427,8 @@ def text_completion( optional_params["stop"] = stop if stream is not None: optional_params["stream"] = stream + if stream_options is not None: + optional_params["stream_options"] = stream_options if suffix is not None: optional_params["suffix"] = suffix if temperature is not None: @@ -3243,7 +3539,9 @@ def text_completion( if kwargs.get("acompletion", False) == True: return response if stream == True or kwargs.get("stream", False) == True: - response = TextCompletionStreamWrapper(completion_stream=response, model=model) + response = TextCompletionStreamWrapper( + completion_stream=response, model=model, stream_options=stream_options + ) return response transformed_logprobs = None # only supported for TGI models @@ -3356,6 +3654,7 @@ async def aimage_generation(*args, **kwargs): custom_llm_provider=custom_llm_provider, original_exception=e, completion_kwargs=args, + extra_kwargs=kwargs, ) @@ -3436,6 +3735,7 @@ def image_generation( "model_list", "num_retries", "context_window_fallback_dict", + "retry_policy", "roles", "final_prompt_value", "bos_token", @@ -3446,6 +3746,7 @@ def image_generation( "client", "rpm", "tpm", + "max_parallel_requests", "input_cost_per_token", "output_cost_per_token", "hf_model_name", @@ -3455,6 +3756,9 @@ def image_generation( "caching_groups", "ttl", "cache", + "region_name", + "allowed_model_region", + "model_config", ] default_params = openai_params + litellm_params non_default_params = { @@ -3555,6 +3859,7 @@ def image_generation( custom_llm_provider=custom_llm_provider, original_exception=e, completion_kwargs=locals(), + extra_kwargs=kwargs, ) @@ -3604,6 +3909,7 @@ async def atranscription(*args, **kwargs): custom_llm_provider=custom_llm_provider, original_exception=e, completion_kwargs=args, + extra_kwargs=kwargs, ) @@ -3784,6 +4090,9 @@ async def ahealth_check( api_base = model_params.get("api_base") or get_secret("OPENAI_API_BASE") + if custom_llm_provider == "text-completion-openai": + mode = "completion" + response = await openai_chat_completions.ahealth_check( model=model, messages=model_params.get( diff --git a/litellm/model_prices_and_context_window_backup.json b/litellm/model_prices_and_context_window_backup.json index f063e8f81..f3db33c60 100644 --- a/litellm/model_prices_and_context_window_backup.json +++ b/litellm/model_prices_and_context_window_backup.json @@ -9,10 +9,34 @@ "mode": "chat", "supports_function_calling": true }, + "gpt-4o": { + "max_tokens": 4096, + "max_input_tokens": 128000, + "max_output_tokens": 4096, + "input_cost_per_token": 0.000005, + "output_cost_per_token": 0.000015, + "litellm_provider": "openai", + "mode": "chat", + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_vision": true + }, + "gpt-4o-2024-05-13": { + "max_tokens": 4096, + "max_input_tokens": 128000, + "max_output_tokens": 4096, + "input_cost_per_token": 0.000005, + "output_cost_per_token": 0.000015, + "litellm_provider": "openai", + "mode": "chat", + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_vision": true + }, "gpt-4-turbo-preview": { - "max_tokens": 4096, - "max_input_tokens": 8192, - "max_output_tokens": 4096, + "max_tokens": 4096, + "max_input_tokens": 128000, + "max_output_tokens": 4096, "input_cost_per_token": 0.00001, "output_cost_per_token": 0.00003, "litellm_provider": "openai", @@ -75,7 +99,8 @@ "litellm_provider": "openai", "mode": "chat", "supports_function_calling": true, - "supports_parallel_function_calling": true + "supports_parallel_function_calling": true, + "supports_vision": true }, "gpt-4-turbo-2024-04-09": { "max_tokens": 4096, @@ -86,7 +111,8 @@ "litellm_provider": "openai", "mode": "chat", "supports_function_calling": true, - "supports_parallel_function_calling": true + "supports_parallel_function_calling": true, + "supports_vision": true }, "gpt-4-1106-preview": { "max_tokens": 4096, @@ -117,7 +143,8 @@ "input_cost_per_token": 0.00001, "output_cost_per_token": 0.00003, "litellm_provider": "openai", - "mode": "chat" + "mode": "chat", + "supports_vision": true }, "gpt-4-1106-vision-preview": { "max_tokens": 4096, @@ -126,7 +153,8 @@ "input_cost_per_token": 0.00001, "output_cost_per_token": 0.00003, "litellm_provider": "openai", - "mode": "chat" + "mode": "chat", + "supports_vision": true }, "gpt-3.5-turbo": { "max_tokens": 4097, @@ -206,9 +234,28 @@ "litellm_provider": "openai", "mode": "chat" }, + "ft:davinci-002": { + "max_tokens": 16384, + "max_input_tokens": 16384, + "max_output_tokens": 4096, + "input_cost_per_token": 0.000002, + "output_cost_per_token": 0.000002, + "litellm_provider": "text-completion-openai", + "mode": "completion" + }, + "ft:babbage-002": { + "max_tokens": 16384, + "max_input_tokens": 16384, + "max_output_tokens": 4096, + "input_cost_per_token": 0.0000004, + "output_cost_per_token": 0.0000004, + "litellm_provider": "text-completion-openai", + "mode": "completion" + }, "text-embedding-3-large": { "max_tokens": 8191, "max_input_tokens": 8191, + "output_vector_size": 3072, "input_cost_per_token": 0.00000013, "output_cost_per_token": 0.000000, "litellm_provider": "openai", @@ -217,6 +264,7 @@ "text-embedding-3-small": { "max_tokens": 8191, "max_input_tokens": 8191, + "output_vector_size": 1536, "input_cost_per_token": 0.00000002, "output_cost_per_token": 0.000000, "litellm_provider": "openai", @@ -225,6 +273,7 @@ "text-embedding-ada-002": { "max_tokens": 8191, "max_input_tokens": 8191, + "output_vector_size": 1536, "input_cost_per_token": 0.0000001, "output_cost_per_token": 0.000000, "litellm_provider": "openai", @@ -331,6 +380,18 @@ "output_cost_per_second": 0.0001, "litellm_provider": "azure" }, + "azure/gpt-4-turbo-2024-04-09": { + "max_tokens": 4096, + "max_input_tokens": 128000, + "max_output_tokens": 4096, + "input_cost_per_token": 0.00001, + "output_cost_per_token": 0.00003, + "litellm_provider": "azure", + "mode": "chat", + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_vision": true + }, "azure/gpt-4-0125-preview": { "max_tokens": 4096, "max_input_tokens": 128000, @@ -409,7 +470,8 @@ "input_cost_per_token": 0.00001, "output_cost_per_token": 0.00003, "litellm_provider": "azure", - "mode": "chat" + "mode": "chat", + "supports_vision": true }, "azure/gpt-35-turbo-16k-0613": { "max_tokens": 4096, @@ -642,6 +704,7 @@ "input_cost_per_token": 0.000002, "output_cost_per_token": 0.000006, "litellm_provider": "mistral", + "supports_function_calling": true, "mode": "chat" }, "mistral/mistral-small-latest": { @@ -651,6 +714,7 @@ "input_cost_per_token": 0.000002, "output_cost_per_token": 0.000006, "litellm_provider": "mistral", + "supports_function_calling": true, "mode": "chat" }, "mistral/mistral-medium": { @@ -700,6 +764,16 @@ "mode": "chat", "supports_function_calling": true }, + "mistral/open-mixtral-8x7b": { + "max_tokens": 8191, + "max_input_tokens": 32000, + "max_output_tokens": 8191, + "input_cost_per_token": 0.000002, + "output_cost_per_token": 0.000006, + "litellm_provider": "mistral", + "mode": "chat", + "supports_function_calling": true + }, "mistral/mistral-embed": { "max_tokens": 8192, "max_input_tokens": 8192, @@ -707,6 +781,24 @@ "litellm_provider": "mistral", "mode": "embedding" }, + "deepseek-chat": { + "max_tokens": 4096, + "max_input_tokens": 32000, + "max_output_tokens": 4096, + "input_cost_per_token": 0.00000014, + "output_cost_per_token": 0.00000028, + "litellm_provider": "deepseek", + "mode": "chat" + }, + "deepseek-coder": { + "max_tokens": 4096, + "max_input_tokens": 16000, + "max_output_tokens": 4096, + "input_cost_per_token": 0.00000014, + "output_cost_per_token": 0.00000028, + "litellm_provider": "deepseek", + "mode": "chat" + }, "groq/llama2-70b-4096": { "max_tokens": 4096, "max_input_tokens": 4096, @@ -714,7 +806,28 @@ "input_cost_per_token": 0.00000070, "output_cost_per_token": 0.00000080, "litellm_provider": "groq", - "mode": "chat" + "mode": "chat", + "supports_function_calling": true + }, + "groq/llama3-8b-8192": { + "max_tokens": 8192, + "max_input_tokens": 8192, + "max_output_tokens": 8192, + "input_cost_per_token": 0.00000010, + "output_cost_per_token": 0.00000010, + "litellm_provider": "groq", + "mode": "chat", + "supports_function_calling": true + }, + "groq/llama3-70b-8192": { + "max_tokens": 8192, + "max_input_tokens": 8192, + "max_output_tokens": 8192, + "input_cost_per_token": 0.00000064, + "output_cost_per_token": 0.00000080, + "litellm_provider": "groq", + "mode": "chat", + "supports_function_calling": true }, "groq/mixtral-8x7b-32768": { "max_tokens": 32768, @@ -723,7 +836,8 @@ "input_cost_per_token": 0.00000027, "output_cost_per_token": 0.00000027, "litellm_provider": "groq", - "mode": "chat" + "mode": "chat", + "supports_function_calling": true }, "groq/gemma-7b-it": { "max_tokens": 8192, @@ -732,7 +846,8 @@ "input_cost_per_token": 0.00000010, "output_cost_per_token": 0.00000010, "litellm_provider": "groq", - "mode": "chat" + "mode": "chat", + "supports_function_calling": true }, "claude-instant-1.2": { "max_tokens": 8191, @@ -768,7 +883,10 @@ "input_cost_per_token": 0.00000025, "output_cost_per_token": 0.00000125, "litellm_provider": "anthropic", - "mode": "chat" + "mode": "chat", + "supports_function_calling": true, + "supports_vision": true, + "tool_use_system_prompt_tokens": 264 }, "claude-3-opus-20240229": { "max_tokens": 4096, @@ -777,7 +895,10 @@ "input_cost_per_token": 0.000015, "output_cost_per_token": 0.000075, "litellm_provider": "anthropic", - "mode": "chat" + "mode": "chat", + "supports_function_calling": true, + "supports_vision": true, + "tool_use_system_prompt_tokens": 395 }, "claude-3-sonnet-20240229": { "max_tokens": 4096, @@ -786,7 +907,10 @@ "input_cost_per_token": 0.000003, "output_cost_per_token": 0.000015, "litellm_provider": "anthropic", - "mode": "chat" + "mode": "chat", + "supports_function_calling": true, + "supports_vision": true, + "tool_use_system_prompt_tokens": 159 }, "text-bison": { "max_tokens": 1024, @@ -996,33 +1120,66 @@ "max_tokens": 8192, "max_input_tokens": 1000000, "max_output_tokens": 8192, + "input_cost_per_token": 0.000000625, + "output_cost_per_token": 0.000001875, + "litellm_provider": "vertex_ai-language-models", + "mode": "chat", + "supports_function_calling": true, + "supports_tool_choice": true, + "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models" + }, + "gemini-1.5-flash-preview-0514": { + "max_tokens": 8192, + "max_input_tokens": 1000000, + "max_output_tokens": 8192, + "max_images_per_prompt": 3000, + "max_videos_per_prompt": 10, + "max_video_length": 1, + "max_audio_length_hours": 8.4, + "max_audio_per_prompt": 1, + "max_pdf_size_mb": 30, "input_cost_per_token": 0, "output_cost_per_token": 0, "litellm_provider": "vertex_ai-language-models", "mode": "chat", "supports_function_calling": true, + "supports_vision": true, + "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models" + }, + "gemini-1.5-pro-preview-0514": { + "max_tokens": 8192, + "max_input_tokens": 1000000, + "max_output_tokens": 8192, + "input_cost_per_token": 0.000000625, + "output_cost_per_token": 0.000001875, + "litellm_provider": "vertex_ai-language-models", + "mode": "chat", + "supports_function_calling": true, + "supports_tool_choice": true, "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models" }, "gemini-1.5-pro-preview-0215": { "max_tokens": 8192, "max_input_tokens": 1000000, "max_output_tokens": 8192, - "input_cost_per_token": 0, - "output_cost_per_token": 0, + "input_cost_per_token": 0.000000625, + "output_cost_per_token": 0.000001875, "litellm_provider": "vertex_ai-language-models", "mode": "chat", "supports_function_calling": true, + "supports_tool_choice": true, "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models" }, "gemini-1.5-pro-preview-0409": { "max_tokens": 8192, "max_input_tokens": 1000000, "max_output_tokens": 8192, - "input_cost_per_token": 0, - "output_cost_per_token": 0, + "input_cost_per_token": 0.000000625, + "output_cost_per_token": 0.000001875, "litellm_provider": "vertex_ai-language-models", "mode": "chat", "supports_function_calling": true, + "supports_tool_choice": true, "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models" }, "gemini-experimental": { @@ -1034,6 +1191,7 @@ "litellm_provider": "vertex_ai-language-models", "mode": "chat", "supports_function_calling": false, + "supports_tool_choice": true, "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models" }, "gemini-pro-vision": { @@ -1048,6 +1206,7 @@ "litellm_provider": "vertex_ai-vision-models", "mode": "chat", "supports_function_calling": true, + "supports_vision": true, "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models" }, "gemini-1.0-pro-vision": { @@ -1062,6 +1221,7 @@ "litellm_provider": "vertex_ai-vision-models", "mode": "chat", "supports_function_calling": true, + "supports_vision": true, "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models" }, "gemini-1.0-pro-vision-001": { @@ -1076,23 +1236,41 @@ "litellm_provider": "vertex_ai-vision-models", "mode": "chat", "supports_function_calling": true, + "supports_vision": true, "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models" }, "vertex_ai/claude-3-sonnet@20240229": { - "max_tokens": 200000, + "max_tokens": 4096, + "max_input_tokens": 200000, "max_output_tokens": 4096, "input_cost_per_token": 0.000003, "output_cost_per_token": 0.000015, "litellm_provider": "vertex_ai-anthropic_models", - "mode": "chat" + "mode": "chat", + "supports_function_calling": true, + "supports_vision": true }, "vertex_ai/claude-3-haiku@20240307": { - "max_tokens": 200000, + "max_tokens": 4096, + "max_input_tokens": 200000, "max_output_tokens": 4096, "input_cost_per_token": 0.00000025, "output_cost_per_token": 0.00000125, "litellm_provider": "vertex_ai-anthropic_models", - "mode": "chat" + "mode": "chat", + "supports_function_calling": true, + "supports_vision": true + }, + "vertex_ai/claude-3-opus@20240229": { + "max_tokens": 4096, + "max_input_tokens": 200000, + "max_output_tokens": 4096, + "input_cost_per_token": 0.0000015, + "output_cost_per_token": 0.0000075, + "litellm_provider": "vertex_ai-anthropic_models", + "mode": "chat", + "supports_function_calling": true, + "supports_vision": true }, "textembedding-gecko": { "max_tokens": 3072, @@ -1144,6 +1322,27 @@ "mode": "embedding", "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models" }, + "text-embedding-preview-0409": { + "max_tokens": 3072, + "max_input_tokens": 3072, + "output_vector_size": 768, + "input_cost_per_token": 0.00000000625, + "input_cost_per_token_batch_requests": 0.000000005, + "output_cost_per_token": 0, + "litellm_provider": "vertex_ai-embedding-models", + "mode": "embedding", + "source": "https://cloud.google.com/vertex-ai/generative-ai/pricing" + }, + "text-multilingual-embedding-preview-0409":{ + "max_tokens": 3072, + "max_input_tokens": 3072, + "output_vector_size": 768, + "input_cost_per_token": 0.00000000625, + "output_cost_per_token": 0, + "litellm_provider": "vertex_ai-embedding-models", + "mode": "embedding", + "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models" + }, "palm/chat-bison": { "max_tokens": 4096, "max_input_tokens": 8192, @@ -1204,6 +1403,24 @@ "mode": "completion", "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models" }, + "gemini/gemini-1.5-flash-latest": { + "max_tokens": 8192, + "max_input_tokens": 1000000, + "max_output_tokens": 8192, + "max_images_per_prompt": 3000, + "max_videos_per_prompt": 10, + "max_video_length": 1, + "max_audio_length_hours": 8.4, + "max_audio_per_prompt": 1, + "max_pdf_size_mb": 30, + "input_cost_per_token": 0, + "output_cost_per_token": 0, + "litellm_provider": "vertex_ai-language-models", + "mode": "chat", + "supports_function_calling": true, + "supports_vision": true, + "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models" + }, "gemini/gemini-pro": { "max_tokens": 8192, "max_input_tokens": 32760, @@ -1224,8 +1441,23 @@ "litellm_provider": "gemini", "mode": "chat", "supports_function_calling": true, + "supports_vision": true, + "supports_tool_choice": true, "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models" }, + "gemini/gemini-1.5-pro-latest": { + "max_tokens": 8192, + "max_input_tokens": 1048576, + "max_output_tokens": 8192, + "input_cost_per_token": 0, + "output_cost_per_token": 0, + "litellm_provider": "gemini", + "mode": "chat", + "supports_function_calling": true, + "supports_vision": true, + "supports_tool_choice": true, + "source": "https://ai.google.dev/models/gemini" + }, "gemini/gemini-pro-vision": { "max_tokens": 2048, "max_input_tokens": 30720, @@ -1235,6 +1467,7 @@ "litellm_provider": "gemini", "mode": "chat", "supports_function_calling": true, + "supports_vision": true, "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models" }, "command-r": { @@ -1311,6 +1544,276 @@ "litellm_provider": "replicate", "mode": "chat" }, + "replicate/meta/llama-2-13b": { + "max_tokens": 4096, + "max_input_tokens": 4096, + "max_output_tokens": 4096, + "input_cost_per_token": 0.0000001, + "output_cost_per_token": 0.0000005, + "litellm_provider": "replicate", + "mode": "chat" + }, + "replicate/meta/llama-2-13b-chat": { + "max_tokens": 4096, + "max_input_tokens": 4096, + "max_output_tokens": 4096, + "input_cost_per_token": 0.0000001, + "output_cost_per_token": 0.0000005, + "litellm_provider": "replicate", + "mode": "chat" + }, + "replicate/meta/llama-2-70b": { + "max_tokens": 4096, + "max_input_tokens": 4096, + "max_output_tokens": 4096, + "input_cost_per_token": 0.00000065, + "output_cost_per_token": 0.00000275, + "litellm_provider": "replicate", + "mode": "chat" + }, + "replicate/meta/llama-2-70b-chat": { + "max_tokens": 4096, + "max_input_tokens": 4096, + "max_output_tokens": 4096, + "input_cost_per_token": 0.00000065, + "output_cost_per_token": 0.00000275, + "litellm_provider": "replicate", + "mode": "chat" + }, + "replicate/meta/llama-2-7b": { + "max_tokens": 4096, + "max_input_tokens": 4096, + "max_output_tokens": 4096, + "input_cost_per_token": 0.00000005, + "output_cost_per_token": 0.00000025, + "litellm_provider": "replicate", + "mode": "chat" + }, + "replicate/meta/llama-2-7b-chat": { + "max_tokens": 4096, + "max_input_tokens": 4096, + "max_output_tokens": 4096, + "input_cost_per_token": 0.00000005, + "output_cost_per_token": 0.00000025, + "litellm_provider": "replicate", + "mode": "chat" + }, + "replicate/meta/llama-3-70b": { + "max_tokens": 4096, + "max_input_tokens": 4096, + "max_output_tokens": 4096, + "input_cost_per_token": 0.00000065, + "output_cost_per_token": 0.00000275, + "litellm_provider": "replicate", + "mode": "chat" + }, + "replicate/meta/llama-3-70b-instruct": { + "max_tokens": 4096, + "max_input_tokens": 4096, + "max_output_tokens": 4096, + "input_cost_per_token": 0.00000065, + "output_cost_per_token": 0.00000275, + "litellm_provider": "replicate", + "mode": "chat" + }, + "replicate/meta/llama-3-8b": { + "max_tokens": 4096, + "max_input_tokens": 4096, + "max_output_tokens": 4096, + "input_cost_per_token": 0.00000005, + "output_cost_per_token": 0.00000025, + "litellm_provider": "replicate", + "mode": "chat" + }, + "replicate/meta/llama-3-8b-instruct": { + "max_tokens": 4096, + "max_input_tokens": 4096, + "max_output_tokens": 4096, + "input_cost_per_token": 0.00000005, + "output_cost_per_token": 0.00000025, + "litellm_provider": "replicate", + "mode": "chat" + }, + "replicate/mistralai/mistral-7b-v0.1": { + "max_tokens": 4096, + "max_input_tokens": 4096, + "max_output_tokens": 4096, + "input_cost_per_token": 0.00000005, + "output_cost_per_token": 0.00000025, + "litellm_provider": "replicate", + "mode": "chat" + }, + "replicate/mistralai/mistral-7b-instruct-v0.2": { + "max_tokens": 4096, + "max_input_tokens": 4096, + "max_output_tokens": 4096, + "input_cost_per_token": 0.00000005, + "output_cost_per_token": 0.00000025, + "litellm_provider": "replicate", + "mode": "chat" + }, + "replicate/mistralai/mixtral-8x7b-instruct-v0.1": { + "max_tokens": 4096, + "max_input_tokens": 4096, + "max_output_tokens": 4096, + "input_cost_per_token": 0.0000003, + "output_cost_per_token": 0.000001, + "litellm_provider": "replicate", + "mode": "chat" + }, + "openrouter/microsoft/wizardlm-2-8x22b:nitro": { + "max_tokens": 65536, + "input_cost_per_token": 0.000001, + "output_cost_per_token": 0.000001, + "litellm_provider": "openrouter", + "mode": "chat" + }, + "openrouter/google/gemini-pro-1.5": { + "max_tokens": 8192, + "max_input_tokens": 1000000, + "max_output_tokens": 8192, + "input_cost_per_token": 0.0000025, + "output_cost_per_token": 0.0000075, + "input_cost_per_image": 0.00265, + "litellm_provider": "openrouter", + "mode": "chat", + "supports_function_calling": true, + "supports_vision": true + }, + "openrouter/mistralai/mixtral-8x22b-instruct": { + "max_tokens": 65536, + "input_cost_per_token": 0.00000065, + "output_cost_per_token": 0.00000065, + "litellm_provider": "openrouter", + "mode": "chat" + }, + "openrouter/cohere/command-r-plus": { + "max_tokens": 128000, + "input_cost_per_token": 0.000003, + "output_cost_per_token": 0.000015, + "litellm_provider": "openrouter", + "mode": "chat" + }, + "openrouter/databricks/dbrx-instruct": { + "max_tokens": 32768, + "input_cost_per_token": 0.0000006, + "output_cost_per_token": 0.0000006, + "litellm_provider": "openrouter", + "mode": "chat" + }, + "openrouter/anthropic/claude-3-haiku": { + "max_tokens": 200000, + "input_cost_per_token": 0.00000025, + "output_cost_per_token": 0.00000125, + "input_cost_per_image": 0.0004, + "litellm_provider": "openrouter", + "mode": "chat", + "supports_function_calling": true, + "supports_vision": true + }, + "openrouter/anthropic/claude-3-sonnet": { + "max_tokens": 200000, + "input_cost_per_token": 0.000003, + "output_cost_per_token": 0.000015, + "input_cost_per_image": 0.0048, + "litellm_provider": "openrouter", + "mode": "chat", + "supports_function_calling": true, + "supports_vision": true + }, + "openrouter/mistralai/mistral-large": { + "max_tokens": 32000, + "input_cost_per_token": 0.000008, + "output_cost_per_token": 0.000024, + "litellm_provider": "openrouter", + "mode": "chat" + }, + "openrouter/cognitivecomputations/dolphin-mixtral-8x7b": { + "max_tokens": 32769, + "input_cost_per_token": 0.0000005, + "output_cost_per_token": 0.0000005, + "litellm_provider": "openrouter", + "mode": "chat" + }, + "openrouter/google/gemini-pro-vision": { + "max_tokens": 45875, + "input_cost_per_token": 0.000000125, + "output_cost_per_token": 0.000000375, + "input_cost_per_image": 0.0025, + "litellm_provider": "openrouter", + "mode": "chat", + "supports_function_calling": true, + "supports_vision": true + }, + "openrouter/fireworks/firellava-13b": { + "max_tokens": 4096, + "input_cost_per_token": 0.0000002, + "output_cost_per_token": 0.0000002, + "litellm_provider": "openrouter", + "mode": "chat" + }, + "openrouter/meta-llama/llama-3-8b-instruct:free": { + "max_tokens": 8192, + "input_cost_per_token": 0.0, + "output_cost_per_token": 0.0, + "litellm_provider": "openrouter", + "mode": "chat" + }, + "openrouter/meta-llama/llama-3-8b-instruct:extended": { + "max_tokens": 16384, + "input_cost_per_token": 0.000000225, + "output_cost_per_token": 0.00000225, + "litellm_provider": "openrouter", + "mode": "chat" + }, + "openrouter/meta-llama/llama-3-70b-instruct:nitro": { + "max_tokens": 8192, + "input_cost_per_token": 0.0000009, + "output_cost_per_token": 0.0000009, + "litellm_provider": "openrouter", + "mode": "chat" + }, + "openrouter/meta-llama/llama-3-70b-instruct": { + "max_tokens": 8192, + "input_cost_per_token": 0.00000059, + "output_cost_per_token": 0.00000079, + "litellm_provider": "openrouter", + "mode": "chat" + }, + "openrouter/openai/gpt-4o": { + "max_tokens": 4096, + "max_input_tokens": 128000, + "max_output_tokens": 4096, + "input_cost_per_token": 0.000005, + "output_cost_per_token": 0.000015, + "litellm_provider": "openrouter", + "mode": "chat", + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_vision": true + }, + "openrouter/openai/gpt-4o-2024-05-13": { + "max_tokens": 4096, + "max_input_tokens": 128000, + "max_output_tokens": 4096, + "input_cost_per_token": 0.000005, + "output_cost_per_token": 0.000015, + "litellm_provider": "openrouter", + "mode": "chat", + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_vision": true + }, + "openrouter/openai/gpt-4-vision-preview": { + "max_tokens": 130000, + "input_cost_per_token": 0.00001, + "output_cost_per_token": 0.00003, + "input_cost_per_image": 0.01445, + "litellm_provider": "openrouter", + "mode": "chat", + "supports_function_calling": true, + "supports_vision": true + }, "openrouter/openai/gpt-3.5-turbo": { "max_tokens": 4095, "input_cost_per_token": 0.0000015, @@ -1348,15 +1851,27 @@ "litellm_provider": "openrouter", "mode": "chat" }, + "openrouter/anthropic/claude-3-opus": { + "max_tokens": 4096, + "max_input_tokens": 200000, + "max_output_tokens": 4096, + "input_cost_per_token": 0.000015, + "output_cost_per_token": 0.000075, + "litellm_provider": "openrouter", + "mode": "chat", + "supports_function_calling": true, + "supports_vision": true, + "tool_use_system_prompt_tokens": 395 + }, "openrouter/google/palm-2-chat-bison": { - "max_tokens": 8000, + "max_tokens": 25804, "input_cost_per_token": 0.0000005, "output_cost_per_token": 0.0000005, "litellm_provider": "openrouter", "mode": "chat" }, "openrouter/google/palm-2-codechat-bison": { - "max_tokens": 8000, + "max_tokens": 20070, "input_cost_per_token": 0.0000005, "output_cost_per_token": 0.0000005, "litellm_provider": "openrouter", @@ -1571,6 +2086,15 @@ "litellm_provider": "bedrock", "mode": "embedding" }, + "amazon.titan-embed-text-v2:0": { + "max_tokens": 8192, + "max_input_tokens": 8192, + "output_vector_size": 1024, + "input_cost_per_token": 0.0000002, + "output_cost_per_token": 0.0, + "litellm_provider": "bedrock", + "mode": "embedding" + }, "mistral.mistral-7b-instruct-v0:2": { "max_tokens": 8191, "max_input_tokens": 32000, @@ -1686,7 +2210,9 @@ "input_cost_per_token": 0.000003, "output_cost_per_token": 0.000015, "litellm_provider": "bedrock", - "mode": "chat" + "mode": "chat", + "supports_function_calling": true, + "supports_vision": true }, "anthropic.claude-3-haiku-20240307-v1:0": { "max_tokens": 4096, @@ -1695,7 +2221,20 @@ "input_cost_per_token": 0.00000025, "output_cost_per_token": 0.00000125, "litellm_provider": "bedrock", - "mode": "chat" + "mode": "chat", + "supports_function_calling": true, + "supports_vision": true + }, + "anthropic.claude-3-opus-20240229-v1:0": { + "max_tokens": 4096, + "max_input_tokens": 200000, + "max_output_tokens": 4096, + "input_cost_per_token": 0.000015, + "output_cost_per_token": 0.000075, + "litellm_provider": "bedrock", + "mode": "chat", + "supports_function_calling": true, + "supports_vision": true }, "anthropic.claude-v1": { "max_tokens": 8191, @@ -2219,6 +2758,24 @@ "litellm_provider": "bedrock", "mode": "chat" }, + "cohere.command-r-plus-v1:0": { + "max_tokens": 4096, + "max_input_tokens": 128000, + "max_output_tokens": 4096, + "input_cost_per_token": 0.0000030, + "output_cost_per_token": 0.000015, + "litellm_provider": "bedrock", + "mode": "chat" + }, + "cohere.command-r-v1:0": { + "max_tokens": 4096, + "max_input_tokens": 128000, + "max_output_tokens": 4096, + "input_cost_per_token": 0.0000005, + "output_cost_per_token": 0.0000015, + "litellm_provider": "bedrock", + "mode": "chat" + }, "cohere.embed-english-v3": { "max_tokens": 512, "max_input_tokens": 512, @@ -2253,6 +2810,24 @@ "litellm_provider": "bedrock", "mode": "chat" }, + "meta.llama3-8b-instruct-v1:0": { + "max_tokens": 8192, + "max_input_tokens": 8192, + "max_output_tokens": 8192, + "input_cost_per_token": 0.0000004, + "output_cost_per_token": 0.0000006, + "litellm_provider": "bedrock", + "mode": "chat" + }, + "meta.llama3-70b-instruct-v1:0": { + "max_tokens": 8192, + "max_input_tokens": 8192, + "max_output_tokens": 8192, + "input_cost_per_token": 0.00000265, + "output_cost_per_token": 0.0000035, + "litellm_provider": "bedrock", + "mode": "chat" + }, "512-x-512/50-steps/stability.stable-diffusion-xl-v0": { "max_tokens": 77, "max_input_tokens": 77, @@ -2428,6 +3003,24 @@ "litellm_provider": "ollama", "mode": "completion" }, + "ollama/llama3": { + "max_tokens": 8192, + "max_input_tokens": 8192, + "max_output_tokens": 8192, + "input_cost_per_token": 0.0, + "output_cost_per_token": 0.0, + "litellm_provider": "ollama", + "mode": "chat" + }, + "ollama/llama3:70b": { + "max_tokens": 8192, + "max_input_tokens": 8192, + "max_output_tokens": 8192, + "input_cost_per_token": 0.0, + "output_cost_per_token": 0.0, + "litellm_provider": "ollama", + "mode": "chat" + }, "ollama/mistral": { "max_tokens": 8192, "max_input_tokens": 8192, @@ -2437,6 +3030,42 @@ "litellm_provider": "ollama", "mode": "completion" }, + "ollama/mistral-7B-Instruct-v0.1": { + "max_tokens": 8192, + "max_input_tokens": 8192, + "max_output_tokens": 8192, + "input_cost_per_token": 0.0, + "output_cost_per_token": 0.0, + "litellm_provider": "ollama", + "mode": "chat" + }, + "ollama/mistral-7B-Instruct-v0.2": { + "max_tokens": 32768, + "max_input_tokens": 32768, + "max_output_tokens": 32768, + "input_cost_per_token": 0.0, + "output_cost_per_token": 0.0, + "litellm_provider": "ollama", + "mode": "chat" + }, + "ollama/mixtral-8x7B-Instruct-v0.1": { + "max_tokens": 32768, + "max_input_tokens": 32768, + "max_output_tokens": 32768, + "input_cost_per_token": 0.0, + "output_cost_per_token": 0.0, + "litellm_provider": "ollama", + "mode": "chat" + }, + "ollama/mixtral-8x22B-Instruct-v0.1": { + "max_tokens": 65536, + "max_input_tokens": 65536, + "max_output_tokens": 65536, + "input_cost_per_token": 0.0, + "output_cost_per_token": 0.0, + "litellm_provider": "ollama", + "mode": "chat" + }, "ollama/codellama": { "max_tokens": 4096, "max_input_tokens": 4096, diff --git a/litellm/proxy/_experimental/out/404.html b/litellm/proxy/_experimental/out/404.html index a68dc5548..3e58fe524 100644 --- a/litellm/proxy/_experimental/out/404.html +++ b/litellm/proxy/_experimental/out/404.html @@ -1 +1 @@ -404: This page could not be found.LiteLLM Dashboard

404

This page could not be found.

\ No newline at end of file +404: This page could not be found.LiteLLM Dashboard

404

This page could not be found.

\ No newline at end of file diff --git a/litellm/proxy/_experimental/out/_next/static/chunks/115-fbad91e754a20933.js b/litellm/proxy/_experimental/out/_next/static/chunks/115-fbad91e754a20933.js deleted file mode 100644 index bdf14bb4e..000000000 --- a/litellm/proxy/_experimental/out/_next/static/chunks/115-fbad91e754a20933.js +++ /dev/null @@ -1,32 +0,0 @@ -"use strict";(self.webpackChunk_N_E=self.webpackChunk_N_E||[]).push([[115],{12215:function(e,t,n){n.d(t,{iN:function(){return h},R_:function(){return d},EV:function(){return g}});var r=n(41785),o=n(76991),a=[{index:7,opacity:.15},{index:6,opacity:.25},{index:5,opacity:.3},{index:5,opacity:.45},{index:5,opacity:.65},{index:5,opacity:.85},{index:4,opacity:.9},{index:3,opacity:.95},{index:2,opacity:.97},{index:1,opacity:.98}];function i(e){var t=e.r,n=e.g,o=e.b,a=(0,r.py)(t,n,o);return{h:360*a.h,s:a.s,v:a.v}}function l(e){var t=e.r,n=e.g,o=e.b;return"#".concat((0,r.vq)(t,n,o,!1))}function s(e,t,n){var r;return(r=Math.round(e.h)>=60&&240>=Math.round(e.h)?n?Math.round(e.h)-2*t:Math.round(e.h)+2*t:n?Math.round(e.h)+2*t:Math.round(e.h)-2*t)<0?r+=360:r>=360&&(r-=360),r}function c(e,t,n){var r;return 0===e.h&&0===e.s?e.s:((r=n?e.s-.16*t:4===t?e.s+.16:e.s+.05*t)>1&&(r=1),n&&5===t&&r>.1&&(r=.1),r<.06&&(r=.06),Number(r.toFixed(2)))}function u(e,t,n){var r;return(r=n?e.v+.05*t:e.v-.15*t)>1&&(r=1),Number(r.toFixed(2))}function d(e){for(var t=arguments.length>1&&void 0!==arguments[1]?arguments[1]:{},n=[],r=(0,o.uA)(e),d=5;d>0;d-=1){var p=i(r),f=l((0,o.uA)({h:s(p,d,!0),s:c(p,d,!0),v:u(p,d,!0)}));n.push(f)}n.push(l(r));for(var m=1;m<=4;m+=1){var g=i(r),h=l((0,o.uA)({h:s(g,m),s:c(g,m),v:u(g,m)}));n.push(h)}return"dark"===t.theme?a.map(function(e){var r,a,i,s=e.index,c=e.opacity;return l((r=(0,o.uA)(t.backgroundColor||"#141414"),a=(0,o.uA)(n[s]),i=100*c/100,{r:(a.r-r.r)*i+r.r,g:(a.g-r.g)*i+r.g,b:(a.b-r.b)*i+r.b}))}):n}var p={red:"#F5222D",volcano:"#FA541C",orange:"#FA8C16",gold:"#FAAD14",yellow:"#FADB14",lime:"#A0D911",green:"#52C41A",cyan:"#13C2C2",blue:"#1677FF",geekblue:"#2F54EB",purple:"#722ED1",magenta:"#EB2F96",grey:"#666666"},f={},m={};Object.keys(p).forEach(function(e){f[e]=d(p[e]),f[e].primary=f[e][5],m[e]=d(p[e],{theme:"dark",backgroundColor:"#141414"}),m[e].primary=m[e][5]}),f.red,f.volcano;var g=f.gold;f.orange,f.yellow,f.lime,f.green,f.cyan;var h=f.blue;f.geekblue,f.purple,f.magenta,f.grey,f.grey},8985:function(e,t,n){n.d(t,{E4:function(){return ej},jG:function(){return A},ks:function(){return U},bf:function(){return F},CI:function(){return eD},fp:function(){return X},xy:function(){return eM}});var r,o,a=n(50833),i=n(80406),l=n(63787),s=n(5239),c=function(e){for(var t,n=0,r=0,o=e.length;o>=4;++r,o-=4)t=(65535&(t=255&e.charCodeAt(r)|(255&e.charCodeAt(++r))<<8|(255&e.charCodeAt(++r))<<16|(255&e.charCodeAt(++r))<<24))*1540483477+((t>>>16)*59797<<16),t^=t>>>24,n=(65535&t)*1540483477+((t>>>16)*59797<<16)^(65535&n)*1540483477+((n>>>16)*59797<<16);switch(o){case 3:n^=(255&e.charCodeAt(r+2))<<16;case 2:n^=(255&e.charCodeAt(r+1))<<8;case 1:n^=255&e.charCodeAt(r),n=(65535&n)*1540483477+((n>>>16)*59797<<16)}return n^=n>>>13,(((n=(65535&n)*1540483477+((n>>>16)*59797<<16))^n>>>15)>>>0).toString(36)},u=n(24050),d=n(64090),p=n.t(d,2);n(61475),n(92536);var f=n(47365),m=n(65127);function g(e){return e.join("%")}var h=function(){function e(t){(0,f.Z)(this,e),(0,a.Z)(this,"instanceId",void 0),(0,a.Z)(this,"cache",new Map),this.instanceId=t}return(0,m.Z)(e,[{key:"get",value:function(e){return this.opGet(g(e))}},{key:"opGet",value:function(e){return this.cache.get(e)||null}},{key:"update",value:function(e,t){return this.opUpdate(g(e),t)}},{key:"opUpdate",value:function(e,t){var n=t(this.cache.get(e));null===n?this.cache.delete(e):this.cache.set(e,n)}}]),e}(),b="data-token-hash",v="data-css-hash",y="__cssinjs_instance__",E=d.createContext({hashPriority:"low",cache:function(){var e=Math.random().toString(12).slice(2);if("undefined"!=typeof document&&document.head&&document.body){var t=document.body.querySelectorAll("style[".concat(v,"]"))||[],n=document.head.firstChild;Array.from(t).forEach(function(t){t[y]=t[y]||e,t[y]===e&&document.head.insertBefore(t,n)});var r={};Array.from(document.querySelectorAll("style[".concat(v,"]"))).forEach(function(t){var n,o=t.getAttribute(v);r[o]?t[y]===e&&(null===(n=t.parentNode)||void 0===n||n.removeChild(t)):r[o]=!0})}return new h(e)}(),defaultCache:!0}),S=n(6976),w=n(22127),x=function(){function e(){(0,f.Z)(this,e),(0,a.Z)(this,"cache",void 0),(0,a.Z)(this,"keys",void 0),(0,a.Z)(this,"cacheCallTimes",void 0),this.cache=new Map,this.keys=[],this.cacheCallTimes=0}return(0,m.Z)(e,[{key:"size",value:function(){return this.keys.length}},{key:"internalGet",value:function(e){var t,n,r=arguments.length>1&&void 0!==arguments[1]&&arguments[1],o={map:this.cache};return e.forEach(function(e){if(o){var t;o=null===(t=o)||void 0===t||null===(t=t.map)||void 0===t?void 0:t.get(e)}else o=void 0}),null!==(t=o)&&void 0!==t&&t.value&&r&&(o.value[1]=this.cacheCallTimes++),null===(n=o)||void 0===n?void 0:n.value}},{key:"get",value:function(e){var t;return null===(t=this.internalGet(e,!0))||void 0===t?void 0:t[0]}},{key:"has",value:function(e){return!!this.internalGet(e)}},{key:"set",value:function(t,n){var r=this;if(!this.has(t)){if(this.size()+1>e.MAX_CACHE_SIZE+e.MAX_CACHE_OFFSET){var o=this.keys.reduce(function(e,t){var n=(0,i.Z)(e,2)[1];return r.internalGet(t)[1]0,"[Ant Design CSS-in-JS] Theme should have at least one derivative function."),C+=1}return(0,m.Z)(e,[{key:"getDerivativeToken",value:function(e){return this.derivatives.reduce(function(t,n){return n(e,t)},void 0)}}]),e}(),k=new x;function A(e){var t=Array.isArray(e)?e:[e];return k.has(t)||k.set(t,new T(t)),k.get(t)}var I=new WeakMap,R={},N=new WeakMap;function _(e){var t=N.get(e)||"";return t||(Object.keys(e).forEach(function(n){var r=e[n];t+=n,r instanceof T?t+=r.id:r&&"object"===(0,S.Z)(r)?t+=_(r):t+=r}),N.set(e,t)),t}function P(e,t){return c("".concat(t,"_").concat(_(e)))}var M="random-".concat(Date.now(),"-").concat(Math.random()).replace(/\./g,""),L="_bAmBoO_",D=void 0,j=(0,w.Z)();function F(e){return"number"==typeof e?"".concat(e,"px"):e}function B(e,t,n){var r,o=arguments.length>3&&void 0!==arguments[3]?arguments[3]:{},i=arguments.length>4&&void 0!==arguments[4]&&arguments[4];if(i)return e;var l=(0,s.Z)((0,s.Z)({},o),{},(r={},(0,a.Z)(r,b,t),(0,a.Z)(r,v,n),r)),c=Object.keys(l).map(function(e){var t=l[e];return t?"".concat(e,'="').concat(t,'"'):null}).filter(function(e){return e}).join(" ");return"")}var U=function(e){var t=arguments.length>1&&void 0!==arguments[1]?arguments[1]:"";return"--".concat(t?"".concat(t,"-"):"").concat(e).replace(/([a-z0-9])([A-Z])/g,"$1-$2").replace(/([A-Z]+)([A-Z][a-z0-9]+)/g,"$1-$2").replace(/([a-z])([A-Z0-9])/g,"$1-$2").toLowerCase()},Z=function(e,t,n){var r,o={},a={};return Object.entries(e).forEach(function(e){var t=(0,i.Z)(e,2),r=t[0],l=t[1];if(null!=n&&null!==(s=n.preserve)&&void 0!==s&&s[r])a[r]=l;else if(("string"==typeof l||"number"==typeof l)&&!(null!=n&&null!==(c=n.ignore)&&void 0!==c&&c[r])){var s,c,u,d=U(r,null==n?void 0:n.prefix);o[d]="number"!=typeof l||null!=n&&null!==(u=n.unitless)&&void 0!==u&&u[r]?String(l):"".concat(l,"px"),a[r]="var(".concat(d,")")}}),[a,(r={scope:null==n?void 0:n.scope},Object.keys(o).length?".".concat(t).concat(null!=r&&r.scope?".".concat(r.scope):"","{").concat(Object.entries(o).map(function(e){var t=(0,i.Z)(e,2),n=t[0],r=t[1];return"".concat(n,":").concat(r,";")}).join(""),"}"):"")]},z=n(24800),H=(0,s.Z)({},p).useInsertionEffect,G=H?function(e,t,n){return H(function(){return e(),t()},n)}:function(e,t,n){d.useMemo(e,n),(0,z.Z)(function(){return t(!0)},n)},$=void 0!==(0,s.Z)({},p).useInsertionEffect?function(e){var t=[],n=!1;return d.useEffect(function(){return n=!1,function(){n=!0,t.length&&t.forEach(function(e){return e()})}},e),function(e){n||t.push(e)}}:function(){return function(e){e()}};function W(e,t,n,r,o){var a=d.useContext(E).cache,s=g([e].concat((0,l.Z)(t))),c=$([s]),u=function(e){a.opUpdate(s,function(t){var r=(0,i.Z)(t||[void 0,void 0],2),o=r[0],a=[void 0===o?0:o,r[1]||n()];return e?e(a):a})};d.useMemo(function(){u()},[s]);var p=a.opGet(s)[1];return G(function(){null==o||o(p)},function(e){return u(function(t){var n=(0,i.Z)(t,2),r=n[0],a=n[1];return e&&0===r&&(null==o||o(p)),[r+1,a]}),function(){a.opUpdate(s,function(t){var n=(0,i.Z)(t||[],2),o=n[0],l=void 0===o?0:o,u=n[1];return 0==l-1?(c(function(){(e||!a.opGet(s))&&(null==r||r(u,!1))}),null):[l-1,u]})}},[s]),p}var V={},q=new Map,Y=function(e,t,n,r){var o=n.getDerivativeToken(e),a=(0,s.Z)((0,s.Z)({},o),t);return r&&(a=r(a)),a},K="token";function X(e,t){var n=arguments.length>2&&void 0!==arguments[2]?arguments[2]:{},r=(0,d.useContext)(E),o=r.cache.instanceId,a=r.container,p=n.salt,f=void 0===p?"":p,m=n.override,g=void 0===m?V:m,h=n.formatToken,S=n.getComputedToken,w=n.cssVar,x=function(e,t){for(var n=I,r=0;r=(q.get(e)||0)}),n.length-r.length>0&&r.forEach(function(e){"undefined"!=typeof document&&document.querySelectorAll("style[".concat(b,'="').concat(e,'"]')).forEach(function(e){if(e[y]===o){var t;null===(t=e.parentNode)||void 0===t||t.removeChild(e)}}),q.delete(e)})},function(e){var t=(0,i.Z)(e,4),n=t[0],r=t[3];if(w&&r){var l=(0,u.hq)(r,c("css-variables-".concat(n._themeKey)),{mark:v,prepend:"queue",attachTo:a,priority:-999});l[y]=o,l.setAttribute(b,n._themeKey)}})}var Q=n(14749),J={animationIterationCount:1,borderImageOutset:1,borderImageSlice:1,borderImageWidth:1,boxFlex:1,boxFlexGroup:1,boxOrdinalGroup:1,columnCount:1,columns:1,flex:1,flexGrow:1,flexPositive:1,flexShrink:1,flexNegative:1,flexOrder:1,gridRow:1,gridRowEnd:1,gridRowSpan:1,gridRowStart:1,gridColumn:1,gridColumnEnd:1,gridColumnSpan:1,gridColumnStart:1,msGridRow:1,msGridRowSpan:1,msGridColumn:1,msGridColumnSpan:1,fontWeight:1,lineHeight:1,opacity:1,order:1,orphans:1,tabSize:1,widows:1,zIndex:1,zoom:1,WebkitLineClamp:1,fillOpacity:1,floodOpacity:1,stopOpacity:1,strokeDasharray:1,strokeDashoffset:1,strokeMiterlimit:1,strokeOpacity:1,strokeWidth:1},ee="comm",et="rule",en="decl",er=Math.abs,eo=String.fromCharCode;function ea(e,t,n){return e.replace(t,n)}function ei(e,t){return 0|e.charCodeAt(t)}function el(e,t,n){return e.slice(t,n)}function es(e){return e.length}function ec(e,t){return t.push(e),e}function eu(e,t){for(var n="",r=0;r0?f[v]+" "+y:ea(y,/&\f/g,f[v])).trim())&&(s[b++]=E);return ev(e,t,n,0===o?et:l,s,c,u,d)}function eO(e,t,n,r,o){return ev(e,t,n,en,el(e,0,r),el(e,r+1,-1),r,o)}var eC="data-ant-cssinjs-cache-path",eT="_FILE_STYLE__",ek=!0,eA="_multi_value_";function eI(e){var t,n,r;return eu((r=function e(t,n,r,o,a,i,l,s,c){for(var u,d,p,f=0,m=0,g=l,h=0,b=0,v=0,y=1,E=1,S=1,w=0,x="",O=a,C=i,T=o,k=x;E;)switch(v=w,w=ey()){case 40:if(108!=v&&58==ei(k,g-1)){-1!=(d=k+=ea(ew(w),"&","&\f"),p=er(f?s[f-1]:0),d.indexOf("&\f",p))&&(S=-1);break}case 34:case 39:case 91:k+=ew(w);break;case 9:case 10:case 13:case 32:k+=function(e){for(;eh=eE();)if(eh<33)ey();else break;return eS(e)>2||eS(eh)>3?"":" "}(v);break;case 92:k+=function(e,t){for(var n;--t&&ey()&&!(eh<48)&&!(eh>102)&&(!(eh>57)||!(eh<65))&&(!(eh>70)||!(eh<97)););return n=eg+(t<6&&32==eE()&&32==ey()),el(eb,e,n)}(eg-1,7);continue;case 47:switch(eE()){case 42:case 47:ec(ev(u=function(e,t){for(;ey();)if(e+eh===57)break;else if(e+eh===84&&47===eE())break;return"/*"+el(eb,t,eg-1)+"*"+eo(47===e?e:ey())}(ey(),eg),n,r,ee,eo(eh),el(u,2,-2),0,c),c);break;default:k+="/"}break;case 123*y:s[f++]=es(k)*S;case 125*y:case 59:case 0:switch(w){case 0:case 125:E=0;case 59+m:-1==S&&(k=ea(k,/\f/g,"")),b>0&&es(k)-g&&ec(b>32?eO(k+";",o,r,g-1,c):eO(ea(k," ","")+";",o,r,g-2,c),c);break;case 59:k+=";";default:if(ec(T=ex(k,n,r,f,m,a,s,x,O=[],C=[],g,i),i),123===w){if(0===m)e(k,n,T,T,O,i,g,s,C);else switch(99===h&&110===ei(k,3)?100:h){case 100:case 108:case 109:case 115:e(t,T,T,o&&ec(ex(t,T,T,0,0,a,s,x,a,O=[],g,C),C),a,C,g,s,o?O:C);break;default:e(k,T,T,T,[""],C,0,s,C)}}}f=m=b=0,y=S=1,x=k="",g=l;break;case 58:g=1+es(k),b=v;default:if(y<1){if(123==w)--y;else if(125==w&&0==y++&&125==(eh=eg>0?ei(eb,--eg):0,ef--,10===eh&&(ef=1,ep--),eh))continue}switch(k+=eo(w),w*y){case 38:S=m>0?1:(k+="\f",-1);break;case 44:s[f++]=(es(k)-1)*S,S=1;break;case 64:45===eE()&&(k+=ew(ey())),h=eE(),m=g=es(x=k+=function(e){for(;!eS(eE());)ey();return el(eb,e,eg)}(eg)),w++;break;case 45:45===v&&2==es(k)&&(y=0)}}return i}("",null,null,null,[""],(n=t=e,ep=ef=1,em=es(eb=n),eg=0,t=[]),0,[0],t),eb="",r),ed).replace(/\{%%%\:[^;];}/g,";")}var eR=function e(t){var n=arguments.length>1&&void 0!==arguments[1]?arguments[1]:{},r=arguments.length>2&&void 0!==arguments[2]?arguments[2]:{root:!0,parentSelectors:[]},o=r.root,a=r.injectHash,c=r.parentSelectors,d=n.hashId,p=n.layer,f=(n.path,n.hashPriority),m=n.transformers,g=void 0===m?[]:m;n.linters;var h="",b={};function v(t){var r=t.getName(d);if(!b[r]){var o=e(t.style,n,{root:!1,parentSelectors:c}),a=(0,i.Z)(o,1)[0];b[r]="@keyframes ".concat(t.getName(d)).concat(a)}}if((function e(t){var n=arguments.length>1&&void 0!==arguments[1]?arguments[1]:[];return t.forEach(function(t){Array.isArray(t)?e(t,n):t&&n.push(t)}),n})(Array.isArray(t)?t:[t]).forEach(function(t){var r="string"!=typeof t||o?t:{};if("string"==typeof r)h+="".concat(r,"\n");else if(r._keyframe)v(r);else{var u=g.reduce(function(e,t){var n;return(null==t||null===(n=t.visit)||void 0===n?void 0:n.call(t,e))||e},r);Object.keys(u).forEach(function(t){var r=u[t];if("object"!==(0,S.Z)(r)||!r||"animationName"===t&&r._keyframe||"object"===(0,S.Z)(r)&&r&&("_skip_check_"in r||eA in r)){function p(e,t){var n=e.replace(/[A-Z]/g,function(e){return"-".concat(e.toLowerCase())}),r=t;J[e]||"number"!=typeof r||0===r||(r="".concat(r,"px")),"animationName"===e&&null!=t&&t._keyframe&&(v(t),r=t.getName(d)),h+="".concat(n,":").concat(r,";")}var m,g=null!==(m=null==r?void 0:r.value)&&void 0!==m?m:r;"object"===(0,S.Z)(r)&&null!=r&&r[eA]&&Array.isArray(g)?g.forEach(function(e){p(t,e)}):p(t,g)}else{var y=!1,E=t.trim(),w=!1;(o||a)&&d?E.startsWith("@")?y=!0:E=function(e,t,n){if(!t)return e;var r=".".concat(t),o="low"===n?":where(".concat(r,")"):r;return e.split(",").map(function(e){var t,n=e.trim().split(/\s+/),r=n[0]||"",a=(null===(t=r.match(/^\w+/))||void 0===t?void 0:t[0])||"";return[r="".concat(a).concat(o).concat(r.slice(a.length))].concat((0,l.Z)(n.slice(1))).join(" ")}).join(",")}(t,d,f):o&&!d&&("&"===E||""===E)&&(E="",w=!0);var x=e(r,n,{root:w,injectHash:y,parentSelectors:[].concat((0,l.Z)(c),[E])}),O=(0,i.Z)(x,2),C=O[0],T=O[1];b=(0,s.Z)((0,s.Z)({},b),T),h+="".concat(E).concat(C)}})}}),o){if(p&&(void 0===D&&(D=function(e,t,n){if((0,w.Z)()){(0,u.hq)(e,M);var r,o,a=document.createElement("div");a.style.position="fixed",a.style.left="0",a.style.top="0",null==t||t(a),document.body.appendChild(a);var i=n?n(a):null===(r=getComputedStyle(a).content)||void 0===r?void 0:r.includes(L);return null===(o=a.parentNode)||void 0===o||o.removeChild(a),(0,u.jL)(M),i}return!1}("@layer ".concat(M," { .").concat(M,' { content: "').concat(L,'"!important; } }'),function(e){e.className=M})),D)){var y=p.split(","),E=y[y.length-1].trim();h="@layer ".concat(E," {").concat(h,"}"),y.length>1&&(h="@layer ".concat(p,"{%%%:%}").concat(h))}}else h="{".concat(h,"}");return[h,b]};function eN(e,t){return c("".concat(e.join("%")).concat(t))}function e_(){return null}var eP="style";function eM(e,t){var n=e.token,o=e.path,s=e.hashId,c=e.layer,p=e.nonce,f=e.clientOnly,m=e.order,g=void 0===m?0:m,h=d.useContext(E),S=h.autoClear,x=(h.mock,h.defaultCache),O=h.hashPriority,C=h.container,T=h.ssrInline,k=h.transformers,A=h.linters,I=h.cache,R=n._tokenKey,N=[R].concat((0,l.Z)(o)),_=W(eP,N,function(){var e=N.join("|");if(!function(){if(!r&&(r={},(0,w.Z)())){var e,t=document.createElement("div");t.className=eC,t.style.position="fixed",t.style.visibility="hidden",t.style.top="-9999px",document.body.appendChild(t);var n=getComputedStyle(t).content||"";(n=n.replace(/^"/,"").replace(/"$/,"")).split(";").forEach(function(e){var t=e.split(":"),n=(0,i.Z)(t,2),o=n[0],a=n[1];r[o]=a});var o=document.querySelector("style[".concat(eC,"]"));o&&(ek=!1,null===(e=o.parentNode)||void 0===e||e.removeChild(o)),document.body.removeChild(t)}}(),r[e]){var n=function(e){var t=r[e],n=null;if(t&&(0,w.Z)()){if(ek)n=eT;else{var o=document.querySelector("style[".concat(v,'="').concat(r[e],'"]'));o?n=o.innerHTML:delete r[e]}}return[n,t]}(e),a=(0,i.Z)(n,2),l=a[0],u=a[1];if(l)return[l,R,u,{},f,g]}var d=eR(t(),{hashId:s,hashPriority:O,layer:c,path:o.join("-"),transformers:k,linters:A}),p=(0,i.Z)(d,2),m=p[0],h=p[1],b=eI(m),y=eN(N,b);return[b,R,y,h,f,g]},function(e,t){var n=(0,i.Z)(e,3)[2];(t||S)&&j&&(0,u.jL)(n,{mark:v})},function(e){var t=(0,i.Z)(e,4),n=t[0],r=(t[1],t[2]),o=t[3];if(j&&n!==eT){var a={mark:v,prepend:"queue",attachTo:C,priority:g},l="function"==typeof p?p():p;l&&(a.csp={nonce:l});var s=(0,u.hq)(n,r,a);s[y]=I.instanceId,s.setAttribute(b,R),Object.keys(o).forEach(function(e){(0,u.hq)(eI(o[e]),"_effect-".concat(e),a)})}}),P=(0,i.Z)(_,3),M=P[0],L=P[1],D=P[2];return function(e){var t,n;return t=T&&!j&&x?d.createElement("style",(0,Q.Z)({},(n={},(0,a.Z)(n,b,L),(0,a.Z)(n,v,D),n),{dangerouslySetInnerHTML:{__html:M}})):d.createElement(e_,null),d.createElement(d.Fragment,null,t,e)}}var eL="cssVar",eD=function(e,t){var n=e.key,r=e.prefix,o=e.unitless,a=e.ignore,s=e.token,c=e.scope,p=void 0===c?"":c,f=(0,d.useContext)(E),m=f.cache.instanceId,g=f.container,h=s._tokenKey,S=[].concat((0,l.Z)(e.path),[n,p,h]);return W(eL,S,function(){var e=Z(t(),n,{prefix:r,unitless:o,ignore:a,scope:p}),l=(0,i.Z)(e,2),s=l[0],c=l[1],u=eN(S,c);return[s,c,u,n]},function(e){var t=(0,i.Z)(e,3)[2];j&&(0,u.jL)(t,{mark:v})},function(e){var t=(0,i.Z)(e,3),r=t[1],o=t[2];if(r){var a=(0,u.hq)(r,o,{mark:v,prepend:"queue",attachTo:g,priority:-999});a[y]=m,a.setAttribute(b,n)}})};o={},(0,a.Z)(o,eP,function(e,t,n){var r=(0,i.Z)(e,6),o=r[0],a=r[1],l=r[2],s=r[3],c=r[4],u=r[5],d=(n||{}).plain;if(c)return null;var p=o,f={"data-rc-order":"prependQueue","data-rc-priority":"".concat(u)};return p=B(o,a,l,f,d),s&&Object.keys(s).forEach(function(e){if(!t[e]){t[e]=!0;var n=eI(s[e]);p+=B(n,a,"_effect-".concat(e),f,d)}}),[u,l,p]}),(0,a.Z)(o,K,function(e,t,n){var r=(0,i.Z)(e,5),o=r[2],a=r[3],l=r[4],s=(n||{}).plain;if(!a)return null;var c=o._tokenKey,u=B(a,l,c,{"data-rc-order":"prependQueue","data-rc-priority":"".concat(-999)},s);return[-999,c,u]}),(0,a.Z)(o,eL,function(e,t,n){var r=(0,i.Z)(e,4),o=r[1],a=r[2],l=r[3],s=(n||{}).plain;if(!o)return null;var c=B(o,l,a,{"data-rc-order":"prependQueue","data-rc-priority":"".concat(-999)},s);return[-999,a,c]});var ej=function(){function e(t,n){(0,f.Z)(this,e),(0,a.Z)(this,"name",void 0),(0,a.Z)(this,"style",void 0),(0,a.Z)(this,"_keyframe",!0),this.name=t,this.style=n}return(0,m.Z)(e,[{key:"getName",value:function(){var e=arguments.length>0&&void 0!==arguments[0]?arguments[0]:"";return e?"".concat(e,"-").concat(this.name):this.name}}]),e}();function eF(e){return e.notSplit=!0,e}eF(["borderTop","borderBottom"]),eF(["borderTop"]),eF(["borderBottom"]),eF(["borderLeft","borderRight"]),eF(["borderLeft"]),eF(["borderRight"])},60688:function(e,t,n){n.d(t,{Z:function(){return A}});var r=n(14749),o=n(80406),a=n(50833),i=n(6787),l=n(64090),s=n(16480),c=n.n(s),u=n(12215),d=n(67689),p=n(5239),f=n(6976),m=n(24050),g=n(74687),h=n(53850);function b(e){return"object"===(0,f.Z)(e)&&"string"==typeof e.name&&"string"==typeof e.theme&&("object"===(0,f.Z)(e.icon)||"function"==typeof e.icon)}function v(){var e=arguments.length>0&&void 0!==arguments[0]?arguments[0]:{};return Object.keys(e).reduce(function(t,n){var r=e[n];return"class"===n?(t.className=r,delete t.class):(delete t[n],t[n.replace(/-(.)/g,function(e,t){return t.toUpperCase()})]=r),t},{})}function y(e){return(0,u.R_)(e)[0]}function E(e){return e?Array.isArray(e)?e:[e]:[]}var S=function(e){var t=(0,l.useContext)(d.Z),n=t.csp,r=t.prefixCls,o="\n.anticon {\n display: inline-block;\n color: inherit;\n font-style: normal;\n line-height: 0;\n text-align: center;\n text-transform: none;\n vertical-align: -0.125em;\n text-rendering: optimizeLegibility;\n -webkit-font-smoothing: antialiased;\n -moz-osx-font-smoothing: grayscale;\n}\n\n.anticon > * {\n line-height: 1;\n}\n\n.anticon svg {\n display: inline-block;\n}\n\n.anticon::before {\n display: none;\n}\n\n.anticon .anticon-icon {\n display: block;\n}\n\n.anticon[tabindex] {\n cursor: pointer;\n}\n\n.anticon-spin::before,\n.anticon-spin {\n display: inline-block;\n -webkit-animation: loadingCircle 1s infinite linear;\n animation: loadingCircle 1s infinite linear;\n}\n\n@-webkit-keyframes loadingCircle {\n 100% {\n -webkit-transform: rotate(360deg);\n transform: rotate(360deg);\n }\n}\n\n@keyframes loadingCircle {\n 100% {\n -webkit-transform: rotate(360deg);\n transform: rotate(360deg);\n }\n}\n";r&&(o=o.replace(/anticon/g,r)),(0,l.useEffect)(function(){var t=e.current,r=(0,g.A)(t);(0,m.hq)(o,"@ant-design-icons",{prepend:!0,csp:n,attachTo:r})},[])},w=["icon","className","onClick","style","primaryColor","secondaryColor"],x={primaryColor:"#333",secondaryColor:"#E6E6E6",calculated:!1},O=function(e){var t,n,r=e.icon,o=e.className,a=e.onClick,s=e.style,c=e.primaryColor,u=e.secondaryColor,d=(0,i.Z)(e,w),f=l.useRef(),m=x;if(c&&(m={primaryColor:c,secondaryColor:u||y(c)}),S(f),t=b(r),n="icon should be icon definiton, but got ".concat(r),(0,h.ZP)(t,"[@ant-design/icons] ".concat(n)),!b(r))return null;var g=r;return g&&"function"==typeof g.icon&&(g=(0,p.Z)((0,p.Z)({},g),{},{icon:g.icon(m.primaryColor,m.secondaryColor)})),function e(t,n,r){return r?l.createElement(t.tag,(0,p.Z)((0,p.Z)({key:n},v(t.attrs)),r),(t.children||[]).map(function(r,o){return e(r,"".concat(n,"-").concat(t.tag,"-").concat(o))})):l.createElement(t.tag,(0,p.Z)({key:n},v(t.attrs)),(t.children||[]).map(function(r,o){return e(r,"".concat(n,"-").concat(t.tag,"-").concat(o))}))}(g.icon,"svg-".concat(g.name),(0,p.Z)((0,p.Z)({className:o,onClick:a,style:s,"data-icon":g.name,width:"1em",height:"1em",fill:"currentColor","aria-hidden":"true"},d),{},{ref:f}))};function C(e){var t=E(e),n=(0,o.Z)(t,2),r=n[0],a=n[1];return O.setTwoToneColors({primaryColor:r,secondaryColor:a})}O.displayName="IconReact",O.getTwoToneColors=function(){return(0,p.Z)({},x)},O.setTwoToneColors=function(e){var t=e.primaryColor,n=e.secondaryColor;x.primaryColor=t,x.secondaryColor=n||y(t),x.calculated=!!n};var T=["className","icon","spin","rotate","tabIndex","onClick","twoToneColor"];C(u.iN.primary);var k=l.forwardRef(function(e,t){var n,s=e.className,u=e.icon,p=e.spin,f=e.rotate,m=e.tabIndex,g=e.onClick,h=e.twoToneColor,b=(0,i.Z)(e,T),v=l.useContext(d.Z),y=v.prefixCls,S=void 0===y?"anticon":y,w=v.rootClassName,x=c()(w,S,(n={},(0,a.Z)(n,"".concat(S,"-").concat(u.name),!!u.name),(0,a.Z)(n,"".concat(S,"-spin"),!!p||"loading"===u.name),n),s),C=m;void 0===C&&g&&(C=-1);var k=E(h),A=(0,o.Z)(k,2),I=A[0],R=A[1];return l.createElement("span",(0,r.Z)({role:"img","aria-label":u.name},b,{ref:t,tabIndex:C,onClick:g,className:x}),l.createElement(O,{icon:u,primaryColor:I,secondaryColor:R,style:f?{msTransform:"rotate(".concat(f,"deg)"),transform:"rotate(".concat(f,"deg)")}:void 0}))});k.displayName="AntdIcon",k.getTwoToneColor=function(){var e=O.getTwoToneColors();return e.calculated?[e.primaryColor,e.secondaryColor]:e.primaryColor},k.setTwoToneColor=C;var A=k},67689:function(e,t,n){var r=(0,n(64090).createContext)({});t.Z=r},99537:function(e,t,n){n.d(t,{Z:function(){return l}});var r=n(14749),o=n(64090),a={icon:{tag:"svg",attrs:{viewBox:"64 64 896 896",focusable:"false"},children:[{tag:"path",attrs:{d:"M512 64C264.6 64 64 264.6 64 512s200.6 448 448 448 448-200.6 448-448S759.4 64 512 64zm193.5 301.7l-210.6 292a31.8 31.8 0 01-51.7 0L318.5 484.9c-3.8-5.3 0-12.7 6.5-12.7h46.9c10.2 0 19.9 4.9 25.9 13.3l71.2 98.8 157.2-218c6-8.3 15.6-13.3 25.9-13.3H699c6.5 0 10.3 7.4 6.5 12.7z"}}]},name:"check-circle",theme:"filled"},i=n(60688),l=o.forwardRef(function(e,t){return o.createElement(i.Z,(0,r.Z)({},e,{ref:t,icon:a}))})},90507:function(e,t,n){n.d(t,{Z:function(){return l}});var r=n(14749),o=n(64090),a={icon:{tag:"svg",attrs:{viewBox:"64 64 896 896",focusable:"false"},children:[{tag:"path",attrs:{d:"M912 190h-69.9c-9.8 0-19.1 4.5-25.1 12.2L404.7 724.5 207 474a32 32 0 00-25.1-12.2H112c-6.7 0-10.4 7.7-6.3 12.9l273.9 347c12.8 16.2 37.4 16.2 50.3 0l488.4-618.9c4.1-5.1.4-12.8-6.3-12.8z"}}]},name:"check",theme:"outlined"},i=n(60688),l=o.forwardRef(function(e,t){return o.createElement(i.Z,(0,r.Z)({},e,{ref:t,icon:a}))})},77136:function(e,t,n){n.d(t,{Z:function(){return l}});var r=n(14749),o=n(64090),a={icon:{tag:"svg",attrs:{"fill-rule":"evenodd",viewBox:"64 64 896 896",focusable:"false"},children:[{tag:"path",attrs:{d:"M512 64c247.4 0 448 200.6 448 448S759.4 960 512 960 64 759.4 64 512 264.6 64 512 64zm127.98 274.82h-.04l-.08.06L512 466.75 384.14 338.88c-.04-.05-.06-.06-.08-.06a.12.12 0 00-.07 0c-.03 0-.05.01-.09.05l-45.02 45.02a.2.2 0 00-.05.09.12.12 0 000 .07v.02a.27.27 0 00.06.06L466.75 512 338.88 639.86c-.05.04-.06.06-.06.08a.12.12 0 000 .07c0 .03.01.05.05.09l45.02 45.02a.2.2 0 00.09.05.12.12 0 00.07 0c.02 0 .04-.01.08-.05L512 557.25l127.86 127.87c.04.04.06.05.08.05a.12.12 0 00.07 0c.03 0 .05-.01.09-.05l45.02-45.02a.2.2 0 00.05-.09.12.12 0 000-.07v-.02a.27.27 0 00-.05-.06L557.25 512l127.87-127.86c.04-.04.05-.06.05-.08a.12.12 0 000-.07c0-.03-.01-.05-.05-.09l-45.02-45.02a.2.2 0 00-.09-.05.12.12 0 00-.07 0z"}}]},name:"close-circle",theme:"filled"},i=n(60688),l=o.forwardRef(function(e,t){return o.createElement(i.Z,(0,r.Z)({},e,{ref:t,icon:a}))})},81303:function(e,t,n){n.d(t,{Z:function(){return l}});var r=n(14749),o=n(64090),a={icon:{tag:"svg",attrs:{"fill-rule":"evenodd",viewBox:"64 64 896 896",focusable:"false"},children:[{tag:"path",attrs:{d:"M799.86 166.31c.02 0 .04.02.08.06l57.69 57.7c.04.03.05.05.06.08a.12.12 0 010 .06c0 .03-.02.05-.06.09L569.93 512l287.7 287.7c.04.04.05.06.06.09a.12.12 0 010 .07c0 .02-.02.04-.06.08l-57.7 57.69c-.03.04-.05.05-.07.06a.12.12 0 01-.07 0c-.03 0-.05-.02-.09-.06L512 569.93l-287.7 287.7c-.04.04-.06.05-.09.06a.12.12 0 01-.07 0c-.02 0-.04-.02-.08-.06l-57.69-57.7c-.04-.03-.05-.05-.06-.07a.12.12 0 010-.07c0-.03.02-.05.06-.09L454.07 512l-287.7-287.7c-.04-.04-.05-.06-.06-.09a.12.12 0 010-.07c0-.02.02-.04.06-.08l57.7-57.69c.03-.04.05-.05.07-.06a.12.12 0 01.07 0c.03 0 .05.02.09.06L512 454.07l287.7-287.7c.04-.04.06-.05.09-.06a.12.12 0 01.07 0z"}}]},name:"close",theme:"outlined"},i=n(60688),l=o.forwardRef(function(e,t){return o.createElement(i.Z,(0,r.Z)({},e,{ref:t,icon:a}))})},20383:function(e,t,n){n.d(t,{Z:function(){return l}});var r=n(14749),o=n(64090),a={icon:{tag:"svg",attrs:{viewBox:"64 64 896 896",focusable:"false"},children:[{tag:"path",attrs:{d:"M884 256h-75c-5.1 0-9.9 2.5-12.9 6.6L512 654.2 227.9 262.6c-3-4.1-7.8-6.6-12.9-6.6h-75c-6.5 0-10.3 7.4-6.5 12.7l352.6 486.1c12.8 17.6 39 17.6 51.7 0l352.6-486.1c3.9-5.3.1-12.7-6.4-12.7z"}}]},name:"down",theme:"outlined"},i=n(60688),l=o.forwardRef(function(e,t){return o.createElement(i.Z,(0,r.Z)({},e,{ref:t,icon:a}))})},31413:function(e,t,n){n.d(t,{Z:function(){return l}});var r=n(14749),o=n(64090),a={icon:{tag:"svg",attrs:{viewBox:"64 64 896 896",focusable:"false"},children:[{tag:"path",attrs:{d:"M176 511a56 56 0 10112 0 56 56 0 10-112 0zm280 0a56 56 0 10112 0 56 56 0 10-112 0zm280 0a56 56 0 10112 0 56 56 0 10-112 0z"}}]},name:"ellipsis",theme:"outlined"},i=n(60688),l=o.forwardRef(function(e,t){return o.createElement(i.Z,(0,r.Z)({},e,{ref:t,icon:a}))})},20653:function(e,t,n){n.d(t,{Z:function(){return l}});var r=n(14749),o=n(64090),a={icon:{tag:"svg",attrs:{viewBox:"64 64 896 896",focusable:"false"},children:[{tag:"path",attrs:{d:"M512 64C264.6 64 64 264.6 64 512s200.6 448 448 448 448-200.6 448-448S759.4 64 512 64zm-32 232c0-4.4 3.6-8 8-8h48c4.4 0 8 3.6 8 8v272c0 4.4-3.6 8-8 8h-48c-4.4 0-8-3.6-8-8V296zm32 440a48.01 48.01 0 010-96 48.01 48.01 0 010 96z"}}]},name:"exclamation-circle",theme:"filled"},i=n(60688),l=o.forwardRef(function(e,t){return o.createElement(i.Z,(0,r.Z)({},e,{ref:t,icon:a}))})},40388:function(e,t,n){n.d(t,{Z:function(){return l}});var r=n(14749),o=n(64090),a={icon:{tag:"svg",attrs:{viewBox:"64 64 896 896",focusable:"false"},children:[{tag:"path",attrs:{d:"M512 64C264.6 64 64 264.6 64 512s200.6 448 448 448 448-200.6 448-448S759.4 64 512 64zm32 664c0 4.4-3.6 8-8 8h-48c-4.4 0-8-3.6-8-8V456c0-4.4 3.6-8 8-8h48c4.4 0 8 3.6 8 8v272zm-32-344a48.01 48.01 0 010-96 48.01 48.01 0 010 96z"}}]},name:"info-circle",theme:"filled"},i=n(60688),l=o.forwardRef(function(e,t){return o.createElement(i.Z,(0,r.Z)({},e,{ref:t,icon:a}))})},66155:function(e,t,n){n.d(t,{Z:function(){return l}});var r=n(14749),o=n(64090),a={icon:{tag:"svg",attrs:{viewBox:"0 0 1024 1024",focusable:"false"},children:[{tag:"path",attrs:{d:"M988 548c-19.9 0-36-16.1-36-36 0-59.4-11.6-117-34.6-171.3a440.45 440.45 0 00-94.3-139.9 437.71 437.71 0 00-139.9-94.3C629 83.6 571.4 72 512 72c-19.9 0-36-16.1-36-36s16.1-36 36-36c69.1 0 136.2 13.5 199.3 40.3C772.3 66 827 103 874 150c47 47 83.9 101.8 109.7 162.7 26.7 63.1 40.2 130.2 40.2 199.3.1 19.9-16 36-35.9 36z"}}]},name:"loading",theme:"outlined"},i=n(60688),l=o.forwardRef(function(e,t){return o.createElement(i.Z,(0,r.Z)({},e,{ref:t,icon:a}))})},50459:function(e,t,n){n.d(t,{Z:function(){return l}});var r=n(14749),o=n(64090),a={icon:{tag:"svg",attrs:{viewBox:"64 64 896 896",focusable:"false"},children:[{tag:"path",attrs:{d:"M765.7 486.8L314.9 134.7A7.97 7.97 0 00302 141v77.3c0 4.9 2.3 9.6 6.1 12.6l360 281.1-360 281.1c-3.9 3-6.1 7.7-6.1 12.6V883c0 6.7 7.7 10.4 12.9 6.3l450.8-352.1a31.96 31.96 0 000-50.4z"}}]},name:"right",theme:"outlined"},i=n(60688),l=o.forwardRef(function(e,t){return o.createElement(i.Z,(0,r.Z)({},e,{ref:t,icon:a}))})},96871:function(e,t,n){n.d(t,{Z:function(){return l}});var r=n(14749),o=n(64090),a={icon:{tag:"svg",attrs:{viewBox:"64 64 896 896",focusable:"false"},children:[{tag:"path",attrs:{d:"M909.6 854.5L649.9 594.8C690.2 542.7 712 479 712 412c0-80.2-31.3-155.4-87.9-212.1-56.6-56.7-132-87.9-212.1-87.9s-155.5 31.3-212.1 87.9C143.2 256.5 112 331.8 112 412c0 80.1 31.3 155.5 87.9 212.1C256.5 680.8 331.8 712 412 712c67 0 130.6-21.8 182.7-62l259.7 259.6a8.2 8.2 0 0011.6 0l43.6-43.5a8.2 8.2 0 000-11.6zM570.4 570.4C528 612.7 471.8 636 412 636s-116-23.3-158.4-65.6C211.3 528 188 471.8 188 412s23.3-116.1 65.6-158.4C296 211.3 352.2 188 412 188s116.1 23.2 158.4 65.6S636 352.2 636 412s-23.3 116.1-65.6 158.4z"}}]},name:"search",theme:"outlined"},i=n(60688),l=o.forwardRef(function(e,t){return o.createElement(i.Z,(0,r.Z)({},e,{ref:t,icon:a}))})},41785:function(e,t,n){n.d(t,{T6:function(){return p},VD:function(){return f},WE:function(){return c},Yt:function(){return m},lC:function(){return a},py:function(){return s},rW:function(){return o},s:function(){return d},ve:function(){return l},vq:function(){return u}});var r=n(27974);function o(e,t,n){return{r:255*(0,r.sh)(e,255),g:255*(0,r.sh)(t,255),b:255*(0,r.sh)(n,255)}}function a(e,t,n){var o=Math.max(e=(0,r.sh)(e,255),t=(0,r.sh)(t,255),n=(0,r.sh)(n,255)),a=Math.min(e,t,n),i=0,l=0,s=(o+a)/2;if(o===a)l=0,i=0;else{var c=o-a;switch(l=s>.5?c/(2-o-a):c/(o+a),o){case e:i=(t-n)/c+(t1&&(n-=1),n<1/6)?e+6*n*(t-e):n<.5?t:n<2/3?e+(t-e)*(2/3-n)*6:e}function l(e,t,n){if(e=(0,r.sh)(e,360),t=(0,r.sh)(t,100),n=(0,r.sh)(n,100),0===t)a=n,l=n,o=n;else{var o,a,l,s=n<.5?n*(1+t):n+t-n*t,c=2*n-s;o=i(c,s,e+1/3),a=i(c,s,e),l=i(c,s,e-1/3)}return{r:255*o,g:255*a,b:255*l}}function s(e,t,n){var o=Math.max(e=(0,r.sh)(e,255),t=(0,r.sh)(t,255),n=(0,r.sh)(n,255)),a=Math.min(e,t,n),i=0,l=o-a;if(o===a)i=0;else{switch(o){case e:i=(t-n)/l+(t>16,g:(65280&e)>>8,b:255&e}}},6564:function(e,t,n){n.d(t,{R:function(){return r}});var r={aliceblue:"#f0f8ff",antiquewhite:"#faebd7",aqua:"#00ffff",aquamarine:"#7fffd4",azure:"#f0ffff",beige:"#f5f5dc",bisque:"#ffe4c4",black:"#000000",blanchedalmond:"#ffebcd",blue:"#0000ff",blueviolet:"#8a2be2",brown:"#a52a2a",burlywood:"#deb887",cadetblue:"#5f9ea0",chartreuse:"#7fff00",chocolate:"#d2691e",coral:"#ff7f50",cornflowerblue:"#6495ed",cornsilk:"#fff8dc",crimson:"#dc143c",cyan:"#00ffff",darkblue:"#00008b",darkcyan:"#008b8b",darkgoldenrod:"#b8860b",darkgray:"#a9a9a9",darkgreen:"#006400",darkgrey:"#a9a9a9",darkkhaki:"#bdb76b",darkmagenta:"#8b008b",darkolivegreen:"#556b2f",darkorange:"#ff8c00",darkorchid:"#9932cc",darkred:"#8b0000",darksalmon:"#e9967a",darkseagreen:"#8fbc8f",darkslateblue:"#483d8b",darkslategray:"#2f4f4f",darkslategrey:"#2f4f4f",darkturquoise:"#00ced1",darkviolet:"#9400d3",deeppink:"#ff1493",deepskyblue:"#00bfff",dimgray:"#696969",dimgrey:"#696969",dodgerblue:"#1e90ff",firebrick:"#b22222",floralwhite:"#fffaf0",forestgreen:"#228b22",fuchsia:"#ff00ff",gainsboro:"#dcdcdc",ghostwhite:"#f8f8ff",goldenrod:"#daa520",gold:"#ffd700",gray:"#808080",green:"#008000",greenyellow:"#adff2f",grey:"#808080",honeydew:"#f0fff0",hotpink:"#ff69b4",indianred:"#cd5c5c",indigo:"#4b0082",ivory:"#fffff0",khaki:"#f0e68c",lavenderblush:"#fff0f5",lavender:"#e6e6fa",lawngreen:"#7cfc00",lemonchiffon:"#fffacd",lightblue:"#add8e6",lightcoral:"#f08080",lightcyan:"#e0ffff",lightgoldenrodyellow:"#fafad2",lightgray:"#d3d3d3",lightgreen:"#90ee90",lightgrey:"#d3d3d3",lightpink:"#ffb6c1",lightsalmon:"#ffa07a",lightseagreen:"#20b2aa",lightskyblue:"#87cefa",lightslategray:"#778899",lightslategrey:"#778899",lightsteelblue:"#b0c4de",lightyellow:"#ffffe0",lime:"#00ff00",limegreen:"#32cd32",linen:"#faf0e6",magenta:"#ff00ff",maroon:"#800000",mediumaquamarine:"#66cdaa",mediumblue:"#0000cd",mediumorchid:"#ba55d3",mediumpurple:"#9370db",mediumseagreen:"#3cb371",mediumslateblue:"#7b68ee",mediumspringgreen:"#00fa9a",mediumturquoise:"#48d1cc",mediumvioletred:"#c71585",midnightblue:"#191970",mintcream:"#f5fffa",mistyrose:"#ffe4e1",moccasin:"#ffe4b5",navajowhite:"#ffdead",navy:"#000080",oldlace:"#fdf5e6",olive:"#808000",olivedrab:"#6b8e23",orange:"#ffa500",orangered:"#ff4500",orchid:"#da70d6",palegoldenrod:"#eee8aa",palegreen:"#98fb98",paleturquoise:"#afeeee",palevioletred:"#db7093",papayawhip:"#ffefd5",peachpuff:"#ffdab9",peru:"#cd853f",pink:"#ffc0cb",plum:"#dda0dd",powderblue:"#b0e0e6",purple:"#800080",rebeccapurple:"#663399",red:"#ff0000",rosybrown:"#bc8f8f",royalblue:"#4169e1",saddlebrown:"#8b4513",salmon:"#fa8072",sandybrown:"#f4a460",seagreen:"#2e8b57",seashell:"#fff5ee",sienna:"#a0522d",silver:"#c0c0c0",skyblue:"#87ceeb",slateblue:"#6a5acd",slategray:"#708090",slategrey:"#708090",snow:"#fffafa",springgreen:"#00ff7f",steelblue:"#4682b4",tan:"#d2b48c",teal:"#008080",thistle:"#d8bfd8",tomato:"#ff6347",turquoise:"#40e0d0",violet:"#ee82ee",wheat:"#f5deb3",white:"#ffffff",whitesmoke:"#f5f5f5",yellow:"#ffff00",yellowgreen:"#9acd32"}},76991:function(e,t,n){n.d(t,{uA:function(){return i}});var r=n(41785),o=n(6564),a=n(27974);function i(e){var t={r:0,g:0,b:0},n=1,i=null,l=null,s=null,c=!1,p=!1;return"string"==typeof e&&(e=function(e){if(0===(e=e.trim().toLowerCase()).length)return!1;var t=!1;if(o.R[e])e=o.R[e],t=!0;else if("transparent"===e)return{r:0,g:0,b:0,a:0,format:"name"};var n=u.rgb.exec(e);return n?{r:n[1],g:n[2],b:n[3]}:(n=u.rgba.exec(e))?{r:n[1],g:n[2],b:n[3],a:n[4]}:(n=u.hsl.exec(e))?{h:n[1],s:n[2],l:n[3]}:(n=u.hsla.exec(e))?{h:n[1],s:n[2],l:n[3],a:n[4]}:(n=u.hsv.exec(e))?{h:n[1],s:n[2],v:n[3]}:(n=u.hsva.exec(e))?{h:n[1],s:n[2],v:n[3],a:n[4]}:(n=u.hex8.exec(e))?{r:(0,r.VD)(n[1]),g:(0,r.VD)(n[2]),b:(0,r.VD)(n[3]),a:(0,r.T6)(n[4]),format:t?"name":"hex8"}:(n=u.hex6.exec(e))?{r:(0,r.VD)(n[1]),g:(0,r.VD)(n[2]),b:(0,r.VD)(n[3]),format:t?"name":"hex"}:(n=u.hex4.exec(e))?{r:(0,r.VD)(n[1]+n[1]),g:(0,r.VD)(n[2]+n[2]),b:(0,r.VD)(n[3]+n[3]),a:(0,r.T6)(n[4]+n[4]),format:t?"name":"hex8"}:!!(n=u.hex3.exec(e))&&{r:(0,r.VD)(n[1]+n[1]),g:(0,r.VD)(n[2]+n[2]),b:(0,r.VD)(n[3]+n[3]),format:t?"name":"hex"}}(e)),"object"==typeof e&&(d(e.r)&&d(e.g)&&d(e.b)?(t=(0,r.rW)(e.r,e.g,e.b),c=!0,p="%"===String(e.r).substr(-1)?"prgb":"rgb"):d(e.h)&&d(e.s)&&d(e.v)?(i=(0,a.JX)(e.s),l=(0,a.JX)(e.v),t=(0,r.WE)(e.h,i,l),c=!0,p="hsv"):d(e.h)&&d(e.s)&&d(e.l)&&(i=(0,a.JX)(e.s),s=(0,a.JX)(e.l),t=(0,r.ve)(e.h,i,s),c=!0,p="hsl"),Object.prototype.hasOwnProperty.call(e,"a")&&(n=e.a)),n=(0,a.Yq)(n),{ok:c,format:e.format||p,r:Math.min(255,Math.max(t.r,0)),g:Math.min(255,Math.max(t.g,0)),b:Math.min(255,Math.max(t.b,0)),a:n}}var l="(?:".concat("[-\\+]?\\d*\\.\\d+%?",")|(?:").concat("[-\\+]?\\d+%?",")"),s="[\\s|\\(]+(".concat(l,")[,|\\s]+(").concat(l,")[,|\\s]+(").concat(l,")\\s*\\)?"),c="[\\s|\\(]+(".concat(l,")[,|\\s]+(").concat(l,")[,|\\s]+(").concat(l,")[,|\\s]+(").concat(l,")\\s*\\)?"),u={CSS_UNIT:new RegExp(l),rgb:RegExp("rgb"+s),rgba:RegExp("rgba"+c),hsl:RegExp("hsl"+s),hsla:RegExp("hsla"+c),hsv:RegExp("hsv"+s),hsva:RegExp("hsva"+c),hex3:/^#?([0-9a-fA-F]{1})([0-9a-fA-F]{1})([0-9a-fA-F]{1})$/,hex6:/^#?([0-9a-fA-F]{2})([0-9a-fA-F]{2})([0-9a-fA-F]{2})$/,hex4:/^#?([0-9a-fA-F]{1})([0-9a-fA-F]{1})([0-9a-fA-F]{1})([0-9a-fA-F]{1})$/,hex8:/^#?([0-9a-fA-F]{2})([0-9a-fA-F]{2})([0-9a-fA-F]{2})([0-9a-fA-F]{2})$/};function d(e){return!!u.CSS_UNIT.exec(String(e))}},6336:function(e,t,n){n.d(t,{C:function(){return l}});var r=n(41785),o=n(6564),a=n(76991),i=n(27974),l=function(){function e(t,n){if(void 0===t&&(t=""),void 0===n&&(n={}),t instanceof e)return t;"number"==typeof t&&(t=(0,r.Yt)(t)),this.originalInput=t;var o,i=(0,a.uA)(t);this.originalInput=t,this.r=i.r,this.g=i.g,this.b=i.b,this.a=i.a,this.roundA=Math.round(100*this.a)/100,this.format=null!==(o=n.format)&&void 0!==o?o:i.format,this.gradientType=n.gradientType,this.r<1&&(this.r=Math.round(this.r)),this.g<1&&(this.g=Math.round(this.g)),this.b<1&&(this.b=Math.round(this.b)),this.isValid=i.ok}return e.prototype.isDark=function(){return 128>this.getBrightness()},e.prototype.isLight=function(){return!this.isDark()},e.prototype.getBrightness=function(){var e=this.toRgb();return(299*e.r+587*e.g+114*e.b)/1e3},e.prototype.getLuminance=function(){var e=this.toRgb(),t=e.r/255,n=e.g/255,r=e.b/255;return .2126*(t<=.03928?t/12.92:Math.pow((t+.055)/1.055,2.4))+.7152*(n<=.03928?n/12.92:Math.pow((n+.055)/1.055,2.4))+.0722*(r<=.03928?r/12.92:Math.pow((r+.055)/1.055,2.4))},e.prototype.getAlpha=function(){return this.a},e.prototype.setAlpha=function(e){return this.a=(0,i.Yq)(e),this.roundA=Math.round(100*this.a)/100,this},e.prototype.isMonochrome=function(){return 0===this.toHsl().s},e.prototype.toHsv=function(){var e=(0,r.py)(this.r,this.g,this.b);return{h:360*e.h,s:e.s,v:e.v,a:this.a}},e.prototype.toHsvString=function(){var e=(0,r.py)(this.r,this.g,this.b),t=Math.round(360*e.h),n=Math.round(100*e.s),o=Math.round(100*e.v);return 1===this.a?"hsv(".concat(t,", ").concat(n,"%, ").concat(o,"%)"):"hsva(".concat(t,", ").concat(n,"%, ").concat(o,"%, ").concat(this.roundA,")")},e.prototype.toHsl=function(){var e=(0,r.lC)(this.r,this.g,this.b);return{h:360*e.h,s:e.s,l:e.l,a:this.a}},e.prototype.toHslString=function(){var e=(0,r.lC)(this.r,this.g,this.b),t=Math.round(360*e.h),n=Math.round(100*e.s),o=Math.round(100*e.l);return 1===this.a?"hsl(".concat(t,", ").concat(n,"%, ").concat(o,"%)"):"hsla(".concat(t,", ").concat(n,"%, ").concat(o,"%, ").concat(this.roundA,")")},e.prototype.toHex=function(e){return void 0===e&&(e=!1),(0,r.vq)(this.r,this.g,this.b,e)},e.prototype.toHexString=function(e){return void 0===e&&(e=!1),"#"+this.toHex(e)},e.prototype.toHex8=function(e){return void 0===e&&(e=!1),(0,r.s)(this.r,this.g,this.b,this.a,e)},e.prototype.toHex8String=function(e){return void 0===e&&(e=!1),"#"+this.toHex8(e)},e.prototype.toHexShortString=function(e){return void 0===e&&(e=!1),1===this.a?this.toHexString(e):this.toHex8String(e)},e.prototype.toRgb=function(){return{r:Math.round(this.r),g:Math.round(this.g),b:Math.round(this.b),a:this.a}},e.prototype.toRgbString=function(){var e=Math.round(this.r),t=Math.round(this.g),n=Math.round(this.b);return 1===this.a?"rgb(".concat(e,", ").concat(t,", ").concat(n,")"):"rgba(".concat(e,", ").concat(t,", ").concat(n,", ").concat(this.roundA,")")},e.prototype.toPercentageRgb=function(){var e=function(e){return"".concat(Math.round(100*(0,i.sh)(e,255)),"%")};return{r:e(this.r),g:e(this.g),b:e(this.b),a:this.a}},e.prototype.toPercentageRgbString=function(){var e=function(e){return Math.round(100*(0,i.sh)(e,255))};return 1===this.a?"rgb(".concat(e(this.r),"%, ").concat(e(this.g),"%, ").concat(e(this.b),"%)"):"rgba(".concat(e(this.r),"%, ").concat(e(this.g),"%, ").concat(e(this.b),"%, ").concat(this.roundA,")")},e.prototype.toName=function(){if(0===this.a)return"transparent";if(this.a<1)return!1;for(var e="#"+(0,r.vq)(this.r,this.g,this.b,!1),t=0,n=Object.entries(o.R);t=0;return!t&&r&&(e.startsWith("hex")||"name"===e)?"name"===e&&0===this.a?this.toName():this.toRgbString():("rgb"===e&&(n=this.toRgbString()),"prgb"===e&&(n=this.toPercentageRgbString()),("hex"===e||"hex6"===e)&&(n=this.toHexString()),"hex3"===e&&(n=this.toHexString(!0)),"hex4"===e&&(n=this.toHex8String(!0)),"hex8"===e&&(n=this.toHex8String()),"name"===e&&(n=this.toName()),"hsl"===e&&(n=this.toHslString()),"hsv"===e&&(n=this.toHsvString()),n||this.toHexString())},e.prototype.toNumber=function(){return(Math.round(this.r)<<16)+(Math.round(this.g)<<8)+Math.round(this.b)},e.prototype.clone=function(){return new e(this.toString())},e.prototype.lighten=function(t){void 0===t&&(t=10);var n=this.toHsl();return n.l+=t/100,n.l=(0,i.V2)(n.l),new e(n)},e.prototype.brighten=function(t){void 0===t&&(t=10);var n=this.toRgb();return n.r=Math.max(0,Math.min(255,n.r-Math.round(-(t/100*255)))),n.g=Math.max(0,Math.min(255,n.g-Math.round(-(t/100*255)))),n.b=Math.max(0,Math.min(255,n.b-Math.round(-(t/100*255)))),new e(n)},e.prototype.darken=function(t){void 0===t&&(t=10);var n=this.toHsl();return n.l-=t/100,n.l=(0,i.V2)(n.l),new e(n)},e.prototype.tint=function(e){return void 0===e&&(e=10),this.mix("white",e)},e.prototype.shade=function(e){return void 0===e&&(e=10),this.mix("black",e)},e.prototype.desaturate=function(t){void 0===t&&(t=10);var n=this.toHsl();return n.s-=t/100,n.s=(0,i.V2)(n.s),new e(n)},e.prototype.saturate=function(t){void 0===t&&(t=10);var n=this.toHsl();return n.s+=t/100,n.s=(0,i.V2)(n.s),new e(n)},e.prototype.greyscale=function(){return this.desaturate(100)},e.prototype.spin=function(t){var n=this.toHsl(),r=(n.h+t)%360;return n.h=r<0?360+r:r,new e(n)},e.prototype.mix=function(t,n){void 0===n&&(n=50);var r=this.toRgb(),o=new e(t).toRgb(),a=n/100;return new e({r:(o.r-r.r)*a+r.r,g:(o.g-r.g)*a+r.g,b:(o.b-r.b)*a+r.b,a:(o.a-r.a)*a+r.a})},e.prototype.analogous=function(t,n){void 0===t&&(t=6),void 0===n&&(n=30);var r=this.toHsl(),o=360/n,a=[this];for(r.h=(r.h-(o*t>>1)+720)%360;--t;)r.h=(r.h+o)%360,a.push(new e(r));return a},e.prototype.complement=function(){var t=this.toHsl();return t.h=(t.h+180)%360,new e(t)},e.prototype.monochromatic=function(t){void 0===t&&(t=6);for(var n=this.toHsv(),r=n.h,o=n.s,a=n.v,i=[],l=1/t;t--;)i.push(new e({h:r,s:o,v:a})),a=(a+l)%1;return i},e.prototype.splitcomplement=function(){var t=this.toHsl(),n=t.h;return[this,new e({h:(n+72)%360,s:t.s,l:t.l}),new e({h:(n+216)%360,s:t.s,l:t.l})]},e.prototype.onBackground=function(t){var n=this.toRgb(),r=new e(t).toRgb(),o=n.a+r.a*(1-n.a);return new e({r:(n.r*n.a+r.r*r.a*(1-n.a))/o,g:(n.g*n.a+r.g*r.a*(1-n.a))/o,b:(n.b*n.a+r.b*r.a*(1-n.a))/o,a:o})},e.prototype.triad=function(){return this.polyad(3)},e.prototype.tetrad=function(){return this.polyad(4)},e.prototype.polyad=function(t){for(var n=this.toHsl(),r=n.h,o=[this],a=360/t,i=1;iMath.abs(e-t))?1:e=360===t?(e<0?e%t+t:e%t)/parseFloat(String(t)):e%t/parseFloat(String(t))}function o(e){return Math.min(1,Math.max(0,e))}function a(e){return(isNaN(e=parseFloat(e))||e<0||e>1)&&(e=1),e}function i(e){return e<=1?"".concat(100*Number(e),"%"):e}function l(e){return 1===e.length?"0"+e:String(e)}n.d(t,{FZ:function(){return l},JX:function(){return i},V2:function(){return o},Yq:function(){return a},sh:function(){return r}})},88804:function(e,t,n){n.d(t,{Z:function(){return y}});var r,o=n(80406),a=n(64090),i=n(89542),l=n(22127);n(53850);var s=n(74084),c=a.createContext(null),u=n(63787),d=n(24800),p=[],f=n(24050);function m(e){var t=e.match(/^(.*)px$/),n=Number(null==t?void 0:t[1]);return Number.isNaN(n)?function(e){if("undefined"==typeof document)return 0;if(void 0===r){var t=document.createElement("div");t.style.width="100%",t.style.height="200px";var n=document.createElement("div"),o=n.style;o.position="absolute",o.top="0",o.left="0",o.pointerEvents="none",o.visibility="hidden",o.width="200px",o.height="150px",o.overflow="hidden",n.appendChild(t),document.body.appendChild(n);var a=t.offsetWidth;n.style.overflow="scroll";var i=t.offsetWidth;a===i&&(i=n.clientWidth),document.body.removeChild(n),r=a-i}return r}():n}var g="rc-util-locker-".concat(Date.now()),h=0,b=!1,v=function(e){return!1!==e&&((0,l.Z)()&&e?"string"==typeof e?document.querySelector(e):"function"==typeof e?e():e:null)},y=a.forwardRef(function(e,t){var n,r,y,E,S=e.open,w=e.autoLock,x=e.getContainer,O=(e.debug,e.autoDestroy),C=void 0===O||O,T=e.children,k=a.useState(S),A=(0,o.Z)(k,2),I=A[0],R=A[1],N=I||S;a.useEffect(function(){(C||S)&&R(S)},[S,C]);var _=a.useState(function(){return v(x)}),P=(0,o.Z)(_,2),M=P[0],L=P[1];a.useEffect(function(){var e=v(x);L(null!=e?e:null)});var D=function(e,t){var n=a.useState(function(){return(0,l.Z)()?document.createElement("div"):null}),r=(0,o.Z)(n,1)[0],i=a.useRef(!1),s=a.useContext(c),f=a.useState(p),m=(0,o.Z)(f,2),g=m[0],h=m[1],b=s||(i.current?void 0:function(e){h(function(t){return[e].concat((0,u.Z)(t))})});function v(){r.parentElement||document.body.appendChild(r),i.current=!0}function y(){var e;null===(e=r.parentElement)||void 0===e||e.removeChild(r),i.current=!1}return(0,d.Z)(function(){return e?s?s(v):v():y(),y},[e]),(0,d.Z)(function(){g.length&&(g.forEach(function(e){return e()}),h(p))},[g]),[r,b]}(N&&!M,0),j=(0,o.Z)(D,2),F=j[0],B=j[1],U=null!=M?M:F;n=!!(w&&S&&(0,l.Z)()&&(U===F||U===document.body)),r=a.useState(function(){return h+=1,"".concat(g,"_").concat(h)}),y=(0,o.Z)(r,1)[0],(0,d.Z)(function(){if(n){var e=function(e){if("undefined"==typeof document||!e||!(e instanceof Element))return{width:0,height:0};var t=getComputedStyle(e,"::-webkit-scrollbar"),n=t.width,r=t.height;return{width:m(n),height:m(r)}}(document.body).width,t=document.body.scrollHeight>(window.innerHeight||document.documentElement.clientHeight)&&window.innerWidth>document.body.offsetWidth;(0,f.hq)("\nhtml body {\n overflow-y: hidden;\n ".concat(t?"width: calc(100% - ".concat(e,"px);"):"","\n}"),y)}else(0,f.jL)(y);return function(){(0,f.jL)(y)}},[n,y]);var Z=null;T&&(0,s.Yr)(T)&&t&&(Z=T.ref);var z=(0,s.x1)(Z,t);if(!N||!(0,l.Z)()||void 0===M)return null;var H=!1===U||("boolean"==typeof E&&(b=E),b),G=T;return t&&(G=a.cloneElement(T,{ref:z})),a.createElement(c.Provider,{value:B},H?G:(0,i.createPortal)(G,U))})},44101:function(e,t,n){n.d(t,{Z:function(){return z}});var r=n(5239),o=n(80406),a=n(6787),i=n(88804),l=n(16480),s=n.n(l),c=n(46505),u=n(97472),d=n(74687),p=n(54811),f=n(91010),m=n(24800),g=n(76158),h=n(64090),b=n(14749),v=n(49367),y=n(74084);function E(e){var t=e.prefixCls,n=e.align,r=e.arrow,o=e.arrowPos,a=r||{},i=a.className,l=a.content,c=o.x,u=o.y,d=h.useRef();if(!n||!n.points)return null;var p={position:"absolute"};if(!1!==n.autoArrow){var f=n.points[0],m=n.points[1],g=f[0],b=f[1],v=m[0],y=m[1];g!==v&&["t","b"].includes(g)?"t"===g?p.top=0:p.bottom=0:p.top=void 0===u?0:u,b!==y&&["l","r"].includes(b)?"l"===b?p.left=0:p.right=0:p.left=void 0===c?0:c}return h.createElement("div",{ref:d,className:s()("".concat(t,"-arrow"),i),style:p},l)}function S(e){var t=e.prefixCls,n=e.open,r=e.zIndex,o=e.mask,a=e.motion;return o?h.createElement(v.ZP,(0,b.Z)({},a,{motionAppear:!0,visible:n,removeOnLeave:!0}),function(e){var n=e.className;return h.createElement("div",{style:{zIndex:r},className:s()("".concat(t,"-mask"),n)})}):null}var w=h.memo(function(e){return e.children},function(e,t){return t.cache}),x=h.forwardRef(function(e,t){var n=e.popup,a=e.className,i=e.prefixCls,l=e.style,u=e.target,d=e.onVisibleChanged,p=e.open,f=e.keepDom,g=e.fresh,x=e.onClick,O=e.mask,C=e.arrow,T=e.arrowPos,k=e.align,A=e.motion,I=e.maskMotion,R=e.forceRender,N=e.getPopupContainer,_=e.autoDestroy,P=e.portal,M=e.zIndex,L=e.onMouseEnter,D=e.onMouseLeave,j=e.onPointerEnter,F=e.ready,B=e.offsetX,U=e.offsetY,Z=e.offsetR,z=e.offsetB,H=e.onAlign,G=e.onPrepare,$=e.stretch,W=e.targetWidth,V=e.targetHeight,q="function"==typeof n?n():n,Y=p||f,K=(null==N?void 0:N.length)>0,X=h.useState(!N||!K),Q=(0,o.Z)(X,2),J=Q[0],ee=Q[1];if((0,m.Z)(function(){!J&&K&&u&&ee(!0)},[J,K,u]),!J)return null;var et="auto",en={left:"-1000vw",top:"-1000vh",right:et,bottom:et};if(F||!p){var er,eo=k.points,ea=k.dynamicInset||(null===(er=k._experimental)||void 0===er?void 0:er.dynamicInset),ei=ea&&"r"===eo[0][1],el=ea&&"b"===eo[0][0];ei?(en.right=Z,en.left=et):(en.left=B,en.right=et),el?(en.bottom=z,en.top=et):(en.top=U,en.bottom=et)}var es={};return $&&($.includes("height")&&V?es.height=V:$.includes("minHeight")&&V&&(es.minHeight=V),$.includes("width")&&W?es.width=W:$.includes("minWidth")&&W&&(es.minWidth=W)),p||(es.pointerEvents="none"),h.createElement(P,{open:R||Y,getContainer:N&&function(){return N(u)},autoDestroy:_},h.createElement(S,{prefixCls:i,open:p,zIndex:M,mask:O,motion:I}),h.createElement(c.Z,{onResize:H,disabled:!p},function(e){return h.createElement(v.ZP,(0,b.Z)({motionAppear:!0,motionEnter:!0,motionLeave:!0,removeOnLeave:!1,forceRender:R,leavedClassName:"".concat(i,"-hidden")},A,{onAppearPrepare:G,onEnterPrepare:G,visible:p,onVisibleChanged:function(e){var t;null==A||null===(t=A.onVisibleChanged)||void 0===t||t.call(A,e),d(e)}}),function(n,o){var c=n.className,u=n.style,d=s()(i,c,a);return h.createElement("div",{ref:(0,y.sQ)(e,t,o),className:d,style:(0,r.Z)((0,r.Z)((0,r.Z)((0,r.Z)({"--arrow-x":"".concat(T.x||0,"px"),"--arrow-y":"".concat(T.y||0,"px")},en),es),u),{},{boxSizing:"border-box",zIndex:M},l),onMouseEnter:L,onMouseLeave:D,onPointerEnter:j,onClick:x},C&&h.createElement(E,{prefixCls:i,arrow:C,arrowPos:T,align:k}),h.createElement(w,{cache:!p&&!g},q))})}))}),O=h.forwardRef(function(e,t){var n=e.children,r=e.getTriggerDOMNode,o=(0,y.Yr)(n),a=h.useCallback(function(e){(0,y.mH)(t,r?r(e):e)},[r]),i=(0,y.x1)(a,n.ref);return o?h.cloneElement(n,{ref:i}):n}),C=h.createContext(null);function T(e){return e?Array.isArray(e)?e:[e]:[]}var k=n(73193);function A(e,t,n,r){return t||(n?{motionName:"".concat(e,"-").concat(n)}:r?{motionName:r}:null)}function I(e){return e.ownerDocument.defaultView}function R(e){for(var t=[],n=null==e?void 0:e.parentElement,r=["hidden","scroll","clip","auto"];n;){var o=I(n).getComputedStyle(n);[o.overflowX,o.overflowY,o.overflow].some(function(e){return r.includes(e)})&&t.push(n),n=n.parentElement}return t}function N(e){var t=arguments.length>1&&void 0!==arguments[1]?arguments[1]:1;return Number.isNaN(e)?t:e}function _(e){return N(parseFloat(e),0)}function P(e,t){var n=(0,r.Z)({},e);return(t||[]).forEach(function(e){if(!(e instanceof HTMLBodyElement||e instanceof HTMLHtmlElement)){var t=I(e).getComputedStyle(e),r=t.overflow,o=t.overflowClipMargin,a=t.borderTopWidth,i=t.borderBottomWidth,l=t.borderLeftWidth,s=t.borderRightWidth,c=e.getBoundingClientRect(),u=e.offsetHeight,d=e.clientHeight,p=e.offsetWidth,f=e.clientWidth,m=_(a),g=_(i),h=_(l),b=_(s),v=N(Math.round(c.width/p*1e3)/1e3),y=N(Math.round(c.height/u*1e3)/1e3),E=m*y,S=h*v,w=0,x=0;if("clip"===r){var O=_(o);w=O*v,x=O*y}var C=c.x+S-w,T=c.y+E-x,k=C+c.width+2*w-S-b*v-(p-f-h-b)*v,A=T+c.height+2*x-E-g*y-(u-d-m-g)*y;n.left=Math.max(n.left,C),n.top=Math.max(n.top,T),n.right=Math.min(n.right,k),n.bottom=Math.min(n.bottom,A)}}),n}function M(e){var t=arguments.length>1&&void 0!==arguments[1]?arguments[1]:0,n="".concat(t),r=n.match(/^(.*)\%$/);return r?parseFloat(r[1])/100*e:parseFloat(n)}function L(e,t){var n=(0,o.Z)(t||[],2),r=n[0],a=n[1];return[M(e.width,r),M(e.height,a)]}function D(){var e=arguments.length>0&&void 0!==arguments[0]?arguments[0]:"";return[e[0],e[1]]}function j(e,t){var n,r=t[0],o=t[1];return n="t"===r?e.y:"b"===r?e.y+e.height:e.y+e.height/2,{x:"l"===o?e.x:"r"===o?e.x+e.width:e.x+e.width/2,y:n}}function F(e,t){var n={t:"b",b:"t",l:"r",r:"l"};return e.map(function(e,r){return r===t?n[e]||"c":e}).join("")}var B=n(63787);n(53850);var U=n(19223),Z=["prefixCls","children","action","showAction","hideAction","popupVisible","defaultPopupVisible","onPopupVisibleChange","afterPopupVisibleChange","mouseEnterDelay","mouseLeaveDelay","focusDelay","blurDelay","mask","maskClosable","getPopupContainer","forceRender","autoDestroy","destroyPopupOnHide","popup","popupClassName","popupStyle","popupPlacement","builtinPlacements","popupAlign","zIndex","stretch","getPopupClassNameFromAlign","fresh","alignPoint","onPopupClick","onPopupAlign","arrow","popupMotion","maskMotion","popupTransitionName","popupAnimation","maskTransitionName","maskAnimation","className","getTriggerDOMNode"],z=function(){var e=arguments.length>0&&void 0!==arguments[0]?arguments[0]:i.Z;return h.forwardRef(function(t,n){var i,l,b,v,y,E,S,w,_,M,z,H,G,$,W,V,q,Y=t.prefixCls,K=void 0===Y?"rc-trigger-popup":Y,X=t.children,Q=t.action,J=t.showAction,ee=t.hideAction,et=t.popupVisible,en=t.defaultPopupVisible,er=t.onPopupVisibleChange,eo=t.afterPopupVisibleChange,ea=t.mouseEnterDelay,ei=t.mouseLeaveDelay,el=void 0===ei?.1:ei,es=t.focusDelay,ec=t.blurDelay,eu=t.mask,ed=t.maskClosable,ep=t.getPopupContainer,ef=t.forceRender,em=t.autoDestroy,eg=t.destroyPopupOnHide,eh=t.popup,eb=t.popupClassName,ev=t.popupStyle,ey=t.popupPlacement,eE=t.builtinPlacements,eS=void 0===eE?{}:eE,ew=t.popupAlign,ex=t.zIndex,eO=t.stretch,eC=t.getPopupClassNameFromAlign,eT=t.fresh,ek=t.alignPoint,eA=t.onPopupClick,eI=t.onPopupAlign,eR=t.arrow,eN=t.popupMotion,e_=t.maskMotion,eP=t.popupTransitionName,eM=t.popupAnimation,eL=t.maskTransitionName,eD=t.maskAnimation,ej=t.className,eF=t.getTriggerDOMNode,eB=(0,a.Z)(t,Z),eU=h.useState(!1),eZ=(0,o.Z)(eU,2),ez=eZ[0],eH=eZ[1];(0,m.Z)(function(){eH((0,g.Z)())},[]);var eG=h.useRef({}),e$=h.useContext(C),eW=h.useMemo(function(){return{registerSubPopup:function(e,t){eG.current[e]=t,null==e$||e$.registerSubPopup(e,t)}}},[e$]),eV=(0,f.Z)(),eq=h.useState(null),eY=(0,o.Z)(eq,2),eK=eY[0],eX=eY[1],eQ=(0,p.Z)(function(e){(0,u.S)(e)&&eK!==e&&eX(e),null==e$||e$.registerSubPopup(eV,e)}),eJ=h.useState(null),e0=(0,o.Z)(eJ,2),e1=e0[0],e2=e0[1],e4=h.useRef(null),e3=(0,p.Z)(function(e){(0,u.S)(e)&&e1!==e&&(e2(e),e4.current=e)}),e6=h.Children.only(X),e5=(null==e6?void 0:e6.props)||{},e8={},e9=(0,p.Z)(function(e){var t,n;return(null==e1?void 0:e1.contains(e))||(null===(t=(0,d.A)(e1))||void 0===t?void 0:t.host)===e||e===e1||(null==eK?void 0:eK.contains(e))||(null===(n=(0,d.A)(eK))||void 0===n?void 0:n.host)===e||e===eK||Object.values(eG.current).some(function(t){return(null==t?void 0:t.contains(e))||e===t})}),e7=A(K,eN,eM,eP),te=A(K,e_,eD,eL),tt=h.useState(en||!1),tn=(0,o.Z)(tt,2),tr=tn[0],to=tn[1],ta=null!=et?et:tr,ti=(0,p.Z)(function(e){void 0===et&&to(e)});(0,m.Z)(function(){to(et||!1)},[et]);var tl=h.useRef(ta);tl.current=ta;var ts=h.useRef([]);ts.current=[];var tc=(0,p.Z)(function(e){var t;ti(e),(null!==(t=ts.current[ts.current.length-1])&&void 0!==t?t:ta)!==e&&(ts.current.push(e),null==er||er(e))}),tu=h.useRef(),td=function(){clearTimeout(tu.current)},tp=function(e){var t=arguments.length>1&&void 0!==arguments[1]?arguments[1]:0;td(),0===t?tc(e):tu.current=setTimeout(function(){tc(e)},1e3*t)};h.useEffect(function(){return td},[]);var tf=h.useState(!1),tm=(0,o.Z)(tf,2),tg=tm[0],th=tm[1];(0,m.Z)(function(e){(!e||ta)&&th(!0)},[ta]);var tb=h.useState(null),tv=(0,o.Z)(tb,2),ty=tv[0],tE=tv[1],tS=h.useState([0,0]),tw=(0,o.Z)(tS,2),tx=tw[0],tO=tw[1],tC=function(e){tO([e.clientX,e.clientY])},tT=(i=ek?tx:e1,l=h.useState({ready:!1,offsetX:0,offsetY:0,offsetR:0,offsetB:0,arrowX:0,arrowY:0,scaleX:1,scaleY:1,align:eS[ey]||{}}),v=(b=(0,o.Z)(l,2))[0],y=b[1],E=h.useRef(0),S=h.useMemo(function(){return eK?R(eK):[]},[eK]),w=h.useRef({}),ta||(w.current={}),_=(0,p.Z)(function(){if(eK&&i&&ta){var e,t,n,a,l,s,c,d=eK.ownerDocument,p=I(eK).getComputedStyle(eK),f=p.width,m=p.height,g=p.position,h=eK.style.left,b=eK.style.top,v=eK.style.right,E=eK.style.bottom,x=eK.style.overflow,O=(0,r.Z)((0,r.Z)({},eS[ey]),ew),C=d.createElement("div");if(null===(e=eK.parentElement)||void 0===e||e.appendChild(C),C.style.left="".concat(eK.offsetLeft,"px"),C.style.top="".concat(eK.offsetTop,"px"),C.style.position=g,C.style.height="".concat(eK.offsetHeight,"px"),C.style.width="".concat(eK.offsetWidth,"px"),eK.style.left="0",eK.style.top="0",eK.style.right="auto",eK.style.bottom="auto",eK.style.overflow="hidden",Array.isArray(i))n={x:i[0],y:i[1],width:0,height:0};else{var T=i.getBoundingClientRect();n={x:T.x,y:T.y,width:T.width,height:T.height}}var A=eK.getBoundingClientRect(),R=d.documentElement,_=R.clientWidth,M=R.clientHeight,B=R.scrollWidth,U=R.scrollHeight,Z=R.scrollTop,z=R.scrollLeft,H=A.height,G=A.width,$=n.height,W=n.width,V=O.htmlRegion,q="visible",Y="visibleFirst";"scroll"!==V&&V!==Y&&(V=q);var K=V===Y,X=P({left:-z,top:-Z,right:B-z,bottom:U-Z},S),Q=P({left:0,top:0,right:_,bottom:M},S),J=V===q?Q:X,ee=K?Q:J;eK.style.left="auto",eK.style.top="auto",eK.style.right="0",eK.style.bottom="0";var et=eK.getBoundingClientRect();eK.style.left=h,eK.style.top=b,eK.style.right=v,eK.style.bottom=E,eK.style.overflow=x,null===(t=eK.parentElement)||void 0===t||t.removeChild(C);var en=N(Math.round(G/parseFloat(f)*1e3)/1e3),er=N(Math.round(H/parseFloat(m)*1e3)/1e3);if(!(0===en||0===er||(0,u.S)(i)&&!(0,k.Z)(i))){var eo=O.offset,ea=O.targetOffset,ei=L(A,eo),el=(0,o.Z)(ei,2),es=el[0],ec=el[1],eu=L(n,ea),ed=(0,o.Z)(eu,2),ep=ed[0],ef=ed[1];n.x-=ep,n.y-=ef;var em=O.points||[],eg=(0,o.Z)(em,2),eh=eg[0],eb=D(eg[1]),ev=D(eh),eE=j(n,eb),ex=j(A,ev),eO=(0,r.Z)({},O),eC=eE.x-ex.x+es,eT=eE.y-ex.y+ec,ek=tt(eC,eT),eA=tt(eC,eT,Q),eR=j(n,["t","l"]),eN=j(A,["t","l"]),e_=j(n,["b","r"]),eP=j(A,["b","r"]),eM=O.overflow||{},eL=eM.adjustX,eD=eM.adjustY,ej=eM.shiftX,eF=eM.shiftY,eB=function(e){return"boolean"==typeof e?e:e>=0};tn();var eU=eB(eD),eZ=ev[0]===eb[0];if(eU&&"t"===ev[0]&&(l>ee.bottom||w.current.bt)){var ez=eT;eZ?ez-=H-$:ez=eR.y-eP.y-ec;var eH=tt(eC,ez),eG=tt(eC,ez,Q);eH>ek||eH===ek&&(!K||eG>=eA)?(w.current.bt=!0,eT=ez,ec=-ec,eO.points=[F(ev,0),F(eb,0)]):w.current.bt=!1}if(eU&&"b"===ev[0]&&(aek||eW===ek&&(!K||eV>=eA)?(w.current.tb=!0,eT=e$,ec=-ec,eO.points=[F(ev,0),F(eb,0)]):w.current.tb=!1}var eq=eB(eL),eY=ev[1]===eb[1];if(eq&&"l"===ev[1]&&(c>ee.right||w.current.rl)){var eX=eC;eY?eX-=G-W:eX=eR.x-eP.x-es;var eQ=tt(eX,eT),eJ=tt(eX,eT,Q);eQ>ek||eQ===ek&&(!K||eJ>=eA)?(w.current.rl=!0,eC=eX,es=-es,eO.points=[F(ev,1),F(eb,1)]):w.current.rl=!1}if(eq&&"r"===ev[1]&&(sek||e1===ek&&(!K||e2>=eA)?(w.current.lr=!0,eC=e0,es=-es,eO.points=[F(ev,1),F(eb,1)]):w.current.lr=!1}tn();var e4=!0===ej?0:ej;"number"==typeof e4&&(sQ.right&&(eC-=c-Q.right-es,n.x>Q.right-e4&&(eC+=n.x-Q.right+e4)));var e3=!0===eF?0:eF;"number"==typeof e3&&(aQ.bottom&&(eT-=l-Q.bottom-ec,n.y>Q.bottom-e3&&(eT+=n.y-Q.bottom+e3)));var e6=A.x+eC,e5=A.y+eT,e8=n.x,e9=n.y;null==eI||eI(eK,eO);var e7=et.right-A.x-(eC+A.width),te=et.bottom-A.y-(eT+A.height);y({ready:!0,offsetX:eC/en,offsetY:eT/er,offsetR:e7/en,offsetB:te/er,arrowX:((Math.max(e6,e8)+Math.min(e6+G,e8+W))/2-e6)/en,arrowY:((Math.max(e5,e9)+Math.min(e5+H,e9+$))/2-e5)/er,scaleX:en,scaleY:er,align:eO})}function tt(e,t){var n=arguments.length>2&&void 0!==arguments[2]?arguments[2]:J,r=A.x+e,o=A.y+t,a=Math.max(r,n.left),i=Math.max(o,n.top);return Math.max(0,(Math.min(r+G,n.right)-a)*(Math.min(o+H,n.bottom)-i))}function tn(){l=(a=A.y+eT)+H,c=(s=A.x+eC)+G}}}),M=function(){y(function(e){return(0,r.Z)((0,r.Z)({},e),{},{ready:!1})})},(0,m.Z)(M,[ey]),(0,m.Z)(function(){ta||M()},[ta]),[v.ready,v.offsetX,v.offsetY,v.offsetR,v.offsetB,v.arrowX,v.arrowY,v.scaleX,v.scaleY,v.align,function(){E.current+=1;var e=E.current;Promise.resolve().then(function(){E.current===e&&_()})}]),tk=(0,o.Z)(tT,11),tA=tk[0],tI=tk[1],tR=tk[2],tN=tk[3],t_=tk[4],tP=tk[5],tM=tk[6],tL=tk[7],tD=tk[8],tj=tk[9],tF=tk[10],tB=(z=void 0===Q?"hover":Q,h.useMemo(function(){var e=T(null!=J?J:z),t=T(null!=ee?ee:z),n=new Set(e),r=new Set(t);return ez&&(n.has("hover")&&(n.delete("hover"),n.add("click")),r.has("hover")&&(r.delete("hover"),r.add("click"))),[n,r]},[ez,z,J,ee])),tU=(0,o.Z)(tB,2),tZ=tU[0],tz=tU[1],tH=tZ.has("click"),tG=tz.has("click")||tz.has("contextMenu"),t$=(0,p.Z)(function(){tg||tF()});H=function(){tl.current&&ek&&tG&&tp(!1)},(0,m.Z)(function(){if(ta&&e1&&eK){var e=R(e1),t=R(eK),n=I(eK),r=new Set([n].concat((0,B.Z)(e),(0,B.Z)(t)));function o(){t$(),H()}return r.forEach(function(e){e.addEventListener("scroll",o,{passive:!0})}),n.addEventListener("resize",o,{passive:!0}),t$(),function(){r.forEach(function(e){e.removeEventListener("scroll",o),n.removeEventListener("resize",o)})}}},[ta,e1,eK]),(0,m.Z)(function(){t$()},[tx,ey]),(0,m.Z)(function(){ta&&!(null!=eS&&eS[ey])&&t$()},[JSON.stringify(ew)]);var tW=h.useMemo(function(){var e=function(e,t,n,r){for(var o=n.points,a=Object.keys(e),i=0;i0&&void 0!==arguments[0]?arguments[0]:[],t=arguments.length>1&&void 0!==arguments[1]?arguments[1]:[],n=arguments.length>2?arguments[2]:void 0;return n?e[0]===t[0]:e[0]===t[0]&&e[1]===t[1]}(null===(l=e[s])||void 0===l?void 0:l.points,o,r))return"".concat(t,"-placement-").concat(s)}return""}(eS,K,tj,ek);return s()(e,null==eC?void 0:eC(tj))},[tj,eC,eS,K,ek]);h.useImperativeHandle(n,function(){return{nativeElement:e4.current,forceAlign:t$}});var tV=h.useState(0),tq=(0,o.Z)(tV,2),tY=tq[0],tK=tq[1],tX=h.useState(0),tQ=(0,o.Z)(tX,2),tJ=tQ[0],t0=tQ[1],t1=function(){if(eO&&e1){var e=e1.getBoundingClientRect();tK(e.width),t0(e.height)}};function t2(e,t,n,r){e8[e]=function(o){var a;null==r||r(o),tp(t,n);for(var i=arguments.length,l=Array(i>1?i-1:0),s=1;s1?n-1:0),o=1;o1?n-1:0),o=1;oaG(t,e()).base(t.base()),or.apply(t,arguments),t}},scaleOrdinal:function(){return oc},scalePoint:function(){return od},scalePow:function(){return ip},scaleQuantile:function(){return function e(){var t,n=[],r=[],o=[];function a(){var e=0,t=Math.max(1,r.length);for(o=Array(t-1);++e2&&void 0!==arguments[2]?arguments[2]:o4;if(!(!(r=e.length)||isNaN(t=+t))){if(t<=0||r<2)return+n(e[0],0,e);if(t>=1)return+n(e[r-1],r-1,e);var r,o=(r-1)*t,a=Math.floor(o),i=+n(e[a],a,e);return i+(+n(e[a+1],a+1,e)-i)*(o-a)}}(n,e/t);return i}function i(e){return null==e||isNaN(e=+e)?t:r[o6(o,e)]}return i.invertExtent=function(e){var t=r.indexOf(e);return t<0?[NaN,NaN]:[t>0?o[t-1]:n[0],t=o?[a[o-1],r]:[a[t-1],a[t]]},l.unknown=function(e){return arguments.length&&(t=e),l},l.thresholds=function(){return a.slice()},l.copy=function(){return e().domain([n,r]).range(i).unknown(t)},or.apply(a3(l),arguments)}},scaleRadial:function(){return function e(){var t,n=aW(),r=[0,1],o=!1;function a(e){var r,a=Math.sign(r=n(e))*Math.sqrt(Math.abs(r));return isNaN(a)?t:o?Math.round(a):a}return a.invert=function(e){return n.invert(ig(e))},a.domain=function(e){return arguments.length?(n.domain(e),a):n.domain()},a.range=function(e){return arguments.length?(n.range((r=Array.from(e,aF)).map(ig)),a):r.slice()},a.rangeRound=function(e){return a.range(e).round(!0)},a.round=function(e){return arguments.length?(o=!!e,a):o},a.clamp=function(e){return arguments.length?(n.clamp(e),a):n.clamp()},a.unknown=function(e){return arguments.length?(t=e,a):t},a.copy=function(){return e(n.domain(),r).round(o).clamp(n.clamp()).unknown(t)},or.apply(a,arguments),a3(a)}},scaleSequential:function(){return function e(){var t=a3(ss()(aU));return t.copy=function(){return sc(t,e())},oo.apply(t,arguments)}},scaleSequentialLog:function(){return function e(){var t=io(ss()).domain([1,10]);return t.copy=function(){return sc(t,e()).base(t.base())},oo.apply(t,arguments)}},scaleSequentialPow:function(){return su},scaleSequentialQuantile:function(){return function e(){var t=[],n=aU;function r(e){if(null!=e&&!isNaN(e=+e))return n((o6(t,e,1)-1)/(t.length-1))}return r.domain=function(e){if(!arguments.length)return t.slice();for(let n of(t=[],e))null==n||isNaN(n=+n)||t.push(n);return t.sort(oJ),r},r.interpolator=function(e){return arguments.length?(n=e,r):n},r.range=function(){return t.map((e,r)=>n(r/(t.length-1)))},r.quantiles=function(e){return Array.from({length:e+1},(n,r)=>(function(e,t,n){if(!(!(r=(e=Float64Array.from(function*(e,t){if(void 0===t)for(let t of e)null!=t&&(t=+t)>=t&&(yield t);else{let n=-1;for(let r of e)null!=(r=t(r,++n,e))&&(r=+r)>=r&&(yield r)}}(e,void 0))).length)||isNaN(t=+t))){if(t<=0||r<2)return ib(e);if(t>=1)return ih(e);var r,o=(r-1)*t,a=Math.floor(o),i=ih((function e(t,n){let r=arguments.length>2&&void 0!==arguments[2]?arguments[2]:0,o=arguments.length>3&&void 0!==arguments[3]?arguments[3]:1/0,a=arguments.length>4?arguments[4]:void 0;if(n=Math.floor(n),r=Math.floor(Math.max(0,r)),o=Math.floor(Math.min(t.length-1,o)),!(r<=n&&n<=o))return t;for(a=void 0===a?iv:function(){let e=arguments.length>0&&void 0!==arguments[0]?arguments[0]:oJ;if(e===oJ)return iv;if("function"!=typeof e)throw TypeError("compare is not a function");return(t,n)=>{let r=e(t,n);return r||0===r?r:(0===e(n,n))-(0===e(t,t))}}(a);o>r;){if(o-r>600){let i=o-r+1,l=n-r+1,s=Math.log(i),c=.5*Math.exp(2*s/3),u=.5*Math.sqrt(s*c*(i-c)/i)*(l-i/2<0?-1:1),d=Math.max(r,Math.floor(n-l*c/i+u)),p=Math.min(o,Math.floor(n+(i-l)*c/i+u));e(t,n,d,p,a)}let i=t[n],l=r,s=o;for(iy(t,r,n),a(t[o],i)>0&&iy(t,r,o);la(t[l],i);)++l;for(;a(t[s],i)>0;)--s}0===a(t[r],i)?iy(t,r,s):iy(t,++s,o),s<=n&&(r=s+1),n<=s&&(o=s-1)}return t})(e,a).subarray(0,a+1));return i+(ib(e.subarray(a+1))-i)*(o-a)}})(t,r/e))},r.copy=function(){return e(n).domain(t)},oo.apply(r,arguments)}},scaleSequentialSqrt:function(){return sd},scaleSequentialSymlog:function(){return function e(){var t=il(ss());return t.copy=function(){return sc(t,e()).constant(t.constant())},oo.apply(t,arguments)}},scaleSqrt:function(){return im},scaleSymlog:function(){return function e(){var t=il(a$());return t.copy=function(){return aG(t,e()).constant(t.constant())},or.apply(t,arguments)}},scaleThreshold:function(){return function e(){var t,n=[.5],r=[0,1],o=1;function a(e){return null!=e&&e<=e?r[o6(n,e,0,o)]:t}return a.domain=function(e){return arguments.length?(o=Math.min((n=Array.from(e)).length,r.length-1),a):n.slice()},a.range=function(e){return arguments.length?(r=Array.from(e),o=Math.min(n.length,r.length-1),a):r.slice()},a.invertExtent=function(e){var t=r.indexOf(e);return[n[t-1],n[t]]},a.unknown=function(e){return arguments.length?(t=e,a):t},a.copy=function(){return e().domain(n).range(r).unknown(t)},or.apply(a,arguments)}},scaleTime:function(){return si},scaleUtc:function(){return sl},tickFormat:function(){return a4}});var C=n(69703),T=n(54942),k=n(2898),A=n(99250),I=n(65492),R=n(64090),N=function(){for(var e,t,n=0,r="",o=arguments.length;n0?1:-1},G=function(e){return D()(e)&&e.indexOf("%")===e.length-1},$=function(e){return z()(e)&&!F()(e)},W=function(e){return $(e)||D()(e)},V=0,q=function(e){var t=++V;return"".concat(e||"").concat(t)},Y=function(e,t){var n,r=arguments.length>2&&void 0!==arguments[2]?arguments[2]:0,o=arguments.length>3&&void 0!==arguments[3]&&arguments[3];if(!$(e)&&!D()(e))return r;if(G(e)){var a=e.indexOf("%");n=t*parseFloat(e.slice(0,a))/100}else n=+e;return F()(n)&&(n=r),o&&n>t&&(n=t),n},K=function(e){if(!e)return null;var t=Object.keys(e);return t&&t.length?e[t[0]]:null},X=function(e){if(!Array.isArray(e))return!1;for(var t=e.length,n={},r=0;r2?n-2:0),o=2;o=0||(o[n]=e[n]);return o}(e,t);if(Object.getOwnPropertySymbols){var a=Object.getOwnPropertySymbols(e);for(r=0;r=0)&&Object.prototype.propertyIsEnumerable.call(e,n)&&(o[n]=e[n])}return o}var ev={click:"onClick",mousedown:"onMouseDown",mouseup:"onMouseUp",mouseover:"onMouseOver",mousemove:"onMouseMove",mouseout:"onMouseOut",mouseenter:"onMouseEnter",mouseleave:"onMouseLeave",touchcancel:"onTouchCancel",touchend:"onTouchEnd",touchmove:"onTouchMove",touchstart:"onTouchStart"},ey=function(e){return"string"==typeof e?e:e?e.displayName||e.name||"Component":""},eE=null,eS=null,ew=function e(t){if(t===eE&&Array.isArray(eS))return eS;var n=[];return R.Children.forEach(t,function(t){en()(t)||((0,M.isFragment)(t)?n=n.concat(e(t.props.children)):n.push(t))}),eS=n,eE=t,n};function ex(e,t){var n=[],r=[];return r=Array.isArray(t)?t.map(function(e){return ey(e)}):[ey(t)],ew(e).forEach(function(e){var t=U()(e,"type.displayName")||U()(e,"type.name");-1!==r.indexOf(t)&&n.push(e)}),n}function eO(e,t){var n=ex(e,t);return n&&n[0]}var eC=function(e){if(!e||!e.props)return!1;var t=e.props,n=t.width,r=t.height;return!!$(n)&&!(n<=0)&&!!$(r)&&!(r<=0)},eT=["a","altGlyph","altGlyphDef","altGlyphItem","animate","animateColor","animateMotion","animateTransform","circle","clipPath","color-profile","cursor","defs","desc","ellipse","feBlend","feColormatrix","feComponentTransfer","feComposite","feConvolveMatrix","feDiffuseLighting","feDisplacementMap","feDistantLight","feFlood","feFuncA","feFuncB","feFuncG","feFuncR","feGaussianBlur","feImage","feMerge","feMergeNode","feMorphology","feOffset","fePointLight","feSpecularLighting","feSpotLight","feTile","feTurbulence","filter","font","font-face","font-face-format","font-face-name","font-face-url","foreignObject","g","glyph","glyphRef","hkern","image","line","lineGradient","marker","mask","metadata","missing-glyph","mpath","path","pattern","polygon","polyline","radialGradient","rect","script","set","stop","style","svg","switch","symbol","text","textPath","title","tref","tspan","use","view","vkern"],ek=function(e,t,n,r){var o,a=null!==(o=null==ed?void 0:ed[r])&&void 0!==o?o:[];return!eo()(e)&&(r&&a.includes(t)||ec.includes(t))||n&&ep.includes(t)},eA=function(e,t,n){if(!e||"function"==typeof e||"boolean"==typeof e)return null;var r=e;if((0,R.isValidElement)(e)&&(r=e.props),!ei()(r))return null;var o={};return Object.keys(r).forEach(function(e){var a;ek(null===(a=r)||void 0===a?void 0:a[e],e,t,n)&&(o[e]=r[e])}),o},eI=function e(t,n){if(t===n)return!0;var r=R.Children.count(t);if(r!==R.Children.count(n))return!1;if(0===r)return!0;if(1===r)return eR(Array.isArray(t)?t[0]:t,Array.isArray(n)?n[0]:n);for(var o=0;o=0)n.push(e);else if(e){var a=ey(e.type),i=t[a]||{},l=i.handler,s=i.once;if(l&&(!s||!r[a])){var c=l(e,a,o);n.push(c),r[a]=!0}}}),n},e_=function(e){var t=e&&e.type;return t&&ev[t]?ev[t]:null};function eP(e){return(eP="function"==typeof Symbol&&"symbol"==typeof Symbol.iterator?function(e){return typeof e}:function(e){return e&&"function"==typeof Symbol&&e.constructor===Symbol&&e!==Symbol.prototype?"symbol":typeof e})(e)}function eM(e,t){var n=Object.keys(e);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);t&&(r=r.filter(function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable})),n.push.apply(n,r)}return n}function eL(e){for(var t=1;te.length)&&(t=e.length);for(var n=0,r=Array(t);n0&&(e=P()(e,h,{trailing:!0,leading:!1}));var t=new ResizeObserver(e),n=S.current.getBoundingClientRect();return T(n.width,n.height),t.observe(S.current),function(){t.disconnect()}},[T,h]);var k=(0,R.useMemo)(function(){var e=O.containerWidth,t=O.containerHeight;if(e<0||t<0)return null;ee(G(l)||G(c),"The width(%s) and height(%s) are both fixed numbers,\n maybe you don't need to use a ResponsiveContainer.",l,c),ee(!r||r>0,"The aspect(%s) must be greater than zero.",r);var n=G(l)?e:l,o=G(c)?t:c;r&&r>0&&(n?o=n/r:o&&(n=o*r),f&&o>f&&(o=f)),ee(n>0||o>0,"The width(%s) and height(%s) of chart should be greater than 0,\n please check the style of container, or the props width(%s) and height(%s),\n or add a minWidth(%s) or minHeight(%s) or use aspect(%s) to control the\n height and width.",n,o,l,c,d,p,r);var a=!Array.isArray(m)&&(0,M.isElement)(m)&&ey(m.type).endsWith("Chart");return R.Children.map(m,function(e){return(0,M.isElement)(e)?(0,R.cloneElement)(e,eL({width:n,height:o},a?{style:eL({height:"100%",width:"100%",maxHeight:o,maxWidth:n},e.props.style)}:{})):e})},[r,m,c,f,p,d,O,l]);return R.createElement("div",{id:b?"".concat(b):void 0,className:N("recharts-responsive-container",v),style:eL(eL({},void 0===E?{}:E),{},{width:l,height:c,minWidth:d,minHeight:p,maxHeight:f}),ref:S},k)}),eF=n(1646),eB=n.n(eF),eU=n(97572),eZ=n.n(eU),ez=n(209),eH=n.n(ez),eG=n(72986),e$=n.n(eG);function eW(e,t){if(!e)throw Error("Invariant failed")}var eV=["children","width","height","viewBox","className","style","title","desc"];function eq(){return(eq=Object.assign?Object.assign.bind():function(e){for(var t=1;t=0||(o[n]=e[n]);return o}(e,t);if(Object.getOwnPropertySymbols){var a=Object.getOwnPropertySymbols(e);for(r=0;r=0)&&Object.prototype.propertyIsEnumerable.call(e,n)&&(o[n]=e[n])}return o}(e,eV),u=o||{width:n,height:r,x:0,y:0},d=N("recharts-surface",a);return R.createElement("svg",eq({},eA(c,!0,"svg"),{className:d,width:n,height:r,style:i,viewBox:"".concat(u.x," ").concat(u.y," ").concat(u.width," ").concat(u.height)}),R.createElement("title",null,l),R.createElement("desc",null,s),t)}var eK=["children","className"];function eX(){return(eX=Object.assign?Object.assign.bind():function(e){for(var t=1;t=0||(o[n]=e[n]);return o}(e,t);if(Object.getOwnPropertySymbols){var a=Object.getOwnPropertySymbols(e);for(r=0;r=0)&&Object.prototype.propertyIsEnumerable.call(e,n)&&(o[n]=e[n])}return o}(e,eK),a=N("recharts-layer",r);return R.createElement("g",eX({className:a},eA(o,!0),{ref:t}),n)});function eJ(e){return(eJ="function"==typeof Symbol&&"symbol"==typeof Symbol.iterator?function(e){return typeof e}:function(e){return e&&"function"==typeof Symbol&&e.constructor===Symbol&&e!==Symbol.prototype?"symbol":typeof e})(e)}function e0(e,t){(null==t||t>e.length)&&(t=e.length);for(var n=0,r=Array(t);n0;)if(!n.equals(e[r],t[r],r,r,e,t,n))return!1;return!0}function tc(e,t){return to(e.getTime(),t.getTime())}function tu(e,t,n){if(e.size!==t.size)return!1;for(var r,o,a={},i=e.entries(),l=0;(r=i.next())&&!r.done;){for(var s=t.entries(),c=!1,u=0;(o=s.next())&&!o.done;){var d=r.value,p=d[0],f=d[1],m=o.value,g=m[0],h=m[1];!c&&!a[u]&&(c=n.equals(p,g,l,u,e,t,n)&&n.equals(f,h,p,g,e,t,n))&&(a[u]=!0),u++}if(!c)return!1;l++}return!0}function td(e,t,n){var r,o=tl(e),a=o.length;if(tl(t).length!==a)return!1;for(;a-- >0;)if((r=o[a])===ta&&(e.$$typeof||t.$$typeof)&&e.$$typeof!==t.$$typeof||!tr(t,r)||!n.equals(e[r],t[r],r,r,e,t,n))return!1;return!0}function tp(e,t,n){var r,o,a,i=tn(e),l=i.length;if(tn(t).length!==l)return!1;for(;l-- >0;)if((r=i[l])===ta&&(e.$$typeof||t.$$typeof)&&e.$$typeof!==t.$$typeof||!tr(t,r)||!n.equals(e[r],t[r],r,r,e,t,n)||(o=ti(e,r),a=ti(t,r),(o||a)&&(!o||!a||o.configurable!==a.configurable||o.enumerable!==a.enumerable||o.writable!==a.writable)))return!1;return!0}function tf(e,t){return to(e.valueOf(),t.valueOf())}function tm(e,t){return e.source===t.source&&e.flags===t.flags}function tg(e,t,n){if(e.size!==t.size)return!1;for(var r,o,a={},i=e.values();(r=i.next())&&!r.done;){for(var l=t.values(),s=!1,c=0;(o=l.next())&&!o.done;)!s&&!a[c]&&(s=n.equals(r.value,o.value,r.value,o.value,e,t,n))&&(a[c]=!0),c++;if(!s)return!1}return!0}function th(e,t){var n=e.length;if(t.length!==n)return!1;for(;n-- >0;)if(e[n]!==t[n])return!1;return!0}var tb=Array.isArray,tv="function"==typeof ArrayBuffer&&ArrayBuffer.isView?ArrayBuffer.isView:null,ty=Object.assign,tE=Object.prototype.toString.call.bind(Object.prototype.toString),tS=tw();function tw(e){void 0===e&&(e={});var t,n,r,o,a,i,l,s,c,u=e.circular,d=e.createInternalComparator,p=e.createState,f=e.strict,m=(n=(t=function(e){var t=e.circular,n=e.createCustomConfig,r=e.strict,o={areArraysEqual:r?tp:ts,areDatesEqual:tc,areMapsEqual:r?te(tu,tp):tu,areObjectsEqual:r?tp:td,arePrimitiveWrappersEqual:tf,areRegExpsEqual:tm,areSetsEqual:r?te(tg,tp):tg,areTypedArraysEqual:r?tp:th};if(n&&(o=ty({},o,n(o))),t){var a=tt(o.areArraysEqual),i=tt(o.areMapsEqual),l=tt(o.areObjectsEqual),s=tt(o.areSetsEqual);o=ty({},o,{areArraysEqual:a,areMapsEqual:i,areObjectsEqual:l,areSetsEqual:s})}return o}(e)).areArraysEqual,r=t.areDatesEqual,o=t.areMapsEqual,a=t.areObjectsEqual,i=t.arePrimitiveWrappersEqual,l=t.areRegExpsEqual,s=t.areSetsEqual,c=t.areTypedArraysEqual,function(e,t,u){if(e===t)return!0;if(null==e||null==t||"object"!=typeof e||"object"!=typeof t)return e!=e&&t!=t;var d=e.constructor;if(d!==t.constructor)return!1;if(d===Object)return a(e,t,u);if(tb(e))return n(e,t,u);if(null!=tv&&tv(e))return c(e,t,u);if(d===Date)return r(e,t,u);if(d===RegExp)return l(e,t,u);if(d===Map)return o(e,t,u);if(d===Set)return s(e,t,u);var p=tE(e);return"[object Date]"===p?r(e,t,u):"[object RegExp]"===p?l(e,t,u):"[object Map]"===p?o(e,t,u):"[object Set]"===p?s(e,t,u):"[object Object]"===p?"function"!=typeof e.then&&"function"!=typeof t.then&&a(e,t,u):"[object Arguments]"===p?a(e,t,u):("[object Boolean]"===p||"[object Number]"===p||"[object String]"===p)&&i(e,t,u)}),g=d?d(m):function(e,t,n,r,o,a,i){return m(e,t,i)};return function(e){var t=e.circular,n=e.comparator,r=e.createState,o=e.equals,a=e.strict;if(r)return function(e,i){var l=r(),s=l.cache;return n(e,i,{cache:void 0===s?t?new WeakMap:void 0:s,equals:o,meta:l.meta,strict:a})};if(t)return function(e,t){return n(e,t,{cache:new WeakMap,equals:o,meta:void 0,strict:a})};var i={cache:void 0,equals:o,meta:void 0,strict:a};return function(e,t){return n(e,t,i)}}({circular:void 0!==u&&u,comparator:m,createState:p,equals:g,strict:void 0!==f&&f})}function tx(e){var t=arguments.length>1&&void 0!==arguments[1]?arguments[1]:0,n=-1;requestAnimationFrame(function r(o){if(n<0&&(n=o),o-n>t)e(o),n=-1;else{var a;a=r,"undefined"!=typeof requestAnimationFrame&&requestAnimationFrame(a)}})}function tO(e){return(tO="function"==typeof Symbol&&"symbol"==typeof Symbol.iterator?function(e){return typeof e}:function(e){return e&&"function"==typeof Symbol&&e.constructor===Symbol&&e!==Symbol.prototype?"symbol":typeof e})(e)}function tC(e,t){(null==t||t>e.length)&&(t=e.length);for(var n=0,r=Array(t);ne.length)&&(t=e.length);for(var n=0,r=Array(t);n=0&&e<=1}),"[configBezier]: arguments should be x1, y1, x2, y2 of [0, 1] instead received %s",r);var p=tH(a,l),f=tH(i,s),m=(e=a,t=l,function(n){var r;return tz([].concat(function(e){if(Array.isArray(e))return tU(e)}(r=tZ(e,t).map(function(e,t){return e*t}).slice(1))||function(e){if("undefined"!=typeof Symbol&&null!=e[Symbol.iterator]||null!=e["@@iterator"])return Array.from(e)}(r)||tB(r)||function(){throw TypeError("Invalid attempt to spread non-iterable instance.\nIn order to be iterable, non-array objects must have a [Symbol.iterator]() method.")}(),[0]),n)}),g=function(e){for(var t=e>1?1:e,n=t,r=0;r<8;++r){var o,a=p(n)-t,i=m(n);if(1e-4>Math.abs(a-t)||i<1e-4)break;n=(o=n-a/i)>1?1:o<0?0:o}return f(n)};return g.isStepper=!1,g},t$=function(){var e=arguments.length>0&&void 0!==arguments[0]?arguments[0]:{},t=e.stiff,n=void 0===t?100:t,r=e.damping,o=void 0===r?8:r,a=e.dt,i=void 0===a?17:a,l=function(e,t,r){var a=r+(-(e-t)*n-r*o)*i/1e3,l=r*i/1e3+e;return 1e-4>Math.abs(l-t)&&1e-4>Math.abs(a)?[t,0]:[l,a]};return l.isStepper=!0,l.dt=i,l},tW=function(){for(var e=arguments.length,t=Array(e),n=0;ne.length)&&(t=e.length);for(var n=0,r=Array(t);ne.length)&&(t=e.length);for(var n=0,r=Array(t);n0?n[o-1]:r,p=c||Object.keys(s);if("function"==typeof l||"spring"===l)return[].concat(t6(e),[t.runJSAnimation.bind(t,{from:d.style,to:s,duration:a,easing:l}),a]);var f=tj(p,a,l),m=t9(t9(t9({},d.style),s),{},{transition:f});return[].concat(t6(e),[m,a,u]).filter(tP)},[i,Math.max(void 0===l?0:l,r)])),[e.onAnimationEnd]))}},{key:"runAnimation",value:function(e){if(!this.manager){var t,n,r;this.manager=(t=function(){return null},n=!1,r=function e(r){if(!n){if(Array.isArray(r)){if(!r.length)return;var o=function(e){if(Array.isArray(e))return e}(r)||function(e){if("undefined"!=typeof Symbol&&null!=e[Symbol.iterator]||null!=e["@@iterator"])return Array.from(e)}(r)||function(e,t){if(e){if("string"==typeof e)return tC(e,t);var n=Object.prototype.toString.call(e).slice(8,-1);if("Object"===n&&e.constructor&&(n=e.constructor.name),"Map"===n||"Set"===n)return Array.from(e);if("Arguments"===n||/^(?:Ui|I)nt(?:8|16|32)(?:Clamped)?Array$/.test(n))return tC(e,t)}}(r)||function(){throw TypeError("Invalid attempt to destructure non-iterable instance.\nIn order to be iterable, non-array objects must have a [Symbol.iterator]() method.")}(),a=o[0],i=o.slice(1);if("number"==typeof a){tx(e.bind(null,i),a);return}e(a),tx(e.bind(null,i));return}"object"===tO(r)&&t(r),"function"==typeof r&&r()}},{stop:function(){n=!0},start:function(e){n=!1,r(e)},subscribe:function(e){return t=e,function(){t=function(){return null}}}})}var o=e.begin,a=e.duration,i=e.attributeName,l=e.to,s=e.easing,c=e.onAnimationStart,u=e.onAnimationEnd,d=e.steps,p=e.children,f=this.manager;if(this.unSubscribe=f.subscribe(this.handleStyleChange),"function"==typeof s||"function"==typeof p||"spring"===s){this.runJSAnimation(e);return}if(d.length>1){this.runStepAnimation(e);return}var m=i?t7({},i,l):l,g=tj(Object.keys(m),a,s);f.start([c,o,t9(t9({},m),{},{transition:g}),a,u])}},{key:"render",value:function(){var e=this.props,t=e.children,n=(e.begin,e.duration),r=(e.attributeName,e.easing,e.isActive),o=(e.steps,e.from,e.to,e.canBegin,e.onAnimationEnd,e.shouldReAnimate,e.onAnimationReStart,function(e,t){if(null==e)return{};var n,r,o=function(e,t){if(null==e)return{};var n,r,o={},a=Object.keys(e);for(r=0;r=0||(o[n]=e[n]);return o}(e,t);if(Object.getOwnPropertySymbols){var a=Object.getOwnPropertySymbols(e);for(r=0;r=0)&&Object.prototype.propertyIsEnumerable.call(e,n)&&(o[n]=e[n])}return o}(e,t3)),a=R.Children.count(t),i=tD(this.state.style);if("function"==typeof t)return t(i);if(!r||0===a||n<=0)return t;var l=function(e){var t=e.props,n=t.style,r=t.className;return(0,R.cloneElement)(e,t9(t9({},o),{},{style:t9(t9({},void 0===n?{}:n),i),className:r}))};return 1===a?l(R.Children.only(t)):R.createElement("div",null,R.Children.map(t,function(e){return l(e)}))}}],ne(a.prototype,n),r&&ne(a,r),Object.defineProperty(a,"prototype",{writable:!1}),a}(R.PureComponent);ni.displayName="Animate",ni.defaultProps={begin:0,duration:1e3,from:"",to:"",attributeName:"",easing:"ease",isActive:!0,canBegin:!0,steps:[],onAnimationEnd:function(){},onAnimationStart:function(){}},ni.propTypes={from:e5().oneOfType([e5().object,e5().string]),to:e5().oneOfType([e5().object,e5().string]),attributeName:e5().string,duration:e5().number,begin:e5().number,easing:e5().oneOfType([e5().string,e5().func]),steps:e5().arrayOf(e5().shape({duration:e5().number.isRequired,style:e5().object.isRequired,easing:e5().oneOfType([e5().oneOf(["ease","ease-in","ease-out","ease-in-out","linear"]),e5().func]),properties:e5().arrayOf("string"),onAnimationEnd:e5().func})),children:e5().oneOfType([e5().node,e5().func]),isActive:e5().bool,canBegin:e5().bool,onAnimationEnd:e5().func,shouldReAnimate:e5().bool,onAnimationStart:e5().func,onAnimationReStart:e5().func};var nl=n(42859),ns=["children","appearOptions","enterOptions","leaveOptions"];function nc(e){return(nc="function"==typeof Symbol&&"symbol"==typeof Symbol.iterator?function(e){return typeof e}:function(e){return e&&"function"==typeof Symbol&&e.constructor===Symbol&&e!==Symbol.prototype?"symbol":typeof e})(e)}function nu(){return(nu=Object.assign?Object.assign.bind():function(e){for(var t=1;t0&&void 0!==arguments[0]?arguments[0]:{},t=e.steps,n=e.duration;return t&&t.length?t.reduce(function(e,t){return e+(Number.isFinite(t.duration)&&t.duration>0?t.duration:0)},0):Number.isFinite(n)?n:0},nE=function(e){!function(e,t){if("function"!=typeof t&&null!==t)throw TypeError("Super expression must either be null or a function");e.prototype=Object.create(t&&t.prototype,{constructor:{value:e,writable:!0,configurable:!0}}),Object.defineProperty(e,"prototype",{writable:!1}),t&&nm(e,t)}(a,e);var t,n,r,o=(t=function(){if("undefined"==typeof Reflect||!Reflect.construct||Reflect.construct.sham)return!1;if("function"==typeof Proxy)return!0;try{return Boolean.prototype.valueOf.call(Reflect.construct(Boolean,[],function(){})),!0}catch(e){return!1}}(),function(){var e,n=nh(a);if(t){var r=nh(this).constructor;e=Reflect.construct(n,arguments,r)}else e=n.apply(this,arguments);return function(e,t){if(t&&("object"===nc(t)||"function"==typeof t))return t;if(void 0!==t)throw TypeError("Derived constructors may only return object or undefined");return ng(e)}(this,e)});function a(){var e;return!function(e,t){if(!(e instanceof t))throw TypeError("Cannot call a class as a function")}(this,a),nb(ng(e=o.call(this)),"handleEnter",function(t,n){var r=e.props,o=r.appearOptions,a=r.enterOptions;e.handleStyleActive(n?o:a)}),nb(ng(e),"handleExit",function(){var t=e.props.leaveOptions;e.handleStyleActive(t)}),e.state={isActive:!1},e}return n=[{key:"handleStyleActive",value:function(e){if(e){var t=e.onAnimationEnd?function(){e.onAnimationEnd()}:null;this.setState(np(np({},e),{},{onAnimationEnd:t,isActive:!0}))}}},{key:"parseTimeout",value:function(){var e=this.props,t=e.appearOptions,n=e.enterOptions,r=e.leaveOptions;return ny(t)+ny(n)+ny(r)}},{key:"render",value:function(){var e=this,t=this.props,n=t.children,r=(t.appearOptions,t.enterOptions,t.leaveOptions,function(e,t){if(null==e)return{};var n,r,o=function(e,t){if(null==e)return{};var n,r,o={},a=Object.keys(e);for(r=0;r=0||(o[n]=e[n]);return o}(e,t);if(Object.getOwnPropertySymbols){var a=Object.getOwnPropertySymbols(e);for(r=0;r=0)&&Object.prototype.propertyIsEnumerable.call(e,n)&&(o[n]=e[n])}return o}(t,ns));return R.createElement(nl.Transition,nu({},r,{onEnter:this.handleEnter,onExit:this.handleExit,timeout:this.parseTimeout()}),function(){return R.createElement(ni,e.state,R.Children.only(n))})}}],nf(a.prototype,n),r&&nf(a,r),Object.defineProperty(a,"prototype",{writable:!1}),a}(R.Component);function nS(e){var t=e.component,n=e.children,r=e.appear,o=e.enter,a=e.leave;return R.createElement(nl.TransitionGroup,{component:t},R.Children.map(n,function(e,t){return R.createElement(nE,{appearOptions:r,enterOptions:o,leaveOptions:a,key:"child-".concat(t)},e)}))}function nw(e){return(nw="function"==typeof Symbol&&"symbol"==typeof Symbol.iterator?function(e){return typeof e}:function(e){return e&&"function"==typeof Symbol&&e.constructor===Symbol&&e!==Symbol.prototype?"symbol":typeof e})(e)}function nx(e,t,n){var r;return(r=function(e,t){if("object"!==nw(e)||null===e)return e;var n=e[Symbol.toPrimitive];if(void 0!==n){var r=n.call(e,t||"default");if("object"!==nw(r))return r;throw TypeError("@@toPrimitive must return a primitive value.")}return("string"===t?String:Number)(e)}(t,"string"),(t="symbol"===nw(r)?r:String(r))in e)?Object.defineProperty(e,t,{value:n,enumerable:!0,configurable:!0,writable:!0}):e[t]=n,e}nE.propTypes={appearOptions:e5().object,enterOptions:e5().object,leaveOptions:e5().object,children:e5().element},nS.propTypes={appear:e5().object,enter:e5().object,leave:e5().object,children:e5().oneOfType([e5().array,e5().element]),component:e5().any},nS.defaultProps={component:"span"};var nO="recharts-tooltip-wrapper",nC={visibility:"hidden"};function nT(e){var t=e.allowEscapeViewBox,n=e.coordinate,r=e.key,o=e.offsetTopLeft,a=e.position,i=e.reverseDirection,l=e.tooltipDimension,s=e.viewBox,c=e.viewBoxDimension;if(a&&$(a[r]))return a[r];var u=n[r]-l-o,d=n[r]+o;return t[r]?i[r]?u:d:i[r]?us[r]+c?Math.max(u,s[r]):Math.max(d,s[r])}function nk(e){return(nk="function"==typeof Symbol&&"symbol"==typeof Symbol.iterator?function(e){return typeof e}:function(e){return e&&"function"==typeof Symbol&&e.constructor===Symbol&&e!==Symbol.prototype?"symbol":typeof e})(e)}function nA(e,t){var n=Object.keys(e);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);t&&(r=r.filter(function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable})),n.push.apply(n,r)}return n}function nI(e){for(var t=1;t1||Math.abs(e.height-this.lastBoundingBox.height)>1)&&(this.lastBoundingBox.width=e.width,this.lastBoundingBox.height=e.height)}else(-1!==this.lastBoundingBox.width||-1!==this.lastBoundingBox.height)&&(this.lastBoundingBox.width=-1,this.lastBoundingBox.height=-1)}},{key:"componentDidMount",value:function(){document.addEventListener("keydown",this.handleKeyDown),this.updateBBox()}},{key:"componentWillUnmount",value:function(){document.removeEventListener("keydown",this.handleKeyDown)}},{key:"componentDidUpdate",value:function(){var e,t;this.props.active&&this.updateBBox(),this.state.dismissed&&((null===(e=this.props.coordinate)||void 0===e?void 0:e.x)!==this.state.dismissedAtCoordinate.x||(null===(t=this.props.coordinate)||void 0===t?void 0:t.y)!==this.state.dismissedAtCoordinate.y)&&(this.state.dismissed=!1)}},{key:"render",value:function(){var e,t,n,r,o,a,i,l,s,c,u,d,p,f,m,g,h,b,v,y,E=this,S=this.props,w=S.active,x=S.allowEscapeViewBox,O=S.animationDuration,C=S.animationEasing,T=S.children,k=S.coordinate,A=S.hasPayload,I=S.isAnimationActive,_=S.offset,P=S.position,M=S.reverseDirection,L=S.useTranslate3d,D=S.viewBox,j=S.wrapperStyle,F=(p=(e={allowEscapeViewBox:x,coordinate:k,offsetTopLeft:_,position:P,reverseDirection:M,tooltipBox:{height:this.lastBoundingBox.height,width:this.lastBoundingBox.width},useTranslate3d:L,viewBox:D}).allowEscapeViewBox,f=e.coordinate,m=e.offsetTopLeft,g=e.position,h=e.reverseDirection,b=e.tooltipBox,v=e.useTranslate3d,y=e.viewBox,b.height>0&&b.width>0&&f?(n=(t={translateX:u=nT({allowEscapeViewBox:p,coordinate:f,key:"x",offsetTopLeft:m,position:g,reverseDirection:h,tooltipDimension:b.width,viewBox:y,viewBoxDimension:y.width}),translateY:d=nT({allowEscapeViewBox:p,coordinate:f,key:"y",offsetTopLeft:m,position:g,reverseDirection:h,tooltipDimension:b.height,viewBox:y,viewBoxDimension:y.height}),useTranslate3d:v}).translateX,r=t.translateY,c=tD({transform:t.useTranslate3d?"translate3d(".concat(n,"px, ").concat(r,"px, 0)"):"translate(".concat(n,"px, ").concat(r,"px)")})):c=nC,{cssProperties:c,cssClasses:(i=(o={translateX:u,translateY:d,coordinate:f}).coordinate,l=o.translateX,s=o.translateY,N(nO,(nx(a={},"".concat(nO,"-right"),$(l)&&i&&$(i.x)&&l>=i.x),nx(a,"".concat(nO,"-left"),$(l)&&i&&$(i.x)&&l=i.y),nx(a,"".concat(nO,"-top"),$(s)&&i&&$(i.y)&&s0;return R.createElement(nD,{allowEscapeViewBox:o,animationDuration:a,animationEasing:i,isAnimationActive:u,active:r,coordinate:s,hasPayload:E,offset:d,position:m,reverseDirection:g,useTranslate3d:h,viewBox:b,wrapperStyle:v},(e=nH(nH({},this.props),{},{payload:y}),R.isValidElement(l)?R.cloneElement(l,e):"function"==typeof l?R.createElement(l,e):R.createElement(e3,e)))}}],nG(a.prototype,n),r&&nG(a,r),Object.defineProperty(a,"prototype",{writable:!1}),a}(R.PureComponent);nV(nK,"displayName","Tooltip"),nV(nK,"defaultProps",{allowEscapeViewBox:{x:!1,y:!1},animationDuration:400,animationEasing:"ease",contentStyle:{},coordinate:{x:0,y:0},cursor:!0,cursorStyle:{},filterNull:!0,isAnimationActive:!nj.isSsr,itemStyle:{},labelStyle:{},offset:10,reverseDirection:{x:!1,y:!1},separator:" : ",trigger:"hover",useTranslate3d:!1,viewBox:{x:0,y:0,height:0,width:0},wrapperStyle:{}});var nX=n(9332),nQ=n.n(nX);let nJ=Math.cos,n0=Math.sin,n1=Math.sqrt,n2=Math.PI,n4=2*n2;var n3={draw(e,t){let n=n1(t/n2);e.moveTo(n,0),e.arc(0,0,n,0,n4)}};let n6=n1(1/3),n5=2*n6,n8=n0(n2/10)/n0(7*n2/10),n9=n0(n4/10)*n8,n7=-nJ(n4/10)*n8,re=n1(3),rt=n1(3)/2,rn=1/n1(12),rr=(rn/2+1)*3;function ro(e){return function(){return e}}function ra(e,t){return t||(t=e.slice(0)),Object.freeze(Object.defineProperties(e,{raw:{value:Object.freeze(t)}}))}function ri(){let e=ra(["M",",",""]);return ri=function(){return e},e}function rl(){let e=ra(["Z"]);return rl=function(){return e},e}function rs(){let e=ra(["L",",",""]);return rs=function(){return e},e}function rc(){let e=ra(["Q",",",",",",",""]);return rc=function(){return e},e}function ru(){let e=ra(["C",",",",",",",",",",",""]);return ru=function(){return e},e}function rd(){let e=ra(["M",",",""]);return rd=function(){return e},e}function rp(){let e=ra(["L",",",""]);return rp=function(){return e},e}function rf(){let e=ra(["L",",",""]);return rf=function(){return e},e}function rm(){let e=ra(["A",",",",0,0,",",",",",""]);return rm=function(){return e},e}function rg(){let e=ra(["M",",",""]);return rg=function(){return e},e}function rh(){let e=ra(["L",",",""]);return rh=function(){return e},e}function rb(){let e=ra(["A",",",",0,1,",",",",","A",",",",0,1,",",",",",""]);return rb=function(){return e},e}function rv(){let e=ra(["A",",",",0,",",",",",",",""]);return rv=function(){return e},e}function ry(){let e=ra(["M",",","h","v","h","Z"]);return ry=function(){return e},e}let rE=Math.PI,rS=2*rE,rw=rS-1e-6;function rx(e){this._+=e[0];for(let t=1,n=e.length;t1e-6){if(Math.abs(u*l-s*c)>1e-6&&o){let p=n-a,f=r-i,m=l*l+s*s,g=Math.sqrt(m),h=Math.sqrt(d),b=o*Math.tan((rE-Math.acos((m+d-(p*p+f*f))/(2*g*h)))/2),v=b/h,y=b/g;Math.abs(v-1)>1e-6&&this._append(rf(),e+v*c,t+v*u),this._append(rm(),o,o,+(u*p>c*f),this._x1=e+y*l,this._y1=t+y*s)}else this._append(rp(),this._x1=e,this._y1=t)}}arc(e,t,n,r,o,a){if(e=+e,t=+t,a=!!a,(n=+n)<0)throw Error("negative radius: ".concat(n));let i=n*Math.cos(r),l=n*Math.sin(r),s=e+i,c=t+l,u=1^a,d=a?r-o:o-r;null===this._x1?this._append(rg(),s,c):(Math.abs(this._x1-s)>1e-6||Math.abs(this._y1-c)>1e-6)&&this._append(rh(),s,c),n&&(d<0&&(d=d%rS+rS),d>rw?this._append(rb(),n,n,u,e-i,t-l,n,n,u,this._x1=s,this._y1=c):d>1e-6&&this._append(rv(),n,n,+(d>=rE),u,this._x1=e+n*Math.cos(o),this._y1=t+n*Math.sin(o)))}rect(e,t,n,r){this._append(ry(),this._x0=this._x1=+e,this._y0=this._y1=+t,n=+n,+r,-n)}toString(){return this._}constructor(e){this._x0=this._y0=this._x1=this._y1=null,this._="",this._append=null==e?rx:function(e){let t=Math.floor(e);if(!(t>=0))throw Error("invalid digits: ".concat(e));if(t>15)return rx;let n=10**t;return function(e){this._+=e[0];for(let t=1,r=e.length;t=0))throw RangeError("invalid digits: ".concat(n));t=e}return e},()=>new rO(t)}function rT(e){return(rT="function"==typeof Symbol&&"symbol"==typeof Symbol.iterator?function(e){return typeof e}:function(e){return e&&"function"==typeof Symbol&&e.constructor===Symbol&&e!==Symbol.prototype?"symbol":typeof e})(e)}rO.prototype,n1(3),n1(3);var rk=["type","size","sizeType"];function rA(){return(rA=Object.assign?Object.assign.bind():function(e){for(var t=1;t=0||(o[n]=e[n]);return o}(e,t);if(Object.getOwnPropertySymbols){var a=Object.getOwnPropertySymbols(e);for(r=0;r=0)&&Object.prototype.propertyIsEnumerable.call(e,n)&&(o[n]=e[n])}return o}(e,rk)),{},{type:r,size:a,sizeType:l}),c=s.className,u=s.cx,d=s.cy,p=eA(s,!0);return u===+u&&d===+d&&a===+a?R.createElement("path",rA({},p,{className:N("recharts-symbols",c),transform:"translate(".concat(u,", ").concat(d,")"),d:(t=rN["symbol".concat(nQ()(r))]||n3,(function(e,t){let n=null,r=rC(o);function o(){let o;if(n||(n=o=r()),e.apply(this,arguments).draw(n,+t.apply(this,arguments)),o)return n=null,o+""||null}return e="function"==typeof e?e:ro(e||n3),t="function"==typeof t?t:ro(void 0===t?64:+t),o.type=function(t){return arguments.length?(e="function"==typeof t?t:ro(t),o):e},o.size=function(e){return arguments.length?(t="function"==typeof e?e:ro(+e),o):t},o.context=function(e){return arguments.length?(n=null==e?null:e,o):n},o})().type(t).size(rP(a,l,r))())})):null};function rL(e){return(rL="function"==typeof Symbol&&"symbol"==typeof Symbol.iterator?function(e){return typeof e}:function(e){return e&&"function"==typeof Symbol&&e.constructor===Symbol&&e!==Symbol.prototype?"symbol":typeof e})(e)}function rD(){return(rD=Object.assign?Object.assign.bind():function(e){for(var t=1;t');var f=t.inactive?i:t.color;return R.createElement("li",rD({className:d,style:s,key:"legend-item-".concat(n)},em(e.props,t,n)),R.createElement(eY,{width:r,height:r,viewBox:l,style:c},e.renderIcon(t)),R.createElement("span",{className:"recharts-legend-item-text",style:{color:f}},u?u(p,t,n):p))})}},{key:"render",value:function(){var e=this.props,t=e.payload,n=e.layout,r=e.align;return t&&t.length?R.createElement("ul",{className:"recharts-default-legend",style:{padding:0,margin:0,textAlign:"horizontal"===n?r:"left"}},this.renderItems()):null}}],rF(a.prototype,n),r&&rF(a,r),Object.defineProperty(a,"prototype",{writable:!1}),a}(R.PureComponent);function rG(e){return(rG="function"==typeof Symbol&&"symbol"==typeof Symbol.iterator?function(e){return typeof e}:function(e){return e&&"function"==typeof Symbol&&e.constructor===Symbol&&e!==Symbol.prototype?"symbol":typeof e})(e)}rZ(rH,"displayName","Legend"),rZ(rH,"defaultProps",{iconSize:14,layout:"horizontal",align:"center",verticalAlign:"middle",inactiveColor:"#ccc"});var r$=["ref"];function rW(e,t){var n=Object.keys(e);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);t&&(r=r.filter(function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable})),n.push.apply(n,r)}return n}function rV(e){for(var t=1;t1||Math.abs(t.height-this.lastBoundingBox.height)>1)&&(this.lastBoundingBox.width=t.width,this.lastBoundingBox.height=t.height,e&&e(t))}else(-1!==this.lastBoundingBox.width||-1!==this.lastBoundingBox.height)&&(this.lastBoundingBox.width=-1,this.lastBoundingBox.height=-1,e&&e(null))}},{key:"getBBoxSnapshot",value:function(){return this.lastBoundingBox.width>=0&&this.lastBoundingBox.height>=0?rV({},this.lastBoundingBox):{width:0,height:0}}},{key:"getDefaultPosition",value:function(e){var t,n,r=this.props,o=r.layout,a=r.align,i=r.verticalAlign,l=r.margin,s=r.chartWidth,c=r.chartHeight;return e&&(void 0!==e.left&&null!==e.left||void 0!==e.right&&null!==e.right)||(t="center"===a&&"vertical"===o?{left:((s||0)-this.getBBoxSnapshot().width)/2}:"right"===a?{right:l&&l.right||0}:{left:l&&l.left||0}),e&&(void 0!==e.top&&null!==e.top||void 0!==e.bottom&&null!==e.bottom)||(n="middle"===i?{top:((c||0)-this.getBBoxSnapshot().height)/2}:"bottom"===i?{bottom:l&&l.bottom||0}:{top:l&&l.top||0}),rV(rV({},t),n)}},{key:"render",value:function(){var e=this,t=this.props,n=t.content,r=t.width,o=t.height,a=t.wrapperStyle,i=t.payloadUniqBy,l=t.payload,s=rV(rV({position:"absolute",width:r||"auto",height:o||"auto"},this.getDefaultPosition(a)),a);return R.createElement("div",{className:"recharts-legend-wrapper",style:s,ref:function(t){e.wrapperNode=t}},function(e,t){if(R.isValidElement(e))return R.cloneElement(e,t);if("function"==typeof e)return R.createElement(e,t);t.ref;var n=function(e,t){if(null==e)return{};var n,r,o=function(e,t){if(null==e)return{};var n,r,o={},a=Object.keys(e);for(r=0;r=0||(o[n]=e[n]);return o}(e,t);if(Object.getOwnPropertySymbols){var a=Object.getOwnPropertySymbols(e);for(r=0;r=0)&&Object.prototype.propertyIsEnumerable.call(e,n)&&(o[n]=e[n])}return o}(t,r$);return R.createElement(rH,n)}(n,rV(rV({},this.props),{},{payload:nU(l,i,r0)})))}}],r=[{key:"getWithHeight",value:function(e,t){var n=e.props.layout;return"vertical"===n&&$(e.props.height)?{height:e.props.height}:"horizontal"===n?{width:e.props.width||t}:null}}],n&&rq(a.prototype,n),r&&rq(a,r),Object.defineProperty(a,"prototype",{writable:!1}),a}(R.PureComponent);function r2(){return(r2=Object.assign?Object.assign.bind():function(e){for(var t=1;te.length)&&(t=e.length);for(var n=0,r=Array(t);n=0?1:-1,s=n>=0?1:-1,c=r>=0&&n>=0||r<0&&n<0?1:0;if(i>0&&o instanceof Array){for(var u=[0,0,0,0],d=0;d<4;d++)u[d]=o[d]>i?i:o[d];a="M".concat(e,",").concat(t+l*u[0]),u[0]>0&&(a+="A ".concat(u[0],",").concat(u[0],",0,0,").concat(c,",").concat(e+s*u[0],",").concat(t)),a+="L ".concat(e+n-s*u[1],",").concat(t),u[1]>0&&(a+="A ".concat(u[1],",").concat(u[1],",0,0,").concat(c,",\n ").concat(e+n,",").concat(t+l*u[1])),a+="L ".concat(e+n,",").concat(t+r-l*u[2]),u[2]>0&&(a+="A ".concat(u[2],",").concat(u[2],",0,0,").concat(c,",\n ").concat(e+n-s*u[2],",").concat(t+r)),a+="L ".concat(e+s*u[3],",").concat(t+r),u[3]>0&&(a+="A ".concat(u[3],",").concat(u[3],",0,0,").concat(c,",\n ").concat(e,",").concat(t+r-l*u[3])),a+="Z"}else if(i>0&&o===+o&&o>0){var p=Math.min(i,o);a="M ".concat(e,",").concat(t+l*p,"\n A ").concat(p,",").concat(p,",0,0,").concat(c,",").concat(e+s*p,",").concat(t,"\n L ").concat(e+n-s*p,",").concat(t,"\n A ").concat(p,",").concat(p,",0,0,").concat(c,",").concat(e+n,",").concat(t+l*p,"\n L ").concat(e+n,",").concat(t+r-l*p,"\n A ").concat(p,",").concat(p,",0,0,").concat(c,",").concat(e+n-s*p,",").concat(t+r,"\n L ").concat(e+s*p,",").concat(t+r,"\n A ").concat(p,",").concat(p,",0,0,").concat(c,",").concat(e,",").concat(t+r-l*p," Z")}else a="M ".concat(e,",").concat(t," h ").concat(n," v ").concat(r," h ").concat(-n," Z");return a},oe=function(e,t){if(!e||!t)return!1;var n=e.x,r=e.y,o=t.x,a=t.y,i=t.width,l=t.height;return!!(Math.abs(i)>0&&Math.abs(l)>0)&&n>=Math.min(o,o+i)&&n<=Math.max(o,o+i)&&r>=Math.min(a,a+l)&&r<=Math.max(a,a+l)},ot={x:0,y:0,width:0,height:0,radius:0,isAnimationActive:!1,isUpdateAnimationActive:!1,animationBegin:0,animationDuration:1500,animationEasing:"ease"},on=function(e){var t,n=r9(r9({},ot),e),r=(0,R.useRef)(),o=function(e){if(Array.isArray(e))return e}(t=(0,R.useState)(-1))||function(e,t){var n=null==e?null:"undefined"!=typeof Symbol&&e[Symbol.iterator]||e["@@iterator"];if(null!=n){var r,o,a,i,l=[],s=!0,c=!1;try{if(a=(n=n.call(e)).next,0===t){if(Object(n)!==n)return;s=!1}else for(;!(s=(r=a.call(n)).done)&&(l.push(r.value),l.length!==t);s=!0);}catch(e){c=!0,o=e}finally{try{if(!s&&null!=n.return&&(i=n.return(),Object(i)!==i))return}finally{if(c)throw o}}return l}}(t,2)||function(e,t){if(e){if("string"==typeof e)return r5(e,t);var n=Object.prototype.toString.call(e).slice(8,-1);if("Object"===n&&e.constructor&&(n=e.constructor.name),"Map"===n||"Set"===n)return Array.from(e);if("Arguments"===n||/^(?:Ui|I)nt(?:8|16|32)(?:Clamped)?Array$/.test(n))return r5(e,t)}}(t,2)||function(){throw TypeError("Invalid attempt to destructure non-iterable instance.\nIn order to be iterable, non-array objects must have a [Symbol.iterator]() method.")}(),a=o[0],i=o[1];(0,R.useEffect)(function(){if(r.current&&r.current.getTotalLength)try{var e=r.current.getTotalLength();e&&i(e)}catch(e){}},[]);var l=n.x,s=n.y,c=n.width,u=n.height,d=n.radius,p=n.className,f=n.animationEasing,m=n.animationDuration,g=n.animationBegin,h=n.isAnimationActive,b=n.isUpdateAnimationActive;if(l!==+l||s!==+s||c!==+c||u!==+u||0===c||0===u)return null;var v=N("recharts-rectangle",p);return b?R.createElement(ni,{canBegin:a>0,from:{width:c,height:u,x:l,y:s},to:{width:c,height:u,x:l,y:s},duration:m,animationEasing:f,isActive:b},function(e){var t=e.width,o=e.height,i=e.x,l=e.y;return R.createElement(ni,{canBegin:a>0,from:"0px ".concat(-1===a?1:a,"px"),to:"".concat(a,"px 0px"),attributeName:"strokeDasharray",begin:g,duration:m,isActive:h,easing:f},R.createElement("path",r6({},eA(n,!0),{className:v,d:r7(i,l,t,o,d),ref:r})))}):R.createElement("path",r6({},eA(n,!0),{className:v,d:r7(l,s,c,u,d)}))};function or(e,t){switch(arguments.length){case 0:break;case 1:this.range(e);break;default:this.range(t).domain(e)}return this}function oo(e,t){switch(arguments.length){case 0:break;case 1:"function"==typeof e?this.interpolator(e):this.range(e);break;default:this.domain(e),"function"==typeof t?this.interpolator(t):this.range(t)}return this}class oa extends Map{get(e){return super.get(oi(this,e))}has(e){return super.has(oi(this,e))}set(e,t){return super.set(function(e,t){let{_intern:n,_key:r}=e,o=r(t);return n.has(o)?n.get(o):(n.set(o,t),t)}(this,e),t)}delete(e){return super.delete(function(e,t){let{_intern:n,_key:r}=e,o=r(t);return n.has(o)&&(t=n.get(o),n.delete(o)),t}(this,e))}constructor(e,t=ol){if(super(),Object.defineProperties(this,{_intern:{value:new Map},_key:{value:t}}),null!=e)for(let[t,n]of e)this.set(t,n)}}function oi(e,t){let{_intern:n,_key:r}=e,o=r(t);return n.has(o)?n.get(o):t}function ol(e){return null!==e&&"object"==typeof e?e.valueOf():e}let os=Symbol("implicit");function oc(){var e=new oa,t=[],n=[],r=os;function o(o){let a=e.get(o);if(void 0===a){if(r!==os)return r;e.set(o,a=t.push(o)-1)}return n[a%n.length]}return o.domain=function(n){if(!arguments.length)return t.slice();for(let r of(t=[],e=new oa,n))e.has(r)||e.set(r,t.push(r)-1);return o},o.range=function(e){return arguments.length?(n=Array.from(e),o):n.slice()},o.unknown=function(e){return arguments.length?(r=e,o):r},o.copy=function(){return oc(t,n).unknown(r)},or.apply(o,arguments),o}function ou(){var e,t,n=oc().unknown(void 0),r=n.domain,o=n.range,a=0,i=1,l=!1,s=0,c=0,u=.5;function d(){var n=r().length,d=i1&&void 0!==arguments[1]?arguments[1]:{};if(null==e||nj.isSsr)return{width:0,height:0};var r=(Object.keys(t=om({},n)).forEach(function(e){t[e]||delete t[e]}),t),o=JSON.stringify({text:e,copyStyle:r});if(og.widthCache[o])return og.widthCache[o];try{var a=document.getElementById(ob);a||((a=document.createElement("span")).setAttribute("id",ob),a.setAttribute("aria-hidden","true"),document.body.appendChild(a));var i=om(om({},oh),r);Object.assign(a.style,i),a.textContent="".concat(e);var l=a.getBoundingClientRect(),s={width:l.width,height:l.height};return og.widthCache[o]=s,++og.cacheCount>2e3&&(og.cacheCount=0,og.widthCache={}),s}catch(e){return{width:0,height:0}}};function oy(e){return(oy="function"==typeof Symbol&&"symbol"==typeof Symbol.iterator?function(e){return typeof e}:function(e){return e&&"function"==typeof Symbol&&e.constructor===Symbol&&e!==Symbol.prototype?"symbol":typeof e})(e)}function oE(e,t){return function(e){if(Array.isArray(e))return e}(e)||function(e,t){var n=null==e?null:"undefined"!=typeof Symbol&&e[Symbol.iterator]||e["@@iterator"];if(null!=n){var r,o,a,i,l=[],s=!0,c=!1;try{if(a=(n=n.call(e)).next,0===t){if(Object(n)!==n)return;s=!1}else for(;!(s=(r=a.call(n)).done)&&(l.push(r.value),l.length!==t);s=!0);}catch(e){c=!0,o=e}finally{try{if(!s&&null!=n.return&&(i=n.return(),Object(i)!==i))return}finally{if(c)throw o}}return l}}(e,t)||function(e,t){if(e){if("string"==typeof e)return oS(e,t);var n=Object.prototype.toString.call(e).slice(8,-1);if("Object"===n&&e.constructor&&(n=e.constructor.name),"Map"===n||"Set"===n)return Array.from(e);if("Arguments"===n||/^(?:Ui|I)nt(?:8|16|32)(?:Clamped)?Array$/.test(n))return oS(e,t)}}(e,t)||function(){throw TypeError("Invalid attempt to destructure non-iterable instance.\nIn order to be iterable, non-array objects must have a [Symbol.iterator]() method.")}()}function oS(e,t){(null==t||t>e.length)&&(t=e.length);for(var n=0,r=Array(t);n=0||(o[n]=e[n]);return o}(e,t);if(Object.getOwnPropertySymbols){var a=Object.getOwnPropertySymbols(e);for(r=0;r=0)&&Object.prototype.propertyIsEnumerable.call(e,n)&&(o[n]=e[n])}return o}function oj(e,t){return function(e){if(Array.isArray(e))return e}(e)||function(e,t){var n=null==e?null:"undefined"!=typeof Symbol&&e[Symbol.iterator]||e["@@iterator"];if(null!=n){var r,o,a,i,l=[],s=!0,c=!1;try{if(a=(n=n.call(e)).next,0===t){if(Object(n)!==n)return;s=!1}else for(;!(s=(r=a.call(n)).done)&&(l.push(r.value),l.length!==t);s=!0);}catch(e){c=!0,o=e}finally{try{if(!s&&null!=n.return&&(i=n.return(),Object(i)!==i))return}finally{if(c)throw o}}return l}}(e,t)||function(e,t){if(e){if("string"==typeof e)return oF(e,t);var n=Object.prototype.toString.call(e).slice(8,-1);if("Object"===n&&e.constructor&&(n=e.constructor.name),"Map"===n||"Set"===n)return Array.from(e);if("Arguments"===n||/^(?:Ui|I)nt(?:8|16|32)(?:Clamped)?Array$/.test(n))return oF(e,t)}}(e,t)||function(){throw TypeError("Invalid attempt to destructure non-iterable instance.\nIn order to be iterable, non-array objects must have a [Symbol.iterator]() method.")}()}function oF(e,t){(null==t||t>e.length)&&(t=e.length);for(var n=0,r=Array(t);n0&&void 0!==arguments[0]?arguments[0]:[];return e.reduce(function(e,t){var a=t.word,i=t.width,l=e[e.length-1];return l&&(null==r||o||l.width+i+ni||t.reduce(function(e,t){return e.width>t.width?e:t}).width>Number(r),t]},m=0,g=l.length-1,h=0;m<=g&&h<=l.length-1;){var b=Math.floor((m+g)/2),v=oj(f(b-1),2),y=v[0],E=v[1],S=oj(f(b),1)[0];if(y||S||(m=b+1),y&&S&&(g=b-1),!y&&S){a=E;break}h++}return a||p},oz=function(e){return[{words:en()(e)?[]:e.toString().split(oB)}]},oH=function(e){var t=e.width,n=e.scaleToFit,r=e.children,o=e.style,a=e.breakAll,i=e.maxLines;if((t||n)&&!nj.isSsr){var l=oU({breakAll:a,children:r,style:o});return l?oZ({breakAll:a,children:r,maxLines:i,style:o},l.wordsWithComputedWidth,l.spaceWidth,t,n):oz(r)}return oz(r)},oG="#808080",o$=function(e){var t,n=e.x,r=void 0===n?0:n,o=e.y,a=void 0===o?0:o,i=e.lineHeight,l=void 0===i?"1em":i,s=e.capHeight,c=void 0===s?"0.71em":s,u=e.scaleToFit,d=void 0!==u&&u,p=e.textAnchor,f=e.verticalAnchor,m=e.fill,g=void 0===m?oG:m,h=oD(e,oP),b=(0,R.useMemo)(function(){return oH({breakAll:h.breakAll,children:h.children,maxLines:h.maxLines,scaleToFit:d,style:h.style,width:h.width})},[h.breakAll,h.children,h.maxLines,d,h.style,h.width]),v=h.dx,y=h.dy,E=h.angle,S=h.className,w=h.breakAll,x=oD(h,oM);if(!W(r)||!W(a))return null;var O=r+($(v)?v:0),C=a+($(y)?y:0);switch(void 0===f?"end":f){case"start":t=o_("calc(".concat(c,")"));break;case"middle":t=o_("calc(".concat((b.length-1)/2," * -").concat(l," + (").concat(c," / 2))"));break;default:t=o_("calc(".concat(b.length-1," * -").concat(l,")"))}var T=[];if(d){var k=b[0].width,A=h.width;T.push("scale(".concat(($(A)?A/k:1)/k,")"))}return E&&T.push("rotate(".concat(E,", ").concat(O,", ").concat(C,")")),T.length&&(x.transform=T.join(" ")),R.createElement("text",oL({},eA(x,!0),{x:O,y:C,className:N("recharts-text",S),textAnchor:void 0===p?"start":p,fill:g.includes("url")?oG:g}),b.map(function(e,n){var r=e.words.join(w?"":" ");return R.createElement("tspan",{x:O,dy:0===n?t:l,key:r},r)}))};let oW=Math.sqrt(50),oV=Math.sqrt(10),oq=Math.sqrt(2);function oY(e,t,n){let r,o,a;let i=(t-e)/Math.max(0,n),l=Math.floor(Math.log10(i)),s=i/Math.pow(10,l),c=s>=oW?10:s>=oV?5:s>=oq?2:1;return(l<0?(r=Math.round(e*(a=Math.pow(10,-l)/c)),o=Math.round(t*a),r/at&&--o,a=-a):(r=Math.round(e/(a=Math.pow(10,l)*c)),o=Math.round(t/a),r*at&&--o),o0))return[];if(e===t)return[e];let r=t=o))return[];let l=a-o+1,s=Array(l);if(r){if(i<0)for(let e=0;et?1:e>=t?0:NaN}function o0(e,t){return null==e||null==t?NaN:te?1:t>=e?0:NaN}function o1(e){let t,n,r;function o(e,r){let o=arguments.length>2&&void 0!==arguments[2]?arguments[2]:0,a=arguments.length>3&&void 0!==arguments[3]?arguments[3]:e.length;if(o>>1;0>n(e[t],r)?o=t+1:a=t}while(ooJ(e(t),n),r=(t,n)=>e(t)-n):(t=e===oJ||e===o0?e:o2,n=e,r=e),{left:o,center:function(e,t){let n=arguments.length>2&&void 0!==arguments[2]?arguments[2]:0,a=arguments.length>3&&void 0!==arguments[3]?arguments[3]:e.length,i=o(e,t,n,a-1);return i>n&&r(e[i-1],t)>-r(e[i],t)?i-1:i},right:function(e,r){let o=arguments.length>2&&void 0!==arguments[2]?arguments[2]:0,a=arguments.length>3&&void 0!==arguments[3]?arguments[3]:e.length;if(o>>1;0>=n(e[t],r)?o=t+1:a=t}while(o>8&15|t>>4&240,t>>4&15|240&t,(15&t)<<4|15&t,1):8===n?am(t>>24&255,t>>16&255,t>>8&255,(255&t)/255):4===n?am(t>>12&15|t>>8&240,t>>8&15|t>>4&240,t>>4&15|240&t,((15&t)<<4|15&t)/255):null):(t=ar.exec(e))?new ah(t[1],t[2],t[3],1):(t=ao.exec(e))?new ah(255*t[1]/100,255*t[2]/100,255*t[3]/100,1):(t=aa.exec(e))?am(t[1],t[2],t[3],t[4]):(t=ai.exec(e))?am(255*t[1]/100,255*t[2]/100,255*t[3]/100,t[4]):(t=al.exec(e))?aw(t[1],t[2]/100,t[3]/100,1):(t=as.exec(e))?aw(t[1],t[2]/100,t[3]/100,t[4]):ac.hasOwnProperty(e)?af(ac[e]):"transparent"===e?new ah(NaN,NaN,NaN,0):null}function af(e){return new ah(e>>16&255,e>>8&255,255&e,1)}function am(e,t,n,r){return r<=0&&(e=t=n=NaN),new ah(e,t,n,r)}function ag(e,t,n,r){var o;return 1==arguments.length?((o=e)instanceof o9||(o=ap(o)),o)?new ah((o=o.rgb()).r,o.g,o.b,o.opacity):new ah:new ah(e,t,n,null==r?1:r)}function ah(e,t,n,r){this.r=+e,this.g=+t,this.b=+n,this.opacity=+r}function ab(){return"#".concat(aS(this.r)).concat(aS(this.g)).concat(aS(this.b))}function av(){let e=ay(this.opacity);return"".concat(1===e?"rgb(":"rgba(").concat(aE(this.r),", ").concat(aE(this.g),", ").concat(aE(this.b)).concat(1===e?")":", ".concat(e,")"))}function ay(e){return isNaN(e)?1:Math.max(0,Math.min(1,e))}function aE(e){return Math.max(0,Math.min(255,Math.round(e)||0))}function aS(e){return((e=aE(e))<16?"0":"")+e.toString(16)}function aw(e,t,n,r){return r<=0?e=t=n=NaN:n<=0||n>=1?e=t=NaN:t<=0&&(e=NaN),new aO(e,t,n,r)}function ax(e){if(e instanceof aO)return new aO(e.h,e.s,e.l,e.opacity);if(e instanceof o9||(e=ap(e)),!e)return new aO;if(e instanceof aO)return e;var t=(e=e.rgb()).r/255,n=e.g/255,r=e.b/255,o=Math.min(t,n,r),a=Math.max(t,n,r),i=NaN,l=a-o,s=(a+o)/2;return l?(i=t===a?(n-r)/l+(n0&&s<1?0:i,new aO(i,l,s,e.opacity)}function aO(e,t,n,r){this.h=+e,this.s=+t,this.l=+n,this.opacity=+r}function aC(e){return(e=(e||0)%360)<0?e+360:e}function aT(e){return Math.max(0,Math.min(1,e||0))}function ak(e,t,n){return(e<60?t+(n-t)*e/60:e<180?n:e<240?t+(n-t)*(240-e)/60:t)*255}function aA(e,t,n,r,o){var a=e*e,i=a*e;return((1-3*e+3*a-i)*t+(4-6*a+3*i)*n+(1+3*e+3*a-3*i)*r+i*o)/6}o5(o9,ap,{copy(e){return Object.assign(new this.constructor,this,e)},displayable(){return this.rgb().displayable()},hex:au,formatHex:au,formatHex8:function(){return this.rgb().formatHex8()},formatHsl:function(){return ax(this).formatHsl()},formatRgb:ad,toString:ad}),o5(ah,ag,o8(o9,{brighter(e){return e=null==e?1.4285714285714286:Math.pow(1.4285714285714286,e),new ah(this.r*e,this.g*e,this.b*e,this.opacity)},darker(e){return e=null==e?.7:Math.pow(.7,e),new ah(this.r*e,this.g*e,this.b*e,this.opacity)},rgb(){return this},clamp(){return new ah(aE(this.r),aE(this.g),aE(this.b),ay(this.opacity))},displayable(){return -.5<=this.r&&this.r<255.5&&-.5<=this.g&&this.g<255.5&&-.5<=this.b&&this.b<255.5&&0<=this.opacity&&this.opacity<=1},hex:ab,formatHex:ab,formatHex8:function(){return"#".concat(aS(this.r)).concat(aS(this.g)).concat(aS(this.b)).concat(aS((isNaN(this.opacity)?1:this.opacity)*255))},formatRgb:av,toString:av})),o5(aO,function(e,t,n,r){return 1==arguments.length?ax(e):new aO(e,t,n,null==r?1:r)},o8(o9,{brighter(e){return e=null==e?1.4285714285714286:Math.pow(1.4285714285714286,e),new aO(this.h,this.s,this.l*e,this.opacity)},darker(e){return e=null==e?.7:Math.pow(.7,e),new aO(this.h,this.s,this.l*e,this.opacity)},rgb(){var e=this.h%360+(this.h<0)*360,t=isNaN(e)||isNaN(this.s)?0:this.s,n=this.l,r=n+(n<.5?n:1-n)*t,o=2*n-r;return new ah(ak(e>=240?e-240:e+120,o,r),ak(e,o,r),ak(e<120?e+240:e-120,o,r),this.opacity)},clamp(){return new aO(aC(this.h),aT(this.s),aT(this.l),ay(this.opacity))},displayable(){return(0<=this.s&&this.s<=1||isNaN(this.s))&&0<=this.l&&this.l<=1&&0<=this.opacity&&this.opacity<=1},formatHsl(){let e=ay(this.opacity);return"".concat(1===e?"hsl(":"hsla(").concat(aC(this.h),", ").concat(100*aT(this.s),"%, ").concat(100*aT(this.l),"%").concat(1===e?")":", ".concat(e,")"))}}));var aI=e=>()=>e;function aR(e,t){var n=t-e;return n?function(t){return e+t*n}:aI(isNaN(e)?t:e)}var aN=function e(t){var n,r=1==(n=+(n=t))?aR:function(e,t){var r,o,a;return t-e?(r=e,o=t,r=Math.pow(r,a=n),o=Math.pow(o,a)-r,a=1/a,function(e){return Math.pow(r+e*o,a)}):aI(isNaN(e)?t:e)};function o(e,t){var n=r((e=ag(e)).r,(t=ag(t)).r),o=r(e.g,t.g),a=r(e.b,t.b),i=aR(e.opacity,t.opacity);return function(t){return e.r=n(t),e.g=o(t),e.b=a(t),e.opacity=i(t),e+""}}return o.gamma=e,o}(1);function a_(e){return function(t){var n,r,o=t.length,a=Array(o),i=Array(o),l=Array(o);for(n=0;n=1?(n=1,t-1):Math.floor(n*t),o=e[r],a=e[r+1],i=r>0?e[r-1]:2*o-a,l=rl&&(i=t.slice(l,i),c[s]?c[s]+=i:c[++s]=i),(o=o[0])===(a=a[0])?c[s]?c[s]+=a:c[++s]=a:(c[++s]=null,u.push({i:s,x:aP(o,a)})),l=aL.lastIndex;return lt&&(n=e,e=t,t=n),c=function(n){return Math.max(e,Math.min(t,n))}),r=s>2?aH:az,o=a=null,d}function d(t){return null==t||isNaN(t=+t)?n:(o||(o=r(i.map(e),l,s)))(e(c(t)))}return d.invert=function(n){return c(t((a||(a=r(l,i.map(e),aP)))(n)))},d.domain=function(e){return arguments.length?(i=Array.from(e,aF),u()):i.slice()},d.range=function(e){return arguments.length?(l=Array.from(e),u()):l.slice()},d.rangeRound=function(e){return l=Array.from(e),s=aj,u()},d.clamp=function(e){return arguments.length?(c=!!e||aU,u()):c!==aU},d.interpolate=function(e){return arguments.length?(s=e,u()):s},d.unknown=function(e){return arguments.length?(n=e,d):n},function(n,r){return e=n,t=r,u()}}function aW(){return a$()(aU,aU)}var aV=/^(?:(.)?([<>=^]))?([+\-( ])?([$#])?(0)?(\d+)?(,)?(\.\d+)?(~)?([a-z%])?$/i;function aq(e){var t;if(!(t=aV.exec(e)))throw Error("invalid format: "+e);return new aY({fill:t[1],align:t[2],sign:t[3],symbol:t[4],zero:t[5],width:t[6],comma:t[7],precision:t[8]&&t[8].slice(1),trim:t[9],type:t[10]})}function aY(e){this.fill=void 0===e.fill?" ":e.fill+"",this.align=void 0===e.align?">":e.align+"",this.sign=void 0===e.sign?"-":e.sign+"",this.symbol=void 0===e.symbol?"":e.symbol+"",this.zero=!!e.zero,this.width=void 0===e.width?void 0:+e.width,this.comma=!!e.comma,this.precision=void 0===e.precision?void 0:+e.precision,this.trim=!!e.trim,this.type=void 0===e.type?"":e.type+""}function aK(e,t){if((n=(e=t?e.toExponential(t-1):e.toExponential()).indexOf("e"))<0)return null;var n,r=e.slice(0,n);return[r.length>1?r[0]+r.slice(2):r,+e.slice(n+1)]}function aX(e){return(e=aK(Math.abs(e)))?e[1]:NaN}function aQ(e,t){var n=aK(e,t);if(!n)return e+"";var r=n[0],o=n[1];return o<0?"0."+Array(-o).join("0")+r:r.length>o+1?r.slice(0,o+1)+"."+r.slice(o+1):r+Array(o-r.length+2).join("0")}aq.prototype=aY.prototype,aY.prototype.toString=function(){return this.fill+this.align+this.sign+this.symbol+(this.zero?"0":"")+(void 0===this.width?"":Math.max(1,0|this.width))+(this.comma?",":"")+(void 0===this.precision?"":"."+Math.max(0,0|this.precision))+(this.trim?"~":"")+this.type};var aJ={"%":(e,t)=>(100*e).toFixed(t),b:e=>Math.round(e).toString(2),c:e=>e+"",d:function(e){return Math.abs(e=Math.round(e))>=1e21?e.toLocaleString("en").replace(/,/g,""):e.toString(10)},e:(e,t)=>e.toExponential(t),f:(e,t)=>e.toFixed(t),g:(e,t)=>e.toPrecision(t),o:e=>Math.round(e).toString(8),p:(e,t)=>aQ(100*e,t),r:aQ,s:function(e,t){var n=aK(e,t);if(!n)return e+"";var r=n[0],o=n[1],a=o-(b=3*Math.max(-8,Math.min(8,Math.floor(o/3))))+1,i=r.length;return a===i?r:a>i?r+Array(a-i+1).join("0"):a>0?r.slice(0,a)+"."+r.slice(a):"0."+Array(1-a).join("0")+aK(e,Math.max(0,t+a-1))[0]},X:e=>Math.round(e).toString(16).toUpperCase(),x:e=>Math.round(e).toString(16)};function a0(e){return e}var a1=Array.prototype.map,a2=["y","z","a","f","p","n","\xb5","m","","k","M","G","T","P","E","Z","Y"];function a4(e,t,n,r){var o,a,i=oQ(e,t,n);switch((r=aq(null==r?",f":r)).type){case"s":var l=Math.max(Math.abs(e),Math.abs(t));return null!=r.precision||isNaN(a=Math.max(0,3*Math.max(-8,Math.min(8,Math.floor(aX(l)/3)))-aX(Math.abs(i))))||(r.precision=a),E(r,l);case"":case"e":case"g":case"p":case"r":null!=r.precision||isNaN(a=Math.max(0,aX(Math.abs(Math.max(Math.abs(e),Math.abs(t)))-(o=Math.abs(o=i)))-aX(o))+1)||(r.precision=a-("e"===r.type));break;case"f":case"%":null!=r.precision||isNaN(a=Math.max(0,-aX(Math.abs(i))))||(r.precision=a-("%"===r.type)*2)}return y(r)}function a3(e){var t=e.domain;return e.ticks=function(e){var n=t();return oK(n[0],n[n.length-1],null==e?10:e)},e.tickFormat=function(e,n){var r=t();return a4(r[0],r[r.length-1],null==e?10:e,n)},e.nice=function(n){null==n&&(n=10);var r,o,a=t(),i=0,l=a.length-1,s=a[i],c=a[l],u=10;for(c0;){if((o=oX(s,c,n))===r)return a[i]=s,a[l]=c,t(a);if(o>0)s=Math.floor(s/o)*o,c=Math.ceil(c/o)*o;else if(o<0)s=Math.ceil(s*o)/o,c=Math.floor(c*o)/o;else break;r=o}return e},e}function a6(){var e=aW();return e.copy=function(){return aG(e,a6())},or.apply(e,arguments),a3(e)}function a5(e,t){e=e.slice();var n,r=0,o=e.length-1,a=e[r],i=e[o];return i-e(-t,n)}function io(e){let t,n;let r=e(a8,a9),o=r.domain,a=10;function i(){var i,l;return t=(i=a)===Math.E?Math.log:10===i&&Math.log10||2===i&&Math.log2||(i=Math.log(i),e=>Math.log(e)/i),n=10===(l=a)?it:l===Math.E?Math.exp:e=>Math.pow(l,e),o()[0]<0?(t=ir(t),n=ir(n),e(a7,ie)):e(a8,a9),r}return r.base=function(e){return arguments.length?(a=+e,i()):a},r.domain=function(e){return arguments.length?(o(e),i()):o()},r.ticks=e=>{let r,i;let l=o(),s=l[0],c=l[l.length-1],u=c0){for(;d<=p;++d)for(r=1;rc)break;m.push(i)}}else for(;d<=p;++d)for(r=a-1;r>=1;--r)if(!((i=d>0?r/n(-d):r*n(d))c)break;m.push(i)}2*m.length{if(null==e&&(e=10),null==o&&(o=10===a?"s":","),"function"!=typeof o&&(a%1||null!=(o=aq(o)).precision||(o.trim=!0),o=y(o)),e===1/0)return o;let i=Math.max(1,a*e/r.ticks().length);return e=>{let r=e/n(Math.round(t(e)));return r*ao(a5(o(),{floor:e=>n(Math.floor(t(e))),ceil:e=>n(Math.ceil(t(e)))})),r}function ia(e){return function(t){return Math.sign(t)*Math.log1p(Math.abs(t/e))}}function ii(e){return function(t){return Math.sign(t)*Math.expm1(Math.abs(t))*e}}function il(e){var t=1,n=e(ia(1),ii(t));return n.constant=function(n){return arguments.length?e(ia(t=+n),ii(t)):t},a3(n)}function is(e){return function(t){return t<0?-Math.pow(-t,e):Math.pow(t,e)}}function ic(e){return e<0?-Math.sqrt(-e):Math.sqrt(e)}function iu(e){return e<0?-e*e:e*e}function id(e){var t=e(aU,aU),n=1;return t.exponent=function(t){return arguments.length?1==(n=+t)?e(aU,aU):.5===n?e(ic,iu):e(is(n),is(1/n)):n},a3(t)}function ip(){var e=id(a$());return e.copy=function(){return aG(e,ip()).exponent(e.exponent())},or.apply(e,arguments),e}function im(){return ip.apply(null,arguments).exponent(.5)}function ig(e){return Math.sign(e)*e*e}function ih(e,t){let n;if(void 0===t)for(let t of e)null!=t&&(n=t)&&(n=t);else{let r=-1;for(let o of e)null!=(o=t(o,++r,e))&&(n=o)&&(n=o)}return n}function ib(e,t){let n;if(void 0===t)for(let t of e)null!=t&&(n>t||void 0===n&&t>=t)&&(n=t);else{let r=-1;for(let o of e)null!=(o=t(o,++r,e))&&(n>o||void 0===n&&o>=o)&&(n=o)}return n}function iv(e,t){return(null==e||!(e>=e))-(null==t||!(t>=t))||(et?1:0)}function iy(e,t,n){let r=e[t];e[t]=e[n],e[n]=r}y=(v=function(e){var t,n,r,o=void 0===e.grouping||void 0===e.thousands?a0:(t=a1.call(e.grouping,Number),n=e.thousands+"",function(e,r){for(var o=e.length,a=[],i=0,l=t[0],s=0;o>0&&l>0&&(s+l+1>r&&(l=Math.max(1,r-s)),a.push(e.substring(o-=l,o+l)),!((s+=l+1)>r));)l=t[i=(i+1)%t.length];return a.reverse().join(n)}),a=void 0===e.currency?"":e.currency[0]+"",i=void 0===e.currency?"":e.currency[1]+"",l=void 0===e.decimal?".":e.decimal+"",s=void 0===e.numerals?a0:(r=a1.call(e.numerals,String),function(e){return e.replace(/[0-9]/g,function(e){return r[+e]})}),c=void 0===e.percent?"%":e.percent+"",u=void 0===e.minus?"−":e.minus+"",d=void 0===e.nan?"NaN":e.nan+"";function p(e){var t=(e=aq(e)).fill,n=e.align,r=e.sign,p=e.symbol,f=e.zero,m=e.width,g=e.comma,h=e.precision,v=e.trim,y=e.type;"n"===y?(g=!0,y="g"):aJ[y]||(void 0===h&&(h=12),v=!0,y="g"),(f||"0"===t&&"="===n)&&(f=!0,t="0",n="=");var E="$"===p?a:"#"===p&&/[boxX]/.test(y)?"0"+y.toLowerCase():"",S="$"===p?i:/[%p]/.test(y)?c:"",w=aJ[y],x=/[defgprs%]/.test(y);function O(e){var a,i,c,p=E,O=S;if("c"===y)O=w(e)+O,e="";else{var C=(e=+e)<0||1/e<0;if(e=isNaN(e)?d:w(Math.abs(e),h),v&&(e=function(e){e:for(var t,n=e.length,r=1,o=-1;r0&&(o=0)}return o>0?e.slice(0,o)+e.slice(t+1):e}(e)),C&&0==+e&&"+"!==r&&(C=!1),p=(C?"("===r?r:u:"-"===r||"("===r?"":r)+p,O=("s"===y?a2[8+b/3]:"")+O+(C&&"("===r?")":""),x){for(a=-1,i=e.length;++a(c=e.charCodeAt(a))||c>57){O=(46===c?l+e.slice(a+1):e.slice(a))+O,e=e.slice(0,a);break}}}g&&!f&&(e=o(e,1/0));var T=p.length+e.length+O.length,k=T>1)+p+e+O+k.slice(T);break;default:e=k+p+e+O}return s(e)}return h=void 0===h?6:/[gprs]/.test(y)?Math.max(1,Math.min(21,h)):Math.max(0,Math.min(20,h)),O.toString=function(){return e+""},O}return{format:p,formatPrefix:function(e,t){var n=p(((e=aq(e)).type="f",e)),r=3*Math.max(-8,Math.min(8,Math.floor(aX(t)/3))),o=Math.pow(10,-r),a=a2[8+r/3];return function(e){return n(o*e)+a}}}}({thousands:",",grouping:[3],currency:["$",""]})).format,E=v.formatPrefix;let iE=new Date,iS=new Date;function iw(e,t,n,r){function o(t){return e(t=0==arguments.length?new Date:new Date(+t)),t}return o.floor=t=>(e(t=new Date(+t)),t),o.ceil=n=>(e(n=new Date(n-1)),t(n,1),e(n),n),o.round=e=>{let t=o(e),n=o.ceil(e);return e-t(t(e=new Date(+e),null==n?1:Math.floor(n)),e),o.range=(n,r,a)=>{let i;let l=[];if(n=o.ceil(n),a=null==a?1:Math.floor(a),!(n0))return l;do l.push(i=new Date(+n)),t(n,a),e(n);while(iiw(t=>{if(t>=t)for(;e(t),!n(t);)t.setTime(t-1)},(e,r)=>{if(e>=e){if(r<0)for(;++r<=0;)for(;t(e,-1),!n(e););else for(;--r>=0;)for(;t(e,1),!n(e););}}),n&&(o.count=(t,r)=>(iE.setTime(+t),iS.setTime(+r),e(iE),e(iS),Math.floor(n(iE,iS))),o.every=e=>isFinite(e=Math.floor(e))&&e>0?e>1?o.filter(r?t=>r(t)%e==0:t=>o.count(0,t)%e==0):o:null),o}let ix=iw(()=>{},(e,t)=>{e.setTime(+e+t)},(e,t)=>t-e);ix.every=e=>isFinite(e=Math.floor(e))&&e>0?e>1?iw(t=>{t.setTime(Math.floor(t/e)*e)},(t,n)=>{t.setTime(+t+n*e)},(t,n)=>(n-t)/e):ix:null,ix.range;let iO=iw(e=>{e.setTime(e-e.getMilliseconds())},(e,t)=>{e.setTime(+e+1e3*t)},(e,t)=>(t-e)/1e3,e=>e.getUTCSeconds());iO.range;let iC=iw(e=>{e.setTime(e-e.getMilliseconds()-1e3*e.getSeconds())},(e,t)=>{e.setTime(+e+6e4*t)},(e,t)=>(t-e)/6e4,e=>e.getMinutes());iC.range;let iT=iw(e=>{e.setUTCSeconds(0,0)},(e,t)=>{e.setTime(+e+6e4*t)},(e,t)=>(t-e)/6e4,e=>e.getUTCMinutes());iT.range;let ik=iw(e=>{e.setTime(e-e.getMilliseconds()-1e3*e.getSeconds()-6e4*e.getMinutes())},(e,t)=>{e.setTime(+e+36e5*t)},(e,t)=>(t-e)/36e5,e=>e.getHours());ik.range;let iA=iw(e=>{e.setUTCMinutes(0,0,0)},(e,t)=>{e.setTime(+e+36e5*t)},(e,t)=>(t-e)/36e5,e=>e.getUTCHours());iA.range;let iI=iw(e=>e.setHours(0,0,0,0),(e,t)=>e.setDate(e.getDate()+t),(e,t)=>(t-e-(t.getTimezoneOffset()-e.getTimezoneOffset())*6e4)/864e5,e=>e.getDate()-1);iI.range;let iR=iw(e=>{e.setUTCHours(0,0,0,0)},(e,t)=>{e.setUTCDate(e.getUTCDate()+t)},(e,t)=>(t-e)/864e5,e=>e.getUTCDate()-1);iR.range;let iN=iw(e=>{e.setUTCHours(0,0,0,0)},(e,t)=>{e.setUTCDate(e.getUTCDate()+t)},(e,t)=>(t-e)/864e5,e=>Math.floor(e/864e5));function i_(e){return iw(t=>{t.setDate(t.getDate()-(t.getDay()+7-e)%7),t.setHours(0,0,0,0)},(e,t)=>{e.setDate(e.getDate()+7*t)},(e,t)=>(t-e-(t.getTimezoneOffset()-e.getTimezoneOffset())*6e4)/6048e5)}iN.range;let iP=i_(0),iM=i_(1),iL=i_(2),iD=i_(3),ij=i_(4),iF=i_(5),iB=i_(6);function iU(e){return iw(t=>{t.setUTCDate(t.getUTCDate()-(t.getUTCDay()+7-e)%7),t.setUTCHours(0,0,0,0)},(e,t)=>{e.setUTCDate(e.getUTCDate()+7*t)},(e,t)=>(t-e)/6048e5)}iP.range,iM.range,iL.range,iD.range,ij.range,iF.range,iB.range;let iZ=iU(0),iz=iU(1),iH=iU(2),iG=iU(3),i$=iU(4),iW=iU(5),iV=iU(6);iZ.range,iz.range,iH.range,iG.range,i$.range,iW.range,iV.range;let iq=iw(e=>{e.setDate(1),e.setHours(0,0,0,0)},(e,t)=>{e.setMonth(e.getMonth()+t)},(e,t)=>t.getMonth()-e.getMonth()+(t.getFullYear()-e.getFullYear())*12,e=>e.getMonth());iq.range;let iY=iw(e=>{e.setUTCDate(1),e.setUTCHours(0,0,0,0)},(e,t)=>{e.setUTCMonth(e.getUTCMonth()+t)},(e,t)=>t.getUTCMonth()-e.getUTCMonth()+(t.getUTCFullYear()-e.getUTCFullYear())*12,e=>e.getUTCMonth());iY.range;let iK=iw(e=>{e.setMonth(0,1),e.setHours(0,0,0,0)},(e,t)=>{e.setFullYear(e.getFullYear()+t)},(e,t)=>t.getFullYear()-e.getFullYear(),e=>e.getFullYear());iK.every=e=>isFinite(e=Math.floor(e))&&e>0?iw(t=>{t.setFullYear(Math.floor(t.getFullYear()/e)*e),t.setMonth(0,1),t.setHours(0,0,0,0)},(t,n)=>{t.setFullYear(t.getFullYear()+n*e)}):null,iK.range;let iX=iw(e=>{e.setUTCMonth(0,1),e.setUTCHours(0,0,0,0)},(e,t)=>{e.setUTCFullYear(e.getUTCFullYear()+t)},(e,t)=>t.getUTCFullYear()-e.getUTCFullYear(),e=>e.getUTCFullYear());function iQ(e,t,n,r,o,a){let i=[[iO,1,1e3],[iO,5,5e3],[iO,15,15e3],[iO,30,3e4],[a,1,6e4],[a,5,3e5],[a,15,9e5],[a,30,18e5],[o,1,36e5],[o,3,108e5],[o,6,216e5],[o,12,432e5],[r,1,864e5],[r,2,1728e5],[n,1,6048e5],[t,1,2592e6],[t,3,7776e6],[e,1,31536e6]];function l(t,n,r){let o=Math.abs(n-t)/r,a=o1(e=>{let[,,t]=e;return t}).right(i,o);if(a===i.length)return e.every(oQ(t/31536e6,n/31536e6,r));if(0===a)return ix.every(Math.max(oQ(t,n,r),1));let[l,s]=i[o/i[a-1][2]isFinite(e=Math.floor(e))&&e>0?iw(t=>{t.setUTCFullYear(Math.floor(t.getUTCFullYear()/e)*e),t.setUTCMonth(0,1),t.setUTCHours(0,0,0,0)},(t,n)=>{t.setUTCFullYear(t.getUTCFullYear()+n*e)}):null,iX.range;let[iJ,i0]=iQ(iX,iY,iZ,iN,iA,iT),[i1,i2]=iQ(iK,iq,iP,iI,ik,iC);function i4(e){if(0<=e.y&&e.y<100){var t=new Date(-1,e.m,e.d,e.H,e.M,e.S,e.L);return t.setFullYear(e.y),t}return new Date(e.y,e.m,e.d,e.H,e.M,e.S,e.L)}function i3(e){if(0<=e.y&&e.y<100){var t=new Date(Date.UTC(-1,e.m,e.d,e.H,e.M,e.S,e.L));return t.setUTCFullYear(e.y),t}return new Date(Date.UTC(e.y,e.m,e.d,e.H,e.M,e.S,e.L))}function i6(e,t,n){return{y:e,m:t,d:n,H:0,M:0,S:0,L:0}}var i5={"-":"",_:" ",0:"0"},i8=/^\s*\d+/,i9=/^%/,i7=/[\\^$*+?|[\]().{}]/g;function le(e,t,n){var r=e<0?"-":"",o=(r?-e:e)+"",a=o.length;return r+(a[e.toLowerCase(),t]))}function lo(e,t,n){var r=i8.exec(t.slice(n,n+1));return r?(e.w=+r[0],n+r[0].length):-1}function la(e,t,n){var r=i8.exec(t.slice(n,n+1));return r?(e.u=+r[0],n+r[0].length):-1}function li(e,t,n){var r=i8.exec(t.slice(n,n+2));return r?(e.U=+r[0],n+r[0].length):-1}function ll(e,t,n){var r=i8.exec(t.slice(n,n+2));return r?(e.V=+r[0],n+r[0].length):-1}function ls(e,t,n){var r=i8.exec(t.slice(n,n+2));return r?(e.W=+r[0],n+r[0].length):-1}function lc(e,t,n){var r=i8.exec(t.slice(n,n+4));return r?(e.y=+r[0],n+r[0].length):-1}function lu(e,t,n){var r=i8.exec(t.slice(n,n+2));return r?(e.y=+r[0]+(+r[0]>68?1900:2e3),n+r[0].length):-1}function ld(e,t,n){var r=/^(Z)|([+-]\d\d)(?::?(\d\d))?/.exec(t.slice(n,n+6));return r?(e.Z=r[1]?0:-(r[2]+(r[3]||"00")),n+r[0].length):-1}function lp(e,t,n){var r=i8.exec(t.slice(n,n+1));return r?(e.q=3*r[0]-3,n+r[0].length):-1}function lf(e,t,n){var r=i8.exec(t.slice(n,n+2));return r?(e.m=r[0]-1,n+r[0].length):-1}function lm(e,t,n){var r=i8.exec(t.slice(n,n+2));return r?(e.d=+r[0],n+r[0].length):-1}function lg(e,t,n){var r=i8.exec(t.slice(n,n+3));return r?(e.m=0,e.d=+r[0],n+r[0].length):-1}function lh(e,t,n){var r=i8.exec(t.slice(n,n+2));return r?(e.H=+r[0],n+r[0].length):-1}function lb(e,t,n){var r=i8.exec(t.slice(n,n+2));return r?(e.M=+r[0],n+r[0].length):-1}function lv(e,t,n){var r=i8.exec(t.slice(n,n+2));return r?(e.S=+r[0],n+r[0].length):-1}function ly(e,t,n){var r=i8.exec(t.slice(n,n+3));return r?(e.L=+r[0],n+r[0].length):-1}function lE(e,t,n){var r=i8.exec(t.slice(n,n+6));return r?(e.L=Math.floor(r[0]/1e3),n+r[0].length):-1}function lS(e,t,n){var r=i9.exec(t.slice(n,n+1));return r?n+r[0].length:-1}function lw(e,t,n){var r=i8.exec(t.slice(n));return r?(e.Q=+r[0],n+r[0].length):-1}function lx(e,t,n){var r=i8.exec(t.slice(n));return r?(e.s=+r[0],n+r[0].length):-1}function lO(e,t){return le(e.getDate(),t,2)}function lC(e,t){return le(e.getHours(),t,2)}function lT(e,t){return le(e.getHours()%12||12,t,2)}function lk(e,t){return le(1+iI.count(iK(e),e),t,3)}function lA(e,t){return le(e.getMilliseconds(),t,3)}function lI(e,t){return lA(e,t)+"000"}function lR(e,t){return le(e.getMonth()+1,t,2)}function lN(e,t){return le(e.getMinutes(),t,2)}function l_(e,t){return le(e.getSeconds(),t,2)}function lP(e){var t=e.getDay();return 0===t?7:t}function lM(e,t){return le(iP.count(iK(e)-1,e),t,2)}function lL(e){var t=e.getDay();return t>=4||0===t?ij(e):ij.ceil(e)}function lD(e,t){return e=lL(e),le(ij.count(iK(e),e)+(4===iK(e).getDay()),t,2)}function lj(e){return e.getDay()}function lF(e,t){return le(iM.count(iK(e)-1,e),t,2)}function lB(e,t){return le(e.getFullYear()%100,t,2)}function lU(e,t){return le((e=lL(e)).getFullYear()%100,t,2)}function lZ(e,t){return le(e.getFullYear()%1e4,t,4)}function lz(e,t){var n=e.getDay();return le((e=n>=4||0===n?ij(e):ij.ceil(e)).getFullYear()%1e4,t,4)}function lH(e){var t=e.getTimezoneOffset();return(t>0?"-":(t*=-1,"+"))+le(t/60|0,"0",2)+le(t%60,"0",2)}function lG(e,t){return le(e.getUTCDate(),t,2)}function l$(e,t){return le(e.getUTCHours(),t,2)}function lW(e,t){return le(e.getUTCHours()%12||12,t,2)}function lV(e,t){return le(1+iR.count(iX(e),e),t,3)}function lq(e,t){return le(e.getUTCMilliseconds(),t,3)}function lY(e,t){return lq(e,t)+"000"}function lK(e,t){return le(e.getUTCMonth()+1,t,2)}function lX(e,t){return le(e.getUTCMinutes(),t,2)}function lQ(e,t){return le(e.getUTCSeconds(),t,2)}function lJ(e){var t=e.getUTCDay();return 0===t?7:t}function l0(e,t){return le(iZ.count(iX(e)-1,e),t,2)}function l1(e){var t=e.getUTCDay();return t>=4||0===t?i$(e):i$.ceil(e)}function l2(e,t){return e=l1(e),le(i$.count(iX(e),e)+(4===iX(e).getUTCDay()),t,2)}function l4(e){return e.getUTCDay()}function l3(e,t){return le(iz.count(iX(e)-1,e),t,2)}function l6(e,t){return le(e.getUTCFullYear()%100,t,2)}function l5(e,t){return le((e=l1(e)).getUTCFullYear()%100,t,2)}function l8(e,t){return le(e.getUTCFullYear()%1e4,t,4)}function l9(e,t){var n=e.getUTCDay();return le((e=n>=4||0===n?i$(e):i$.ceil(e)).getUTCFullYear()%1e4,t,4)}function l7(){return"+0000"}function se(){return"%"}function st(e){return+e}function sn(e){return Math.floor(+e/1e3)}function sr(e){return new Date(e)}function so(e){return e instanceof Date?+e:+new Date(+e)}function sa(e,t,n,r,o,a,i,l,s,c){var u=aW(),d=u.invert,p=u.domain,f=c(".%L"),m=c(":%S"),g=c("%I:%M"),h=c("%I %p"),b=c("%a %d"),v=c("%b %d"),y=c("%B"),E=c("%Y");function S(e){return(s(e)1)for(var n,r,o,a=1,i=e[t[0]],l=i.length;a=0;)n[t]=t;return n}function sv(e,t){return e[t]}function sy(e){let t=[];return t.key=e,t}w=(S=function(e){var t=e.dateTime,n=e.date,r=e.time,o=e.periods,a=e.days,i=e.shortDays,l=e.months,s=e.shortMonths,c=ln(o),u=lr(o),d=ln(a),p=lr(a),f=ln(i),m=lr(i),g=ln(l),h=lr(l),b=ln(s),v=lr(s),y={a:function(e){return i[e.getDay()]},A:function(e){return a[e.getDay()]},b:function(e){return s[e.getMonth()]},B:function(e){return l[e.getMonth()]},c:null,d:lO,e:lO,f:lI,g:lU,G:lz,H:lC,I:lT,j:lk,L:lA,m:lR,M:lN,p:function(e){return o[+(e.getHours()>=12)]},q:function(e){return 1+~~(e.getMonth()/3)},Q:st,s:sn,S:l_,u:lP,U:lM,V:lD,w:lj,W:lF,x:null,X:null,y:lB,Y:lZ,Z:lH,"%":se},E={a:function(e){return i[e.getUTCDay()]},A:function(e){return a[e.getUTCDay()]},b:function(e){return s[e.getUTCMonth()]},B:function(e){return l[e.getUTCMonth()]},c:null,d:lG,e:lG,f:lY,g:l5,G:l9,H:l$,I:lW,j:lV,L:lq,m:lK,M:lX,p:function(e){return o[+(e.getUTCHours()>=12)]},q:function(e){return 1+~~(e.getUTCMonth()/3)},Q:st,s:sn,S:lQ,u:lJ,U:l0,V:l2,w:l4,W:l3,x:null,X:null,y:l6,Y:l8,Z:l7,"%":se},S={a:function(e,t,n){var r=f.exec(t.slice(n));return r?(e.w=m.get(r[0].toLowerCase()),n+r[0].length):-1},A:function(e,t,n){var r=d.exec(t.slice(n));return r?(e.w=p.get(r[0].toLowerCase()),n+r[0].length):-1},b:function(e,t,n){var r=b.exec(t.slice(n));return r?(e.m=v.get(r[0].toLowerCase()),n+r[0].length):-1},B:function(e,t,n){var r=g.exec(t.slice(n));return r?(e.m=h.get(r[0].toLowerCase()),n+r[0].length):-1},c:function(e,n,r){return O(e,t,n,r)},d:lm,e:lm,f:lE,g:lu,G:lc,H:lh,I:lh,j:lg,L:ly,m:lf,M:lb,p:function(e,t,n){var r=c.exec(t.slice(n));return r?(e.p=u.get(r[0].toLowerCase()),n+r[0].length):-1},q:lp,Q:lw,s:lx,S:lv,u:la,U:li,V:ll,w:lo,W:ls,x:function(e,t,r){return O(e,n,t,r)},X:function(e,t,n){return O(e,r,t,n)},y:lu,Y:lc,Z:ld,"%":lS};function w(e,t){return function(n){var r,o,a,i=[],l=-1,s=0,c=e.length;for(n instanceof Date||(n=new Date(+n));++l53)return null;"w"in a||(a.w=1),"Z"in a?(r=(o=(r=i3(i6(a.y,0,1))).getUTCDay())>4||0===o?iz.ceil(r):iz(r),r=iR.offset(r,(a.V-1)*7),a.y=r.getUTCFullYear(),a.m=r.getUTCMonth(),a.d=r.getUTCDate()+(a.w+6)%7):(r=(o=(r=i4(i6(a.y,0,1))).getDay())>4||0===o?iM.ceil(r):iM(r),r=iI.offset(r,(a.V-1)*7),a.y=r.getFullYear(),a.m=r.getMonth(),a.d=r.getDate()+(a.w+6)%7)}else("W"in a||"U"in a)&&("w"in a||(a.w="u"in a?a.u%7:"W"in a?1:0),o="Z"in a?i3(i6(a.y,0,1)).getUTCDay():i4(i6(a.y,0,1)).getDay(),a.m=0,a.d="W"in a?(a.w+6)%7+7*a.W-(o+5)%7:a.w+7*a.U-(o+6)%7);return"Z"in a?(a.H+=a.Z/100|0,a.M+=a.Z%100,i3(a)):i4(a)}}function O(e,t,n,r){for(var o,a,i=0,l=t.length,s=n.length;i=s)return -1;if(37===(o=t.charCodeAt(i++))){if(!(a=S[(o=t.charAt(i++))in i5?t.charAt(i++):o])||(r=a(e,n,r))<0)return -1}else if(o!=n.charCodeAt(r++))return -1}return r}return y.x=w(n,y),y.X=w(r,y),y.c=w(t,y),E.x=w(n,E),E.X=w(r,E),E.c=w(t,E),{format:function(e){var t=w(e+="",y);return t.toString=function(){return e},t},parse:function(e){var t=x(e+="",!1);return t.toString=function(){return e},t},utcFormat:function(e){var t=w(e+="",E);return t.toString=function(){return e},t},utcParse:function(e){var t=x(e+="",!0);return t.toString=function(){return e},t}}}({dateTime:"%x, %X",date:"%-m/%-d/%Y",time:"%-I:%M:%S %p",periods:["AM","PM"],days:["Sunday","Monday","Tuesday","Wednesday","Thursday","Friday","Saturday"],shortDays:["Sun","Mon","Tue","Wed","Thu","Fri","Sat"],months:["January","February","March","April","May","June","July","August","September","October","November","December"],shortMonths:["Jan","Feb","Mar","Apr","May","Jun","Jul","Aug","Sep","Oct","Nov","Dec"]})).format,S.parse,x=S.utcFormat,S.utcParse,Array.prototype.slice;var sE=n(5037),sS=n.n(sE),sw=n(30264),sx=n.n(sw),sO=n(20734),sC=n.n(sO),sT=n(93574),sk=n.n(sT),sA=n(6122),sI=n.n(sA);function sR(e,t){(null==t||t>e.length)&&(t=e.length);for(var n=0,r=Array(t);n=t?n.apply(void 0,o):e(t-i,sM(function(){for(var e=arguments.length,t=Array(e),r=0;re.length)&&(t=e.length);for(var n=0,r=Array(t);nr&&(o=r,a=n),[o,a]}function sV(e,t,n){if(e.lte(0))return new(sI())(0);var r=sZ.getDigitCount(e.toNumber()),o=new(sI())(10).pow(r),a=e.div(o),i=1!==r?.05:.1,l=new(sI())(Math.ceil(a.div(i).toNumber())).add(n).mul(i).mul(o);return t?l:new(sI())(Math.ceil(l))}function sq(e,t,n){var r=1,o=new(sI())(e);if(!o.isint()&&n){var a=Math.abs(e);a<1?(r=new(sI())(10).pow(sZ.getDigitCount(e)-1),o=new(sI())(Math.floor(o.div(r).toNumber())).mul(r)):a>1&&(o=new(sI())(Math.floor(e)))}else 0===e?o=new(sI())(Math.floor((t-1)/2)):n||(o=new(sI())(Math.floor(e)));var i=Math.floor((t-1)/2);return sF(sj(function(e){return o.add(new(sI())(e-i).mul(r)).toNumber()}),sD)(0,t)}var sY=sU(function(e){var t=sH(e,2),n=t[0],r=t[1],o=arguments.length>1&&void 0!==arguments[1]?arguments[1]:6,a=!(arguments.length>2)||void 0===arguments[2]||arguments[2],i=Math.max(o,2),l=sH(sW([n,r]),2),s=l[0],c=l[1];if(s===-1/0||c===1/0){var u=c===1/0?[s].concat(sz(sD(0,o-1).map(function(){return 1/0}))):[].concat(sz(sD(0,o-1).map(function(){return-1/0})),[c]);return n>r?sB(u):u}if(s===c)return sq(s,o,a);var d=function e(t,n,r,o){var a,i=arguments.length>4&&void 0!==arguments[4]?arguments[4]:0;if(!Number.isFinite((n-t)/(r-1)))return{step:new(sI())(0),tickMin:new(sI())(0),tickMax:new(sI())(0)};var l=sV(new(sI())(n).sub(t).div(r-1),o,i),s=Math.ceil((a=t<=0&&n>=0?new(sI())(0):(a=new(sI())(t).add(n).div(2)).sub(new(sI())(a).mod(l))).sub(t).div(l).toNumber()),c=Math.ceil(new(sI())(n).sub(a).div(l).toNumber()),u=s+c+1;return u>r?e(t,n,r,o,i+1):(u0?c+(r-u):c,s=n>0?s:s+(r-u)),{step:l,tickMin:a.sub(new(sI())(s).mul(l)),tickMax:a.add(new(sI())(c).mul(l))})}(s,c,i,a),p=d.step,f=d.tickMin,m=d.tickMax,g=sZ.rangeStep(f,m.add(new(sI())(.1).mul(p)),p);return n>r?sB(g):g});sU(function(e){var t=sH(e,2),n=t[0],r=t[1],o=arguments.length>1&&void 0!==arguments[1]?arguments[1]:6,a=!(arguments.length>2)||void 0===arguments[2]||arguments[2],i=Math.max(o,2),l=sH(sW([n,r]),2),s=l[0],c=l[1];if(s===-1/0||c===1/0)return[n,r];if(s===c)return sq(s,o,a);var u=sV(new(sI())(c).sub(s).div(i-1),a,0),d=sF(sj(function(e){return new(sI())(s).add(new(sI())(e).mul(u)).toNumber()}),sD)(0,i).filter(function(e){return e>=s&&e<=c});return n>r?sB(d):d});var sK=sU(function(e,t){var n=sH(e,2),r=n[0],o=n[1],a=!(arguments.length>2)||void 0===arguments[2]||arguments[2],i=sH(sW([r,o]),2),l=i[0],s=i[1];if(l===-1/0||s===1/0)return[r,o];if(l===s)return[l];var c=sV(new(sI())(s).sub(l).div(Math.max(t,2)-1),a,0),u=[].concat(sz(sZ.rangeStep(new(sI())(l),new(sI())(s).sub(new(sI())(.99).mul(c)),c)),[s]);return r>o?sB(u):u}),sX=["offset","layout","width","dataKey","data","dataPointFormatter","xAxis","yAxis"];function sQ(){return(sQ=Object.assign?Object.assign.bind():function(e){for(var t=1;te.length)&&(t=e.length);for(var n=0,r=Array(t);n=0||(o[n]=e[n]);return o}(e,t);if(Object.getOwnPropertySymbols){var a=Object.getOwnPropertySymbols(e);for(r=0;r=0)&&Object.prototype.propertyIsEnumerable.call(e,n)&&(o[n]=e[n])}return o}(e,sX),!1);"x"===e.direction&&"number"!==l.type&&eW(!1);var u=a.map(function(e){var a,u,d=i(e,o),p=d.x,f=d.y,m=d.value,g=d.errorVal;if(!g)return null;var h=[];if(Array.isArray(g)){var b=function(e){if(Array.isArray(e))return e}(g)||function(e,t){var n=null==e?null:"undefined"!=typeof Symbol&&e[Symbol.iterator]||e["@@iterator"];if(null!=n){var r,o,a,i,l=[],s=!0,c=!1;try{if(a=(n=n.call(e)).next,0===t){if(Object(n)!==n)return;s=!1}else for(;!(s=(r=a.call(n)).done)&&(l.push(r.value),l.length!==t);s=!0);}catch(e){c=!0,o=e}finally{try{if(!s&&null!=n.return&&(i=n.return(),Object(i)!==i))return}finally{if(c)throw o}}return l}}(g,2)||function(e,t){if(e){if("string"==typeof e)return sJ(e,t);var n=Object.prototype.toString.call(e).slice(8,-1);if("Object"===n&&e.constructor&&(n=e.constructor.name),"Map"===n||"Set"===n)return Array.from(e);if("Arguments"===n||/^(?:Ui|I)nt(?:8|16|32)(?:Clamped)?Array$/.test(n))return sJ(e,t)}}(g,2)||function(){throw TypeError("Invalid attempt to destructure non-iterable instance.\nIn order to be iterable, non-array objects must have a [Symbol.iterator]() method.")}();a=b[0],u=b[1]}else a=u=g;if("vertical"===n){var v=l.scale,y=f+t,E=y+r,S=y-r,w=v(m-a),x=v(m+u);h.push({x1:x,y1:E,x2:x,y2:S}),h.push({x1:w,y1:y,x2:x,y2:y}),h.push({x1:w,y1:E,x2:w,y2:S})}else if("horizontal"===n){var O=s.scale,C=p+t,T=C-r,k=C+r,A=O(m-a),I=O(m+u);h.push({x1:T,y1:I,x2:k,y2:I}),h.push({x1:C,y1:A,x2:C,y2:I}),h.push({x1:T,y1:A,x2:k,y2:A})}return R.createElement(eQ,sQ({className:"recharts-errorBar",key:"bar-".concat(h.map(function(e){return"".concat(e.x1,"-").concat(e.x2,"-").concat(e.y1,"-").concat(e.y2)}))},c),h.map(function(e){return R.createElement("line",sQ({},e,{key:"line-".concat(e.x1,"-").concat(e.x2,"-").concat(e.y1,"-").concat(e.y2)}))}))});return R.createElement(eQ,{className:"recharts-errorBars"},u)}function s1(e){return(s1="function"==typeof Symbol&&"symbol"==typeof Symbol.iterator?function(e){return typeof e}:function(e){return e&&"function"==typeof Symbol&&e.constructor===Symbol&&e!==Symbol.prototype?"symbol":typeof e})(e)}function s2(e,t){var n=Object.keys(e);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);t&&(r=r.filter(function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable})),n.push.apply(n,r)}return n}function s4(e){for(var t=1;te.length)&&(t=e.length);for(var n=0,r=Array(t);n1&&void 0!==arguments[1]?arguments[1]:[],r=arguments.length>2?arguments[2]:void 0,o=arguments.length>3?arguments[3]:void 0,a=-1,i=null!==(t=null==n?void 0:n.length)&&void 0!==t?t:0;if(i<=1)return 0;if(o&&"angleAxis"===o.axisType&&1e-6>=Math.abs(Math.abs(o.range[1]-o.range[0])-360))for(var l=o.range,s=0;s0?r[s-1].coordinate:r[i-1].coordinate,u=r[s].coordinate,d=s>=i-1?r[0].coordinate:r[s+1].coordinate,p=void 0;if(H(u-c)!==H(d-u)){var f=[];if(H(d-u)===H(l[1]-l[0])){p=d;var m=u+l[1]-l[0];f[0]=Math.min(m,(m+c)/2),f[1]=Math.max(m,(m+c)/2)}else{p=c;var g=d+l[1]-l[0];f[0]=Math.min(u,(g+u)/2),f[1]=Math.max(u,(g+u)/2)}var h=[Math.min(u,(p+u)/2),Math.max(u,(p+u)/2)];if(e>h[0]&&e<=h[1]||e>=f[0]&&e<=f[1]){a=r[s].index;break}}else{var b=Math.min(c,d),v=Math.max(c,d);if(e>(b+u)/2&&e<=(v+u)/2){a=r[s].index;break}}}else for(var y=0;y0&&y(n[y].coordinate+n[y-1].coordinate)/2&&e<=(n[y].coordinate+n[y+1].coordinate)/2||y===i-1&&e>(n[y].coordinate+n[y-1].coordinate)/2){a=n[y].index;break}return a},co=function(e){var t,n=e.type.displayName,r=e.props,o=r.stroke,a=r.fill;switch(n){case"Line":t=o;break;case"Area":case"Radar":t=o&&"none"!==o?o:a;break;default:t=a}return t},ca=function(e){var t=e.barSize,n=e.stackGroups,r=void 0===n?{}:n;if(!r)return{};for(var o={},a=Object.keys(r),i=0,l=a.length;i=0});if(g&&g.length){var h=g[0].props.barSize,b=g[0].props[m];o[b]||(o[b]=[]),o[b].push({item:g[0],stackList:g.slice(1),barSize:en()(h)?t:h})}}return o},ci=function(e){var t,n=e.barGap,r=e.barCategoryGap,o=e.bandSize,a=e.sizeList,i=void 0===a?[]:a,l=e.maxBarSize,s=i.length;if(s<1)return null;var c=Y(n,o,0,!0),u=[];if(i[0].barSize===+i[0].barSize){var d=!1,p=o/s,f=i.reduce(function(e,t){return e+t.barSize||0},0);(f+=(s-1)*c)>=o&&(f-=(s-1)*c,c=0),f>=o&&p>0&&(d=!0,p*=.9,f=s*p);var m={offset:((o-f)/2>>0)-c,size:0};t=i.reduce(function(e,t){var n={item:t.item,position:{offset:m.offset+m.size+c,size:d?p:t.barSize}},r=[].concat(s7(e),[n]);return m=r[r.length-1].position,t.stackList&&t.stackList.length&&t.stackList.forEach(function(e){r.push({item:e,position:m})}),r},u)}else{var g=Y(r,o,0,!0);o-2*g-(s-1)*c<=0&&(c=0);var h=(o-2*g-(s-1)*c)/s;h>1&&(h>>=0);var b=l===+l?Math.min(h,l):h;t=i.reduce(function(e,t,n){var r=[].concat(s7(e),[{item:t.item,position:{offset:g+(h+c)*n+(h-b)/2,size:b}}]);return t.stackList&&t.stackList.length&&t.stackList.forEach(function(e){r.push({item:e,position:r[r.length-1].position})}),r},u)}return t},cl=function(e,t,n,r){var o=n.children,a=n.width,i=n.margin,l=s3({children:o,legendWidth:a-(i.left||0)-(i.right||0)});if(l){var s=r||{},c=s.width,u=s.height,d=l.align,p=l.verticalAlign,f=l.layout;if(("vertical"===f||"horizontal"===f&&"middle"===p)&&"center"!==d&&$(e[d]))return s8(s8({},e),{},s9({},d,e[d]+(c||0)));if(("horizontal"===f||"vertical"===f&&"center"===d)&&"middle"!==p&&$(e[p]))return s8(s8({},e),{},s9({},p,e[p]+(u||0)))}return e},cs=function(e,t,n,r,o){var a=ex(t.props.children,s0).filter(function(e){var t;return t=e.props.direction,!!en()(o)||("horizontal"===r?"yAxis"===o:"vertical"===r||"x"===t?"xAxis"===o:"y"!==t||"yAxis"===o)});if(a&&a.length){var i=a.map(function(e){return e.props.dataKey});return e.reduce(function(e,t){var r=ct(t,n,0),o=Array.isArray(r)?[sx()(r),sS()(r)]:[r,r],a=i.reduce(function(e,n){var r=ct(t,n,0),a=o[0]-Math.abs(Array.isArray(r)?r[0]:r),i=o[1]+Math.abs(Array.isArray(r)?r[1]:r);return[Math.min(a,e[0]),Math.max(i,e[1])]},[1/0,-1/0]);return[Math.min(a[0],e[0]),Math.max(a[1],e[1])]},[1/0,-1/0])}return null},cc=function(e,t,n,r,o){var a=t.map(function(t){return cs(e,t,n,o,r)}).filter(function(e){return!en()(e)});return a&&a.length?a.reduce(function(e,t){return[Math.min(e[0],t[0]),Math.max(e[1],t[1])]},[1/0,-1/0]):null},cu=function(e,t,n,r,o){var a=t.map(function(t){var a=t.props.dataKey;return"number"===n&&a&&cs(e,t,a,r)||cn(e,a,n,o)});if("number"===n)return a.reduce(function(e,t){return[Math.min(e[0],t[0]),Math.max(e[1],t[1])]},[1/0,-1/0]);var i={};return a.reduce(function(e,t){for(var n=0,r=t.length;n=2?2*H(i[0]-i[1])*s:s,t&&(e.ticks||e.niceTicks))?(e.ticks||e.niceTicks).map(function(e){return{coordinate:r(o?o.indexOf(e):e)+s,value:e,offset:s}}).filter(function(e){return!F()(e.coordinate)}):e.isCategorical&&e.categoricalDomain?e.categoricalDomain.map(function(e,t){return{coordinate:r(e)+s,value:e,index:t,offset:s}}):r.ticks&&!n?r.ticks(e.tickCount).map(function(e){return{coordinate:r(e)+s,value:e,offset:s}}):r.domain().map(function(e,t){return{coordinate:r(e)+s,value:o?o[e]:e,index:t,offset:s}})},cm=new WeakMap,cg=function(e,t){if("function"!=typeof t)return e;cm.has(e)||cm.set(e,new WeakMap);var n=cm.get(e);if(n.has(t))return n.get(t);var r=function(){e.apply(void 0,arguments),t.apply(void 0,arguments)};return n.set(t,r),r},ch=function(e,t,n){var r=e.scale,o=e.type,a=e.layout,i=e.axisType;if("auto"===r)return"radial"===a&&"radiusAxis"===i?{scale:ou(),realScaleType:"band"}:"radial"===a&&"angleAxis"===i?{scale:a6(),realScaleType:"linear"}:"category"===o&&t&&(t.indexOf("LineChart")>=0||t.indexOf("AreaChart")>=0||t.indexOf("ComposedChart")>=0&&!n)?{scale:od(),realScaleType:"point"}:"category"===o?{scale:ou(),realScaleType:"band"}:{scale:a6(),realScaleType:"linear"};if(D()(r)){var l="scale".concat(nQ()(r));return{scale:(O[l]||od)(),realScaleType:O[l]?l:"point"}}return eo()(r)?{scale:r}:{scale:od(),realScaleType:"point"}},cb=function(e){var t=e.domain();if(t&&!(t.length<=2)){var n=t.length,r=e.range(),o=Math.min(r[0],r[1])-1e-4,a=Math.max(r[0],r[1])+1e-4,i=e(t[0]),l=e(t[n-1]);(ia||la)&&e.domain([t[0],t[n-1]])}},cv=function(e,t){if(!e)return null;for(var n=0,r=e.length;nr)&&(o[1]=r),o[0]>r&&(o[0]=r),o[1]=0?(e[i][n][0]=o,e[i][n][1]=o+l,o=e[i][n][1]):(e[i][n][0]=a,e[i][n][1]=a+l,a=e[i][n][1])}},expand:function(e,t){if((r=e.length)>0){for(var n,r,o,a=0,i=e[0].length;a0){for(var n,r=0,o=e[t[0]],a=o.length;r0&&(r=(n=e[t[0]]).length)>0){for(var n,r,o,a=0,i=1;i=0?(e[a][n][0]=o,e[a][n][1]=o+i,o=e[a][n][1]):(e[a][n][0]=0,e[a][n][1]=0)}}},cS=function(e,t,n){var r=t.map(function(e){return e.props.dataKey}),o=cE[n];return(function(){var e=ro([]),t=sb,n=sg,r=sv;function o(o){var a,i,l=Array.from(e.apply(this,arguments),sy),s=l.length,c=-1;for(let e of o)for(a=0,++c;a=0?0:o<0?o:r}return n[0]},cT=function(e,t){var n=e.props.stackId;if(W(n)){var r=t[n];if(r){var o=r.items.indexOf(e);return o>=0?r.stackedData[o]:null}}return null},ck=function(e,t,n){return Object.keys(e).reduce(function(r,o){var a=e[o].stackedData.reduce(function(e,r){var o=r.slice(t,n+1).reduce(function(e,t){return[sx()(t.concat([e[0]]).filter($)),sS()(t.concat([e[1]]).filter($))]},[1/0,-1/0]);return[Math.min(e[0],o[0]),Math.max(e[1],o[1])]},[1/0,-1/0]);return[Math.min(a[0],r[0]),Math.max(a[1],r[1])]},[1/0,-1/0]).map(function(e){return e===1/0||e===-1/0?0:e})},cA=/^dataMin[\s]*-[\s]*([0-9]+([.]{1}[0-9]+){0,1})$/,cI=/^dataMax[\s]*\+[\s]*([0-9]+([.]{1}[0-9]+){0,1})$/,cR=function(e,t,n){if(eo()(e))return e(t,n);if(!Array.isArray(e))return t;var r=[];if($(e[0]))r[0]=n?e[0]:Math.min(e[0],t[0]);else if(cA.test(e[0])){var o=+cA.exec(e[0])[1];r[0]=t[0]-o}else eo()(e[0])?r[0]=e[0](t[0]):r[0]=t[0];if($(e[1]))r[1]=n?e[1]:Math.max(e[1],t[1]);else if(cI.test(e[1])){var a=+cI.exec(e[1])[1];r[1]=t[1]+a}else eo()(e[1])?r[1]=e[1](t[1]):r[1]=t[1];return r},cN=function(e,t,n){if(e&&e.scale&&e.scale.bandwidth){var r=e.scale.bandwidth();if(!n||r>0)return r}if(e&&t&&t.length>=2){for(var o=eZ()(t,function(e){return e.coordinate}),a=1/0,i=1,l=o.length;i0&&t.handleDrag(e.changedTouches[0])}),cq(cW(t),"handleDragEnd",function(){t.setState({isTravellerMoving:!1,isSlideMoving:!1},function(){var e=t.props,n=e.endIndex,r=e.onDragEnd,o=e.startIndex;null==r||r({endIndex:n,startIndex:o})}),t.detachDragEndListener()}),cq(cW(t),"handleLeaveWrapper",function(){(t.state.isTravellerMoving||t.state.isSlideMoving)&&(t.leaveTimer=window.setTimeout(t.handleDragEnd,t.props.leaveTimeOut))}),cq(cW(t),"handleEnterSlideOrTraveller",function(){t.setState({isTextActive:!0})}),cq(cW(t),"handleLeaveSlideOrTraveller",function(){t.setState({isTextActive:!1})}),cq(cW(t),"handleSlideDragStart",function(e){var n=cX(e)?e.changedTouches[0]:e;t.setState({isTravellerMoving:!1,isSlideMoving:!0,slideMoveStartX:n.pageX}),t.attachDragEndListener()}),t.travellerDragStartHandlers={startX:t.handleTravellerDragStart.bind(cW(t),"startX"),endX:t.handleTravellerDragStart.bind(cW(t),"endX")},t.state={},t}return n=[{key:"componentWillUnmount",value:function(){this.leaveTimer&&(clearTimeout(this.leaveTimer),this.leaveTimer=null),this.detachDragEndListener()}},{key:"getIndex",value:function(e){var t=e.startX,n=e.endX,r=this.state.scaleValues,o=this.props,i=o.gap,l=o.data.length-1,s=a.getIndexInRange(r,Math.min(t,n)),c=a.getIndexInRange(r,Math.max(t,n));return{startIndex:s-s%i,endIndex:c===l?l:c-c%i}}},{key:"getTextOfTick",value:function(e){var t=this.props,n=t.data,r=t.tickFormatter,o=t.dataKey,a=ct(n[e],o,e);return eo()(r)?r(a,e):a}},{key:"attachDragEndListener",value:function(){window.addEventListener("mouseup",this.handleDragEnd,!0),window.addEventListener("touchend",this.handleDragEnd,!0),window.addEventListener("mousemove",this.handleDrag,!0)}},{key:"detachDragEndListener",value:function(){window.removeEventListener("mouseup",this.handleDragEnd,!0),window.removeEventListener("touchend",this.handleDragEnd,!0),window.removeEventListener("mousemove",this.handleDrag,!0)}},{key:"handleSlideDrag",value:function(e){var t=this.state,n=t.slideMoveStartX,r=t.startX,o=t.endX,a=this.props,i=a.x,l=a.width,s=a.travellerWidth,c=a.startIndex,u=a.endIndex,d=a.onChange,p=e.pageX-n;p>0?p=Math.min(p,i+l-s-o,i+l-s-r):p<0&&(p=Math.max(p,i-r,i-o));var f=this.getIndex({startX:r+p,endX:o+p});(f.startIndex!==c||f.endIndex!==u)&&d&&d(f),this.setState({startX:r+p,endX:o+p,slideMoveStartX:e.pageX})}},{key:"handleTravellerDragStart",value:function(e,t){var n=cX(t)?t.changedTouches[0]:t;this.setState({isSlideMoving:!1,isTravellerMoving:!0,movingTravellerId:e,brushMoveStartX:n.pageX}),this.attachDragEndListener()}},{key:"handleTravellerMove",value:function(e){var t,n=this.state,r=n.brushMoveStartX,o=n.movingTravellerId,a=n.endX,i=n.startX,l=this.state[o],s=this.props,c=s.x,u=s.width,d=s.travellerWidth,p=s.onChange,f=s.gap,m=s.data,g={startX:this.state.startX,endX:this.state.endX},h=e.pageX-r;h>0?h=Math.min(h,c+u-d-l):h<0&&(h=Math.max(h,c-l)),g[o]=l+h;var b=this.getIndex(g),v=b.startIndex,y=b.endIndex,E=function(){var e=m.length-1;return"startX"===o&&(a>i?v%f==0:y%f==0)||ai?y%f==0:v%f==0)||a>i&&y===e};this.setState((cq(t={},o,l+h),cq(t,"brushMoveStartX",e.pageX),t),function(){p&&E()&&p(b)})}},{key:"handleTravellerMoveKeyboard",value:function(e,t){var n=this,r=this.state,o=r.scaleValues,a=r.startX,i=r.endX,l=this.state[t],s=o.indexOf(l);if(-1!==s){var c=s+e;if(-1!==c&&!(c>=o.length)){var u=o[c];"startX"===t&&u>=i||"endX"===t&&u<=a||this.setState(cq({},t,u),function(){n.props.onChange(n.getIndex({startX:n.state.startX,endX:n.state.endX}))})}}}},{key:"renderBackground",value:function(){var e=this.props,t=e.x,n=e.y,r=e.width,o=e.height,a=e.fill,i=e.stroke;return R.createElement("rect",{stroke:i,fill:a,x:t,y:n,width:r,height:o})}},{key:"renderPanorama",value:function(){var e=this.props,t=e.x,n=e.y,r=e.width,o=e.height,a=e.data,i=e.children,l=e.padding,s=R.Children.only(i);return s?R.cloneElement(s,{x:t,y:n,width:r,height:o,margin:l,compact:!0,data:a}):null}},{key:"renderTravellerLayer",value:function(e,t){var n=this,r=this.props,o=r.y,i=r.travellerWidth,l=r.height,s=r.traveller,c=r.ariaLabel,u=r.data,d=r.startIndex,p=r.endIndex,f=Math.max(e,this.props.x),m=cH(cH({},eA(this.props,!1)),{},{x:f,y:o,width:i,height:l}),g=c||"Min value: ".concat(u[d].name,", Max value: ").concat(u[p].name);return R.createElement(eQ,{tabIndex:0,role:"slider","aria-label":g,"aria-valuenow":e,className:"recharts-brush-traveller",onMouseEnter:this.handleEnterSlideOrTraveller,onMouseLeave:this.handleLeaveSlideOrTraveller,onMouseDown:this.travellerDragStartHandlers[t],onTouchStart:this.travellerDragStartHandlers[t],onKeyDown:function(e){["ArrowLeft","ArrowRight"].includes(e.key)&&(e.preventDefault(),e.stopPropagation(),n.handleTravellerMoveKeyboard("ArrowRight"===e.key?1:-1,t))},onFocus:function(){n.setState({isTravellerFocused:!0})},onBlur:function(){n.setState({isTravellerFocused:!1})},style:{cursor:"col-resize"}},a.renderTraveller(s,m))}},{key:"renderSlide",value:function(e,t){var n=this.props,r=n.y,o=n.height,a=n.stroke,i=n.travellerWidth;return R.createElement("rect",{className:"recharts-brush-slide",onMouseEnter:this.handleEnterSlideOrTraveller,onMouseLeave:this.handleLeaveSlideOrTraveller,onMouseDown:this.handleSlideDragStart,onTouchStart:this.handleSlideDragStart,style:{cursor:"move"},stroke:"none",fill:a,fillOpacity:.2,x:Math.min(e,t)+i,y:r,width:Math.max(Math.abs(t-e)-i,0),height:o})}},{key:"renderText",value:function(){var e=this.props,t=e.startIndex,n=e.endIndex,r=e.y,o=e.height,a=e.travellerWidth,i=e.stroke,l=this.state,s=l.startX,c=l.endX,u={pointerEvents:"none",fill:i};return R.createElement(eQ,{className:"recharts-brush-texts"},R.createElement(o$,cZ({textAnchor:"end",verticalAnchor:"middle",x:Math.min(s,c)-5,y:r+o/2},u),this.getTextOfTick(t)),R.createElement(o$,cZ({textAnchor:"start",verticalAnchor:"middle",x:Math.max(s,c)+a+5,y:r+o/2},u),this.getTextOfTick(n)))}},{key:"render",value:function(){var e=this.props,t=e.data,n=e.className,r=e.children,o=e.x,a=e.y,i=e.width,l=e.height,s=e.alwaysShowText,c=this.state,u=c.startX,d=c.endX,p=c.isTextActive,f=c.isSlideMoving,m=c.isTravellerMoving,g=c.isTravellerFocused;if(!t||!t.length||!$(o)||!$(a)||!$(i)||!$(l)||i<=0||l<=0)return null;var h=N("recharts-brush",n),b=1===R.Children.count(r),v=cB("userSelect","none");return R.createElement(eQ,{className:h,onMouseLeave:this.handleLeaveWrapper,onTouchMove:this.handleTouchMove,style:v},this.renderBackground(),b&&this.renderPanorama(),this.renderSlide(u,d),this.renderTravellerLayer(u,"startX"),this.renderTravellerLayer(d,"endX"),(p||f||m||g||s)&&this.renderText())}}],r=[{key:"renderDefaultTraveller",value:function(e){var t=e.x,n=e.y,r=e.width,o=e.height,a=e.stroke,i=Math.floor(n+o/2)-1;return R.createElement(R.Fragment,null,R.createElement("rect",{x:t,y:n,width:r,height:o,fill:a,stroke:"none"}),R.createElement("line",{x1:t+1,y1:i,x2:t+r-1,y2:i,fill:"none",stroke:"#fff"}),R.createElement("line",{x1:t+1,y1:i+2,x2:t+r-1,y2:i+2,fill:"none",stroke:"#fff"}))}},{key:"renderTraveller",value:function(e,t){return R.isValidElement(e)?R.cloneElement(e,t):eo()(e)?e(t):a.renderDefaultTraveller(t)}},{key:"getDerivedStateFromProps",value:function(e,t){var n=e.data,r=e.width,o=e.x,a=e.travellerWidth,i=e.updateId,l=e.startIndex,s=e.endIndex;if(n!==t.prevData||i!==t.prevUpdateId)return cH({prevData:n,prevTravellerWidth:a,prevUpdateId:i,prevX:o,prevWidth:r},n&&n.length?cK({data:n,width:r,x:o,travellerWidth:a,startIndex:l,endIndex:s}):{scale:null,scaleValues:null});if(t.scale&&(r!==t.prevWidth||o!==t.prevX||a!==t.prevTravellerWidth)){t.scale.range([o,o+r-a]);var c=t.scale.domain().map(function(e){return t.scale(e)});return{prevData:n,prevTravellerWidth:a,prevUpdateId:i,prevX:o,prevWidth:r,startX:t.scale(e.startIndex),endX:t.scale(e.endIndex),scaleValues:c}}return null}},{key:"getIndexInRange",value:function(e,t){for(var n=e.length,r=0,o=n-1;o-r>1;){var a=Math.floor((r+o)/2);e[a]>t?o=a:r=a}return t>=e[o]?o:r}}],n&&cG(a.prototype,n),r&&cG(a,r),Object.defineProperty(a,"prototype",{writable:!1}),a}(R.PureComponent);function cJ(e){return(cJ="function"==typeof Symbol&&"symbol"==typeof Symbol.iterator?function(e){return typeof e}:function(e){return e&&"function"==typeof Symbol&&e.constructor===Symbol&&e!==Symbol.prototype?"symbol":typeof e})(e)}function c0(e,t){var n=Object.keys(e);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);t&&(r=r.filter(function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable})),n.push.apply(n,r)}return n}function c1(e){for(var t=1;ta&&(l=2*Math.PI-l),{radius:i,angle:180*l/Math.PI,angleInRadian:l}},c5=function(e){var t=e.startAngle,n=e.endAngle,r=Math.min(Math.floor(t/360),Math.floor(n/360));return{startAngle:t-360*r,endAngle:n-360*r}},c8=function(e,t){var n,r=c6({x:e.x,y:e.y},t),o=r.radius,a=r.angle,i=t.innerRadius,l=t.outerRadius;if(ol)return!1;if(0===o)return!0;var s=c5(t),c=s.startAngle,u=s.endAngle,d=a;if(c<=u){for(;d>u;)d-=360;for(;d=c&&d<=u}else{for(;d>c;)d-=360;for(;d=u&&d<=c}return n?c1(c1({},t),{},{radius:o,angle:d+360*Math.min(Math.floor(t.startAngle/360),Math.floor(t.endAngle/360))}):null};function c9(e){return(c9="function"==typeof Symbol&&"symbol"==typeof Symbol.iterator?function(e){return typeof e}:function(e){return e&&"function"==typeof Symbol&&e.constructor===Symbol&&e!==Symbol.prototype?"symbol":typeof e})(e)}var c7=["offset"];function ue(e,t){(null==t||t>e.length)&&(t=e.length);for(var n=0,r=Array(t);n=0?1:-1;"insideStart"===a?(r=f+v*l,o=g):"insideEnd"===a?(r=m-v*l,o=!g):"end"===a&&(r=m+v*l,o=g),o=b<=0?o:!o;var y=c4(c,u,h,r),E=c4(c,u,h,r+(o?1:-1)*359),S="M".concat(y.x,",").concat(y.y,"\n A").concat(h,",").concat(h,",0,1,").concat(o?0:1,",\n ").concat(E.x,",").concat(E.y),w=en()(e.id)?q("recharts-radial-line-"):e.id;return R.createElement("text",ur({},n,{dominantBaseline:"central",className:N("recharts-radial-bar-label",s)}),R.createElement("defs",null,R.createElement("path",{id:w,d:S})),R.createElement("textPath",{xlinkHref:"#".concat(w)},t))},ui=function(e){var t=e.viewBox,n=e.offset,r=e.position,o=t.cx,a=t.cy,i=t.innerRadius,l=t.outerRadius,s=(t.startAngle+t.endAngle)/2;if("outside"===r){var c=c4(o,a,l+n,s),u=c.x;return{x:u,y:c.y,textAnchor:u>=o?"start":"end",verticalAnchor:"middle"}}if("center"===r)return{x:o,y:a,textAnchor:"middle",verticalAnchor:"middle"};if("centerTop"===r)return{x:o,y:a,textAnchor:"middle",verticalAnchor:"start"};if("centerBottom"===r)return{x:o,y:a,textAnchor:"middle",verticalAnchor:"end"};var d=c4(o,a,(i+l)/2,s);return{x:d.x,y:d.y,textAnchor:"middle",verticalAnchor:"middle"}},ul=function(e){var t=e.viewBox,n=e.parentViewBox,r=e.offset,o=e.position,a=t.x,i=t.y,l=t.width,s=t.height,c=s>=0?1:-1,u=c*r,d=c>0?"end":"start",p=c>0?"start":"end",f=l>=0?1:-1,m=f*r,g=f>0?"end":"start",h=f>0?"start":"end";if("top"===o)return un(un({},{x:a+l/2,y:i-c*r,textAnchor:"middle",verticalAnchor:d}),n?{height:Math.max(i-n.y,0),width:l}:{});if("bottom"===o)return un(un({},{x:a+l/2,y:i+s+u,textAnchor:"middle",verticalAnchor:p}),n?{height:Math.max(n.y+n.height-(i+s),0),width:l}:{});if("left"===o){var b={x:a-m,y:i+s/2,textAnchor:g,verticalAnchor:"middle"};return un(un({},b),n?{width:Math.max(b.x-n.x,0),height:s}:{})}if("right"===o){var v={x:a+l+m,y:i+s/2,textAnchor:h,verticalAnchor:"middle"};return un(un({},v),n?{width:Math.max(n.x+n.width-v.x,0),height:s}:{})}var y=n?{width:l,height:s}:{};return"insideLeft"===o?un({x:a+m,y:i+s/2,textAnchor:h,verticalAnchor:"middle"},y):"insideRight"===o?un({x:a+l-m,y:i+s/2,textAnchor:g,verticalAnchor:"middle"},y):"insideTop"===o?un({x:a+l/2,y:i+u,textAnchor:"middle",verticalAnchor:p},y):"insideBottom"===o?un({x:a+l/2,y:i+s-u,textAnchor:"middle",verticalAnchor:d},y):"insideTopLeft"===o?un({x:a+m,y:i+u,textAnchor:h,verticalAnchor:p},y):"insideTopRight"===o?un({x:a+l-m,y:i+u,textAnchor:g,verticalAnchor:p},y):"insideBottomLeft"===o?un({x:a+m,y:i+s-u,textAnchor:h,verticalAnchor:d},y):"insideBottomRight"===o?un({x:a+l-m,y:i+s-u,textAnchor:g,verticalAnchor:d},y):ei()(o)&&($(o.x)||G(o.x))&&($(o.y)||G(o.y))?un({x:a+Y(o.x,l),y:i+Y(o.y,s),textAnchor:"end",verticalAnchor:"end"},y):un({x:a+l/2,y:i+s/2,textAnchor:"middle",verticalAnchor:"middle"},y)};function us(e){var t,n=e.offset,r=un({offset:void 0===n?5:n},function(e,t){if(null==e)return{};var n,r,o=function(e,t){if(null==e)return{};var n,r,o={},a=Object.keys(e);for(r=0;r=0||(o[n]=e[n]);return o}(e,t);if(Object.getOwnPropertySymbols){var a=Object.getOwnPropertySymbols(e);for(r=0;r=0)&&Object.prototype.propertyIsEnumerable.call(e,n)&&(o[n]=e[n])}return o}(e,c7)),o=r.viewBox,a=r.position,i=r.value,l=r.children,s=r.content,c=r.className,u=r.textBreakAll;if(!o||en()(i)&&en()(l)&&!(0,R.isValidElement)(s)&&!eo()(s))return null;if((0,R.isValidElement)(s))return(0,R.cloneElement)(s,r);if(eo()(s)){if(t=(0,R.createElement)(s,r),(0,R.isValidElement)(t))return t}else t=uo(r);var d="cx"in o&&$(o.cx),p=eA(r,!0);if(d&&("insideStart"===a||"insideEnd"===a||"end"===a))return ua(r,t,p);var f=d?ui(r):ul(r);return R.createElement(o$,ur({className:N("recharts-label",void 0===c?"":c)},p,f,{breakAll:u}),t)}us.displayName="Label";var uc=function(e){var t=e.cx,n=e.cy,r=e.angle,o=e.startAngle,a=e.endAngle,i=e.r,l=e.radius,s=e.innerRadius,c=e.outerRadius,u=e.x,d=e.y,p=e.top,f=e.left,m=e.width,g=e.height,h=e.clockWise,b=e.labelViewBox;if(b)return b;if($(m)&&$(g)){if($(u)&&$(d))return{x:u,y:d,width:m,height:g};if($(p)&&$(f))return{x:p,y:f,width:m,height:g}}return $(u)&&$(d)?{x:u,y:d,width:0,height:0}:$(t)&&$(n)?{cx:t,cy:n,startAngle:o||r||0,endAngle:a||r||0,innerRadius:s||0,outerRadius:c||l||i||0,clockWise:h}:e.viewBox?e.viewBox:{}};us.parseViewBox=uc,us.renderCallByParent=function(e,t){var n,r,o=!(arguments.length>2)||void 0===arguments[2]||arguments[2];if(!e||!e.children&&o&&!e.label)return null;var a=e.children,i=uc(e),l=ex(a,us).map(function(e,n){return(0,R.cloneElement)(e,{viewBox:t||i,key:"label-".concat(n)})});return o?[(n=e.label,r=t||i,n?!0===n?R.createElement(us,{key:"label-implicit",viewBox:r}):W(n)?R.createElement(us,{key:"label-implicit",viewBox:r,value:n}):(0,R.isValidElement)(n)?n.type===us?(0,R.cloneElement)(n,{key:"label-implicit",viewBox:r}):R.createElement(us,{key:"label-implicit",content:n,viewBox:r}):eo()(n)?R.createElement(us,{key:"label-implicit",content:n,viewBox:r}):ei()(n)?R.createElement(us,ur({viewBox:r},n,{key:"label-implicit"})):null:null)].concat(function(e){if(Array.isArray(e))return ue(e)}(l)||function(e){if("undefined"!=typeof Symbol&&null!=e[Symbol.iterator]||null!=e["@@iterator"])return Array.from(e)}(l)||function(e,t){if(e){if("string"==typeof e)return ue(e,t);var n=Object.prototype.toString.call(e).slice(8,-1);if("Object"===n&&e.constructor&&(n=e.constructor.name),"Map"===n||"Set"===n)return Array.from(e);if("Arguments"===n||/^(?:Ui|I)nt(?:8|16|32)(?:Clamped)?Array$/.test(n))return ue(e,t)}}(l)||function(){throw TypeError("Invalid attempt to spread non-iterable instance.\nIn order to be iterable, non-array objects must have a [Symbol.iterator]() method.")}()):l};var uu=function(e,t){var n=e.alwaysShow,r=e.ifOverflow;return n&&(r="extendDomain"),r===t},ud=n(50924),up=n.n(ud),uf=function(e){return null};uf.displayName="Cell";var um=n(36887),ug=n.n(um);function uh(e){return(uh="function"==typeof Symbol&&"symbol"==typeof Symbol.iterator?function(e){return typeof e}:function(e){return e&&"function"==typeof Symbol&&e.constructor===Symbol&&e!==Symbol.prototype?"symbol":typeof e})(e)}var ub=["valueAccessor"],uv=["data","dataKey","clockWise","id","textBreakAll"];function uy(e,t){(null==t||t>e.length)&&(t=e.length);for(var n=0,r=Array(t);n=0||(o[n]=e[n]);return o}(e,t);if(Object.getOwnPropertySymbols){var a=Object.getOwnPropertySymbols(e);for(r=0;r=0)&&Object.prototype.propertyIsEnumerable.call(e,n)&&(o[n]=e[n])}return o}var uO=function(e){return Array.isArray(e.value)?ug()(e.value):e.value};function uC(e){var t=e.valueAccessor,n=void 0===t?uO:t,r=ux(e,ub),o=r.data,a=r.dataKey,i=r.clockWise,l=r.id,s=r.textBreakAll,c=ux(r,uv);return o&&o.length?R.createElement(eQ,{className:"recharts-label-list"},o.map(function(e,t){var r=en()(a)?n(e,t):ct(e&&e.payload,a),o=en()(l)?{}:{id:"".concat(l,"-").concat(t)};return R.createElement(us,uE({},eA(e,!0),c,o,{parentViewBox:e.parentViewBox,value:r,textBreakAll:s,viewBox:us.parseViewBox(en()(i)?e:uw(uw({},e),{},{clockWise:i})),key:"label-".concat(t),index:t}))})):null}uC.displayName="LabelList",uC.renderCallByParent=function(e,t){var n,r=!(arguments.length>2)||void 0===arguments[2]||arguments[2];if(!e||!e.children&&r&&!e.label)return null;var o=ex(e.children,uC).map(function(e,n){return(0,R.cloneElement)(e,{data:t,key:"labelList-".concat(n)})});return r?[(n=e.label)?!0===n?R.createElement(uC,{key:"labelList-implicit",data:t}):R.isValidElement(n)||eo()(n)?R.createElement(uC,{key:"labelList-implicit",data:t,content:n}):ei()(n)?R.createElement(uC,uE({data:t},n,{key:"labelList-implicit"})):null:null].concat(function(e){if(Array.isArray(e))return uy(e)}(o)||function(e){if("undefined"!=typeof Symbol&&null!=e[Symbol.iterator]||null!=e["@@iterator"])return Array.from(e)}(o)||function(e,t){if(e){if("string"==typeof e)return uy(e,t);var n=Object.prototype.toString.call(e).slice(8,-1);if("Object"===n&&e.constructor&&(n=e.constructor.name),"Map"===n||"Set"===n)return Array.from(e);if("Arguments"===n||/^(?:Ui|I)nt(?:8|16|32)(?:Clamped)?Array$/.test(n))return uy(e,t)}}(o)||function(){throw TypeError("Invalid attempt to spread non-iterable instance.\nIn order to be iterable, non-array objects must have a [Symbol.iterator]() method.")}()):o};var uT=n(23393),uk=n.n(uT),uA=n(90849),uI=n.n(uA);function uR(e){return(uR="function"==typeof Symbol&&"symbol"==typeof Symbol.iterator?function(e){return typeof e}:function(e){return e&&"function"==typeof Symbol&&e.constructor===Symbol&&e!==Symbol.prototype?"symbol":typeof e})(e)}function uN(){return(uN=Object.assign?Object.assign.bind():function(e){for(var t=1;te.length)&&(t=e.length);for(var n=0,r=Array(t);n0,from:{upperWidth:0,lowerWidth:0,height:d,x:l,y:s},to:{upperWidth:c,lowerWidth:u,height:d,x:l,y:s},duration:m,animationEasing:f,isActive:h},function(e){var t=e.upperWidth,o=e.lowerWidth,i=e.height,l=e.x,s=e.y;return R.createElement(ni,{canBegin:a>0,from:"0px ".concat(-1===a?1:a,"px"),to:"".concat(a,"px 0px"),attributeName:"strokeDasharray",begin:g,duration:m,easing:f},R.createElement("path",uN({},eA(n,!0),{className:b,d:uL(l,s,t,o,i),ref:r})))}):R.createElement("g",null,R.createElement("path",uN({},eA(n,!0),{className:b,d:uL(l,s,c,u,d)})))};function uF(e){return(uF="function"==typeof Symbol&&"symbol"==typeof Symbol.iterator?function(e){return typeof e}:function(e){return e&&"function"==typeof Symbol&&e.constructor===Symbol&&e!==Symbol.prototype?"symbol":typeof e})(e)}function uB(){return(uB=Object.assign?Object.assign.bind():function(e){for(var t=1;t180),",").concat(+(i>s),",\n ").concat(u.x,",").concat(u.y,"\n ");if(o>0){var p=c4(n,r,o,i),f=c4(n,r,o,s);d+="L ".concat(f.x,",").concat(f.y,"\n A ").concat(o,",").concat(o,",0,\n ").concat(+(Math.abs(l)>180),",").concat(+(i<=s),",\n ").concat(p.x,",").concat(p.y," Z")}else d+="L ".concat(n,",").concat(r," Z");return d},uG=function(e){var t=e.cx,n=e.cy,r=e.innerRadius,o=e.outerRadius,a=e.cornerRadius,i=e.forceCornerRadius,l=e.cornerIsExternal,s=e.startAngle,c=e.endAngle,u=H(c-s),d=uz({cx:t,cy:n,radius:o,angle:s,sign:u,cornerRadius:a,cornerIsExternal:l}),p=d.circleTangency,f=d.lineTangency,m=d.theta,g=uz({cx:t,cy:n,radius:o,angle:c,sign:-u,cornerRadius:a,cornerIsExternal:l}),h=g.circleTangency,b=g.lineTangency,v=g.theta,y=l?Math.abs(s-c):Math.abs(s-c)-m-v;if(y<0)return i?"M ".concat(f.x,",").concat(f.y,"\n a").concat(a,",").concat(a,",0,0,1,").concat(2*a,",0\n a").concat(a,",").concat(a,",0,0,1,").concat(-(2*a),",0\n "):uH({cx:t,cy:n,innerRadius:r,outerRadius:o,startAngle:s,endAngle:c});var E="M ".concat(f.x,",").concat(f.y,"\n A").concat(a,",").concat(a,",0,0,").concat(+(u<0),",").concat(p.x,",").concat(p.y,"\n A").concat(o,",").concat(o,",0,").concat(+(y>180),",").concat(+(u<0),",").concat(h.x,",").concat(h.y,"\n A").concat(a,",").concat(a,",0,0,").concat(+(u<0),",").concat(b.x,",").concat(b.y,"\n ");if(r>0){var S=uz({cx:t,cy:n,radius:r,angle:s,sign:u,isExternal:!0,cornerRadius:a,cornerIsExternal:l}),w=S.circleTangency,x=S.lineTangency,O=S.theta,C=uz({cx:t,cy:n,radius:r,angle:c,sign:-u,isExternal:!0,cornerRadius:a,cornerIsExternal:l}),T=C.circleTangency,k=C.lineTangency,A=C.theta,I=l?Math.abs(s-c):Math.abs(s-c)-O-A;if(I<0&&0===a)return"".concat(E,"L").concat(t,",").concat(n,"Z");E+="L".concat(k.x,",").concat(k.y,"\n A").concat(a,",").concat(a,",0,0,").concat(+(u<0),",").concat(T.x,",").concat(T.y,"\n A").concat(r,",").concat(r,",0,").concat(+(I>180),",").concat(+(u>0),",").concat(w.x,",").concat(w.y,"\n A").concat(a,",").concat(a,",0,0,").concat(+(u<0),",").concat(x.x,",").concat(x.y,"Z")}else E+="L".concat(t,",").concat(n,"Z");return E},u$={cx:0,cy:0,innerRadius:0,outerRadius:0,startAngle:0,endAngle:0,cornerRadius:0,forceCornerRadius:!1,cornerIsExternal:!1},uW=function(e){var t,n=uZ(uZ({},u$),e),r=n.cx,o=n.cy,a=n.innerRadius,i=n.outerRadius,l=n.cornerRadius,s=n.forceCornerRadius,c=n.cornerIsExternal,u=n.startAngle,d=n.endAngle,p=n.className;if(i0&&360>Math.abs(u-d)?uG({cx:r,cy:o,innerRadius:a,outerRadius:i,cornerRadius:Math.min(g,m/2),forceCornerRadius:s,cornerIsExternal:c,startAngle:u,endAngle:d}):uH({cx:r,cy:o,innerRadius:a,outerRadius:i,startAngle:u,endAngle:d}),R.createElement("path",uB({},eA(n,!0),{className:f,d:t,role:"img"}))},uV=["option","shapeType","propTransformer","activeClassName","isActive"];function uq(e){return(uq="function"==typeof Symbol&&"symbol"==typeof Symbol.iterator?function(e){return typeof e}:function(e){return e&&"function"==typeof Symbol&&e.constructor===Symbol&&e!==Symbol.prototype?"symbol":typeof e})(e)}function uY(e,t){var n=Object.keys(e);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);t&&(r=r.filter(function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable})),n.push.apply(n,r)}return n}function uK(e){for(var t=1;t=0||(o[n]=e[n]);return o}(e,t);if(Object.getOwnPropertySymbols){var a=Object.getOwnPropertySymbols(e);for(r=0;r=0)&&Object.prototype.propertyIsEnumerable.call(e,n)&&(o[n]=e[n])}return o}(e,uV);if((0,R.isValidElement)(n))t=(0,R.cloneElement)(n,uK(uK({},l),(0,R.isValidElement)(n)?n.props:n));else if(eo()(n))t=n(l);else if(uk()(n)&&!uI()(n)){var s=(void 0===o?function(e,t){return uK(uK({},t),e)}:o)(n,l);t=R.createElement(uX,{shapeType:r,elementProps:s})}else t=R.createElement(uX,{shapeType:r,elementProps:l});return i?R.createElement(eQ,{className:void 0===a?"recharts-active-shape":a},t):t}function uJ(e,t){return null!=t&&"trapezoids"in e.props}function u0(e,t){return null!=t&&"sectors"in e.props}function u1(e,t){return null!=t&&"points"in e.props}function u2(e,t){var n,r,o=e.x===(null==t||null===(n=t.labelViewBox)||void 0===n?void 0:n.x)||e.x===t.x,a=e.y===(null==t||null===(r=t.labelViewBox)||void 0===r?void 0:r.y)||e.y===t.y;return o&&a}function u4(e,t){var n=e.endAngle===t.endAngle,r=e.startAngle===t.startAngle;return n&&r}function u3(e,t){var n=e.x===t.x,r=e.y===t.y,o=e.z===t.z;return n&&r&&o}function u6(e){return(u6="function"==typeof Symbol&&"symbol"==typeof Symbol.iterator?function(e){return typeof e}:function(e){return e&&"function"==typeof Symbol&&e.constructor===Symbol&&e!==Symbol.prototype?"symbol":typeof e})(e)}var u5=["x","y"];function u8(){return(u8=Object.assign?Object.assign.bind():function(e){for(var t=1;t=0||(o[n]=e[n]);return o}(e,t);if(Object.getOwnPropertySymbols){var a=Object.getOwnPropertySymbols(e);for(r=0;r=0)&&Object.prototype.propertyIsEnumerable.call(e,n)&&(o[n]=e[n])}return o}(e,u5),a=parseInt("".concat(n),10),i=parseInt("".concat(r),10),l=parseInt("".concat(t.height||o.height),10),s=parseInt("".concat(t.width||o.width),10);return u7(u7(u7(u7(u7({},t),o),a?{x:a}:{}),i?{y:i}:{}),{},{height:l,width:s,name:t.name,radius:t.radius})}function dt(e){return R.createElement(uQ,u8({shapeType:"rectangle",propTransformer:de,activeClassName:"recharts-active-bar"},e))}var dn=["value","background"];function dr(e){return(dr="function"==typeof Symbol&&"symbol"==typeof Symbol.iterator?function(e){return typeof e}:function(e){return e&&"function"==typeof Symbol&&e.constructor===Symbol&&e!==Symbol.prototype?"symbol":typeof e})(e)}function da(){return(da=Object.assign?Object.assign.bind():function(e){for(var t=1;t=0||(o[n]=e[n]);return o}(e,t);if(Object.getOwnPropertySymbols){var a=Object.getOwnPropertySymbols(e);for(r=0;r=0)&&Object.prototype.propertyIsEnumerable.call(e,n)&&(o[n]=e[n])}return o}(t,dn);if(!i)return null;var s=dl(dl(dl(dl(dl({},l),{},{fill:"#eee"},i),a),em(e.props,t,n)),{},{onAnimationStart:e.handleAnimationStart,onAnimationEnd:e.handleAnimationEnd,dataKey:r,index:n,key:"background-bar-".concat(n),className:"recharts-bar-background-rectangle"});return R.createElement(dt,da({option:e.props.background,isActive:n===o},s))})}},{key:"renderErrorBar",value:function(e,t){if(this.props.isAnimationActive&&!this.state.isAnimationFinished)return null;var n=this.props,r=n.data,o=n.xAxis,a=n.yAxis,i=n.layout,l=ex(n.children,s0);if(!l)return null;var s="vertical"===i?r[0].height/2:r[0].width/2,c=function(e,t){var n=Array.isArray(e.value)?e.value[1]:e.value;return{x:e.x,y:e.y,value:n,errorVal:ct(e,t)}};return R.createElement(eQ,{clipPath:e?"url(#clipPath-".concat(t,")"):null},l.map(function(e){return R.cloneElement(e,{key:"error-bar-".concat(t,"-").concat(e.props.dataKey),data:r,xAxis:o,yAxis:a,layout:i,offset:s,dataPointFormatter:c})}))}},{key:"render",value:function(){var e=this.props,t=e.hide,n=e.data,r=e.className,o=e.xAxis,a=e.yAxis,i=e.left,l=e.top,s=e.width,c=e.height,u=e.isAnimationActive,d=e.background,p=e.id;if(t||!n||!n.length)return null;var f=this.state.isAnimationFinished,m=N("recharts-bar",r),g=o&&o.allowDataOverflow,h=a&&a.allowDataOverflow,b=g||h,v=en()(p)?this.id:p;return R.createElement(eQ,{className:m},g||h?R.createElement("defs",null,R.createElement("clipPath",{id:"clipPath-".concat(v)},R.createElement("rect",{x:g?i:i-s/2,y:h?l:l-c/2,width:g?s:2*s,height:h?c:2*c}))):null,R.createElement(eQ,{className:"recharts-bar-rectangles",clipPath:b?"url(#clipPath-".concat(v,")"):null},d?this.renderBackground():null,this.renderRectangles()),this.renderErrorBar(b,v),(!u||f)&&uC.renderCallByParent(this.props,n))}}],r=[{key:"getDerivedStateFromProps",value:function(e,t){return e.animationId!==t.prevAnimationId?{prevAnimationId:e.animationId,curData:e.data,prevData:t.curData}:e.data!==t.curData?{curData:e.data}:null}}],n&&ds(a.prototype,n),r&&ds(a,r),Object.defineProperty(a,"prototype",{writable:!1}),a}(R.PureComponent);function dg(e){return(dg="function"==typeof Symbol&&"symbol"==typeof Symbol.iterator?function(e){return typeof e}:function(e){return e&&"function"==typeof Symbol&&e.constructor===Symbol&&e!==Symbol.prototype?"symbol":typeof e})(e)}function dh(e,t){for(var n=0;n0&&Math.abs(b)0&&Math.abs(g)1&&void 0!==arguments[1]?arguments[1]:{},n=t.bandAware,r=t.position;if(void 0!==e){if(r)switch(r){case"start":default:return this.scale(e);case"middle":var o=this.bandwidth?this.bandwidth()/2:0;return this.scale(e)+o;case"end":var a=this.bandwidth?this.bandwidth():0;return this.scale(e)+a}if(n){var i=this.bandwidth?this.bandwidth()/2:0;return this.scale(e)+i}return this.scale(e)}}},{key:"isInRange",value:function(e){var t=this.range(),n=t[0],r=t[t.length-1];return n<=r?e>=n&&e<=r:e>=r&&e<=n}}],t=[{key:"create",value:function(e){return new n(e)}}],e&&dh(n.prototype,e),t&&dh(n,t),Object.defineProperty(n,"prototype",{writable:!1}),n}();dy(dw,"EPS",1e-4);var dx=function(e){var t=Object.keys(e).reduce(function(t,n){return dv(dv({},t),{},dy({},n,dw.create(e[n])))},{});return dv(dv({},t),{},{apply:function(e){var n=arguments.length>1&&void 0!==arguments[1]?arguments[1]:{},r=n.bandAware,o=n.position;return up()(e,function(e,n){return t[n].apply(e,{bandAware:r,position:o})})},isInRange:function(e){return e$()(e,function(e,n){return t[n].isInRange(e)})}})},dO=function(e){var t=e.width,n=e.height,r=arguments.length>1&&void 0!==arguments[1]?arguments[1]:0,o=(r%180+180)%180*Math.PI/180,a=Math.atan(n/t);return Math.abs(o>a&&oe.length)&&(t=e.length);for(var n=0,r=Array(t);ne.length)&&(t=e.length);for(var n=0,r=Array(t);n=0;--t)o[t]=(i[t]-o[t+1])/a[t];for(t=0,a[r-1]=(e[r]+o[r-1])/2;t=d;--p)l.point(b[p],v[p]);l.lineEnd(),l.areaEnd()}}h&&(b[u]=+e(f,u,c),v[u]=+t(f,u,c),l.point(r?+r(f,u,c):b[u],n?+n(f,u,c):v[u]))}if(m)return l=null,m+""||null}function u(){return pk().defined(o).curve(i).context(a)}return e="function"==typeof e?e:void 0===e?pC:ro(+e),t="function"==typeof t?t:void 0===t?ro(0):ro(+t),n="function"==typeof n?n:void 0===n?pT:ro(+n),c.x=function(t){return arguments.length?(e="function"==typeof t?t:ro(+t),r=null,c):e},c.x0=function(t){return arguments.length?(e="function"==typeof t?t:ro(+t),c):e},c.x1=function(e){return arguments.length?(r=null==e?null:"function"==typeof e?e:ro(+e),c):r},c.y=function(e){return arguments.length?(t="function"==typeof e?e:ro(+e),n=null,c):t},c.y0=function(e){return arguments.length?(t="function"==typeof e?e:ro(+e),c):t},c.y1=function(e){return arguments.length?(n=null==e?null:"function"==typeof e?e:ro(+e),c):n},c.lineX0=c.lineY0=function(){return u().x(e).y(t)},c.lineY1=function(){return u().x(e).y(n)},c.lineX1=function(){return u().x(r).y(t)},c.defined=function(e){return arguments.length?(o="function"==typeof e?e:ro(!!e),c):o},c.curve=function(e){return arguments.length?(i=e,null!=a&&(l=i(a)),c):i},c.context=function(e){return arguments.length?(null==e?a=l=null:l=i(a=e),c):a},c}function pI(e){return(pI="function"==typeof Symbol&&"symbol"==typeof Symbol.iterator?function(e){return typeof e}:function(e){return e&&"function"==typeof Symbol&&e.constructor===Symbol&&e!==Symbol.prototype?"symbol":typeof e})(e)}function pR(){return(pR=Object.assign?Object.assign.bind():function(e){for(var t=1;t=0&&(this._t=1-this._t,this._line=1-this._line)},point:function(e,t){switch(e=+e,t=+t,this._point){case 0:this._point=1,this._line?this._context.lineTo(e,t):this._context.moveTo(e,t);break;case 1:this._point=2;default:if(this._t<=0)this._context.lineTo(this._x,t),this._context.lineTo(e,t);else{var n=this._x*(1-this._t)+e*this._t;this._context.lineTo(n,this._y),this._context.lineTo(n,t)}}this._x=e,this._y=t}};var pP={curveBasisClosed:function(e){return new pu(e)},curveBasisOpen:function(e){return new pd(e)},curveBasis:function(e){return new pc(e)},curveBumpX:function(e){return new pp(e,!0)},curveBumpY:function(e){return new pp(e,!1)},curveLinearClosed:function(e){return new pf(e)},curveLinear:pg,curveMonotoneX:function(e){return new py(e)},curveMonotoneY:function(e){return new pE(e)},curveNatural:function(e){return new pw(e)},curveStep:function(e){return new pO(e,.5)},curveStepAfter:function(e){return new pO(e,1)},curveStepBefore:function(e){return new pO(e,0)}},pM=function(e){return e.x===+e.x&&e.y===+e.y},pL=function(e){return e.x},pD=function(e){return e.y},pj=function(e,t){if(eo()(e))return e;var n="curve".concat(nQ()(e));return("curveMonotone"===n||"curveBump"===n)&&t?pP["".concat(n).concat("vertical"===t?"Y":"X")]:pP[n]||pg},pF=function(e){var t,n=e.type,r=e.points,o=void 0===r?[]:r,a=e.baseLine,i=e.layout,l=e.connectNulls,s=void 0!==l&&l,c=pj(void 0===n?"linear":n,i),u=s?o.filter(function(e){return pM(e)}):o;if(Array.isArray(a)){var d=s?a.filter(function(e){return pM(e)}):a,p=u.map(function(e,t){return p_(p_({},e),{},{base:d[t]})});return(t="vertical"===i?pA().y(pD).x1(pL).x0(function(e){return e.base.x}):pA().x(pL).y1(pD).y0(function(e){return e.base.y})).defined(pM).curve(c),t(p)}return(t="vertical"===i&&$(a)?pA().y(pD).x1(pL).x0(a):$(a)?pA().x(pL).y1(pD).y0(a):pk().x(pL).y(pD)).defined(pM).curve(c),t(u)},pB=function(e){var t=e.className,n=e.points,r=e.path,o=e.pathRef;if((!n||!n.length)&&!r)return null;var a=n&&n.length?pF(e):r;return R.createElement("path",pR({},eA(e,!1),ef(e),{className:N("recharts-curve",t),d:a,ref:o}))};function pU(e){return(pU="function"==typeof Symbol&&"symbol"==typeof Symbol.iterator?function(e){return typeof e}:function(e){return e&&"function"==typeof Symbol&&e.constructor===Symbol&&e!==Symbol.prototype?"symbol":typeof e})(e)}var pZ=["x","y","top","left","width","height","className"];function pz(){return(pz=Object.assign?Object.assign.bind():function(e){for(var t=1;t=0||(o[n]=e[n]);return o}(e,t);if(Object.getOwnPropertySymbols){var a=Object.getOwnPropertySymbols(e);for(r=0;r=0)&&Object.prototype.propertyIsEnumerable.call(e,n)&&(o[n]=e[n])}return o}(e,pZ));return $(n)&&$(o)&&$(u)&&$(p)&&$(i)&&$(s)?R.createElement("path",pz({},eA(m,!0),{className:N("recharts-cross",f),d:"M".concat(n,",").concat(i,"v").concat(p,"M").concat(s,",").concat(o,"h").concat(u)})):null};function p$(e){var t=e.cx,n=e.cy,r=e.radius,o=e.startAngle,a=e.endAngle;return{points:[c4(t,n,r,o),c4(t,n,r,a)],cx:t,cy:n,radius:r,startAngle:o,endAngle:a}}function pW(e){return(pW="function"==typeof Symbol&&"symbol"==typeof Symbol.iterator?function(e){return typeof e}:function(e){return e&&"function"==typeof Symbol&&e.constructor===Symbol&&e!==Symbol.prototype?"symbol":typeof e})(e)}function pV(e,t){var n=Object.keys(e);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);t&&(r=r.filter(function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable})),n.push.apply(n,r)}return n}function pq(e){for(var t=1;t=0||(o[n]=e[n]);return o}(e,t);if(Object.getOwnPropertySymbols){var a=Object.getOwnPropertySymbols(e);for(r=0;r=0)&&Object.prototype.propertyIsEnumerable.call(e,n)&&(o[n]=e[n])}return o}function p2(e,t){for(var n=0;ne.length)&&(t=e.length);for(var n=0,r=Array(t);n0?a:e&&e.length&&$(r)&&$(o)?e.slice(r,o+1):[]};function fc(e){return"number"===e?[0,"auto"]:void 0}var fu=function(e,t,n,r){var o=e.graphicalItems,a=e.tooltipAxis,i=fs(t,e);return n<0||!o||!o.length||n>=i.length?null:o.reduce(function(o,l){var s,c,u=null!==(s=l.props.data)&&void 0!==s?s:t;return(u&&e.dataStartIndex+e.dataEndIndex!==0&&(u=u.slice(e.dataStartIndex,e.dataEndIndex+1)),c=a.dataKey&&!a.allowDuplicatedCategory?J(void 0===u?i:u,a.dataKey,r):u&&u[n]||i[n])?[].concat(p5(o),[cP(l,c)]):o},[])},fd=function(e,t,n,r){var o=r||{x:e.chartX,y:e.chartY},a="horizontal"===n?o.x:"vertical"===n?o.y:"centric"===n?o.angle:o.radius,i=e.orderedTooltipTicks,l=e.tooltipAxis,s=e.tooltipTicks,c=cr(a,i,s,l);if(c>=0&&s){var u=s[c]&&s[c].value,d=fu(e,t,c,u),p=fl(n,i,c,o);return{activeTooltipIndex:c,activeLabel:u,activePayload:d,activeCoordinate:p}}return null},fp=function(e,t){var n=t.axes,r=t.graphicalItems,o=t.axisType,a=t.axisIdKey,i=t.stackGroups,l=t.dataStartIndex,s=t.dataEndIndex,c=e.layout,u=e.children,d=e.stackOffset,p=cd(c,o);return n.reduce(function(t,n){var f=n.props,m=f.type,g=f.dataKey,h=f.allowDataOverflow,b=f.allowDuplicatedCategory,v=f.scale,y=f.ticks,E=f.includeHidden,S=n.props[a];if(t[S])return t;var w=fs(e.data,{graphicalItems:r.filter(function(e){return e.props[a]===S}),dataStartIndex:l,dataEndIndex:s}),x=w.length;(function(e,t,n){if("number"===n&&!0===t&&Array.isArray(e)){var r=null==e?void 0:e[0],o=null==e?void 0:e[1];if(r&&o&&$(r)&&$(o))return!0}return!1})(n.props.domain,h,m)&&(T=cR(n.props.domain,null,h),p&&("number"===m||"auto"!==v)&&(A=cn(w,g,"category")));var O=fc(m);if(!T||0===T.length){var C,T,k,A,I,R=null!==(I=n.props.domain)&&void 0!==I?I:O;if(g){if(T=cn(w,g,m),"category"===m&&p){var N=X(T);b&&N?(k=T,T=eB()(0,x)):b||(T=c_(R,T,n).reduce(function(e,t){return e.indexOf(t)>=0?e:[].concat(p5(e),[t])},[]))}else if("category"===m)T=b?T.filter(function(e){return""!==e&&!en()(e)}):c_(R,T,n).reduce(function(e,t){return e.indexOf(t)>=0||""===t||en()(t)?e:[].concat(p5(e),[t])},[]);else if("number"===m){var _=cc(w,r.filter(function(e){return e.props[a]===S&&(E||!e.props.hide)}),g,o,c);_&&(T=_)}p&&("number"===m||"auto"!==v)&&(A=cn(w,g,"category"))}else T=p?eB()(0,x):i&&i[S]&&i[S].hasStack&&"number"===m?"expand"===d?[0,1]:ck(i[S].stackGroups,l,s):cu(w,r.filter(function(e){return e.props[a]===S&&(E||!e.props.hide)}),m,c,!0);"number"===m?(T=d9(u,T,S,o,y),R&&(T=cR(R,T,h))):"category"===m&&R&&T.every(function(e){return R.indexOf(e)>=0})&&(T=R)}return fe(fe({},t),{},ft({},S,fe(fe({},n.props),{},{axisType:o,domain:T,categoricalDomain:A,duplicateDomain:k,originalDomain:null!==(C=n.props.domain)&&void 0!==C?C:O,isCategorical:p,layout:c})))},{})},ff=function(e,t){var n=t.graphicalItems,r=t.Axis,o=t.axisType,a=t.axisIdKey,i=t.stackGroups,l=t.dataStartIndex,s=t.dataEndIndex,c=e.layout,u=e.children,d=fs(e.data,{graphicalItems:n,dataStartIndex:l,dataEndIndex:s}),p=d.length,f=cd(c,o),m=-1;return n.reduce(function(e,t){var g,h=t.props[a],b=fc("number");return e[h]?e:(m++,g=f?eB()(0,p):i&&i[h]&&i[h].hasStack?d9(u,g=ck(i[h].stackGroups,l,s),h,o):d9(u,g=cR(b,cu(d,n.filter(function(e){return e.props[a]===h&&!e.props.hide}),"number",c),r.defaultProps.allowDataOverflow),h,o),fe(fe({},e),{},ft({},h,fe(fe({axisType:o},r.defaultProps),{},{hide:!0,orientation:U()(fr,"".concat(o,".").concat(m%2),null),domain:g,originalDomain:b,isCategorical:f,layout:c}))))},{})},fm=function(e,t){var n=t.axisType,r=void 0===n?"xAxis":n,o=t.AxisComp,a=t.graphicalItems,i=t.stackGroups,l=t.dataStartIndex,s=t.dataEndIndex,c=e.children,u="".concat(r,"Id"),d=ex(c,o),p={};return d&&d.length?p=fp(e,{axes:d,graphicalItems:a,axisType:r,axisIdKey:u,stackGroups:i,dataStartIndex:l,dataEndIndex:s}):a&&a.length&&(p=ff(e,{Axis:o,graphicalItems:a,axisType:r,axisIdKey:u,stackGroups:i,dataStartIndex:l,dataEndIndex:s})),p},fg=function(e){var t=K(e),n=cf(t,!1,!0);return{tooltipTicks:n,orderedTooltipTicks:eZ()(n,function(e){return e.coordinate}),tooltipAxis:t,tooltipAxisBandSize:cN(t,n)}},fh=function(e){var t=e.children,n=e.defaultShowTooltip,r=eO(t,cQ),o=0,a=0;return e.data&&0!==e.data.length&&(a=e.data.length-1),r&&r.props&&(r.props.startIndex>=0&&(o=r.props.startIndex),r.props.endIndex>=0&&(a=r.props.endIndex)),{chartX:0,chartY:0,dataStartIndex:o,dataEndIndex:a,activeTooltipIndex:-1,isTooltipActive:!!n}},fb=function(e){return"horizontal"===e?{numericAxisName:"yAxis",cateAxisName:"xAxis"}:"vertical"===e?{numericAxisName:"xAxis",cateAxisName:"yAxis"}:"centric"===e?{numericAxisName:"radiusAxis",cateAxisName:"angleAxis"}:{numericAxisName:"angleAxis",cateAxisName:"radiusAxis"}},fv=function(e,t){var n=e.props,r=e.graphicalItems,o=e.xAxisMap,a=void 0===o?{}:o,i=e.yAxisMap,l=void 0===i?{}:i,s=n.width,c=n.height,u=n.children,d=n.margin||{},p=eO(u,cQ),f=eO(u,r1),m=Object.keys(l).reduce(function(e,t){var n=l[t],r=n.orientation;return n.mirror||n.hide?e:fe(fe({},e),{},ft({},r,e[r]+n.width))},{left:d.left||0,right:d.right||0}),g=Object.keys(a).reduce(function(e,t){var n=a[t],r=n.orientation;return n.mirror||n.hide?e:fe(fe({},e),{},ft({},r,U()(e,"".concat(r))+n.height))},{top:d.top||0,bottom:d.bottom||0}),h=fe(fe({},g),m),b=h.bottom;p&&(h.bottom+=p.props.height||cQ.defaultProps.height),f&&t&&(h=cl(h,r,n,t));var v=s-h.left-h.right,y=c-h.top-h.bottom;return fe(fe({brushBottom:b},h),{},{width:Math.max(v,0),height:Math.max(y,0)})};function fy(e,t,n){if(t<1)return[];if(1===t&&void 0===n)return e;for(var r=[],o=0;oe*o)return!1;var a=n();return e*(t-e*a/2-r)>=0&&e*(t+e*a/2-o)<=0}function fS(e){return(fS="function"==typeof Symbol&&"symbol"==typeof Symbol.iterator?function(e){return typeof e}:function(e){return e&&"function"==typeof Symbol&&e.constructor===Symbol&&e!==Symbol.prototype?"symbol":typeof e})(e)}function fw(e,t){var n=Object.keys(e);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);t&&(r=r.filter(function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable})),n.push.apply(n,r)}return n}function fx(e){for(var t=1;t=2?H(c[1].coordinate-c[0].coordinate):1,S=(r="width"===b,o=u.x,a=u.y,i=u.width,l=u.height,1===E?{start:r?o:a,end:r?o+i:a+l}:{start:r?o+i:a+l,end:r?o:a});return"equidistantPreserveStart"===f?function(e,t,n,r,o){for(var a,i=(r||[]).slice(),l=t.start,s=t.end,c=0,u=1,d=l;u<=i.length;)if(a=function(){var t,a=null==r?void 0:r[c];if(void 0===a)return{v:fy(r,u)};var i=c,p=function(){return void 0===t&&(t=n(a,i)),t},f=a.coordinate,m=0===c||fE(e,f,p,d,s);m||(c=0,d=l,u+=1),m&&(d=f+e*(p()/2+o),c+=u)}())return a.v;return[]}(E,S,y,c,d):("preserveStart"===f||"preserveStartEnd"===f?function(e,t,n,r,o,a){var i=(r||[]).slice(),l=i.length,s=t.start,c=t.end;if(a){var u=r[l-1],d=n(u,l-1),p=e*(u.coordinate+e*d/2-c);i[l-1]=u=fx(fx({},u),{},{tickCoord:p>0?u.coordinate-p*e:u.coordinate}),fE(e,u.tickCoord,function(){return d},s,c)&&(c=u.tickCoord-e*(d/2+o),i[l-1]=fx(fx({},u),{},{isShow:!0}))}for(var f=a?l-1:l,m=function(t){var r,a=i[t],l=function(){return void 0===r&&(r=n(a,t)),r};if(0===t){var u=e*(a.coordinate-e*l()/2-s);i[t]=a=fx(fx({},a),{},{tickCoord:u<0?a.coordinate-u*e:a.coordinate})}else i[t]=a=fx(fx({},a),{},{tickCoord:a.coordinate});fE(e,a.tickCoord,l,s,c)&&(s=a.tickCoord+e*(l()/2+o),i[t]=fx(fx({},a),{},{isShow:!0}))},g=0;g0?c.coordinate-d*e:c.coordinate})}else a[t]=c=fx(fx({},c),{},{tickCoord:c.coordinate});fE(e,c.tickCoord,u,l,s)&&(s=c.tickCoord-e*(u()/2+o),a[t]=fx(fx({},c),{},{isShow:!0}))},u=i-1;u>=0;u--)c(u);return a}(E,S,y,c,d)).filter(function(e){return e.isShow})}var fC=["viewBox"],fT=["viewBox"],fk=["ticks"];function fA(e){return(fA="function"==typeof Symbol&&"symbol"==typeof Symbol.iterator?function(e){return typeof e}:function(e){return e&&"function"==typeof Symbol&&e.constructor===Symbol&&e!==Symbol.prototype?"symbol":typeof e})(e)}function fI(){return(fI=Object.assign?Object.assign.bind():function(e){for(var t=1;t=0||(o[n]=e[n]);return o}(e,t);if(Object.getOwnPropertySymbols){var a=Object.getOwnPropertySymbols(e);for(r=0;r=0)&&Object.prototype.propertyIsEnumerable.call(e,n)&&(o[n]=e[n])}return o}function fP(e,t){for(var n=0;n0?this.props:c)),r<=0||o<=0||!u||!u.length)?null:R.createElement(eQ,{className:N("recharts-cartesian-axis",i),ref:function(t){e.layerReference=t}},n&&this.renderAxisLine(),this.renderTicks(u,this.state.fontSize,this.state.letterSpacing),us.renderCallByParent(this.props))}}],r=[{key:"renderTickItem",value:function(e,t,n){return R.isValidElement(e)?R.cloneElement(e,t):eo()(e)?e(t):R.createElement(o$,fI({},t,{className:"recharts-cartesian-axis-tick-value"}),n)}}],n&&fP(a.prototype,n),r&&fP(a,r),Object.defineProperty(a,"prototype",{writable:!1}),a}(R.Component);function fB(){return(fB=Object.assign?Object.assign.bind():function(e){for(var t=1;t0&&(O=Math.min((e||0)-(C[t-1]||0),O))});var T=O/x,k="vertical"===g.layout?n.height:n.width;if("gap"===g.padding&&(s=T*k/2),"no-gap"===g.padding){var A=Y(e.barCategoryGap,T*k),I=T*k/2;s=I-A-(I-A)/k*A}}c="xAxis"===r?[n.left+(y.left||0)+(s||0),n.left+n.width-(y.right||0)-(s||0)]:"yAxis"===r?"horizontal"===l?[n.top+n.height-(y.bottom||0),n.top+(y.top||0)]:[n.top+(y.top||0)+(s||0),n.top+n.height-(y.bottom||0)-(s||0)]:g.range,S&&(c=[c[1],c[0]]);var R=ch(g,o,d),N=R.scale,_=R.realScaleType;N.domain(b).range(c),cb(N);var P=cx(N,dv(dv({},g),{},{realScaleType:_}));"xAxis"===r?(m="top"===h&&!E||"bottom"===h&&E,p=n.left,f=u[w]-m*g.height):"yAxis"===r&&(m="left"===h&&!E||"right"===h&&E,p=u[w]-m*g.width,f=n.top);var M=dv(dv(dv({},g),P),{},{realScaleType:_,x:p,y:f,scale:N,width:"xAxis"===r?n.width:g.width,height:"yAxis"===r?n.height:g.height});return M.bandSize=cN(M,P),g.hide||"xAxis"!==r?g.hide||(u[w]+=(m?-1:1)*M.width):u[w]+=(m?-1:1)*M.height,dv(dv({},a),{},dy({},i,M))},{})}}).chartName,i=r.GraphicalChild,s=void 0===(l=r.defaultTooltipEventType)?"axis":l,u=void 0===(c=r.validateTooltipEventTypes)?["axis"]:c,d=r.axisComponents,p=r.legendContent,f=r.formatAxisMap,m=r.defaultProps,g=function(e,t){var n=t.graphicalItems,r=t.stackGroups,o=t.offset,a=t.updateId,i=t.dataStartIndex,l=t.dataEndIndex,s=e.barSize,c=e.layout,u=e.barGap,p=e.barCategoryGap,f=e.maxBarSize,m=fb(c),g=m.numericAxisName,h=m.cateAxisName,b=!!n&&!!n.length&&n.some(function(e){var t=ey(e&&e.type);return t&&t.indexOf("Bar")>=0})&&ca({barSize:s,stackGroups:r}),v=[];return n.forEach(function(n,s){var m,y=fs(e.data,{graphicalItems:[n],dataStartIndex:i,dataEndIndex:l}),E=n.props,S=E.dataKey,w=E.maxBarSize,x=n.props["".concat(g,"Id")],O=n.props["".concat(h,"Id")],C=d.reduce(function(e,r){var o,a=t["".concat(r.axisType,"Map")],i=n.props["".concat(r.axisType,"Id")];a&&a[i]||"zAxis"===r.axisType||eW(!1);var l=a[i];return fe(fe({},e),{},(ft(o={},r.axisType,l),ft(o,"".concat(r.axisType,"Ticks"),cf(l)),o))},{}),T=C[h],k=C["".concat(h,"Ticks")],A=r&&r[x]&&r[x].hasStack&&cT(n,r[x].stackGroups),I=ey(n.type).indexOf("Bar")>=0,R=cN(T,k),N=[];if(I){var _,P,M=en()(w)?f:w,L=null!==(_=null!==(P=cN(T,k,!0))&&void 0!==P?P:M)&&void 0!==_?_:0;N=ci({barGap:u,barCategoryGap:p,bandSize:L!==R?L:R,sizeList:b[O],maxBarSize:M}),L!==R&&(N=N.map(function(e){return fe(fe({},e),{},{position:fe(fe({},e.position),{},{offset:e.position.offset-L/2})})}))}var D=n&&n.type&&n.type.getComposedData;D&&v.push({props:fe(fe({},D(fe(fe({},C),{},{displayedData:y,props:e,dataKey:S,item:n,bandSize:R,barPosition:N,offset:o,stackedData:A,layout:c,dataStartIndex:i,dataEndIndex:l}))),{},(ft(m={key:n.key||"item-".concat(s)},g,C[g]),ft(m,h,C[h]),ft(m,"animationId",a),m)),childIndex:ew(e.children).indexOf(n),item:n})}),v},h=function(e,t){var n=e.props,r=e.dataStartIndex,o=e.dataEndIndex,l=e.updateId;if(!eC({props:n}))return null;var s=n.children,c=n.layout,u=n.stackOffset,p=n.data,m=n.reverseStackOrder,h=fb(c),b=h.numericAxisName,v=h.cateAxisName,y=ex(s,i),E=cw(p,y,"".concat(b,"Id"),"".concat(v,"Id"),u,m),S=d.reduce(function(e,t){var a="".concat(t.axisType,"Map");return fe(fe({},e),{},ft({},a,fm(n,fe(fe({},t),{},{graphicalItems:y,stackGroups:t.axisType===b&&E,dataStartIndex:r,dataEndIndex:o}))))},{}),w=fv(fe(fe({},S),{},{props:n,graphicalItems:y}),null==t?void 0:t.legendBBox);Object.keys(S).forEach(function(e){S[e]=f(n,S[e],w,e.replace("Map",""),a)});var x=fg(S["".concat(v,"Map")]),O=g(n,fe(fe({},S),{},{dataStartIndex:r,dataEndIndex:o,updateId:l,graphicalItems:y,stackGroups:E,offset:w}));return fe(fe({formattedGraphicalItems:O,graphicalItems:y,offset:w,stackGroups:E},x),S)},o=function(e){(function(e,t){if("function"!=typeof t&&null!==t)throw TypeError("Super expression must either be null or a function");e.prototype=Object.create(t&&t.prototype,{constructor:{value:e,writable:!0,configurable:!0}}),Object.defineProperty(e,"prototype",{writable:!1}),t&&p4(e,t)})(i,e);var t,n,r,o=(t=function(){if("undefined"==typeof Reflect||!Reflect.construct||Reflect.construct.sham)return!1;if("function"==typeof Proxy)return!0;try{return Boolean.prototype.valueOf.call(Reflect.construct(Boolean,[],function(){})),!0}catch(e){return!1}}(),function(){var e,n=p6(i);if(t){var r=p6(this).constructor;e=Reflect.construct(n,arguments,r)}else e=n.apply(this,arguments);return function(e,t){if(t&&("object"===pQ(t)||"function"==typeof t))return t;if(void 0!==t)throw TypeError("Derived constructors may only return object or undefined");return p3(e)}(this,e)});function i(e){var t,n,r;return function(e,t){if(!(e instanceof t))throw TypeError("Cannot call a class as a function")}(this,i),ft(p3(r=o.call(this,e)),"eventEmitterSymbol",Symbol("rechartsEventEmitter")),ft(p3(r),"accessibilityManager",new pi),ft(p3(r),"handleLegendBBoxUpdate",function(e){if(e){var t=r.state,n=t.dataStartIndex,o=t.dataEndIndex,a=t.updateId;r.setState(fe({legendBBox:e},h({props:r.props,dataStartIndex:n,dataEndIndex:o,updateId:a},fe(fe({},r.state),{},{legendBBox:e}))))}}),ft(p3(r),"handleReceiveSyncEvent",function(e,t,n){r.props.syncId===e&&(n!==r.eventEmitterSymbol||"function"==typeof r.props.syncMethod)&&r.applySyncEvent(t)}),ft(p3(r),"handleBrushChange",function(e){var t=e.startIndex,n=e.endIndex;if(t!==r.state.dataStartIndex||n!==r.state.dataEndIndex){var o=r.state.updateId;r.setState(function(){return fe({dataStartIndex:t,dataEndIndex:n},h({props:r.props,dataStartIndex:t,dataEndIndex:n,updateId:o},r.state))}),r.triggerSyncEvent({dataStartIndex:t,dataEndIndex:n})}}),ft(p3(r),"handleMouseEnter",function(e){var t=r.getMouseInfo(e);if(t){var n=fe(fe({},t),{},{isTooltipActive:!0});r.setState(n),r.triggerSyncEvent(n);var o=r.props.onMouseEnter;eo()(o)&&o(n,e)}}),ft(p3(r),"triggeredAfterMouseMove",function(e){var t=r.getMouseInfo(e),n=t?fe(fe({},t),{},{isTooltipActive:!0}):{isTooltipActive:!1};r.setState(n),r.triggerSyncEvent(n);var o=r.props.onMouseMove;eo()(o)&&o(n,e)}),ft(p3(r),"handleItemMouseEnter",function(e){r.setState(function(){return{isTooltipActive:!0,activeItem:e,activePayload:e.tooltipPayload,activeCoordinate:e.tooltipPosition||{x:e.cx,y:e.cy}}})}),ft(p3(r),"handleItemMouseLeave",function(){r.setState(function(){return{isTooltipActive:!1}})}),ft(p3(r),"handleMouseMove",function(e){e.persist(),r.throttleTriggeredAfterMouseMove(e)}),ft(p3(r),"handleMouseLeave",function(e){var t={isTooltipActive:!1};r.setState(t),r.triggerSyncEvent(t);var n=r.props.onMouseLeave;eo()(n)&&n(t,e)}),ft(p3(r),"handleOuterEvent",function(e){var t,n=e_(e),o=U()(r.props,"".concat(n));n&&eo()(o)&&o(null!==(t=/.*touch.*/i.test(n)?r.getMouseInfo(e.changedTouches[0]):r.getMouseInfo(e))&&void 0!==t?t:{},e)}),ft(p3(r),"handleClick",function(e){var t=r.getMouseInfo(e);if(t){var n=fe(fe({},t),{},{isTooltipActive:!0});r.setState(n),r.triggerSyncEvent(n);var o=r.props.onClick;eo()(o)&&o(n,e)}}),ft(p3(r),"handleMouseDown",function(e){var t=r.props.onMouseDown;eo()(t)&&t(r.getMouseInfo(e),e)}),ft(p3(r),"handleMouseUp",function(e){var t=r.props.onMouseUp;eo()(t)&&t(r.getMouseInfo(e),e)}),ft(p3(r),"handleTouchMove",function(e){null!=e.changedTouches&&e.changedTouches.length>0&&r.throttleTriggeredAfterMouseMove(e.changedTouches[0])}),ft(p3(r),"handleTouchStart",function(e){null!=e.changedTouches&&e.changedTouches.length>0&&r.handleMouseDown(e.changedTouches[0])}),ft(p3(r),"handleTouchEnd",function(e){null!=e.changedTouches&&e.changedTouches.length>0&&r.handleMouseUp(e.changedTouches[0])}),ft(p3(r),"triggerSyncEvent",function(e){void 0!==r.props.syncId&&pe.emit(pt,r.props.syncId,e,r.eventEmitterSymbol)}),ft(p3(r),"applySyncEvent",function(e){var t=r.props,n=t.layout,o=t.syncMethod,a=r.state.updateId,i=e.dataStartIndex,l=e.dataEndIndex;if(void 0!==e.dataStartIndex||void 0!==e.dataEndIndex)r.setState(fe({dataStartIndex:i,dataEndIndex:l},h({props:r.props,dataStartIndex:i,dataEndIndex:l,updateId:a},r.state)));else if(void 0!==e.activeTooltipIndex){var s=e.chartX,c=e.chartY,u=e.activeTooltipIndex,d=r.state,p=d.offset,f=d.tooltipTicks;if(!p)return;if("function"==typeof o)u=o(f,e);else if("value"===o){u=-1;for(var m=0;m=0){if(s.dataKey&&!s.allowDuplicatedCategory){var x="function"==typeof s.dataKey?function(e){return"function"==typeof s.dataKey?s.dataKey(e.payload):null}:"payload.".concat(s.dataKey.toString());C=J(f,x,u),T=m&&g&&J(g,x,u)}else C=null==f?void 0:f[c],T=m&&g&&g[c];if(E||y){var O=void 0!==e.props.activeIndex?e.props.activeIndex:c;return[(0,R.cloneElement)(e,fe(fe(fe({},o.props),S),{},{activeIndex:O})),null,null]}if(!en()(C))return[w].concat(p5(r.renderActivePoints({item:o,activePoint:C,basePoint:T,childIndex:c,isRange:m})))}else{var C,T,k,A=(null!==(k=r.getItemByXY(r.state.activeCoordinate))&&void 0!==k?k:{graphicalItem:w}).graphicalItem,I=A.item,N=void 0===I?e:I,_=A.childIndex,P=fe(fe(fe({},o.props),S),{},{activeIndex:_});return[(0,R.cloneElement)(N,P),null,null]}}return m?[w,null,null]:[w,null]}),ft(p3(r),"renderCustomized",function(e,t,n){return(0,R.cloneElement)(e,fe(fe({key:"recharts-customized-".concat(n)},r.props),r.state))}),ft(p3(r),"renderMap",{CartesianGrid:{handler:r.renderGrid,once:!0},ReferenceArea:{handler:r.renderReferenceElement},ReferenceLine:{handler:fi},ReferenceDot:{handler:r.renderReferenceElement},XAxis:{handler:fi},YAxis:{handler:fi},Brush:{handler:r.renderBrush,once:!0},Bar:{handler:r.renderGraphicChild},Line:{handler:r.renderGraphicChild},Area:{handler:r.renderGraphicChild},Radar:{handler:r.renderGraphicChild},RadialBar:{handler:r.renderGraphicChild},Scatter:{handler:r.renderGraphicChild},Pie:{handler:r.renderGraphicChild},Funnel:{handler:r.renderGraphicChild},Tooltip:{handler:r.renderCursor,once:!0},PolarGrid:{handler:r.renderPolarGrid,once:!0},PolarAngleAxis:{handler:r.renderPolarAxis},PolarRadiusAxis:{handler:r.renderPolarAxis},Customized:{handler:r.renderCustomized}}),r.clipPathId="".concat(null!==(t=e.id)&&void 0!==t?t:q("recharts"),"-clip"),r.throttleTriggeredAfterMouseMove=P()(r.triggeredAfterMouseMove,null!==(n=e.throttleDelay)&&void 0!==n?n:1e3/60),r.state={},r}return n=[{key:"componentDidMount",value:function(){var e,t;this.addListener(),this.accessibilityManager.setDetails({container:this.container,offset:{left:null!==(e=this.props.margin.left)&&void 0!==e?e:0,top:null!==(t=this.props.margin.top)&&void 0!==t?t:0},coordinateList:this.state.tooltipTicks,mouseHandlerCallback:this.triggeredAfterMouseMove,layout:this.props.layout}),this.displayDefaultTooltip()}},{key:"displayDefaultTooltip",value:function(){var e=this.props,t=e.children,n=e.data,r=e.height,o=e.layout,a=eO(t,nK);if(a){var i=a.props.defaultIndex;if("number"==typeof i&&!(i<0)&&!(i>this.state.tooltipTicks.length)){var l=this.state.tooltipTicks[i]&&this.state.tooltipTicks[i].value,s=fu(this.state,n,i,l),c=this.state.tooltipTicks[i].coordinate,u=(this.state.offset.top+r)/2,d="horizontal"===o?{x:c,y:u}:{y:c,x:u},p=this.state.formattedGraphicalItems.find(function(e){return"Scatter"===e.item.type.name});p&&(d=fe(fe({},d),p.props.points[i].tooltipPosition),s=p.props.points[i].tooltipPayload);var f={activeTooltipIndex:i,isTooltipActive:!0,activeLabel:l,activePayload:s,activeCoordinate:d};this.setState(f),this.renderCursor(a),this.accessibilityManager.setIndex(i)}}}},{key:"getSnapshotBeforeUpdate",value:function(e,t){if(!this.props.accessibilityLayer)return null;if(this.state.tooltipTicks!==t.tooltipTicks&&this.accessibilityManager.setDetails({coordinateList:this.state.tooltipTicks}),this.props.layout!==e.layout&&this.accessibilityManager.setDetails({layout:this.props.layout}),this.props.margin!==e.margin){var n,r;this.accessibilityManager.setDetails({offset:{left:null!==(n=this.props.margin.left)&&void 0!==n?n:0,top:null!==(r=this.props.margin.top)&&void 0!==r?r:0}})}return null}},{key:"componentDidUpdate",value:function(e){eI([eO(e.children,nK)],[eO(this.props.children,nK)])||this.displayDefaultTooltip()}},{key:"componentWillUnmount",value:function(){this.removeListener(),this.throttleTriggeredAfterMouseMove.cancel()}},{key:"getTooltipEventType",value:function(){var e=eO(this.props.children,nK);if(e&&"boolean"==typeof e.props.shared){var t=e.props.shared?"axis":"item";return u.indexOf(t)>=0?t:s}return s}},{key:"getMouseInfo",value:function(e){if(!this.container)return null;var t=this.container,n=t.getBoundingClientRect(),r={top:n.top+window.scrollY-document.documentElement.clientTop,left:n.left+window.scrollX-document.documentElement.clientLeft},o={chartX:Math.round(e.pageX-r.left),chartY:Math.round(e.pageY-r.top)},a=n.width/t.offsetWidth||1,i=this.inRange(o.chartX,o.chartY,a);if(!i)return null;var l=this.state,s=l.xAxisMap,c=l.yAxisMap;if("axis"!==this.getTooltipEventType()&&s&&c){var u=K(s).scale,d=K(c).scale,p=u&&u.invert?u.invert(o.chartX):null,f=d&&d.invert?d.invert(o.chartY):null;return fe(fe({},o),{},{xValue:p,yValue:f})}var m=fd(this.state,this.props.data,this.props.layout,i);return m?fe(fe({},o),m):null}},{key:"inRange",value:function(e,t){var n=arguments.length>2&&void 0!==arguments[2]?arguments[2]:1,r=this.props.layout,o=e/n,a=t/n;if("horizontal"===r||"vertical"===r){var i=this.state.offset;return o>=i.left&&o<=i.left+i.width&&a>=i.top&&a<=i.top+i.height?{x:o,y:a}:null}var l=this.state,s=l.angleAxisMap,c=l.radiusAxisMap;return s&&c?c8({x:o,y:a},K(s)):null}},{key:"parseEventsOfWrapper",value:function(){var e=this.props.children,t=this.getTooltipEventType(),n=eO(e,nK),r={};return n&&"axis"===t&&(r="click"===n.props.trigger?{onClick:this.handleClick}:{onMouseEnter:this.handleMouseEnter,onMouseMove:this.handleMouseMove,onMouseLeave:this.handleMouseLeave,onTouchMove:this.handleTouchMove,onTouchStart:this.handleTouchStart,onTouchEnd:this.handleTouchEnd}),fe(fe({},ef(this.props,this.handleOuterEvent)),r)}},{key:"addListener",value:function(){pe.on(pt,this.handleReceiveSyncEvent)}},{key:"removeListener",value:function(){pe.removeListener(pt,this.handleReceiveSyncEvent)}},{key:"filterFormatItem",value:function(e,t,n){for(var r=this.state.formattedGraphicalItems,o=0,a=r.length;o=0||(o[n]=e[n]);return o}(e,t);if(Object.getOwnPropertySymbols){var a=Object.getOwnPropertySymbols(e);for(r=0;r=0)&&Object.prototype.propertyIsEnumerable.call(e,n)&&(o[n]=e[n])}return o}var fX=function(e){var t=e.fill;if(!t||"none"===t)return null;var n=e.fillOpacity,r=e.x,o=e.y,a=e.width,i=e.height;return R.createElement("rect",{x:r,y:o,width:a,height:i,stroke:"none",fill:t,fillOpacity:n,className:"recharts-cartesian-grid-bg"})};function fQ(e,t){var n;if(R.isValidElement(e))n=R.cloneElement(e,t);else if(eo()(e))n=e(t);else{var r=t.x1,o=t.y1,a=t.x2,i=t.y2,l=t.key,s=eA(fK(t,fG),!1),c=(s.offset,fK(s,f$));n=R.createElement("line",fY({},c,{x1:r,y1:o,x2:a,y2:i,fill:"none",key:l}))}return n}function fJ(e){var t=e.x,n=e.width,r=e.horizontal,o=void 0===r||r,a=e.horizontalPoints;if(!o||!a||!a.length)return null;var i=a.map(function(r,a){return fQ(o,fq(fq({},e),{},{x1:t,y1:r,x2:t+n,y2:r,key:"line-".concat(a),index:a}))});return R.createElement("g",{className:"recharts-cartesian-grid-horizontal"},i)}function f0(e){var t=e.y,n=e.height,r=e.vertical,o=void 0===r||r,a=e.verticalPoints;if(!o||!a||!a.length)return null;var i=a.map(function(r,a){return fQ(o,fq(fq({},e),{},{x1:r,y1:t,x2:r,y2:t+n,key:"line-".concat(a),index:a}))});return R.createElement("g",{className:"recharts-cartesian-grid-vertical"},i)}function f1(e){var t=e.horizontalFill,n=e.fillOpacity,r=e.x,o=e.y,a=e.width,i=e.height,l=e.horizontalPoints,s=e.horizontal;if(!(void 0===s||s)||!t||!t.length)return null;var c=l.map(function(e){return Math.round(e+o-o)}).sort(function(e,t){return e-t});o!==c[0]&&c.unshift(0);var u=c.map(function(e,l){var s=c[l+1]?c[l+1]-e:o+i-e;if(s<=0)return null;var u=l%t.length;return R.createElement("rect",{key:"react-".concat(l),y:e,x:r,height:s,width:a,stroke:"none",fill:t[u],fillOpacity:n,className:"recharts-cartesian-grid-bg"})});return R.createElement("g",{className:"recharts-cartesian-gridstripes-horizontal"},u)}function f2(e){var t=e.vertical,n=e.verticalFill,r=e.fillOpacity,o=e.x,a=e.y,i=e.width,l=e.height,s=e.verticalPoints;if(!(void 0===t||t)||!n||!n.length)return null;var c=s.map(function(e){return Math.round(e+o-o)}).sort(function(e,t){return e-t});o!==c[0]&&c.unshift(0);var u=c.map(function(e,t){var s=c[t+1]?c[t+1]-e:o+i-e;if(s<=0)return null;var u=t%n.length;return R.createElement("rect",{key:"react-".concat(t),x:e,y:a,width:s,height:l,stroke:"none",fill:n[u],fillOpacity:r,className:"recharts-cartesian-grid-bg"})});return R.createElement("g",{className:"recharts-cartesian-gridstripes-vertical"},u)}var f4=function(e,t){var n=e.xAxis,r=e.width,o=e.height,a=e.offset;return cp(fO(fq(fq(fq({},fF.defaultProps),n),{},{ticks:cf(n,!0),viewBox:{x:0,y:0,width:r,height:o}})),a.left,a.left+a.width,t)},f3=function(e,t){var n=e.yAxis,r=e.width,o=e.height,a=e.offset;return cp(fO(fq(fq(fq({},fF.defaultProps),n),{},{ticks:cf(n,!0),viewBox:{x:0,y:0,width:r,height:o}})),a.top,a.top+a.height,t)},f6={horizontal:!0,vertical:!0,stroke:"#ccc",fill:"none",verticalFill:[],horizontalFill:[]};function f5(e){var t,n,r,o,a,i,l=d$(),s=dW(),c=(0,R.useContext)(dF),u=fq(fq({},e),{},{stroke:null!==(t=e.stroke)&&void 0!==t?t:f6.stroke,fill:null!==(n=e.fill)&&void 0!==n?n:f6.fill,horizontal:null!==(r=e.horizontal)&&void 0!==r?r:f6.horizontal,horizontalFill:null!==(o=e.horizontalFill)&&void 0!==o?o:f6.horizontalFill,vertical:null!==(a=e.vertical)&&void 0!==a?a:f6.vertical,verticalFill:null!==(i=e.verticalFill)&&void 0!==i?i:f6.verticalFill}),d=u.x,p=u.y,f=u.width,m=u.height,g=u.xAxis,h=u.yAxis,b=u.syncWithTicks,v=u.horizontalValues,y=u.verticalValues;if(!$(f)||f<=0||!$(m)||m<=0||!$(d)||d!==+d||!$(p)||p!==+p)return null;var E=u.verticalCoordinatesGenerator||f4,S=u.horizontalCoordinatesGenerator||f3,w=u.horizontalPoints,x=u.verticalPoints;if((!w||!w.length)&&eo()(S)){var O=v&&v.length,C=S({yAxis:h?fq(fq({},h),{},{ticks:O?v:h.ticks}):void 0,width:l,height:s,offset:c},!!O||b);ee(Array.isArray(C),"horizontalCoordinatesGenerator should return Array but instead it returned [".concat(fW(C),"]")),Array.isArray(C)&&(w=C)}if((!x||!x.length)&&eo()(E)){var T=y&&y.length,k=E({xAxis:g?fq(fq({},g),{},{ticks:T?y:g.ticks}):void 0,width:l,height:s,offset:c},!!T||b);ee(Array.isArray(k),"verticalCoordinatesGenerator should return Array but instead it returned [".concat(fW(k),"]")),Array.isArray(k)&&(x=k)}return R.createElement("g",{className:"recharts-cartesian-grid"},R.createElement(fX,{fill:u.fill,fillOpacity:u.fillOpacity,x:u.x,y:u.y,width:u.width,height:u.height}),R.createElement(fJ,fY({},u,{offset:c,horizontalPoints:w})),R.createElement(f0,fY({},u,{offset:c,verticalPoints:x})),R.createElement(f1,fY({},u,{horizontalPoints:w})),R.createElement(f2,fY({},u,{verticalPoints:x})))}f5.displayName="CartesianGrid";let f8=(e,t)=>{let[n,r]=(0,R.useState)(t);(0,R.useEffect)(()=>{let t=()=>{r(window.innerWidth),e()};return t(),window.addEventListener("resize",t),()=>window.removeEventListener("resize",t)},[e,n])},f9=e=>{var t=(0,C._T)(e,[]);return R.createElement("svg",Object.assign({},t,{xmlns:"http://www.w3.org/2000/svg",viewBox:"0 0 24 24",fill:"currentColor"}),R.createElement("path",{d:"M8 12L14 6V18L8 12Z"}))},f7=e=>{var t=(0,C._T)(e,[]);return R.createElement("svg",Object.assign({},t,{xmlns:"http://www.w3.org/2000/svg",viewBox:"0 0 24 24",fill:"currentColor"}),R.createElement("path",{d:"M16 12L10 18V6L16 12Z"}))},me=(0,I.fn)("Legend"),mt=e=>{let{name:t,color:n,onClick:r,activeLegend:o}=e,a=!!r;return R.createElement("li",{className:(0,A.q)(me("legendItem"),"group inline-flex items-center px-2 py-0.5 rounded-tremor-small transition whitespace-nowrap",a?"cursor-pointer":"cursor-default","text-tremor-content",a?"hover:bg-tremor-background-subtle":"","dark:text-dark-tremor-content",a?"dark:hover:bg-dark-tremor-background-subtle":""),onClick:e=>{e.stopPropagation(),null==r||r(t,n)}},R.createElement("svg",{className:(0,A.q)("flex-none h-2 w-2 mr-1.5",(0,I.bM)(n,k.K.text).textColor,o&&o!==t?"opacity-40":"opacity-100"),fill:"currentColor",viewBox:"0 0 8 8"},R.createElement("circle",{cx:4,cy:4,r:4})),R.createElement("p",{className:(0,A.q)("whitespace-nowrap truncate text-tremor-default","text-tremor-content",a?"group-hover:text-tremor-content-emphasis":"","dark:text-dark-tremor-content",o&&o!==t?"opacity-40":"opacity-100",a?"dark:group-hover:text-dark-tremor-content-emphasis":"")},t))},mn=e=>{let{icon:t,onClick:n,disabled:r}=e,[o,a]=R.useState(!1),i=R.useRef(null);return R.useEffect(()=>(o?i.current=setInterval(()=>{null==n||n()},300):clearInterval(i.current),()=>clearInterval(i.current)),[o,n]),(0,R.useEffect)(()=>{r&&(clearInterval(i.current),a(!1))},[r]),R.createElement("button",{type:"button",className:(0,A.q)(me("legendSliderButton"),"w-5 group inline-flex items-center truncate rounded-tremor-small transition",r?"cursor-not-allowed":"cursor-pointer",r?"text-tremor-content-subtle":"text-tremor-content hover:text-tremor-content-emphasis hover:bg-tremor-background-subtle",r?"dark:text-dark-tremor-subtle":"dark:text-dark-tremor dark:hover:text-tremor-content-emphasis dark:hover:bg-dark-tremor-background-subtle"),disabled:r,onClick:e=>{e.stopPropagation(),null==n||n()},onMouseDown:e=>{e.stopPropagation(),a(!0)},onMouseUp:e=>{e.stopPropagation(),a(!1)}},R.createElement(t,{className:"w-full"}))},mr=R.forwardRef((e,t)=>{var n,r;let{categories:o,colors:a=k.s,className:i,onClickLegendItem:l,activeLegend:s,enableLegendSlider:c=!1}=e,u=(0,C._T)(e,["categories","colors","className","onClickLegendItem","activeLegend","enableLegendSlider"]),d=R.useRef(null),[p,f]=R.useState(null),[m,g]=R.useState(null),h=R.useRef(null),b=(0,R.useCallback)(()=>{let e=null==d?void 0:d.current;e&&f({left:e.scrollLeft>0,right:e.scrollWidth-e.clientWidth>e.scrollLeft})},[f]),v=(0,R.useCallback)(e=>{var t;let n=null==d?void 0:d.current,r=null!==(t=null==n?void 0:n.clientWidth)&&void 0!==t?t:0;n&&c&&(n.scrollTo({left:"left"===e?n.scrollLeft-r:n.scrollLeft+r,behavior:"smooth"}),setTimeout(()=>{b()},400))},[c,b]);R.useEffect(()=>{let e=e=>{"ArrowLeft"===e?v("left"):"ArrowRight"===e&&v("right")};return m?(e(m),h.current=setInterval(()=>{e(m)},300)):clearInterval(h.current),()=>clearInterval(h.current)},[m,v]);let y=e=>{e.stopPropagation(),"ArrowLeft"!==e.key&&"ArrowRight"!==e.key||(e.preventDefault(),g(e.key))},E=e=>{e.stopPropagation(),g(null)};return R.useEffect(()=>{let e=null==d?void 0:d.current;return c&&(b(),null==e||e.addEventListener("keydown",y),null==e||e.addEventListener("keyup",E)),()=>{null==e||e.removeEventListener("keydown",y),null==e||e.removeEventListener("keyup",E)}},[b,c]),R.createElement("ol",Object.assign({ref:t,className:(0,A.q)(me("root"),"relative overflow-hidden",i)},u),R.createElement("div",{ref:d,tabIndex:0,className:(0,A.q)("h-full flex",c?(null==p?void 0:p.right)||(null==p?void 0:p.left)?"pl-4 pr-12 items-center overflow-auto snap-mandatory [&::-webkit-scrollbar]:hidden [scrollbar-width:none]":"":"flex-wrap")},o.map((e,t)=>R.createElement(mt,{key:"item-".concat(t),name:e,color:a[t],onClick:l,activeLegend:s}))),c&&((null==p?void 0:p.right)||(null==p?void 0:p.left))?R.createElement(R.Fragment,null,R.createElement("div",{className:(0,A.q)("from-tremor-background","dark:from-dark-tremor-background","absolute top-0 bottom-0 left-0 w-4 bg-gradient-to-r to-transparent pointer-events-none")}),R.createElement("div",{className:(0,A.q)("to-tremor-background","dark:to-dark-tremor-background","absolute top-0 bottom-0 right-10 w-4 bg-gradient-to-r from-transparent pointer-events-none")}),R.createElement("div",{className:(0,A.q)("bg-tremor-background","dark:bg-dark-tremor-background","absolute flex top-0 pr-1 bottom-0 right-0 items-center justify-center h-full")},R.createElement(mn,{icon:f9,onClick:()=>{g(null),v("left")},disabled:!(null==p?void 0:p.left)}),R.createElement(mn,{icon:f7,onClick:()=>{g(null),v("right")},disabled:!(null==p?void 0:p.right)}))):null)});mr.displayName="Legend";let mo=(e,t,n,r,o,a)=>{let{payload:i}=e,l=(0,R.useRef)(null);f8(()=>{var e,t;n((t=null===(e=l.current)||void 0===e?void 0:e.clientHeight)?Number(t)+20:60)});let s=i.filter(e=>"none"!==e.type);return R.createElement("div",{ref:l,className:"flex items-center justify-end"},R.createElement(mr,{categories:s.map(e=>e.value),colors:s.map(e=>t.get(e.value)),onClickLegendItem:o,activeLegend:r,enableLegendSlider:a}))},ma=e=>{let{children:t}=e;return R.createElement("div",{className:(0,A.q)("rounded-tremor-default text-tremor-default border","bg-tremor-background shadow-tremor-dropdown border-tremor-border","dark:bg-dark-tremor-background dark:shadow-dark-tremor-dropdown dark:border-dark-tremor-border")},t)},mi=e=>{let{value:t,name:n,color:r}=e;return R.createElement("div",{className:"flex items-center justify-between space-x-8"},R.createElement("div",{className:"flex items-center space-x-2"},R.createElement("span",{className:(0,A.q)("shrink-0 rounded-tremor-full border-2 h-3 w-3","border-tremor-background shadow-tremor-card","dark:border-dark-tremor-background dark:shadow-dark-tremor-card",(0,I.bM)(r,k.K.background).bgColor)}),R.createElement("p",{className:(0,A.q)("text-right whitespace-nowrap","text-tremor-content","dark:text-dark-tremor-content")},n)),R.createElement("p",{className:(0,A.q)("font-medium tabular-nums text-right whitespace-nowrap","text-tremor-content-emphasis","dark:text-dark-tremor-content-emphasis")},t))},ml=e=>{let{active:t,payload:n,label:r,categoryColors:o,valueFormatter:a}=e;if(t&&n){let e=n.filter(e=>"none"!==e.type);return R.createElement(ma,null,R.createElement("div",{className:(0,A.q)("border-tremor-border border-b px-4 py-2","dark:border-dark-tremor-border")},R.createElement("p",{className:(0,A.q)("font-medium","text-tremor-content-emphasis","dark:text-dark-tremor-content-emphasis")},r)),R.createElement("div",{className:(0,A.q)("px-4 py-2 space-y-1")},e.map((e,t)=>{var n;let{value:r,name:i}=e;return R.createElement(mi,{key:"id-".concat(t),value:a(r),name:i,color:null!==(n=o.get(i))&&void 0!==n?n:T.fr.Blue})})))}return null},ms=(0,I.fn)("Flex"),mc={start:"justify-start",end:"justify-end",center:"justify-center",between:"justify-between",around:"justify-around",evenly:"justify-evenly"},mu={start:"items-start",end:"items-end",center:"items-center",baseline:"items-baseline",stretch:"items-stretch"},md={row:"flex-row",col:"flex-col","row-reverse":"flex-row-reverse","col-reverse":"flex-col-reverse"},mp=R.forwardRef((e,t)=>{let{flexDirection:n="row",justifyContent:r="between",alignItems:o="center",children:a,className:i}=e,l=(0,C._T)(e,["flexDirection","justifyContent","alignItems","children","className"]);return R.createElement("div",Object.assign({ref:t,className:(0,A.q)(ms("root"),"flex w-full",md[n],mc[r],mu[o],i)},l),a)});mp.displayName="Flex";var mf=n(71801);let mm=e=>{let{noDataText:t="No data"}=e;return R.createElement(mp,{alignItems:"center",justifyContent:"center",className:(0,A.q)("w-full h-full border border-dashed rounded-tremor-default","border-tremor-border","dark:border-dark-tremor-border")},R.createElement(mf.Z,{className:(0,A.q)("text-tremor-content","dark:text-dark-tremor-content")},t))},mg=(e,t)=>{let n=new Map;return e.forEach((e,r)=>{n.set(e,t[r])}),n},mh=(e,t,n)=>[e?"auto":null!=t?t:0,null!=n?n:"auto"];function mb(e,t){if(e===t)return!0;if("object"!=typeof e||"object"!=typeof t||null===e||null===t)return!1;let n=Object.keys(e),r=Object.keys(t);if(n.length!==r.length)return!1;for(let o of n)if(!r.includes(o)||!mb(e[o],t[o]))return!1;return!0}let mv=R.forwardRef((e,t)=>{let{data:n=[],categories:r=[],index:o,colors:a=k.s,valueFormatter:i=I.Cj,layout:l="horizontal",stack:s=!1,relative:c=!1,startEndOnly:u=!1,animationDuration:d=900,showAnimation:p=!1,showXAxis:f=!0,showYAxis:m=!0,yAxisWidth:g=56,intervalType:h="equidistantPreserveStart",showTooltip:b=!0,showLegend:v=!0,showGridLines:y=!0,autoMinValue:E=!1,minValue:S,maxValue:w,allowDecimals:x=!0,noDataText:O,onValueChange:N,enableLegendSlider:_=!1,customTooltip:P,rotateLabelX:M,tickGap:L=5,className:D}=e,j=(0,C._T)(e,["data","categories","index","colors","valueFormatter","layout","stack","relative","startEndOnly","animationDuration","showAnimation","showXAxis","showYAxis","yAxisWidth","intervalType","showTooltip","showLegend","showGridLines","autoMinValue","minValue","maxValue","allowDecimals","noDataText","onValueChange","enableLegendSlider","customTooltip","rotateLabelX","tickGap","className"]),F=f||m?20:0,[B,U]=(0,R.useState)(60),Z=mg(r,a),[z,H]=R.useState(void 0),[G,$]=(0,R.useState)(void 0),W=!!N;function V(e,t,n){var r,o,a,i;n.stopPropagation(),N&&(mb(z,Object.assign(Object.assign({},e.payload),{value:e.value}))?($(void 0),H(void 0),null==N||N(null)):($(null===(o=null===(r=e.tooltipPayload)||void 0===r?void 0:r[0])||void 0===o?void 0:o.dataKey),H(Object.assign(Object.assign({},e.payload),{value:e.value})),null==N||N(Object.assign({eventType:"bar",categoryClicked:null===(i=null===(a=e.tooltipPayload)||void 0===a?void 0:a[0])||void 0===i?void 0:i.dataKey},e.payload))))}let q=mh(E,S,w);return R.createElement("div",Object.assign({ref:t,className:(0,A.q)("w-full h-80",D)},j),R.createElement(ej,{className:"h-full w-full"},(null==n?void 0:n.length)?R.createElement(fH,{data:n,stackOffset:s?"sign":c?"expand":"none",layout:"vertical"===l?"vertical":"horizontal",onClick:W&&(G||z)?()=>{H(void 0),$(void 0),null==N||N(null)}:void 0},y?R.createElement(f5,{className:(0,A.q)("stroke-1","stroke-tremor-border","dark:stroke-dark-tremor-border"),horizontal:"vertical"!==l,vertical:"vertical"===l}):null,"vertical"!==l?R.createElement(fU,{padding:{left:F,right:F},hide:!f,dataKey:o,interval:u?"preserveStartEnd":h,tick:{transform:"translate(0, 6)"},ticks:u?[n[0][o],n[n.length-1][o]]:void 0,fill:"",stroke:"",className:(0,A.q)("mt-4 text-tremor-label","fill-tremor-content","dark:fill-dark-tremor-content"),tickLine:!1,axisLine:!1,angle:null==M?void 0:M.angle,dy:null==M?void 0:M.verticalShift,height:null==M?void 0:M.xAxisHeight,minTickGap:L}):R.createElement(fU,{hide:!f,type:"number",tick:{transform:"translate(-3, 0)"},domain:q,fill:"",stroke:"",className:(0,A.q)("text-tremor-label","fill-tremor-content","dark:fill-dark-tremor-content"),tickLine:!1,axisLine:!1,tickFormatter:i,minTickGap:L,allowDecimals:x,angle:null==M?void 0:M.angle,dy:null==M?void 0:M.verticalShift,height:null==M?void 0:M.xAxisHeight}),"vertical"!==l?R.createElement(fz,{width:g,hide:!m,axisLine:!1,tickLine:!1,type:"number",domain:q,tick:{transform:"translate(-3, 0)"},fill:"",stroke:"",className:(0,A.q)("text-tremor-label","fill-tremor-content","dark:fill-dark-tremor-content"),tickFormatter:c?e=>"".concat((100*e).toString()," %"):i,allowDecimals:x}):R.createElement(fz,{width:g,hide:!m,dataKey:o,axisLine:!1,tickLine:!1,ticks:u?[n[0][o],n[n.length-1][o]]:void 0,type:"category",interval:"preserveStartEnd",tick:{transform:"translate(0, 6)"},fill:"",stroke:"",className:(0,A.q)("text-tremor-label","fill-tremor-content","dark:fill-dark-tremor-content")}),R.createElement(nK,{wrapperStyle:{outline:"none"},isAnimationActive:!1,cursor:{fill:"#d1d5db",opacity:"0.15"},content:b?e=>{let{active:t,payload:n,label:r}=e;return P?R.createElement(P,{payload:null==n?void 0:n.map(e=>{var t;return Object.assign(Object.assign({},e),{color:null!==(t=Z.get(e.dataKey))&&void 0!==t?t:T.fr.Gray})}),active:t,label:r}):R.createElement(ml,{active:t,payload:n,label:r,valueFormatter:i,categoryColors:Z})}:R.createElement(R.Fragment,null),position:{y:0}}),v?R.createElement(r1,{verticalAlign:"top",height:B,content:e=>{let{payload:t}=e;return mo({payload:t},Z,U,G,W?e=>{W&&(e!==G||z?($(e),null==N||N({eventType:"category",categoryClicked:e})):($(void 0),null==N||N(null)),H(void 0))}:void 0,_)}}):null,r.map(e=>{var t;return R.createElement(dm,{className:(0,A.q)((0,I.bM)(null!==(t=Z.get(e))&&void 0!==t?t:T.fr.Gray,k.K.background).fillColor,N?"cursor-pointer":""),key:e,name:e,type:"linear",stackId:s||c?"a":void 0,dataKey:e,fill:"",isAnimationActive:p,animationDuration:d,shape:e=>((e,t,n,r)=>{let{fillOpacity:o,name:a,payload:i,value:l}=e,{x:s,width:c,y:u,height:d}=e;return"horizontal"===r&&d<0?(u+=d,d=Math.abs(d)):"vertical"===r&&c<0&&(s+=c,c=Math.abs(c)),R.createElement("rect",{x:s,y:u,width:c,height:d,opacity:t||n&&n!==a?mb(t,Object.assign(Object.assign({},i),{value:l}))?o:.3:o})})(e,z,G,l),onClick:V})})):R.createElement(mm,{noDataText:O})))});mv.displayName="BarChart"},5:function(e,t,n){n.d(t,{Z:function(){return f}});var r=n(69703),o=n(64090),a=n(58437),i=n(54942),l=n(2898),s=n(99250),c=n(65492);let u={xs:{paddingX:"px-2",paddingY:"py-0.5",fontSize:"text-xs"},sm:{paddingX:"px-2.5",paddingY:"py-0.5",fontSize:"text-sm"},md:{paddingX:"px-3",paddingY:"py-0.5",fontSize:"text-md"},lg:{paddingX:"px-3.5",paddingY:"py-0.5",fontSize:"text-lg"},xl:{paddingX:"px-4",paddingY:"py-1",fontSize:"text-xl"}},d={xs:{height:"h-4",width:"w-4"},sm:{height:"h-4",width:"w-4"},md:{height:"h-4",width:"w-4"},lg:{height:"h-5",width:"w-5"},xl:{height:"h-6",width:"w-6"}},p=(0,c.fn)("Badge"),f=o.forwardRef((e,t)=>{let{color:n,icon:f,size:m=i.u8.SM,tooltip:g,className:h,children:b}=e,v=(0,r._T)(e,["color","icon","size","tooltip","className","children"]),y=f||null,{tooltipProps:E,getReferenceProps:S}=(0,a.l)();return o.createElement("span",Object.assign({ref:(0,c.lq)([t,E.refs.setReference]),className:(0,s.q)(p("root"),"w-max flex-shrink-0 inline-flex justify-center items-center cursor-default rounded-tremor-full",n?(0,s.q)((0,c.bM)(n,l.K.background).bgColor,(0,c.bM)(n,l.K.text).textColor,"bg-opacity-20 dark:bg-opacity-25"):(0,s.q)("bg-tremor-brand-muted text-tremor-brand-emphasis","dark:bg-dark-tremor-brand-muted dark:text-dark-tremor-brand-emphasis"),u[m].paddingX,u[m].paddingY,u[m].fontSize,h)},S,v),o.createElement(a.Z,Object.assign({text:g},E)),y?o.createElement(y,{className:(0,s.q)(p("icon"),"shrink-0 -ml-1 mr-1.5",d[m].height,d[m].width)}):null,o.createElement("p",{className:(0,s.q)(p("text"),"text-sm whitespace-nowrap")},b))});f.displayName="Badge"},61244:function(e,t,n){n.d(t,{Z:function(){return g}});var r=n(69703),o=n(64090),a=n(58437),i=n(54942),l=n(99250),s=n(65492),c=n(2898);let u={xs:{paddingX:"px-1.5",paddingY:"py-1.5"},sm:{paddingX:"px-1.5",paddingY:"py-1.5"},md:{paddingX:"px-2",paddingY:"py-2"},lg:{paddingX:"px-2",paddingY:"py-2"},xl:{paddingX:"px-2.5",paddingY:"py-2.5"}},d={xs:{height:"h-3",width:"w-3"},sm:{height:"h-5",width:"w-5"},md:{height:"h-5",width:"w-5"},lg:{height:"h-7",width:"w-7"},xl:{height:"h-9",width:"w-9"}},p={simple:{rounded:"",border:"",ring:"",shadow:""},light:{rounded:"rounded-tremor-default",border:"",ring:"",shadow:""},shadow:{rounded:"rounded-tremor-default",border:"border",ring:"",shadow:"shadow-tremor-card dark:shadow-dark-tremor-card"},solid:{rounded:"rounded-tremor-default",border:"border-2",ring:"ring-1",shadow:""},outlined:{rounded:"rounded-tremor-default",border:"border",ring:"ring-2",shadow:""}},f=(e,t)=>{switch(e){case"simple":return{textColor:t?(0,s.bM)(t,c.K.text).textColor:"text-tremor-brand dark:text-dark-tremor-brand",bgColor:"",borderColor:"",ringColor:""};case"light":return{textColor:t?(0,s.bM)(t,c.K.text).textColor:"text-tremor-brand dark:text-dark-tremor-brand",bgColor:t?(0,l.q)((0,s.bM)(t,c.K.background).bgColor,"bg-opacity-20"):"bg-tremor-brand-muted dark:bg-dark-tremor-brand-muted",borderColor:"",ringColor:""};case"shadow":return{textColor:t?(0,s.bM)(t,c.K.text).textColor:"text-tremor-brand dark:text-dark-tremor-brand",bgColor:t?(0,l.q)((0,s.bM)(t,c.K.background).bgColor,"bg-opacity-20"):"bg-tremor-background dark:bg-dark-tremor-background",borderColor:"border-tremor-border dark:border-dark-tremor-border",ringColor:""};case"solid":return{textColor:t?(0,s.bM)(t,c.K.text).textColor:"text-tremor-brand-inverted dark:text-dark-tremor-brand-inverted",bgColor:t?(0,l.q)((0,s.bM)(t,c.K.background).bgColor,"bg-opacity-20"):"bg-tremor-brand dark:bg-dark-tremor-brand",borderColor:"border-tremor-brand-inverted dark:border-dark-tremor-brand-inverted",ringColor:"ring-tremor-ring dark:ring-dark-tremor-ring"};case"outlined":return{textColor:t?(0,s.bM)(t,c.K.text).textColor:"text-tremor-brand dark:text-dark-tremor-brand",bgColor:t?(0,l.q)((0,s.bM)(t,c.K.background).bgColor,"bg-opacity-20"):"bg-tremor-background dark:bg-dark-tremor-background",borderColor:t?(0,s.bM)(t,c.K.ring).borderColor:"border-tremor-brand-subtle dark:border-dark-tremor-brand-subtle",ringColor:t?(0,l.q)((0,s.bM)(t,c.K.ring).ringColor,"ring-opacity-40"):"ring-tremor-brand-muted dark:ring-dark-tremor-brand-muted"}}},m=(0,s.fn)("Icon"),g=o.forwardRef((e,t)=>{let{icon:n,variant:c="simple",tooltip:g,size:h=i.u8.SM,color:b,className:v}=e,y=(0,r._T)(e,["icon","variant","tooltip","size","color","className"]),E=f(c,b),{tooltipProps:S,getReferenceProps:w}=(0,a.l)();return o.createElement("span",Object.assign({ref:(0,s.lq)([t,S.refs.setReference]),className:(0,l.q)(m("root"),"inline-flex flex-shrink-0 items-center",E.bgColor,E.textColor,E.borderColor,E.ringColor,p[c].rounded,p[c].border,p[c].shadow,p[c].ring,u[h].paddingX,u[h].paddingY,v)},w,y),o.createElement(a.Z,Object.assign({text:g},S)),o.createElement(n,{className:(0,l.q)(m("icon"),"shrink-0",d[h].height,d[h].width)}))});g.displayName="Icon"},2179:function(e,t,n){n.d(t,{Z:function(){return O}});var r=n(69703),o=n(58437),a=n(64090);let i=["preEnter","entering","entered","preExit","exiting","exited","unmounted"],l=e=>({_s:e,status:i[e],isEnter:e<3,isMounted:6!==e,isResolved:2===e||e>4}),s=e=>e?6:5,c=(e,t)=>{switch(e){case 1:case 0:return 2;case 4:case 3:return s(t)}},u=e=>"object"==typeof e?[e.enter,e.exit]:[e,e],d=(e,t)=>setTimeout(()=>{isNaN(document.body.offsetTop)||e(t+1)},0),p=(e,t,n,r,o)=>{clearTimeout(r.current);let a=l(e);t(a),n.current=a,o&&o({current:a})},f=function(){let{enter:e=!0,exit:t=!0,preEnter:n,preExit:r,timeout:o,initialEntered:i,mountOnEnter:f,unmountOnExit:m,onStateChange:g}=arguments.length>0&&void 0!==arguments[0]?arguments[0]:{},[h,b]=(0,a.useState)(()=>l(i?2:s(f))),v=(0,a.useRef)(h),y=(0,a.useRef)(),[E,S]=u(o),w=(0,a.useCallback)(()=>{let e=c(v.current._s,m);e&&p(e,b,v,y,g)},[g,m]),x=(0,a.useCallback)(o=>{let a=e=>{switch(p(e,b,v,y,g),e){case 1:E>=0&&(y.current=setTimeout(w,E));break;case 4:S>=0&&(y.current=setTimeout(w,S));break;case 0:case 3:y.current=d(a,e)}},i=v.current.isEnter;"boolean"!=typeof o&&(o=!i),o?i||a(e?n?0:1:2):i&&a(t?r?3:4:s(m))},[w,g,e,t,n,r,E,S,m]);return(0,a.useEffect)(()=>()=>clearTimeout(y.current),[]),[h,x,w]};var m=n(54942),g=n(99250),h=n(65492);let b=e=>{var t=(0,r._T)(e,[]);return a.createElement("svg",Object.assign({},t,{xmlns:"http://www.w3.org/2000/svg",viewBox:"0 0 24 24",fill:"currentColor"}),a.createElement("path",{fill:"none",d:"M0 0h24v24H0z"}),a.createElement("path",{d:"M18.364 5.636L16.95 7.05A7 7 0 1 0 19 12h2a9 9 0 1 1-2.636-6.364z"}))};var v=n(2898);let y={xs:{height:"h-4",width:"w-4"},sm:{height:"h-5",width:"w-5"},md:{height:"h-5",width:"w-5"},lg:{height:"h-6",width:"w-6"},xl:{height:"h-6",width:"w-6"}},E=e=>"light"!==e?{xs:{paddingX:"px-2.5",paddingY:"py-1.5",fontSize:"text-xs"},sm:{paddingX:"px-4",paddingY:"py-2",fontSize:"text-sm"},md:{paddingX:"px-4",paddingY:"py-2",fontSize:"text-md"},lg:{paddingX:"px-4",paddingY:"py-2.5",fontSize:"text-lg"},xl:{paddingX:"px-4",paddingY:"py-3",fontSize:"text-xl"}}:{xs:{paddingX:"",paddingY:"",fontSize:"text-xs"},sm:{paddingX:"",paddingY:"",fontSize:"text-sm"},md:{paddingX:"",paddingY:"",fontSize:"text-md"},lg:{paddingX:"",paddingY:"",fontSize:"text-lg"},xl:{paddingX:"",paddingY:"",fontSize:"text-xl"}},S=(e,t)=>{switch(e){case"primary":return{textColor:t?(0,h.bM)("white").textColor:"text-tremor-brand-inverted dark:text-dark-tremor-brand-inverted",hoverTextColor:t?(0,h.bM)("white").textColor:"text-tremor-brand-inverted dark:text-dark-tremor-brand-inverted",bgColor:t?(0,h.bM)(t,v.K.background).bgColor:"bg-tremor-brand dark:bg-dark-tremor-brand",hoverBgColor:t?(0,h.bM)(t,v.K.darkBackground).hoverBgColor:"hover:bg-tremor-brand-emphasis dark:hover:bg-dark-tremor-brand-emphasis",borderColor:t?(0,h.bM)(t,v.K.border).borderColor:"border-tremor-brand dark:border-dark-tremor-brand",hoverBorderColor:t?(0,h.bM)(t,v.K.darkBorder).hoverBorderColor:"hover:border-tremor-brand-emphasis dark:hover:border-dark-tremor-brand-emphasis"};case"secondary":return{textColor:t?(0,h.bM)(t,v.K.text).textColor:"text-tremor-brand dark:text-dark-tremor-brand",hoverTextColor:t?(0,h.bM)(t,v.K.text).textColor:"hover:text-tremor-brand-emphasis dark:hover:text-dark-tremor-brand-emphasis",bgColor:(0,h.bM)("transparent").bgColor,hoverBgColor:t?(0,g.q)((0,h.bM)(t,v.K.background).hoverBgColor,"hover:bg-opacity-20 dark:hover:bg-opacity-20"):"hover:bg-tremor-brand-faint dark:hover:bg-dark-tremor-brand-faint",borderColor:t?(0,h.bM)(t,v.K.border).borderColor:"border-tremor-brand dark:border-dark-tremor-brand"};case"light":return{textColor:t?(0,h.bM)(t,v.K.text).textColor:"text-tremor-brand dark:text-dark-tremor-brand",hoverTextColor:t?(0,h.bM)(t,v.K.darkText).hoverTextColor:"hover:text-tremor-brand-emphasis dark:hover:text-dark-tremor-brand-emphasis",bgColor:(0,h.bM)("transparent").bgColor,borderColor:"",hoverBorderColor:""}}},w=(0,h.fn)("Button"),x=e=>{let{loading:t,iconSize:n,iconPosition:r,Icon:o,needMargin:i,transitionStatus:l}=e,s=i?r===m.zS.Left?(0,g.q)("-ml-1","mr-1.5"):(0,g.q)("-mr-1","ml-1.5"):"",c=(0,g.q)("w-0 h-0"),u={default:c,entering:c,entered:n,exiting:n,exited:c};return t?a.createElement(b,{className:(0,g.q)(w("icon"),"animate-spin shrink-0",s,u.default,u[l]),style:{transition:"width 150ms"}}):a.createElement(o,{className:(0,g.q)(w("icon"),"shrink-0",n,s)})},O=a.forwardRef((e,t)=>{let{icon:n,iconPosition:i=m.zS.Left,size:l=m.u8.SM,color:s,variant:c="primary",disabled:u,loading:d=!1,loadingText:p,children:b,tooltip:v,className:O}=e,C=(0,r._T)(e,["icon","iconPosition","size","color","variant","disabled","loading","loadingText","children","tooltip","className"]),T=d||u,k=void 0!==n||d,A=d&&p,I=!(!b&&!A),R=(0,g.q)(y[l].height,y[l].width),N="light"!==c?(0,g.q)("rounded-tremor-default border","shadow-tremor-input","dark:shadow-dark-tremor-input"):"",_=S(c,s),P=E(c)[l],{tooltipProps:M,getReferenceProps:L}=(0,o.l)(300),[D,j]=f({timeout:50});return(0,a.useEffect)(()=>{j(d)},[d]),a.createElement("button",Object.assign({ref:(0,h.lq)([t,M.refs.setReference]),className:(0,g.q)(w("root"),"flex-shrink-0 inline-flex justify-center items-center group font-medium outline-none",N,P.paddingX,P.paddingY,P.fontSize,_.textColor,_.bgColor,_.borderColor,_.hoverBorderColor,T?"opacity-50 cursor-not-allowed":(0,g.q)(S(c,s).hoverTextColor,S(c,s).hoverBgColor,S(c,s).hoverBorderColor),O),disabled:T},L,C),a.createElement(o.Z,Object.assign({text:v},M)),k&&i!==m.zS.Right?a.createElement(x,{loading:d,iconSize:R,iconPosition:i,Icon:n,transitionStatus:D.status,needMargin:I}):null,A||b?a.createElement("span",{className:(0,g.q)(w("text"),"text-tremor-default whitespace-nowrap")},A?p:b):null,k&&i===m.zS.Right?a.createElement(x,{loading:d,iconSize:R,iconPosition:i,Icon:n,transitionStatus:D.status,needMargin:I}):null)});O.displayName="Button"},55901:function(e,t,n){n.d(t,{Z:function(){return m}});var r=n(69703),o=n(64090);let a=e=>{var t=(0,r._T)(e,[]);return o.createElement("svg",Object.assign({xmlns:"http://www.w3.org/2000/svg",viewBox:"0 0 24 24",fill:"currentColor"},t),o.createElement("path",{d:"M11.9999 13.1714L16.9497 8.22168L18.3639 9.63589L11.9999 15.9999L5.63599 9.63589L7.0502 8.22168L11.9999 13.1714Z"}))},i=e=>{var t=(0,r._T)(e,[]);return o.createElement("svg",Object.assign({xmlns:"http://www.w3.org/2000/svg",viewBox:"0 0 24 24",fill:"currentColor"},t),o.createElement("path",{d:"M12 22C6.47715 22 2 17.5228 2 12C2 6.47715 6.47715 2 12 2C17.5228 2 22 6.47715 22 12C22 17.5228 17.5228 22 12 22ZM12 10.5858L9.17157 7.75736L7.75736 9.17157L10.5858 12L7.75736 14.8284L9.17157 16.2426L12 13.4142L14.8284 16.2426L16.2426 14.8284L13.4142 12L16.2426 9.17157L14.8284 7.75736L12 10.5858Z"}))};var l=n(99250),s=n(65492),c=n(91753),u=n(63416),d=n(70129);let p=(e,t)=>{let n=void 0!==t,[r,a]=(0,o.useState)(e);return[n?t:r,e=>{n||a(e)}]},f=(0,s.fn)("Select"),m=o.forwardRef((e,t)=>{let{defaultValue:n,value:s,onValueChange:m,placeholder:g="Select...",disabled:h=!1,icon:b,enableClear:v=!0,children:y,className:E}=e,S=(0,r._T)(e,["defaultValue","value","onValueChange","placeholder","disabled","icon","enableClear","children","className"]),[w,x]=p(n,s),O=(0,o.useMemo)(()=>{let e=o.Children.toArray(y).filter(o.isValidElement);return(0,c.sl)(e)},[y]);return o.createElement(u.R,Object.assign({as:"div",ref:t,defaultValue:w,value:w,onChange:e=>{null==m||m(e),x(e)},disabled:h,className:(0,l.q)("w-full min-w-[10rem] relative text-tremor-default",E)},S),e=>{var t;let{value:n}=e;return o.createElement(o.Fragment,null,o.createElement(u.R.Button,{className:(0,l.q)("w-full outline-none text-left whitespace-nowrap truncate rounded-tremor-default focus:ring-2 transition duration-100 border pr-8 py-2","border-tremor-border shadow-tremor-input focus:border-tremor-brand-subtle focus:ring-tremor-brand-muted","dark:border-dark-tremor-border dark:shadow-dark-tremor-input dark:focus:border-dark-tremor-brand-subtle dark:focus:ring-dark-tremor-brand-muted",b?"pl-10":"pl-3",(0,c.um)((0,c.Uh)(n),h))},b&&o.createElement("span",{className:(0,l.q)("absolute inset-y-0 left-0 flex items-center ml-px pl-2.5")},o.createElement(b,{className:(0,l.q)(f("Icon"),"flex-none h-5 w-5","text-tremor-content-subtle","dark:text-dark-tremor-content-subtle")})),o.createElement("span",{className:"w-[90%] block truncate"},n&&null!==(t=O.get(n))&&void 0!==t?t:g),o.createElement("span",{className:(0,l.q)("absolute inset-y-0 right-0 flex items-center mr-3")},o.createElement(a,{className:(0,l.q)(f("arrowDownIcon"),"flex-none h-5 w-5","text-tremor-content-subtle","dark:text-dark-tremor-content-subtle")}))),v&&w?o.createElement("button",{type:"button",className:(0,l.q)("absolute inset-y-0 right-0 flex items-center mr-8"),onClick:e=>{e.preventDefault(),x(""),null==m||m("")}},o.createElement(i,{className:(0,l.q)(f("clearIcon"),"flex-none h-4 w-4","text-tremor-content-subtle","dark:text-dark-tremor-content-subtle")})):null,o.createElement(d.u,{className:"absolute z-10 w-full",enter:"transition ease duration-100 transform",enterFrom:"opacity-0 -translate-y-4",enterTo:"opacity-100 translate-y-0",leave:"transition ease duration-100 transform",leaveFrom:"opacity-100 translate-y-0",leaveTo:"opacity-0 -translate-y-4"},o.createElement(u.R.Options,{className:(0,l.q)("divide-y overflow-y-auto outline-none rounded-tremor-default max-h-[228px] left-0 border my-1","bg-tremor-background border-tremor-border divide-tremor-border shadow-tremor-dropdown","dark:bg-dark-tremor-background dark:border-dark-tremor-border dark:divide-dark-tremor-border dark:shadow-dark-tremor-dropdown")},y)))})});m.displayName="Select"},27166:function(e,t,n){n.d(t,{Z:function(){return s}});var r=n(69703),o=n(64090),a=n(63416),i=n(99250);let l=(0,n(65492).fn)("SelectItem"),s=o.forwardRef((e,t)=>{let{value:n,icon:s,className:c,children:u}=e,d=(0,r._T)(e,["value","icon","className","children"]);return o.createElement(a.R.Option,Object.assign({className:(0,i.q)(l("root"),"flex justify-start items-center cursor-default text-tremor-default px-2.5 py-2.5","ui-active:bg-tremor-background-muted ui-active:text-tremor-content-strong ui-selected:text-tremor-content-strong ui-selected:bg-tremor-background-muted text-tremor-content-emphasis","dark:ui-active:bg-dark-tremor-background-muted dark:ui-active:text-dark-tremor-content-strong dark:ui-selected:text-dark-tremor-content-strong dark:ui-selected:bg-dark-tremor-background-muted dark:text-dark-tremor-content-emphasis",c),ref:t,key:n,value:n},d),s&&o.createElement(s,{className:(0,i.q)(l("icon"),"flex-none w-5 h-5 mr-1.5","text-tremor-content-subtle","dark:text-dark-tremor-content-subtle")}),o.createElement("span",{className:"whitespace-nowrap truncate"},null!=u?u:n))});s.displayName="SelectItem"},92836:function(e,t,n){n.d(t,{Z:function(){return p}});var r=n(69703),o=n(80991),a=n(2898),i=n(99250),l=n(65492),s=n(64090),c=n(41608),u=n(50027);n(18174),n(21871),n(41213);let d=(0,l.fn)("Tab"),p=s.forwardRef((e,t)=>{let{icon:n,className:p,children:f}=e,m=(0,r._T)(e,["icon","className","children"]),g=(0,s.useContext)(c.O),h=(0,s.useContext)(u.Z);return s.createElement(o.O,Object.assign({ref:t,className:(0,i.q)(d("root"),"flex whitespace-nowrap truncate max-w-xs outline-none focus:ring-0 text-tremor-default transition duration-100",h?(0,l.bM)(h,a.K.text).selectTextColor:"solid"===g?"ui-selected:text-tremor-content-emphasis dark:ui-selected:text-dark-tremor-content-emphasis":"ui-selected:text-tremor-brand dark:ui-selected:text-dark-tremor-brand",function(e,t){switch(e){case"line":return(0,i.q)("ui-selected:border-b-2 hover:border-b-2 border-transparent transition duration-100 -mb-px px-2 py-2","hover:border-tremor-content hover:text-tremor-content-emphasis text-tremor-content","dark:hover:border-dark-tremor-content-emphasis dark:hover:text-dark-tremor-content-emphasis dark:text-dark-tremor-content",t?(0,l.bM)(t,a.K.border).selectBorderColor:"ui-selected:border-tremor-brand dark:ui-selected:border-dark-tremor-brand");case"solid":return(0,i.q)("border-transparent border rounded-tremor-small px-2.5 py-1","ui-selected:border-tremor-border ui-selected:bg-tremor-background ui-selected:shadow-tremor-input hover:text-tremor-content-emphasis ui-selected:text-tremor-brand","dark:ui-selected:border-dark-tremor-border dark:ui-selected:bg-dark-tremor-background dark:ui-selected:shadow-dark-tremor-input dark:hover:text-dark-tremor-content-emphasis dark:ui-selected:text-dark-tremor-brand",t?(0,l.bM)(t,a.K.text).selectTextColor:"text-tremor-content dark:text-dark-tremor-content")}}(g,h),p)},m),n?s.createElement(n,{className:(0,i.q)(d("icon"),"flex-none h-5 w-5",f?"mr-2":"")}):null,f?s.createElement("span",null,f):null)});p.displayName="Tab"},26734:function(e,t,n){n.d(t,{Z:function(){return c}});var r=n(69703),o=n(80991),a=n(99250),i=n(65492),l=n(64090);let s=(0,i.fn)("TabGroup"),c=l.forwardRef((e,t)=>{let{defaultIndex:n,index:i,onIndexChange:c,children:u,className:d}=e,p=(0,r._T)(e,["defaultIndex","index","onIndexChange","children","className"]);return l.createElement(o.O.Group,Object.assign({as:"div",ref:t,defaultIndex:n,selectedIndex:i,onChange:c,className:(0,a.q)(s("root"),"w-full",d)},p),u)});c.displayName="TabGroup"},41608:function(e,t,n){n.d(t,{O:function(){return c},Z:function(){return d}});var r=n(69703),o=n(64090),a=n(50027);n(18174),n(21871),n(41213);var i=n(80991),l=n(99250);let s=(0,n(65492).fn)("TabList"),c=(0,o.createContext)("line"),u={line:(0,l.q)("flex border-b space-x-4","border-tremor-border","dark:border-dark-tremor-border"),solid:(0,l.q)("inline-flex p-0.5 rounded-tremor-default space-x-1.5","bg-tremor-background-subtle","dark:bg-dark-tremor-background-subtle")},d=o.forwardRef((e,t)=>{let{color:n,variant:d="line",children:p,className:f}=e,m=(0,r._T)(e,["color","variant","children","className"]);return o.createElement(i.O.List,Object.assign({ref:t,className:(0,l.q)(s("root"),"justify-start overflow-x-clip",u[d],f)},m),o.createElement(c.Provider,{value:d},o.createElement(a.Z.Provider,{value:n},p)))});d.displayName="TabList"},32126:function(e,t,n){n.d(t,{Z:function(){return u}});var r=n(69703);n(50027);var o=n(18174);n(21871);var a=n(41213),i=n(99250),l=n(65492),s=n(64090);let c=(0,l.fn)("TabPanel"),u=s.forwardRef((e,t)=>{let{children:n,className:l}=e,u=(0,r._T)(e,["children","className"]),{selectedValue:d}=(0,s.useContext)(a.Z),p=d===(0,s.useContext)(o.Z);return s.createElement("div",Object.assign({ref:t,className:(0,i.q)(c("root"),"w-full mt-2",p?"":"hidden",l),"aria-selected":p?"true":"false"},u),n)});u.displayName="TabPanel"},23682:function(e,t,n){n.d(t,{Z:function(){return d}});var r=n(69703),o=n(80991);n(50027);var a=n(18174);n(21871);var i=n(41213),l=n(99250),s=n(65492),c=n(64090);let u=(0,s.fn)("TabPanels"),d=c.forwardRef((e,t)=>{let{children:n,className:s}=e,d=(0,r._T)(e,["children","className"]);return c.createElement(o.O.Panels,Object.assign({as:"div",ref:t,className:(0,l.q)(u("root"),"w-full",s)},d),e=>{let{selectedIndex:t}=e;return c.createElement(i.Z.Provider,{value:{selectedValue:t}},c.Children.map(n,(e,t)=>c.createElement(a.Z.Provider,{value:t},e)))})});d.displayName="TabPanels"},52273:function(e,t,n){n.d(t,{Z:function(){return f}});var r=n(69703),o=n(64090),a=n(99250),i=n(65492);let l=e=>{var t=(0,r._T)(e,[]);return o.createElement("svg",Object.assign({xmlns:"http://www.w3.org/2000/svg",viewBox:"0 0 24 24",fill:"currentColor"},t),o.createElement("path",{d:"M12 22C6.47715 22 2 17.5228 2 12C2 6.47715 6.47715 2 12 2C17.5228 2 22 6.47715 22 12C22 17.5228 17.5228 22 12 22ZM11 15V17H13V15H11ZM11 7V13H13V7H11Z"}))},s=e=>{var t=(0,r._T)(e,[]);return o.createElement("svg",Object.assign({xmlns:"http://www.w3.org/2000/svg",viewBox:"0 0 24 24",fill:"currentColor"},t),o.createElement("path",{d:"M1.18164 12C2.12215 6.87976 6.60812 3 12.0003 3C17.3924 3 21.8784 6.87976 22.8189 12C21.8784 17.1202 17.3924 21 12.0003 21C6.60812 21 2.12215 17.1202 1.18164 12ZM12.0003 17C14.7617 17 17.0003 14.7614 17.0003 12C17.0003 9.23858 14.7617 7 12.0003 7C9.23884 7 7.00026 9.23858 7.00026 12C7.00026 14.7614 9.23884 17 12.0003 17ZM12.0003 15C10.3434 15 9.00026 13.6569 9.00026 12C9.00026 10.3431 10.3434 9 12.0003 9C13.6571 9 15.0003 10.3431 15.0003 12C15.0003 13.6569 13.6571 15 12.0003 15Z"}))},c=e=>{var t=(0,r._T)(e,[]);return o.createElement("svg",Object.assign({xmlns:"http://www.w3.org/2000/svg",viewBox:"0 0 24 24",fill:"currentColor"},t),o.createElement("path",{d:"M4.52047 5.93457L1.39366 2.80777L2.80788 1.39355L22.6069 21.1925L21.1927 22.6068L17.8827 19.2968C16.1814 20.3755 14.1638 21.0002 12.0003 21.0002C6.60812 21.0002 2.12215 17.1204 1.18164 12.0002C1.61832 9.62282 2.81932 7.5129 4.52047 5.93457ZM14.7577 16.1718L13.2937 14.7078C12.902 14.8952 12.4634 15.0002 12.0003 15.0002C10.3434 15.0002 9.00026 13.657 9.00026 12.0002C9.00026 11.537 9.10522 11.0984 9.29263 10.7067L7.82866 9.24277C7.30514 10.0332 7.00026 10.9811 7.00026 12.0002C7.00026 14.7616 9.23884 17.0002 12.0003 17.0002C13.0193 17.0002 13.9672 16.6953 14.7577 16.1718ZM7.97446 3.76015C9.22127 3.26959 10.5793 3.00016 12.0003 3.00016C17.3924 3.00016 21.8784 6.87992 22.8189 12.0002C22.5067 13.6998 21.8038 15.2628 20.8068 16.5925L16.947 12.7327C16.9821 12.4936 17.0003 12.249 17.0003 12.0002C17.0003 9.23873 14.7617 7.00016 12.0003 7.00016C11.7514 7.00016 11.5068 7.01833 11.2677 7.05343L7.97446 3.76015Z"}))};var u=n(91753);let d=o.forwardRef((e,t)=>{let{value:n,defaultValue:d,type:p,placeholder:f="Type...",icon:m,error:g=!1,errorMessage:h,disabled:b=!1,stepper:v,makeInputClassName:y,className:E,onChange:S,onValueChange:w,autoFocus:x}=e,O=(0,r._T)(e,["value","defaultValue","type","placeholder","icon","error","errorMessage","disabled","stepper","makeInputClassName","className","onChange","onValueChange","autoFocus"]),[C,T]=(0,o.useState)(x||!1),[k,A]=(0,o.useState)(!1),I=(0,o.useCallback)(()=>A(!k),[k,A]),R=(0,o.useRef)(null),N=(0,u.Uh)(n||d);return o.useEffect(()=>{let e=()=>T(!0),t=()=>T(!1),n=R.current;return n&&(n.addEventListener("focus",e),n.addEventListener("blur",t),x&&n.focus()),()=>{n&&(n.removeEventListener("focus",e),n.removeEventListener("blur",t))}},[x]),o.createElement(o.Fragment,null,o.createElement("div",{className:(0,a.q)(y("root"),"relative w-full flex items-center min-w-[10rem] outline-none rounded-tremor-default transition duration-100 border","shadow-tremor-input","dark:shadow-dark-tremor-input",(0,u.um)(N,b,g),C&&(0,a.q)("ring-2","border-tremor-brand-subtle ring-tremor-brand-muted","dark:border-dark-tremor-brand-subtle dark:ring-dark-tremor-brand-muted"),E)},m?o.createElement(m,{className:(0,a.q)(y("icon"),"shrink-0 h-5 w-5 ml-2.5","text-tremor-content-subtle","dark:text-dark-tremor-content-subtle")}):null,o.createElement("input",Object.assign({ref:(0,i.lq)([R,t]),defaultValue:d,value:n,type:k?"text":p,className:(0,a.q)(y("input"),"w-full focus:outline-none focus:ring-0 border-none bg-transparent text-tremor-default rounded-tremor-default transition duration-100 py-2","text-tremor-content-emphasis","dark:text-dark-tremor-content-emphasis","[appearance:textfield] [&::-webkit-outer-spin-button]:appearance-none [&::-webkit-inner-spin-button]:appearance-none",m?"pl-2":"pl-3",g?"pr-3":"pr-4",b?"placeholder:text-tremor-content-subtle dark:placeholder:text-dark-tremor-content-subtle":"placeholder:text-tremor-content dark:placeholder:text-dark-tremor-content"),placeholder:f,disabled:b,"data-testid":"base-input",onChange:e=>{null==S||S(e),null==w||w(e.target.value)}},O)),"password"!==p||b?null:o.createElement("button",{className:(0,a.q)(y("toggleButton"),"mr-2"),type:"button",onClick:()=>I(),"aria-label":k?"Hide password":"Show Password"},k?o.createElement(c,{className:(0,a.q)("flex-none h-5 w-5 transition","text-tremor-content-subtle hover:text-tremor-content","dark:text-dark-tremor-content-subtle hover:dark:text-dark-tremor-content"),"aria-hidden":!0}):o.createElement(s,{className:(0,a.q)("flex-none h-5 w-5 transition","text-tremor-content-subtle hover:text-tremor-content","dark:text-dark-tremor-content-subtle hover:dark:text-dark-tremor-content"),"aria-hidden":!0})),g?o.createElement(l,{className:(0,a.q)(y("errorIcon"),"text-red-500 shrink-0 w-5 h-5 mr-2.5")}):null,null!=v?v:null),g&&h?o.createElement("p",{className:(0,a.q)(y("errorMessage"),"text-sm text-red-500 mt-1")},h):null)});d.displayName="BaseInput";let p=(0,i.fn)("TextInput"),f=o.forwardRef((e,t)=>{let{type:n="text"}=e,a=(0,r._T)(e,["type"]);return o.createElement(d,Object.assign({ref:t,type:n,makeInputClassName:p},a))});f.displayName="TextInput"},91753:function(e,t,n){n.d(t,{Uh:function(){return s},sl:function(){return i},um:function(){return l}});var r=n(99250),o=n(64090);let a=e=>["string","number"].includes(typeof e)?e:e instanceof Array?e.map(a).join(""):"object"==typeof e&&e?a(e.props.children):void 0;function i(e){let t=new Map;return o.Children.map(e,e=>{var n;t.set(e.props.value,null!==(n=a(e))&&void 0!==n?n:e.props.value)}),t}let l=function(e,t){let n=arguments.length>2&&void 0!==arguments[2]&&arguments[2];return(0,r.q)(t?"bg-tremor-background-subtle dark:bg-dark-tremor-background-subtle":"bg-tremor-background dark:bg-dark-tremor-background",!t&&"hover:bg-tremor-background-muted dark:hover:bg-dark-tremor-background-muted",e?"text-tremor-content-emphasis dark:text-dark-tremor-content-emphasis":"text-tremor-content dark:text-dark-tremor-content",t&&"text-tremor-content-subtle dark:text-dark-tremor-content-subtle",n&&"text-red-500",n?"border-red-500":"border-tremor-border dark:border-dark-tremor-border")};function s(e){return null!=e&&""!==e}},13810:function(e,t,n){n.d(t,{Z:function(){return d}});var r=n(69703),o=n(64090),a=n(54942),i=n(2898),l=n(99250),s=n(65492);let c=(0,s.fn)("Card"),u=e=>{if(!e)return"";switch(e){case a.zS.Left:return"border-l-4";case a.m.Top:return"border-t-4";case a.zS.Right:return"border-r-4";case a.m.Bottom:return"border-b-4";default:return""}},d=o.forwardRef((e,t)=>{let{decoration:n="",decorationColor:a,children:d,className:p}=e,f=(0,r._T)(e,["decoration","decorationColor","children","className"]);return o.createElement("div",Object.assign({ref:t,className:(0,l.q)(c("root"),"relative w-full text-left ring-1 rounded-tremor-default p-6","bg-tremor-background ring-tremor-ring shadow-tremor-card","dark:bg-dark-tremor-background dark:ring-dark-tremor-ring dark:shadow-dark-tremor-card",a?(0,s.bM)(a,i.K.border).borderColor:"border-tremor-brand dark:border-dark-tremor-brand",u(n),p)},f),d)});d.displayName="Card"},39290:function(e,t,n){n.d(t,{Z:function(){return c}});var r=n(69703),o=n(64090),a=n(70129),i=n(68637),l=n(99250);let s=(0,n(65492).fn)("dialog"),c=o.forwardRef((e,t)=>{let{children:n,className:c}=e,u=(0,r._T)(e,["children","className"]);return o.createElement(a.u,{as:o.Fragment,appear:!0,show:e.open},o.createElement(i.V,Object.assign({as:"div",ref:t},u,{className:(0,l.q)(s("root"),"relative z-50",c)}),o.createElement(a.u.Child,{as:o.Fragment,enter:"ease-out duration-300",enterFrom:"opacity-0",enterTo:"opacity-100",leave:"ease-in duration-200",leaveFrom:"opacity-100",leaveTo:"opacity-0"},o.createElement("div",{className:(0,l.q)("fixed inset-0 bg-slate-950/30 dark:bg-slate-950/50 transition-opacity")})),o.createElement("div",{className:"fixed inset-0 overflow-y-auto w-screen"},o.createElement("div",{className:"flex min-h-full items-center justify-center p-4"},n))))});c.displayName="Dialog"},66952:function(e,t,n){n.d(t,{Z:function(){return d}});var r=n(69703),o=n(64090),a=n(70129),i=n(68637),l=n(99250),s=n(65492);n(50027),n(18174);var c=n(21871);n(41213);let u=(0,s.fn)("dialog"),d=o.forwardRef((e,t)=>{var n;let{children:s,className:d}=e,p=(0,r._T)(e,["children","className"]),f=null!==(n=o.useContext(c.Z))&&void 0!==n?n:(0,l.q)("rounded-tremor-default p-6");return o.createElement(a.u.Child,{as:o.Fragment,enter:"ease-out duration-300",enterFrom:"opacity-0 scale-95",enterTo:"opacity-100 scale-100",leave:"ease-in duration-200",leaveFrom:"opacity-100 scale-100",leaveTo:"opacity-0 scale-95"},o.createElement(i.V.Panel,Object.assign({ref:t,className:(0,l.q)(u("panel"),"w-full max-w-lg overflow-hidden text-left ring-1 shadow-tremor transition-all transform","bg-tremor-background text-tremor-content ring-tremor-ring","dark:bg-dark-tremor-background dark:text-dark-tremor-content dark:ring-dark-tremor-ring",f,d)},p),s))});d.displayName="DialogPanel"},10384:function(e,t,n){n.d(t,{Z:function(){return c}});var r=n(69703),o=n(99250),a=n(65492),i=n(64090),l=n(50217);let s=(0,a.fn)("Col"),c=i.forwardRef((e,t)=>{let{numColSpan:n=1,numColSpanSm:a,numColSpanMd:c,numColSpanLg:u,children:d,className:p}=e,f=(0,r._T)(e,["numColSpan","numColSpanSm","numColSpanMd","numColSpanLg","children","className"]),m=(e,t)=>e&&Object.keys(t).includes(String(e))?t[e]:"";return i.createElement("div",Object.assign({ref:t,className:(0,o.q)(s("root"),(()=>{let e=m(n,l.PT),t=m(a,l.SP),r=m(c,l.VS),i=m(u,l._w);return(0,o.q)(e,t,r,i)})(),p)},f),d)});c.displayName="Col"},46453:function(e,t,n){n.d(t,{Z:function(){return u}});var r=n(69703),o=n(99250),a=n(65492),i=n(64090),l=n(50217);let s=(0,a.fn)("Grid"),c=(e,t)=>e&&Object.keys(t).includes(String(e))?t[e]:"",u=i.forwardRef((e,t)=>{let{numItems:n=1,numItemsSm:a,numItemsMd:u,numItemsLg:d,children:p,className:f}=e,m=(0,r._T)(e,["numItems","numItemsSm","numItemsMd","numItemsLg","children","className"]),g=c(n,l._m),h=c(a,l.LH),b=c(u,l.l5),v=c(d,l.N4),y=(0,o.q)(g,h,b,v);return i.createElement("div",Object.assign({ref:t,className:(0,o.q)(s("root"),"grid",y,f)},m),p)});u.displayName="Grid"},50217:function(e,t,n){n.d(t,{LH:function(){return o},N4:function(){return i},PT:function(){return l},SP:function(){return s},VS:function(){return c},_m:function(){return r},_w:function(){return u},l5:function(){return a}});let r={0:"grid-cols-none",1:"grid-cols-1",2:"grid-cols-2",3:"grid-cols-3",4:"grid-cols-4",5:"grid-cols-5",6:"grid-cols-6",7:"grid-cols-7",8:"grid-cols-8",9:"grid-cols-9",10:"grid-cols-10",11:"grid-cols-11",12:"grid-cols-12"},o={0:"sm:grid-cols-none",1:"sm:grid-cols-1",2:"sm:grid-cols-2",3:"sm:grid-cols-3",4:"sm:grid-cols-4",5:"sm:grid-cols-5",6:"sm:grid-cols-6",7:"sm:grid-cols-7",8:"sm:grid-cols-8",9:"sm:grid-cols-9",10:"sm:grid-cols-10",11:"sm:grid-cols-11",12:"sm:grid-cols-12"},a={0:"md:grid-cols-none",1:"md:grid-cols-1",2:"md:grid-cols-2",3:"md:grid-cols-3",4:"md:grid-cols-4",5:"md:grid-cols-5",6:"md:grid-cols-6",7:"md:grid-cols-7",8:"md:grid-cols-8",9:"md:grid-cols-9",10:"md:grid-cols-10",11:"md:grid-cols-11",12:"md:grid-cols-12"},i={0:"lg:grid-cols-none",1:"lg:grid-cols-1",2:"lg:grid-cols-2",3:"lg:grid-cols-3",4:"lg:grid-cols-4",5:"lg:grid-cols-5",6:"lg:grid-cols-6",7:"lg:grid-cols-7",8:"lg:grid-cols-8",9:"lg:grid-cols-9",10:"lg:grid-cols-10",11:"lg:grid-cols-11",12:"lg:grid-cols-12"},l={1:"col-span-1",2:"col-span-2",3:"col-span-3",4:"col-span-4",5:"col-span-5",6:"col-span-6",7:"col-span-7",8:"col-span-8",9:"col-span-9",10:"col-span-10",11:"col-span-11",12:"col-span-12",13:"col-span-13"},s={1:"sm:col-span-1",2:"sm:col-span-2",3:"sm:col-span-3",4:"sm:col-span-4",5:"sm:col-span-5",6:"sm:col-span-6",7:"sm:col-span-7",8:"sm:col-span-8",9:"sm:col-span-9",10:"sm:col-span-10",11:"sm:col-span-11",12:"sm:col-span-12",13:"sm:col-span-13"},c={1:"md:col-span-1",2:"md:col-span-2",3:"md:col-span-3",4:"md:col-span-4",5:"md:col-span-5",6:"md:col-span-6",7:"md:col-span-7",8:"md:col-span-8",9:"md:col-span-9",10:"md:col-span-10",11:"md:col-span-11",12:"md:col-span-12",13:"md:col-span-13"},u={1:"lg:col-span-1",2:"lg:col-span-2",3:"lg:col-span-3",4:"lg:col-span-4",5:"lg:col-span-5",6:"lg:col-span-6",7:"lg:col-span-7",8:"lg:col-span-8",9:"lg:col-span-9",10:"lg:col-span-10",11:"lg:col-span-11",12:"lg:col-span-12",13:"lg:col-span-13"}},10827:function(e,t,n){n.d(t,{Z:function(){return l}});var r=n(69703),o=n(64090),a=n(99250);let i=(0,n(65492).fn)("Table"),l=o.forwardRef((e,t)=>{let{children:n,className:l}=e,s=(0,r._T)(e,["children","className"]);return o.createElement("div",{className:(0,a.q)(i("root"),"overflow-auto",l)},o.createElement("table",Object.assign({ref:t,className:(0,a.q)(i("table"),"w-full text-tremor-default","text-tremor-content","dark:text-dark-tremor-content")},s),n))});l.displayName="Table"},3851:function(e,t,n){n.d(t,{Z:function(){return l}});var r=n(69703),o=n(64090),a=n(99250);let i=(0,n(65492).fn)("TableBody"),l=o.forwardRef((e,t)=>{let{children:n,className:l}=e,s=(0,r._T)(e,["children","className"]);return o.createElement(o.Fragment,null,o.createElement("tbody",Object.assign({ref:t,className:(0,a.q)(i("root"),"align-top divide-y","divide-tremor-border","dark:divide-dark-tremor-border",l)},s),n))});l.displayName="TableBody"},2044:function(e,t,n){n.d(t,{Z:function(){return l}});var r=n(69703),o=n(64090),a=n(99250);let i=(0,n(65492).fn)("TableCell"),l=o.forwardRef((e,t)=>{let{children:n,className:l}=e,s=(0,r._T)(e,["children","className"]);return o.createElement(o.Fragment,null,o.createElement("td",Object.assign({ref:t,className:(0,a.q)(i("root"),"align-middle whitespace-nowrap text-left p-4",l)},s),n))});l.displayName="TableCell"},64167:function(e,t,n){n.d(t,{Z:function(){return l}});var r=n(69703),o=n(64090),a=n(99250);let i=(0,n(65492).fn)("TableHead"),l=o.forwardRef((e,t)=>{let{children:n,className:l}=e,s=(0,r._T)(e,["children","className"]);return o.createElement(o.Fragment,null,o.createElement("thead",Object.assign({ref:t,className:(0,a.q)(i("root"),"text-left","text-tremor-content","dark:text-dark-tremor-content",l)},s),n))});l.displayName="TableHead"},74480:function(e,t,n){n.d(t,{Z:function(){return l}});var r=n(69703),o=n(64090),a=n(99250);let i=(0,n(65492).fn)("TableHeaderCell"),l=o.forwardRef((e,t)=>{let{children:n,className:l}=e,s=(0,r._T)(e,["children","className"]);return o.createElement(o.Fragment,null,o.createElement("th",Object.assign({ref:t,className:(0,a.q)(i("root"),"whitespace-nowrap text-left font-semibold top-0 px-4 py-3.5","text-tremor-content","dark:text-dark-tremor-content",l)},s),n))});l.displayName="TableHeaderCell"},7178:function(e,t,n){n.d(t,{Z:function(){return l}});var r=n(69703),o=n(64090),a=n(99250);let i=(0,n(65492).fn)("TableRow"),l=o.forwardRef((e,t)=>{let{children:n,className:l}=e,s=(0,r._T)(e,["children","className"]);return o.createElement(o.Fragment,null,o.createElement("tr",Object.assign({ref:t,className:(0,a.q)(i("row"),l)},s),n))});l.displayName="TableRow"},18190:function(e,t,n){n.d(t,{Z:function(){return c}});var r=n(69703),o=n(64090),a=n(2898),i=n(99250),l=n(65492);let s=(0,l.fn)("Callout"),c=o.forwardRef((e,t)=>{let{title:n,icon:c,color:u,className:d,children:p}=e,f=(0,r._T)(e,["title","icon","color","className","children"]);return o.createElement("div",Object.assign({ref:t,className:(0,i.q)(s("root"),"flex flex-col overflow-hidden rounded-tremor-default text-tremor-default border-l-4 py-3 pr-3 pl-4",u?(0,i.q)((0,l.bM)(u,a.K.background).bgColor,(0,l.bM)(u,a.K.darkBorder).borderColor,(0,l.bM)(u,a.K.darkText).textColor,"dark:bg-opacity-10 bg-opacity-10"):(0,i.q)("bg-tremor-brand-faint border-tremor-brand-emphasis text-tremor-brand-emphasis","dark:bg-dark-tremor-brand-muted/70 dark:border-dark-tremor-brand-emphasis dark:text-dark-tremor-brand-emphasis"),d)},f),o.createElement("div",{className:(0,i.q)(s("header"),"flex items-start")},c?o.createElement(c,{className:(0,i.q)(s("icon"),"flex-none h-5 w-5 mr-1.5")}):null,o.createElement("h4",{className:(0,i.q)(s("title"),"font-semibold")},n)),o.createElement("p",{className:(0,i.q)(s("body"),"overflow-y-auto",p?"mt-2":"")},p))});c.displayName="Callout"},71801:function(e,t,n){n.d(t,{Z:function(){return l}});var r=n(2898),o=n(99250),a=n(65492),i=n(64090);let l=i.forwardRef((e,t)=>{let{color:n,className:l,children:s}=e;return i.createElement("p",{ref:t,className:(0,o.q)("text-tremor-default",n?(0,a.bM)(n,r.K.text).textColor:(0,o.q)("text-tremor-content","dark:text-dark-tremor-content"),l)},s)});l.displayName="Text"},42440:function(e,t,n){n.d(t,{Z:function(){return s}});var r=n(69703),o=n(2898),a=n(99250),i=n(65492),l=n(64090);let s=l.forwardRef((e,t)=>{let{color:n,children:s,className:c}=e,u=(0,r._T)(e,["color","children","className"]);return l.createElement("p",Object.assign({ref:t,className:(0,a.q)("font-medium text-tremor-title",n?(0,i.bM)(n,o.K.darkText).textColor:"text-tremor-content-emphasis dark:text-dark-tremor-content-emphasis",c)},u),s)});s.displayName="Title"},58437:function(e,t,n){n.d(t,{Z:function(){return eU},l:function(){return eB}});var r=n(64090),o=n.t(r,2),a=n(89542);function i(e){return c(e)?(e.nodeName||"").toLowerCase():"#document"}function l(e){var t;return(null==e||null==(t=e.ownerDocument)?void 0:t.defaultView)||window}function s(e){var t;return null==(t=(c(e)?e.ownerDocument:e.document)||window.document)?void 0:t.documentElement}function c(e){return e instanceof Node||e instanceof l(e).Node}function u(e){return e instanceof Element||e instanceof l(e).Element}function d(e){return e instanceof HTMLElement||e instanceof l(e).HTMLElement}function p(e){return"undefined"!=typeof ShadowRoot&&(e instanceof ShadowRoot||e instanceof l(e).ShadowRoot)}function f(e){let{overflow:t,overflowX:n,overflowY:r,display:o}=v(e);return/auto|scroll|overlay|hidden|clip/.test(t+r+n)&&!["inline","contents"].includes(o)}function m(e){let t=h(),n=v(e);return"none"!==n.transform||"none"!==n.perspective||!!n.containerType&&"normal"!==n.containerType||!t&&!!n.backdropFilter&&"none"!==n.backdropFilter||!t&&!!n.filter&&"none"!==n.filter||["transform","perspective","filter"].some(e=>(n.willChange||"").includes(e))||["paint","layout","strict","content"].some(e=>(n.contain||"").includes(e))}function g(e){let t=E(e);for(;d(t)&&!b(t);){if(m(t))return t;t=E(t)}return null}function h(){return"undefined"!=typeof CSS&&!!CSS.supports&&CSS.supports("-webkit-backdrop-filter","none")}function b(e){return["html","body","#document"].includes(i(e))}function v(e){return l(e).getComputedStyle(e)}function y(e){return u(e)?{scrollLeft:e.scrollLeft,scrollTop:e.scrollTop}:{scrollLeft:e.pageXOffset,scrollTop:e.pageYOffset}}function E(e){if("html"===i(e))return e;let t=e.assignedSlot||e.parentNode||p(e)&&e.host||s(e);return p(t)?t.host:t}function S(e,t,n){var r;void 0===t&&(t=[]),void 0===n&&(n=!0);let o=function e(t){let n=E(t);return b(n)?t.ownerDocument?t.ownerDocument.body:t.body:d(n)&&f(n)?n:e(n)}(e),a=o===(null==(r=e.ownerDocument)?void 0:r.body),i=l(o);return a?t.concat(i,i.visualViewport||[],f(o)?o:[],i.frameElement&&n?S(i.frameElement):[]):t.concat(o,S(o,[],n))}let w=Math.min,x=Math.max,O=Math.round,C=Math.floor,T=e=>({x:e,y:e}),k={left:"right",right:"left",bottom:"top",top:"bottom"},A={start:"end",end:"start"};function I(e,t){return"function"==typeof e?e(t):e}function R(e){return e.split("-")[0]}function N(e){return e.split("-")[1]}function _(e){return"x"===e?"y":"x"}function P(e){return"y"===e?"height":"width"}function M(e){return["top","bottom"].includes(R(e))?"y":"x"}function L(e){return e.replace(/start|end/g,e=>A[e])}function D(e){return e.replace(/left|right|bottom|top/g,e=>k[e])}function j(e){return{...e,top:e.y,left:e.x,right:e.x+e.width,bottom:e.y+e.height}}function F(e,t,n){let r,{reference:o,floating:a}=e,i=M(t),l=_(M(t)),s=P(l),c=R(t),u="y"===i,d=o.x+o.width/2-a.width/2,p=o.y+o.height/2-a.height/2,f=o[s]/2-a[s]/2;switch(c){case"top":r={x:d,y:o.y-a.height};break;case"bottom":r={x:d,y:o.y+o.height};break;case"right":r={x:o.x+o.width,y:p};break;case"left":r={x:o.x-a.width,y:p};break;default:r={x:o.x,y:o.y}}switch(N(t)){case"start":r[l]-=f*(n&&u?-1:1);break;case"end":r[l]+=f*(n&&u?-1:1)}return r}let B=async(e,t,n)=>{let{placement:r="bottom",strategy:o="absolute",middleware:a=[],platform:i}=n,l=a.filter(Boolean),s=await (null==i.isRTL?void 0:i.isRTL(t)),c=await i.getElementRects({reference:e,floating:t,strategy:o}),{x:u,y:d}=F(c,r,s),p=r,f={},m=0;for(let n=0;n{!function(n){try{t=t||e.matches(n)}catch(e){}}(n)});let o=g(e);if(t&&o){let e=o.getBoundingClientRect();n=e.x,r=e.y}return[t,n,r]}function K(e){return V(s(e)).left+y(e).scrollLeft}function X(e,t,n){let r;if("viewport"===t)r=function(e,t){let n=l(e),r=s(e),o=n.visualViewport,a=r.clientWidth,i=r.clientHeight,c=0,u=0;if(o){a=o.width,i=o.height;let e=h();(!e||e&&"fixed"===t)&&(c=o.offsetLeft,u=o.offsetTop)}return{width:a,height:i,x:c,y:u}}(e,n);else if("document"===t)r=function(e){let t=s(e),n=y(e),r=e.ownerDocument.body,o=x(t.scrollWidth,t.clientWidth,r.scrollWidth,r.clientWidth),a=x(t.scrollHeight,t.clientHeight,r.scrollHeight,r.clientHeight),i=-n.scrollLeft+K(e),l=-n.scrollTop;return"rtl"===v(r).direction&&(i+=x(t.clientWidth,r.clientWidth)-o),{width:o,height:a,x:i,y:l}}(s(e));else if(u(t))r=function(e,t){let n=V(e,!0,"fixed"===t),r=n.top+e.clientTop,o=n.left+e.clientLeft,a=d(e)?G(e):T(1),i=e.clientWidth*a.x;return{width:i,height:e.clientHeight*a.y,x:o*a.x,y:r*a.y}}(t,n);else{let n=W(e);r={...t,x:t.x-n.x,y:t.y-n.y}}return j(r)}function Q(e,t){return d(e)&&"fixed"!==v(e).position?t?t(e):e.offsetParent:null}function J(e,t){let n=l(e);if(!d(e))return n;let r=Q(e,t);for(;r&&["table","td","th"].includes(i(r))&&"static"===v(r).position;)r=Q(r,t);return r&&("html"===i(r)||"body"===i(r)&&"static"===v(r).position&&!m(r))?n:r||g(e)||n}let ee=async function(e){let t=this.getOffsetParent||J,n=this.getDimensions;return{reference:function(e,t,n,r){let o=d(t),a=s(t),l="fixed"===n,c=V(e,!0,l,t),u={scrollLeft:0,scrollTop:0},p=T(0);if(o||!o&&!l){if(("body"!==i(t)||f(a))&&(u=y(t)),o){let e=V(t,!0,l,t);p.x=e.x+t.clientLeft,p.y=e.y+t.clientTop}else a&&(p.x=K(a))}let m=c.left+u.scrollLeft-p.x,g=c.top+u.scrollTop-p.y,[h,b,v]=Y(r);return h&&(m+=b,g+=v,o&&(m+=t.clientLeft,g+=t.clientTop)),{x:m,y:g,width:c.width,height:c.height}}(e.reference,await t(e.floating),e.strategy,e.floating),floating:{x:0,y:0,...await n(e.floating)}}},et={convertOffsetParentRelativeRectToViewportRelativeRect:function(e){let{elements:t,rect:n,offsetParent:r,strategy:o}=e,a=s(r),[l]=t?Y(t.floating):[!1];if(r===a||l)return n;let c={scrollLeft:0,scrollTop:0},u=T(1),p=T(0),m=d(r);if((m||!m&&"fixed"!==o)&&(("body"!==i(r)||f(a))&&(c=y(r)),d(r))){let e=V(r);u=G(r),p.x=e.x+r.clientLeft,p.y=e.y+r.clientTop}return{width:n.width*u.x,height:n.height*u.y,x:n.x*u.x-c.scrollLeft*u.x+p.x,y:n.y*u.y-c.scrollTop*u.y+p.y}},getDocumentElement:s,getClippingRect:function(e){let{element:t,boundary:n,rootBoundary:r,strategy:o}=e,a=[..."clippingAncestors"===n?function(e,t){let n=t.get(e);if(n)return n;let r=S(e,[],!1).filter(e=>u(e)&&"body"!==i(e)),o=null,a="fixed"===v(e).position,l=a?E(e):e;for(;u(l)&&!b(l);){let t=v(l),n=m(l);n||"fixed"!==t.position||(o=null),(a?!n&&!o:!n&&"static"===t.position&&!!o&&["absolute","fixed"].includes(o.position)||f(l)&&!n&&function e(t,n){let r=E(t);return!(r===n||!u(r)||b(r))&&("fixed"===v(r).position||e(r,n))}(e,l))?r=r.filter(e=>e!==l):o=t,l=E(l)}return t.set(e,r),r}(t,this._c):[].concat(n),r],l=a[0],s=a.reduce((e,n)=>{let r=X(t,n,o);return e.top=x(r.top,e.top),e.right=w(r.right,e.right),e.bottom=w(r.bottom,e.bottom),e.left=x(r.left,e.left),e},X(t,l,o));return{width:s.right-s.left,height:s.bottom-s.top,x:s.left,y:s.top}},getOffsetParent:J,getElementRects:ee,getClientRects:function(e){return Array.from(e.getClientRects())},getDimensions:function(e){let{width:t,height:n}=z(e);return{width:t,height:n}},getScale:G,isElement:u,isRTL:function(e){return"rtl"===v(e).direction}};function en(e,t,n,r){let o;void 0===r&&(r={});let{ancestorScroll:a=!0,ancestorResize:i=!0,elementResize:l="function"==typeof ResizeObserver,layoutShift:c="function"==typeof IntersectionObserver,animationFrame:u=!1}=r,d=H(e),p=a||i?[...d?S(d):[],...S(t)]:[];p.forEach(e=>{a&&e.addEventListener("scroll",n,{passive:!0}),i&&e.addEventListener("resize",n)});let f=d&&c?function(e,t){let n,r=null,o=s(e);function a(){var e;clearTimeout(n),null==(e=r)||e.disconnect(),r=null}return!function i(l,s){void 0===l&&(l=!1),void 0===s&&(s=1),a();let{left:c,top:u,width:d,height:p}=e.getBoundingClientRect();if(l||t(),!d||!p)return;let f=C(u),m=C(o.clientWidth-(c+d)),g={rootMargin:-f+"px "+-m+"px "+-C(o.clientHeight-(u+p))+"px "+-C(c)+"px",threshold:x(0,w(1,s))||1},h=!0;function b(e){let t=e[0].intersectionRatio;if(t!==s){if(!h)return i();t?i(!1,t):n=setTimeout(()=>{i(!1,1e-7)},100)}h=!1}try{r=new IntersectionObserver(b,{...g,root:o.ownerDocument})}catch(e){r=new IntersectionObserver(b,g)}r.observe(e)}(!0),a}(d,n):null,m=-1,g=null;l&&(g=new ResizeObserver(e=>{let[r]=e;r&&r.target===d&&g&&(g.unobserve(t),cancelAnimationFrame(m),m=requestAnimationFrame(()=>{var e;null==(e=g)||e.observe(t)})),n()}),d&&!u&&g.observe(d),g.observe(t));let h=u?V(e):null;return u&&function t(){let r=V(e);h&&(r.x!==h.x||r.y!==h.y||r.width!==h.width||r.height!==h.height)&&n(),h=r,o=requestAnimationFrame(t)}(),n(),()=>{var e;p.forEach(e=>{a&&e.removeEventListener("scroll",n),i&&e.removeEventListener("resize",n)}),null==f||f(),null==(e=g)||e.disconnect(),g=null,u&&cancelAnimationFrame(o)}}let er=(e,t,n)=>{let r=new Map,o={platform:et,...n},a={...o.platform,_c:r};return B(e,t,{...o,platform:a})};var eo="undefined"!=typeof document?r.useLayoutEffect:r.useEffect;function ea(e,t){let n,r,o;if(e===t)return!0;if(typeof e!=typeof t)return!1;if("function"==typeof e&&e.toString()===t.toString())return!0;if(e&&t&&"object"==typeof e){if(Array.isArray(e)){if((n=e.length)!=t.length)return!1;for(r=n;0!=r--;)if(!ea(e[r],t[r]))return!1;return!0}if((n=(o=Object.keys(e)).length)!==Object.keys(t).length)return!1;for(r=n;0!=r--;)if(!Object.prototype.hasOwnProperty.call(t,o[r]))return!1;for(r=n;0!=r--;){let n=o[r];if(("_owner"!==n||!e.$$typeof)&&!ea(e[n],t[n]))return!1}return!0}return e!=e&&t!=t}function ei(e){let t=r.useRef(e);return eo(()=>{t.current=e}),t}var el="undefined"!=typeof document?r.useLayoutEffect:r.useEffect;let es=!1,ec=0,eu=()=>"floating-ui-"+ec++,ed=o["useId".toString()]||function(){let[e,t]=r.useState(()=>es?eu():void 0);return el(()=>{null==e&&t(eu())},[]),r.useEffect(()=>{es||(es=!0)},[]),e},ep=r.createContext(null),ef=r.createContext(null),em=()=>{var e;return(null==(e=r.useContext(ep))?void 0:e.id)||null},eg=()=>r.useContext(ef);function eh(e){return(null==e?void 0:e.ownerDocument)||document}function eb(e){return eh(e).defaultView||window}function ev(e){return!!e&&e instanceof eb(e).Element}function ey(e){return!!e&&e instanceof eb(e).HTMLElement}function eE(e,t){let n=["mouse","pen"];return t||n.push("",void 0),n.includes(e)}function eS(e){let t=(0,r.useRef)(e);return el(()=>{t.current=e}),t}let ew="data-floating-ui-safe-polygon";function ex(e,t,n){return n&&!eE(n)?0:"number"==typeof e?e:null==e?void 0:e[t]}let eO=function(e,t){let{enabled:n=!0,delay:o=0,handleClose:a=null,mouseOnly:i=!1,restMs:l=0,move:s=!0}=void 0===t?{}:t,{open:c,onOpenChange:u,dataRef:d,events:p,elements:{domReference:f,floating:m},refs:g}=e,h=eg(),b=em(),v=eS(a),y=eS(o),E=r.useRef(),S=r.useRef(),w=r.useRef(),x=r.useRef(),O=r.useRef(!0),C=r.useRef(!1),T=r.useRef(()=>{}),k=r.useCallback(()=>{var e;let t=null==(e=d.current.openEvent)?void 0:e.type;return(null==t?void 0:t.includes("mouse"))&&"mousedown"!==t},[d]);r.useEffect(()=>{if(n)return p.on("dismiss",e),()=>{p.off("dismiss",e)};function e(){clearTimeout(S.current),clearTimeout(x.current),O.current=!0}},[n,p]),r.useEffect(()=>{if(!n||!v.current||!c)return;function e(){k()&&u(!1)}let t=eh(m).documentElement;return t.addEventListener("mouseleave",e),()=>{t.removeEventListener("mouseleave",e)}},[m,c,u,n,v,d,k]);let A=r.useCallback(function(e){void 0===e&&(e=!0);let t=ex(y.current,"close",E.current);t&&!w.current?(clearTimeout(S.current),S.current=setTimeout(()=>u(!1),t)):e&&(clearTimeout(S.current),u(!1))},[y,u]),I=r.useCallback(()=>{T.current(),w.current=void 0},[]),R=r.useCallback(()=>{if(C.current){let e=eh(g.floating.current).body;e.style.pointerEvents="",e.removeAttribute(ew),C.current=!1}},[g]);return r.useEffect(()=>{if(n&&ev(f))return c&&f.addEventListener("mouseleave",a),null==m||m.addEventListener("mouseleave",a),s&&f.addEventListener("mousemove",r,{once:!0}),f.addEventListener("mouseenter",r),f.addEventListener("mouseleave",o),()=>{c&&f.removeEventListener("mouseleave",a),null==m||m.removeEventListener("mouseleave",a),s&&f.removeEventListener("mousemove",r),f.removeEventListener("mouseenter",r),f.removeEventListener("mouseleave",o)};function t(){return!!d.current.openEvent&&["click","mousedown"].includes(d.current.openEvent.type)}function r(e){if(clearTimeout(S.current),O.current=!1,i&&!eE(E.current)||l>0&&0===ex(y.current,"open"))return;d.current.openEvent=e;let t=ex(y.current,"open",E.current);t?S.current=setTimeout(()=>{u(!0)},t):u(!0)}function o(n){if(t())return;T.current();let r=eh(m);if(clearTimeout(x.current),v.current){c||clearTimeout(S.current),w.current=v.current({...e,tree:h,x:n.clientX,y:n.clientY,onClose(){R(),I(),A()}});let t=w.current;r.addEventListener("mousemove",t),T.current=()=>{r.removeEventListener("mousemove",t)};return}A()}function a(n){t()||null==v.current||v.current({...e,tree:h,x:n.clientX,y:n.clientY,onClose(){R(),I(),A()}})(n)}},[f,m,n,e,i,l,s,A,I,R,u,c,h,y,v,d]),el(()=>{var e,t,r;if(n&&c&&null!=(e=v.current)&&e.__options.blockPointerEvents&&k()){let e=eh(m).body;if(e.setAttribute(ew,""),e.style.pointerEvents="none",C.current=!0,ev(f)&&m){let e=null==h?void 0:null==(t=h.nodesRef.current.find(e=>e.id===b))?void 0:null==(r=t.context)?void 0:r.elements.floating;return e&&(e.style.pointerEvents=""),f.style.pointerEvents="auto",m.style.pointerEvents="auto",()=>{f.style.pointerEvents="",m.style.pointerEvents=""}}}},[n,c,b,m,f,h,v,d,k]),el(()=>{c||(E.current=void 0,I(),R())},[c,I,R]),r.useEffect(()=>()=>{I(),clearTimeout(S.current),clearTimeout(x.current),R()},[n,I,R]),r.useMemo(()=>{if(!n)return{};function e(e){E.current=e.pointerType}return{reference:{onPointerDown:e,onPointerEnter:e,onMouseMove(){c||0===l||(clearTimeout(x.current),x.current=setTimeout(()=>{O.current||u(!0)},l))}},floating:{onMouseEnter(){clearTimeout(S.current)},onMouseLeave(){p.emit("dismiss",{type:"mouseLeave",data:{returnFocus:!1}}),A(!1)}}}},[p,n,l,c,u,A])};function eC(e,t){if(!e||!t)return!1;let n=t.getRootNode&&t.getRootNode();if(e.contains(t))return!0;if(n&&function(e){if("undefined"==typeof ShadowRoot)return!1;let t=eb(e).ShadowRoot;return e instanceof t||e instanceof ShadowRoot}(n)){let n=t;do{if(n&&e===n)return!0;n=n.parentNode||n.host}while(n)}return!1}function eT(e,t){let n=e.filter(e=>{var n;return e.parentId===t&&(null==(n=e.context)?void 0:n.open)})||[],r=n;for(;r.length;)r=e.filter(e=>{var t;return null==(t=r)?void 0:t.some(t=>{var n;return e.parentId===t.id&&(null==(n=e.context)?void 0:n.open)})})||[],n=n.concat(r);return n}let ek=o["useInsertionEffect".toString()]||(e=>e());function eA(e){let t=r.useRef(()=>{});return ek(()=>{t.current=e}),r.useCallback(function(){for(var e=arguments.length,n=Array(e),r=0;r!1),x="function"==typeof f?w:f,O=r.useRef(!1),{escapeKeyBubbles:C,outsidePressBubbles:T}=e_(v);return r.useEffect(()=>{if(!n||!d)return;function e(e){if("Escape"===e.key){let e=y?eT(y.nodesRef.current,i):[];if(e.length>0){let t=!0;if(e.forEach(e=>{var n;if(null!=(n=e.context)&&n.open&&!e.context.dataRef.current.__escapeKeyBubbles){t=!1;return}}),!t)return}a.emit("dismiss",{type:"escapeKey",data:{returnFocus:{preventScroll:!1}}}),o(!1)}}function t(e){var t;let n=O.current;if(O.current=!1,n||"function"==typeof x&&!x(e))return;let r="composedPath"in e?e.composedPath()[0]:e.target;if(ey(r)&&c){let t=c.ownerDocument.defaultView||window,n=r.scrollWidth>r.clientWidth,o=r.scrollHeight>r.clientHeight,a=o&&e.offsetX>r.clientWidth;if(o&&"rtl"===t.getComputedStyle(r).direction&&(a=e.offsetX<=r.offsetWidth-r.clientWidth),a||n&&e.offsetY>r.clientHeight)return}let l=y&&eT(y.nodesRef.current,i).some(t=>{var n;return eI(e,null==(n=t.context)?void 0:n.elements.floating)});if(eI(e,c)||eI(e,s)||l)return;let u=y?eT(y.nodesRef.current,i):[];if(u.length>0){let e=!0;if(u.forEach(t=>{var n;if(null!=(n=t.context)&&n.open&&!t.context.dataRef.current.__outsidePressBubbles){e=!1;return}}),!e)return}a.emit("dismiss",{type:"outsidePress",data:{returnFocus:E?{preventScroll:!0}:function(e){if(0===e.mozInputSource&&e.isTrusted)return!0;let t=/Android/i;return(t.test(function(){let e=navigator.userAgentData;return null!=e&&e.platform?e.platform:navigator.platform}())||t.test(function(){let e=navigator.userAgentData;return e&&Array.isArray(e.brands)?e.brands.map(e=>{let{brand:t,version:n}=e;return t+"/"+n}).join(" "):navigator.userAgent}()))&&e.pointerType?"click"===e.type&&1===e.buttons:0===e.detail&&!e.pointerType}(e)||0===(t=e).width&&0===t.height||1===t.width&&1===t.height&&0===t.pressure&&0===t.detail&&"mouse"!==t.pointerType||t.width<1&&t.height<1&&0===t.pressure&&0===t.detail}}),o(!1)}function r(){o(!1)}u.current.__escapeKeyBubbles=C,u.current.__outsidePressBubbles=T;let f=eh(c);p&&f.addEventListener("keydown",e),x&&f.addEventListener(m,t);let g=[];return b&&(ev(s)&&(g=S(s)),ev(c)&&(g=g.concat(S(c))),!ev(l)&&l&&l.contextElement&&(g=g.concat(S(l.contextElement)))),(g=g.filter(e=>{var t;return e!==(null==(t=f.defaultView)?void 0:t.visualViewport)})).forEach(e=>{e.addEventListener("scroll",r,{passive:!0})}),()=>{p&&f.removeEventListener("keydown",e),x&&f.removeEventListener(m,t),g.forEach(e=>{e.removeEventListener("scroll",r)})}},[u,c,s,l,p,x,m,a,y,i,n,o,b,d,C,T,E]),r.useEffect(()=>{O.current=!1},[x,m]),r.useMemo(()=>d?{reference:{[eR[h]]:()=>{g&&(a.emit("dismiss",{type:"referencePress",data:{returnFocus:!1}}),o(!1))}},floating:{[eN[m]]:()=>{O.current=!0}}}:{},[d,a,g,m,h,o])},eM=function(e,t){let{open:n,onOpenChange:o,dataRef:a,events:i,refs:l,elements:{floating:s,domReference:c}}=e,{enabled:u=!0,keyboardOnly:d=!0}=void 0===t?{}:t,p=r.useRef(""),f=r.useRef(!1),m=r.useRef();return r.useEffect(()=>{if(!u)return;let e=eh(s).defaultView||window;function t(){!n&&ey(c)&&c===function(e){let t=e.activeElement;for(;(null==(n=t)?void 0:null==(r=n.shadowRoot)?void 0:r.activeElement)!=null;){var n,r;t=t.shadowRoot.activeElement}return t}(eh(c))&&(f.current=!0)}return e.addEventListener("blur",t),()=>{e.removeEventListener("blur",t)}},[s,c,n,u]),r.useEffect(()=>{if(u)return i.on("dismiss",e),()=>{i.off("dismiss",e)};function e(e){("referencePress"===e.type||"escapeKey"===e.type)&&(f.current=!0)}},[i,u]),r.useEffect(()=>()=>{clearTimeout(m.current)},[]),r.useMemo(()=>u?{reference:{onPointerDown(e){let{pointerType:t}=e;p.current=t,f.current=!!(t&&d)},onMouseLeave(){f.current=!1},onFocus(e){var t;f.current||"focus"===e.type&&(null==(t=a.current.openEvent)?void 0:t.type)==="mousedown"&&a.current.openEvent&&eI(a.current.openEvent,c)||(a.current.openEvent=e.nativeEvent,o(!0))},onBlur(e){f.current=!1;let t=e.relatedTarget,n=ev(t)&&t.hasAttribute("data-floating-ui-focus-guard")&&"outside"===t.getAttribute("data-type");m.current=setTimeout(()=>{eC(l.floating.current,t)||eC(c,t)||n||o(!1)})}}}:{},[u,d,c,l,a,o])},eL=function(e,t){let{open:n}=e,{enabled:o=!0,role:a="dialog"}=void 0===t?{}:t,i=ed(),l=ed();return r.useMemo(()=>{let e={id:i,role:a};return o?"tooltip"===a?{reference:{"aria-describedby":n?i:void 0},floating:e}:{reference:{"aria-expanded":n?"true":"false","aria-haspopup":"alertdialog"===a?"dialog":a,"aria-controls":n?i:void 0,..."listbox"===a&&{role:"combobox"},..."menu"===a&&{id:l}},floating:{...e,..."menu"===a&&{"aria-labelledby":l}}}:{}},[o,a,n,i,l])};function eD(e,t,n){let r=new Map;return{..."floating"===n&&{tabIndex:-1},...e,...t.map(e=>e?e[n]:null).concat(e).reduce((e,t)=>(t&&Object.entries(t).forEach(t=>{let[n,o]=t;if(0===n.indexOf("on")){if(r.has(n)||r.set(n,[]),"function"==typeof o){var a;null==(a=r.get(n))||a.push(o),e[n]=function(){for(var e,t=arguments.length,o=Array(t),a=0;ae(...o))}}}else e[n]=o}),e),{})}}let ej=function(e){void 0===e&&(e=[]);let t=e,n=r.useCallback(t=>eD(t,e,"reference"),t),o=r.useCallback(t=>eD(t,e,"floating"),t),a=r.useCallback(t=>eD(t,e,"item"),e.map(e=>null==e?void 0:e.item));return r.useMemo(()=>({getReferenceProps:n,getFloatingProps:o,getItemProps:a}),[n,o,a])};var eF=n(99250);let eB=e=>{var t,n;let[o,i]=(0,r.useState)(!1),[l,s]=(0,r.useState)(),{x:c,y:u,refs:d,strategy:p,context:f}=function(e){void 0===e&&(e={});let{open:t=!1,onOpenChange:n,nodeId:o}=e,i=function(e){void 0===e&&(e={});let{placement:t="bottom",strategy:n="absolute",middleware:o=[],platform:i,whileElementsMounted:l,open:s}=e,[c,u]=r.useState({x:null,y:null,strategy:n,placement:t,middlewareData:{},isPositioned:!1}),[d,p]=r.useState(o);ea(d,o)||p(o);let f=r.useRef(null),m=r.useRef(null),g=r.useRef(c),h=ei(l),b=ei(i),[v,y]=r.useState(null),[E,S]=r.useState(null),w=r.useCallback(e=>{f.current!==e&&(f.current=e,y(e))},[]),x=r.useCallback(e=>{m.current!==e&&(m.current=e,S(e))},[]),O=r.useCallback(()=>{if(!f.current||!m.current)return;let e={placement:t,strategy:n,middleware:d};b.current&&(e.platform=b.current),er(f.current,m.current,e).then(e=>{let t={...e,isPositioned:!0};C.current&&!ea(g.current,t)&&(g.current=t,a.flushSync(()=>{u(t)}))})},[d,t,n,b]);eo(()=>{!1===s&&g.current.isPositioned&&(g.current.isPositioned=!1,u(e=>({...e,isPositioned:!1})))},[s]);let C=r.useRef(!1);eo(()=>(C.current=!0,()=>{C.current=!1}),[]),eo(()=>{if(v&&E){if(h.current)return h.current(v,E,O);O()}},[v,E,O,h]);let T=r.useMemo(()=>({reference:f,floating:m,setReference:w,setFloating:x}),[w,x]),k=r.useMemo(()=>({reference:v,floating:E}),[v,E]);return r.useMemo(()=>({...c,update:O,refs:T,elements:k,reference:w,floating:x}),[c,O,T,k,w,x])}(e),l=eg(),s=r.useRef(null),c=r.useRef({}),u=r.useState(()=>(function(){let e=new Map;return{emit(t,n){var r;null==(r=e.get(t))||r.forEach(e=>e(n))},on(t,n){e.set(t,[...e.get(t)||[],n])},off(t,n){e.set(t,(e.get(t)||[]).filter(e=>e!==n))}}})())[0],[d,p]=r.useState(null),f=r.useCallback(e=>{let t=ev(e)?{getBoundingClientRect:()=>e.getBoundingClientRect(),contextElement:e}:e;i.refs.setReference(t)},[i.refs]),m=r.useCallback(e=>{(ev(e)||null===e)&&(s.current=e,p(e)),(ev(i.refs.reference.current)||null===i.refs.reference.current||null!==e&&!ev(e))&&i.refs.setReference(e)},[i.refs]),g=r.useMemo(()=>({...i.refs,setReference:m,setPositionReference:f,domReference:s}),[i.refs,m,f]),h=r.useMemo(()=>({...i.elements,domReference:d}),[i.elements,d]),b=eA(n),v=r.useMemo(()=>({...i,refs:g,elements:h,dataRef:c,nodeId:o,events:u,open:t,onOpenChange:b}),[i,o,u,t,b,g,h]);return el(()=>{let e=null==l?void 0:l.nodesRef.current.find(e=>e.id===o);e&&(e.context=v)}),r.useMemo(()=>({...i,context:v,refs:g,reference:m,positionReference:f}),[i,g,v,m,f])}({open:o,onOpenChange:t=>{t&&e?s(setTimeout(()=>{i(t)},e)):(clearTimeout(l),i(t))},placement:"top",whileElementsMounted:en,middleware:[{name:"offset",options:5,async fn(e){var t,n;let{x:r,y:o,placement:a,middlewareData:i}=e,l=await Z(e,5);return a===(null==(t=i.offset)?void 0:t.placement)&&null!=(n=i.arrow)&&n.alignmentOffset?{}:{x:r+l.x,y:o+l.y,data:{...l,placement:a}}}},{name:"flip",options:t={fallbackAxisSideDirection:"start"},async fn(e){var n,r,o,a,i;let{placement:l,middlewareData:s,rects:c,initialPlacement:u,platform:d,elements:p}=e,{mainAxis:f=!0,crossAxis:m=!0,fallbackPlacements:g,fallbackStrategy:h="bestFit",fallbackAxisSideDirection:b="none",flipAlignment:v=!0,...y}=I(t,e);if(null!=(n=s.arrow)&&n.alignmentOffset)return{};let E=R(l),S=R(u)===u,w=await (null==d.isRTL?void 0:d.isRTL(p.floating)),x=g||(S||!v?[D(u)]:function(e){let t=D(e);return[L(e),t,L(t)]}(u));g||"none"===b||x.push(...function(e,t,n,r){let o=N(e),a=function(e,t,n){let r=["left","right"],o=["right","left"];switch(e){case"top":case"bottom":if(n)return t?o:r;return t?r:o;case"left":case"right":return t?["top","bottom"]:["bottom","top"];default:return[]}}(R(e),"start"===n,r);return o&&(a=a.map(e=>e+"-"+o),t&&(a=a.concat(a.map(L)))),a}(u,v,b,w));let O=[u,...x],C=await U(e,y),T=[],k=(null==(r=s.flip)?void 0:r.overflows)||[];if(f&&T.push(C[E]),m){let e=function(e,t,n){void 0===n&&(n=!1);let r=N(e),o=_(M(e)),a=P(o),i="x"===o?r===(n?"end":"start")?"right":"left":"start"===r?"bottom":"top";return t.reference[a]>t.floating[a]&&(i=D(i)),[i,D(i)]}(l,c,w);T.push(C[e[0]],C[e[1]])}if(k=[...k,{placement:l,overflows:T}],!T.every(e=>e<=0)){let e=((null==(o=s.flip)?void 0:o.index)||0)+1,t=O[e];if(t)return{data:{index:e,overflows:k},reset:{placement:t}};let n=null==(a=k.filter(e=>e.overflows[0]<=0).sort((e,t)=>e.overflows[1]-t.overflows[1])[0])?void 0:a.placement;if(!n)switch(h){case"bestFit":{let e=null==(i=k.map(e=>[e.placement,e.overflows.filter(e=>e>0).reduce((e,t)=>e+t,0)]).sort((e,t)=>e[1]-t[1])[0])?void 0:i[0];e&&(n=e);break}case"initialPlacement":n=u}if(l!==n)return{reset:{placement:n}}}return{}}},(void 0===n&&(n={}),{name:"shift",options:n,async fn(e){let{x:t,y:r,placement:o}=e,{mainAxis:a=!0,crossAxis:i=!1,limiter:l={fn:e=>{let{x:t,y:n}=e;return{x:t,y:n}}},...s}=I(n,e),c={x:t,y:r},u=await U(e,s),d=M(R(o)),p=_(d),f=c[p],m=c[d];if(a){let e="y"===p?"top":"left",t="y"===p?"bottom":"right",n=f+u[e],r=f-u[t];f=x(n,w(f,r))}if(i){let e="y"===d?"top":"left",t="y"===d?"bottom":"right",n=m+u[e],r=m-u[t];m=x(n,w(m,r))}let g=l.fn({...e,[p]:f,[d]:m});return{...g,data:{x:g.x-t,y:g.y-r}}}})]}),m=eO(f,{move:!1}),{getReferenceProps:g,getFloatingProps:h}=ej([m,eM(f),eP(f),eL(f,{role:"tooltip"})]);return{tooltipProps:{open:o,x:c,y:u,refs:d,strategy:p,getFloatingProps:h},getReferenceProps:g}},eU=e=>{let{text:t,open:n,x:o,y:a,refs:i,strategy:l,getFloatingProps:s}=e;return n&&t?r.createElement("div",Object.assign({className:(0,eF.q)("max-w-xs text-sm z-20 rounded-tremor-default opacity-100 px-2.5 py-1","text-white bg-tremor-background-emphasis","text-white dark:bg-dark-tremor-background-subtle"),ref:i.setFloating,style:{position:l,top:null!=a?a:0,left:null!=o?o:0}},s()),t):null};eU.displayName="Tooltip"},67989:function(e,t,n){n.d(t,{Z:function(){return c}});var r=n(69703),o=n(64090),a=n(2898),i=n(99250),l=n(65492);let s=(0,l.fn)("BarList"),c=o.forwardRef((e,t)=>{var n;let c;let{data:u=[],color:d,valueFormatter:p=l.Cj,showAnimation:f=!1,className:m}=e,g=(0,r._T)(e,["data","color","valueFormatter","showAnimation","className"]),h=(n=u.map(e=>e.value),c=-1/0,n.forEach(e=>{c=Math.max(c,e)}),n.map(e=>0===e?0:Math.max(e/c*100,1)));return o.createElement("div",Object.assign({ref:t,className:(0,i.q)(s("root"),"flex justify-between space-x-6",m)},g),o.createElement("div",{className:(0,i.q)(s("bars"),"relative w-full")},u.map((e,t)=>{var n,r,c;let p=e.icon;return o.createElement("div",{key:null!==(n=e.key)&&void 0!==n?n:e.name,className:(0,i.q)(s("bar"),"flex items-center rounded-tremor-small bg-opacity-30","h-9",e.color||d?(0,l.bM)(null!==(r=e.color)&&void 0!==r?r:d,a.K.background).bgColor:"bg-tremor-brand-subtle dark:bg-dark-tremor-brand-subtle dark:bg-opacity-30",t===u.length-1?"mb-0":"mb-2"),style:{width:"".concat(h[t],"%"),transition:f?"all 1s":""}},o.createElement("div",{className:(0,i.q)("absolute max-w-full flex left-2")},p?o.createElement(p,{className:(0,i.q)(s("barIcon"),"flex-none h-5 w-5 mr-2","text-tremor-content","dark:text-dark-tremor-content")}):null,e.href?o.createElement("a",{href:e.href,target:null!==(c=e.target)&&void 0!==c?c:"_blank",rel:"noreferrer",className:(0,i.q)(s("barLink"),"whitespace-nowrap hover:underline truncate text-tremor-default","text-tremor-content-emphasis","dark:text-dark-tremor-content-emphasis")},e.name):o.createElement("p",{className:(0,i.q)(s("barText"),"whitespace-nowrap truncate text-tremor-default","text-tremor-content-emphasis","dark:text-dark-tremor-content-emphasis")},e.name)))})),o.createElement("div",{className:"text-right min-w-min"},u.map((e,t)=>{var n;return o.createElement("div",{key:null!==(n=e.key)&&void 0!==n?n:e.name,className:(0,i.q)(s("labelWrapper"),"flex justify-end items-center","h-9",t===u.length-1?"mb-0":"mb-2")},o.createElement("p",{className:(0,i.q)(s("labelText"),"whitespace-nowrap truncate text-tremor-default","text-tremor-content-emphasis","dark:text-dark-tremor-content-emphasis")},p(e.value)))})))});c.displayName="BarList"},50027:function(e,t,n){n.d(t,{Z:function(){return a}});var r=n(64090),o=n(54942);n(99250);let a=(0,r.createContext)(o.fr.Blue)},18174:function(e,t,n){n.d(t,{Z:function(){return r}});let r=(0,n(64090).createContext)(0)},21871:function(e,t,n){n.d(t,{Z:function(){return r}});let r=(0,n(64090).createContext)(void 0)},41213:function(e,t,n){n.d(t,{Z:function(){return r}});let r=(0,n(64090).createContext)({selectedValue:void 0,handleValueChange:void 0})},54942:function(e,t,n){n.d(t,{fr:function(){return r},m:function(){return i},u8:function(){return o},zS:function(){return a}});let r={Slate:"slate",Gray:"gray",Zinc:"zinc",Neutral:"neutral",Stone:"stone",Red:"red",Orange:"orange",Amber:"amber",Yellow:"yellow",Lime:"lime",Green:"green",Emerald:"emerald",Teal:"teal",Cyan:"cyan",Sky:"sky",Blue:"blue",Indigo:"indigo",Violet:"violet",Purple:"purple",Fuchsia:"fuchsia",Pink:"pink",Rose:"rose"},o={XS:"xs",SM:"sm",MD:"md",LG:"lg",XL:"xl"},a={Left:"left",Right:"right"},i={Top:"top",Bottom:"bottom"}},2898:function(e,t,n){n.d(t,{K:function(){return o},s:function(){return a}});var r=n(54942);let o={canvasBackground:50,lightBackground:100,background:500,darkBackground:600,darkestBackground:800,lightBorder:200,border:500,darkBorder:700,lightRing:200,ring:300,lightText:400,text:500,darkText:700,darkestText:900,icon:500},a=[r.fr.Blue,r.fr.Cyan,r.fr.Sky,r.fr.Indigo,r.fr.Violet,r.fr.Purple,r.fr.Fuchsia,r.fr.Slate,r.fr.Gray,r.fr.Zinc,r.fr.Neutral,r.fr.Stone,r.fr.Red,r.fr.Orange,r.fr.Amber,r.fr.Yellow,r.fr.Lime,r.fr.Green,r.fr.Emerald,r.fr.Teal,r.fr.Pink,r.fr.Rose]},99250:function(e,t,n){n.d(t,{q:function(){return j}});var r=/^\[(.+)\]$/;function o(e,t){var n=e;return t.split("-").forEach(function(e){n.nextPart.has(e)||n.nextPart.set(e,{nextPart:new Map,validators:[]}),n=n.nextPart.get(e)}),n}var a=/\s+/;function i(){for(var e,t,n=0,r="";ne&&(t=0,r=n,n=new Map)}return{get:function(e){var t=n.get(e);return void 0!==t?t:void 0!==(t=r.get(e))?(o(e,t),t):void 0},set:function(e,t){n.has(e)?n.set(e,t):o(e,t)}}}(e.cacheSize),splitModifiers:(n=1===(t=e.separator||":").length,a=t[0],i=t.length,function(e){for(var r,o=[],l=0,s=0,c=0;cs?r-s:void 0}}),...(u=e.theme,d=e.prefix,p={nextPart:new Map,validators:[]},(f=Object.entries(e.classGroups),d?f.map(function(e){return[e[0],e[1].map(function(e){return"string"==typeof e?d+e:"object"==typeof e?Object.fromEntries(Object.entries(e).map(function(e){return[d+e[0],e[1]]})):e})]}):f).forEach(function(e){var t=e[0];(function e(t,n,r,a){t.forEach(function(t){if("string"==typeof t){(""===t?n:o(n,t)).classGroupId=r;return}if("function"==typeof t){if(t.isThemeGetter){e(t(a),n,r,a);return}n.validators.push({validator:t,classGroupId:r});return}Object.entries(t).forEach(function(t){var i=t[0];e(t[1],o(n,i),r,a)})})})(e[1],p,t,u)}),l=e.conflictingClassGroups,c=void 0===(s=e.conflictingClassGroupModifiers)?{}:s,{getClassGroupId:function(e){var t=e.split("-");return""===t[0]&&1!==t.length&&t.shift(),function e(t,n){if(0===t.length)return n.classGroupId;var r,o=t[0],a=n.nextPart.get(o),i=a?e(t.slice(1),a):void 0;if(i)return i;if(0!==n.validators.length){var l=t.join("-");return null===(r=n.validators.find(function(e){return(0,e.validator)(l)}))||void 0===r?void 0:r.classGroupId}}(t,p)||function(e){if(r.test(e)){var t=r.exec(e)[1],n=null==t?void 0:t.substring(0,t.indexOf(":"));if(n)return"arbitrary.."+n}}(e)},getConflictingClassGroupIds:function(e,t){var n=l[e]||[];return t&&c[e]?[].concat(n,c[e]):n}})}}(s.slice(1).reduce(function(e,t){return t(e)},i()))).cache.get,n=e.cache.set,u=d,d(a)};function d(r){var o,i,l,s,c,u=t(r);if(u)return u;var d=(i=(o=e).splitModifiers,l=o.getClassGroupId,s=o.getConflictingClassGroupIds,c=new Set,r.trim().split(a).map(function(e){var t=i(e),n=t.modifiers,r=t.hasImportantModifier,o=t.baseClassName,a=t.maybePostfixModifierPosition,s=l(a?o.substring(0,a):o),c=!!a;if(!s){if(!a||!(s=l(o)))return{isTailwindClass:!1,originalClassName:e};c=!1}var u=(function(e){if(e.length<=1)return e;var t=[],n=[];return e.forEach(function(e){"["===e[0]?(t.push.apply(t,n.sort().concat([e])),n=[]):n.push(e)}),t.push.apply(t,n.sort()),t})(n).join(":");return{isTailwindClass:!0,modifierId:r?u+"!":u,classGroupId:s,originalClassName:e,hasPostfixModifier:c}}).reverse().filter(function(e){if(!e.isTailwindClass)return!0;var t=e.modifierId,n=e.classGroupId,r=e.hasPostfixModifier,o=t+n;return!c.has(o)&&(c.add(o),s(n,r).forEach(function(e){return c.add(t+e)}),!0)}).reverse().map(function(e){return e.originalClassName}).join(" "));return n(r,d),d}return function(){return u(i.apply(null,arguments))}}function s(e){var t=function(t){return t[e]||[]};return t.isThemeGetter=!0,t}var c=/^\[(?:([a-z-]+):)?(.+)\]$/i,u=/^\d+\/\d+$/,d=new Set(["px","full","screen"]),p=/^(\d+(\.\d+)?)?(xs|sm|md|lg|xl)$/,f=/\d+(%|px|r?em|[sdl]?v([hwib]|min|max)|pt|pc|in|cm|mm|cap|ch|ex|r?lh|cq(w|h|i|b|min|max))|\b(calc|min|max|clamp)\(.+\)|^0$/,m=/^-?((\d+)?\.?(\d+)[a-z]+|0)_-?((\d+)?\.?(\d+)[a-z]+|0)/;function g(e){return S(e)||d.has(e)||u.test(e)||h(e)}function h(e){return A(e,"length",I)}function b(e){return A(e,"size",R)}function v(e){return A(e,"position",R)}function y(e){return A(e,"url",N)}function E(e){return A(e,"number",S)}function S(e){return!Number.isNaN(Number(e))}function w(e){return e.endsWith("%")&&S(e.slice(0,-1))}function x(e){return _(e)||A(e,"number",_)}function O(e){return c.test(e)}function C(){return!0}function T(e){return p.test(e)}function k(e){return A(e,"",P)}function A(e,t,n){var r=c.exec(e);return!!r&&(r[1]?r[1]===t:n(r[2]))}function I(e){return f.test(e)}function R(){return!1}function N(e){return e.startsWith("url(")}function _(e){return Number.isInteger(Number(e))}function P(e){return m.test(e)}function M(){var e=s("colors"),t=s("spacing"),n=s("blur"),r=s("brightness"),o=s("borderColor"),a=s("borderRadius"),i=s("borderSpacing"),l=s("borderWidth"),c=s("contrast"),u=s("grayscale"),d=s("hueRotate"),p=s("invert"),f=s("gap"),m=s("gradientColorStops"),A=s("gradientColorStopPositions"),I=s("inset"),R=s("margin"),N=s("opacity"),_=s("padding"),P=s("saturate"),M=s("scale"),L=s("sepia"),D=s("skew"),j=s("space"),F=s("translate"),B=function(){return["auto","contain","none"]},U=function(){return["auto","hidden","clip","visible","scroll"]},Z=function(){return["auto",O,t]},z=function(){return[O,t]},H=function(){return["",g]},G=function(){return["auto",S,O]},$=function(){return["bottom","center","left","left-bottom","left-top","right","right-bottom","right-top","top"]},W=function(){return["solid","dashed","dotted","double","none"]},V=function(){return["normal","multiply","screen","overlay","darken","lighten","color-dodge","color-burn","hard-light","soft-light","difference","exclusion","hue","saturation","color","luminosity","plus-lighter"]},q=function(){return["start","end","center","between","around","evenly","stretch"]},Y=function(){return["","0",O]},K=function(){return["auto","avoid","all","avoid-page","page","left","right","column"]},X=function(){return[S,E]},Q=function(){return[S,O]};return{cacheSize:500,theme:{colors:[C],spacing:[g],blur:["none","",T,O],brightness:X(),borderColor:[e],borderRadius:["none","","full",T,O],borderSpacing:z(),borderWidth:H(),contrast:X(),grayscale:Y(),hueRotate:Q(),invert:Y(),gap:z(),gradientColorStops:[e],gradientColorStopPositions:[w,h],inset:Z(),margin:Z(),opacity:X(),padding:z(),saturate:X(),scale:X(),sepia:Y(),skew:Q(),space:z(),translate:z()},classGroups:{aspect:[{aspect:["auto","square","video",O]}],container:["container"],columns:[{columns:[T]}],"break-after":[{"break-after":K()}],"break-before":[{"break-before":K()}],"break-inside":[{"break-inside":["auto","avoid","avoid-page","avoid-column"]}],"box-decoration":[{"box-decoration":["slice","clone"]}],box:[{box:["border","content"]}],display:["block","inline-block","inline","flex","inline-flex","table","inline-table","table-caption","table-cell","table-column","table-column-group","table-footer-group","table-header-group","table-row-group","table-row","flow-root","grid","inline-grid","contents","list-item","hidden"],float:[{float:["right","left","none"]}],clear:[{clear:["left","right","both","none"]}],isolation:["isolate","isolation-auto"],"object-fit":[{object:["contain","cover","fill","none","scale-down"]}],"object-position":[{object:[].concat($(),[O])}],overflow:[{overflow:U()}],"overflow-x":[{"overflow-x":U()}],"overflow-y":[{"overflow-y":U()}],overscroll:[{overscroll:B()}],"overscroll-x":[{"overscroll-x":B()}],"overscroll-y":[{"overscroll-y":B()}],position:["static","fixed","absolute","relative","sticky"],inset:[{inset:[I]}],"inset-x":[{"inset-x":[I]}],"inset-y":[{"inset-y":[I]}],start:[{start:[I]}],end:[{end:[I]}],top:[{top:[I]}],right:[{right:[I]}],bottom:[{bottom:[I]}],left:[{left:[I]}],visibility:["visible","invisible","collapse"],z:[{z:["auto",x]}],basis:[{basis:Z()}],"flex-direction":[{flex:["row","row-reverse","col","col-reverse"]}],"flex-wrap":[{flex:["wrap","wrap-reverse","nowrap"]}],flex:[{flex:["1","auto","initial","none",O]}],grow:[{grow:Y()}],shrink:[{shrink:Y()}],order:[{order:["first","last","none",x]}],"grid-cols":[{"grid-cols":[C]}],"col-start-end":[{col:["auto",{span:["full",x]},O]}],"col-start":[{"col-start":G()}],"col-end":[{"col-end":G()}],"grid-rows":[{"grid-rows":[C]}],"row-start-end":[{row:["auto",{span:[x]},O]}],"row-start":[{"row-start":G()}],"row-end":[{"row-end":G()}],"grid-flow":[{"grid-flow":["row","col","dense","row-dense","col-dense"]}],"auto-cols":[{"auto-cols":["auto","min","max","fr",O]}],"auto-rows":[{"auto-rows":["auto","min","max","fr",O]}],gap:[{gap:[f]}],"gap-x":[{"gap-x":[f]}],"gap-y":[{"gap-y":[f]}],"justify-content":[{justify:["normal"].concat(q())}],"justify-items":[{"justify-items":["start","end","center","stretch"]}],"justify-self":[{"justify-self":["auto","start","end","center","stretch"]}],"align-content":[{content:["normal"].concat(q(),["baseline"])}],"align-items":[{items:["start","end","center","baseline","stretch"]}],"align-self":[{self:["auto","start","end","center","stretch","baseline"]}],"place-content":[{"place-content":[].concat(q(),["baseline"])}],"place-items":[{"place-items":["start","end","center","baseline","stretch"]}],"place-self":[{"place-self":["auto","start","end","center","stretch"]}],p:[{p:[_]}],px:[{px:[_]}],py:[{py:[_]}],ps:[{ps:[_]}],pe:[{pe:[_]}],pt:[{pt:[_]}],pr:[{pr:[_]}],pb:[{pb:[_]}],pl:[{pl:[_]}],m:[{m:[R]}],mx:[{mx:[R]}],my:[{my:[R]}],ms:[{ms:[R]}],me:[{me:[R]}],mt:[{mt:[R]}],mr:[{mr:[R]}],mb:[{mb:[R]}],ml:[{ml:[R]}],"space-x":[{"space-x":[j]}],"space-x-reverse":["space-x-reverse"],"space-y":[{"space-y":[j]}],"space-y-reverse":["space-y-reverse"],w:[{w:["auto","min","max","fit",O,t]}],"min-w":[{"min-w":["min","max","fit",O,g]}],"max-w":[{"max-w":["0","none","full","min","max","fit","prose",{screen:[T]},T,O]}],h:[{h:[O,t,"auto","min","max","fit"]}],"min-h":[{"min-h":["min","max","fit",O,g]}],"max-h":[{"max-h":[O,t,"min","max","fit"]}],"font-size":[{text:["base",T,h]}],"font-smoothing":["antialiased","subpixel-antialiased"],"font-style":["italic","not-italic"],"font-weight":[{font:["thin","extralight","light","normal","medium","semibold","bold","extrabold","black",E]}],"font-family":[{font:[C]}],"fvn-normal":["normal-nums"],"fvn-ordinal":["ordinal"],"fvn-slashed-zero":["slashed-zero"],"fvn-figure":["lining-nums","oldstyle-nums"],"fvn-spacing":["proportional-nums","tabular-nums"],"fvn-fraction":["diagonal-fractions","stacked-fractons"],tracking:[{tracking:["tighter","tight","normal","wide","wider","widest",O]}],"line-clamp":[{"line-clamp":["none",S,E]}],leading:[{leading:["none","tight","snug","normal","relaxed","loose",O,g]}],"list-image":[{"list-image":["none",O]}],"list-style-type":[{list:["none","disc","decimal",O]}],"list-style-position":[{list:["inside","outside"]}],"placeholder-color":[{placeholder:[e]}],"placeholder-opacity":[{"placeholder-opacity":[N]}],"text-alignment":[{text:["left","center","right","justify","start","end"]}],"text-color":[{text:[e]}],"text-opacity":[{"text-opacity":[N]}],"text-decoration":["underline","overline","line-through","no-underline"],"text-decoration-style":[{decoration:[].concat(W(),["wavy"])}],"text-decoration-thickness":[{decoration:["auto","from-font",g]}],"underline-offset":[{"underline-offset":["auto",O,g]}],"text-decoration-color":[{decoration:[e]}],"text-transform":["uppercase","lowercase","capitalize","normal-case"],"text-overflow":["truncate","text-ellipsis","text-clip"],indent:[{indent:z()}],"vertical-align":[{align:["baseline","top","middle","bottom","text-top","text-bottom","sub","super",O]}],whitespace:[{whitespace:["normal","nowrap","pre","pre-line","pre-wrap","break-spaces"]}],break:[{break:["normal","words","all","keep"]}],hyphens:[{hyphens:["none","manual","auto"]}],content:[{content:["none",O]}],"bg-attachment":[{bg:["fixed","local","scroll"]}],"bg-clip":[{"bg-clip":["border","padding","content","text"]}],"bg-opacity":[{"bg-opacity":[N]}],"bg-origin":[{"bg-origin":["border","padding","content"]}],"bg-position":[{bg:[].concat($(),[v])}],"bg-repeat":[{bg:["no-repeat",{repeat:["","x","y","round","space"]}]}],"bg-size":[{bg:["auto","cover","contain",b]}],"bg-image":[{bg:["none",{"gradient-to":["t","tr","r","br","b","bl","l","tl"]},y]}],"bg-color":[{bg:[e]}],"gradient-from-pos":[{from:[A]}],"gradient-via-pos":[{via:[A]}],"gradient-to-pos":[{to:[A]}],"gradient-from":[{from:[m]}],"gradient-via":[{via:[m]}],"gradient-to":[{to:[m]}],rounded:[{rounded:[a]}],"rounded-s":[{"rounded-s":[a]}],"rounded-e":[{"rounded-e":[a]}],"rounded-t":[{"rounded-t":[a]}],"rounded-r":[{"rounded-r":[a]}],"rounded-b":[{"rounded-b":[a]}],"rounded-l":[{"rounded-l":[a]}],"rounded-ss":[{"rounded-ss":[a]}],"rounded-se":[{"rounded-se":[a]}],"rounded-ee":[{"rounded-ee":[a]}],"rounded-es":[{"rounded-es":[a]}],"rounded-tl":[{"rounded-tl":[a]}],"rounded-tr":[{"rounded-tr":[a]}],"rounded-br":[{"rounded-br":[a]}],"rounded-bl":[{"rounded-bl":[a]}],"border-w":[{border:[l]}],"border-w-x":[{"border-x":[l]}],"border-w-y":[{"border-y":[l]}],"border-w-s":[{"border-s":[l]}],"border-w-e":[{"border-e":[l]}],"border-w-t":[{"border-t":[l]}],"border-w-r":[{"border-r":[l]}],"border-w-b":[{"border-b":[l]}],"border-w-l":[{"border-l":[l]}],"border-opacity":[{"border-opacity":[N]}],"border-style":[{border:[].concat(W(),["hidden"])}],"divide-x":[{"divide-x":[l]}],"divide-x-reverse":["divide-x-reverse"],"divide-y":[{"divide-y":[l]}],"divide-y-reverse":["divide-y-reverse"],"divide-opacity":[{"divide-opacity":[N]}],"divide-style":[{divide:W()}],"border-color":[{border:[o]}],"border-color-x":[{"border-x":[o]}],"border-color-y":[{"border-y":[o]}],"border-color-t":[{"border-t":[o]}],"border-color-r":[{"border-r":[o]}],"border-color-b":[{"border-b":[o]}],"border-color-l":[{"border-l":[o]}],"divide-color":[{divide:[o]}],"outline-style":[{outline:[""].concat(W())}],"outline-offset":[{"outline-offset":[O,g]}],"outline-w":[{outline:[g]}],"outline-color":[{outline:[e]}],"ring-w":[{ring:H()}],"ring-w-inset":["ring-inset"],"ring-color":[{ring:[e]}],"ring-opacity":[{"ring-opacity":[N]}],"ring-offset-w":[{"ring-offset":[g]}],"ring-offset-color":[{"ring-offset":[e]}],shadow:[{shadow:["","inner","none",T,k]}],"shadow-color":[{shadow:[C]}],opacity:[{opacity:[N]}],"mix-blend":[{"mix-blend":V()}],"bg-blend":[{"bg-blend":V()}],filter:[{filter:["","none"]}],blur:[{blur:[n]}],brightness:[{brightness:[r]}],contrast:[{contrast:[c]}],"drop-shadow":[{"drop-shadow":["","none",T,O]}],grayscale:[{grayscale:[u]}],"hue-rotate":[{"hue-rotate":[d]}],invert:[{invert:[p]}],saturate:[{saturate:[P]}],sepia:[{sepia:[L]}],"backdrop-filter":[{"backdrop-filter":["","none"]}],"backdrop-blur":[{"backdrop-blur":[n]}],"backdrop-brightness":[{"backdrop-brightness":[r]}],"backdrop-contrast":[{"backdrop-contrast":[c]}],"backdrop-grayscale":[{"backdrop-grayscale":[u]}],"backdrop-hue-rotate":[{"backdrop-hue-rotate":[d]}],"backdrop-invert":[{"backdrop-invert":[p]}],"backdrop-opacity":[{"backdrop-opacity":[N]}],"backdrop-saturate":[{"backdrop-saturate":[P]}],"backdrop-sepia":[{"backdrop-sepia":[L]}],"border-collapse":[{border:["collapse","separate"]}],"border-spacing":[{"border-spacing":[i]}],"border-spacing-x":[{"border-spacing-x":[i]}],"border-spacing-y":[{"border-spacing-y":[i]}],"table-layout":[{table:["auto","fixed"]}],caption:[{caption:["top","bottom"]}],transition:[{transition:["none","all","","colors","opacity","shadow","transform",O]}],duration:[{duration:Q()}],ease:[{ease:["linear","in","out","in-out",O]}],delay:[{delay:Q()}],animate:[{animate:["none","spin","ping","pulse","bounce",O]}],transform:[{transform:["","gpu","none"]}],scale:[{scale:[M]}],"scale-x":[{"scale-x":[M]}],"scale-y":[{"scale-y":[M]}],rotate:[{rotate:[x,O]}],"translate-x":[{"translate-x":[F]}],"translate-y":[{"translate-y":[F]}],"skew-x":[{"skew-x":[D]}],"skew-y":[{"skew-y":[D]}],"transform-origin":[{origin:["center","top","top-right","right","bottom-right","bottom","bottom-left","left","top-left",O]}],accent:[{accent:["auto",e]}],appearance:["appearance-none"],cursor:[{cursor:["auto","default","pointer","wait","text","move","help","not-allowed","none","context-menu","progress","cell","crosshair","vertical-text","alias","copy","no-drop","grab","grabbing","all-scroll","col-resize","row-resize","n-resize","e-resize","s-resize","w-resize","ne-resize","nw-resize","se-resize","sw-resize","ew-resize","ns-resize","nesw-resize","nwse-resize","zoom-in","zoom-out",O]}],"caret-color":[{caret:[e]}],"pointer-events":[{"pointer-events":["none","auto"]}],resize:[{resize:["none","y","x",""]}],"scroll-behavior":[{scroll:["auto","smooth"]}],"scroll-m":[{"scroll-m":z()}],"scroll-mx":[{"scroll-mx":z()}],"scroll-my":[{"scroll-my":z()}],"scroll-ms":[{"scroll-ms":z()}],"scroll-me":[{"scroll-me":z()}],"scroll-mt":[{"scroll-mt":z()}],"scroll-mr":[{"scroll-mr":z()}],"scroll-mb":[{"scroll-mb":z()}],"scroll-ml":[{"scroll-ml":z()}],"scroll-p":[{"scroll-p":z()}],"scroll-px":[{"scroll-px":z()}],"scroll-py":[{"scroll-py":z()}],"scroll-ps":[{"scroll-ps":z()}],"scroll-pe":[{"scroll-pe":z()}],"scroll-pt":[{"scroll-pt":z()}],"scroll-pr":[{"scroll-pr":z()}],"scroll-pb":[{"scroll-pb":z()}],"scroll-pl":[{"scroll-pl":z()}],"snap-align":[{snap:["start","end","center","align-none"]}],"snap-stop":[{snap:["normal","always"]}],"snap-type":[{snap:["none","x","y","both"]}],"snap-strictness":[{snap:["mandatory","proximity"]}],touch:[{touch:["auto","none","pinch-zoom","manipulation",{pan:["x","left","right","y","up","down"]}]}],select:[{select:["none","text","all","auto"]}],"will-change":[{"will-change":["auto","scroll","contents","transform",O]}],fill:[{fill:[e,"none"]}],"stroke-w":[{stroke:[g,E]}],stroke:[{stroke:[e,"none"]}],sr:["sr-only","not-sr-only"]},conflictingClassGroups:{overflow:["overflow-x","overflow-y"],overscroll:["overscroll-x","overscroll-y"],inset:["inset-x","inset-y","start","end","top","right","bottom","left"],"inset-x":["right","left"],"inset-y":["top","bottom"],flex:["basis","grow","shrink"],gap:["gap-x","gap-y"],p:["px","py","ps","pe","pt","pr","pb","pl"],px:["pr","pl"],py:["pt","pb"],m:["mx","my","ms","me","mt","mr","mb","ml"],mx:["mr","ml"],my:["mt","mb"],"font-size":["leading"],"fvn-normal":["fvn-ordinal","fvn-slashed-zero","fvn-figure","fvn-spacing","fvn-fraction"],"fvn-ordinal":["fvn-normal"],"fvn-slashed-zero":["fvn-normal"],"fvn-figure":["fvn-normal"],"fvn-spacing":["fvn-normal"],"fvn-fraction":["fvn-normal"],rounded:["rounded-s","rounded-e","rounded-t","rounded-r","rounded-b","rounded-l","rounded-ss","rounded-se","rounded-ee","rounded-es","rounded-tl","rounded-tr","rounded-br","rounded-bl"],"rounded-s":["rounded-ss","rounded-es"],"rounded-e":["rounded-se","rounded-ee"],"rounded-t":["rounded-tl","rounded-tr"],"rounded-r":["rounded-tr","rounded-br"],"rounded-b":["rounded-br","rounded-bl"],"rounded-l":["rounded-tl","rounded-bl"],"border-spacing":["border-spacing-x","border-spacing-y"],"border-w":["border-w-s","border-w-e","border-w-t","border-w-r","border-w-b","border-w-l"],"border-w-x":["border-w-r","border-w-l"],"border-w-y":["border-w-t","border-w-b"],"border-color":["border-color-t","border-color-r","border-color-b","border-color-l"],"border-color-x":["border-color-r","border-color-l"],"border-color-y":["border-color-t","border-color-b"],"scroll-m":["scroll-mx","scroll-my","scroll-ms","scroll-me","scroll-mt","scroll-mr","scroll-mb","scroll-ml"],"scroll-mx":["scroll-mr","scroll-ml"],"scroll-my":["scroll-mt","scroll-mb"],"scroll-p":["scroll-px","scroll-py","scroll-ps","scroll-pe","scroll-pt","scroll-pr","scroll-pb","scroll-pl"],"scroll-px":["scroll-pr","scroll-pl"],"scroll-py":["scroll-pt","scroll-pb"]},conflictingClassGroupModifiers:{"font-size":["leading"]}}}var L=Object.prototype.hasOwnProperty,D=new Set(["string","number","boolean"]);let j=function(e){for(var t=arguments.length,n=Array(t>1?t-1:0),r=1;rr.includes(e),a=e=>e.toString();function i(e){return t=>{e.forEach(e=>{"function"==typeof e?e(t):null!=e&&(e.current=t)})}}function l(e){return t=>"tremor-".concat(e,"-").concat(t)}function s(e,t){let n=o(e);if("white"===e||"black"===e||"transparent"===e||!t||!n){let t=e.includes("#")||e.includes("--")||e.includes("rgb")?"[".concat(e,"]"):e;return{bgColor:"bg-".concat(t),hoverBgColor:"hover:bg-".concat(t),selectBgColor:"ui-selected:bg-".concat(t),textColor:"text-".concat(t),selectTextColor:"ui-selected:text-".concat(t),hoverTextColor:"hover:text-".concat(t),borderColor:"border-".concat(t),selectBorderColor:"ui-selected:border-".concat(t),hoverBorderColor:"hover:border-".concat(t),ringColor:"ring-".concat(t),strokeColor:"stroke-".concat(t),fillColor:"fill-".concat(t)}}return{bgColor:"bg-".concat(e,"-").concat(t),selectBgColor:"ui-selected:bg-".concat(e,"-").concat(t),hoverBgColor:"hover:bg-".concat(e,"-").concat(t),textColor:"text-".concat(e,"-").concat(t),selectTextColor:"ui-selected:text-".concat(e,"-").concat(t),hoverTextColor:"hover:text-".concat(e,"-").concat(t),borderColor:"border-".concat(e,"-").concat(t),selectBorderColor:"ui-selected:border-".concat(e,"-").concat(t),hoverBorderColor:"hover:border-".concat(e,"-").concat(t),ringColor:"ring-".concat(e,"-").concat(t),strokeColor:"stroke-".concat(e,"-").concat(t),fillColor:"fill-".concat(e,"-").concat(t)}}},21467:function(e,t,n){n.d(t,{i:function(){return l}});var r=n(64090),o=n(44329),a=n(54165),i=n(57499);function l(e){return t=>r.createElement(a.ZP,{theme:{token:{motion:!1,zIndexPopupBase:0}}},r.createElement(e,Object.assign({},t)))}t.Z=(e,t,n,a)=>l(l=>{let{prefixCls:s,style:c}=l,u=r.useRef(null),[d,p]=r.useState(0),[f,m]=r.useState(0),[g,h]=(0,o.Z)(!1,{value:l.open}),{getPrefixCls:b}=r.useContext(i.E_),v=b(t||"select",s);r.useEffect(()=>{if(h(!0),"undefined"!=typeof ResizeObserver){let e=new ResizeObserver(e=>{let t=e[0].target;p(t.offsetHeight+8),m(t.offsetWidth)}),t=setInterval(()=>{var r;let o=n?".".concat(n(v)):".".concat(v,"-dropdown"),a=null===(r=u.current)||void 0===r?void 0:r.querySelector(o);a&&(clearInterval(t),e.observe(a))},10);return()=>{clearInterval(t),e.disconnect()}}},[]);let y=Object.assign(Object.assign({},l),{style:Object.assign(Object.assign({},c),{margin:0}),open:g,visible:g,getPopupContainer:()=>u.current});return a&&(y=a(y)),r.createElement("div",{ref:u,style:{paddingBottom:d,position:"relative",minWidth:f}},r.createElement(e,Object.assign({},y)))})},51761:function(e,t,n){n.d(t,{Cn:function(){return c},u6:function(){return i}});var r=n(64090),o=n(24750),a=n(86718);let i=1e3,l={Modal:100,Drawer:100,Popover:100,Popconfirm:100,Tooltip:100,Tour:100},s={SelectLike:50,Dropdown:50,DatePicker:50,Menu:50,ImagePreview:1};function c(e,t){let[,n]=(0,o.ZP)(),c=r.useContext(a.Z);if(void 0!==t)return[t,t];let u=null!=c?c:0;return e in l?(u+=(c?0:n.zIndexPopupBase)+l[e],u=Math.min(u,n.zIndexPopupBase+i)):u+=s[e],[void 0===c?t:u,u]}},47387:function(e,t,n){n.d(t,{m:function(){return l}});let r=()=>({height:0,opacity:0}),o=e=>{let{scrollHeight:t}=e;return{height:t,opacity:1}},a=e=>({height:e?e.offsetHeight:0}),i=(e,t)=>(null==t?void 0:t.deadline)===!0||"height"===t.propertyName,l=(e,t,n)=>void 0!==n?n:"".concat(e,"-").concat(t);t.Z=function(){let e=arguments.length>0&&void 0!==arguments[0]?arguments[0]:"ant";return{motionName:"".concat(e,"-motion-collapse"),onAppearStart:r,onEnterStart:r,onAppearActive:o,onEnterActive:o,onLeaveStart:a,onLeaveActive:r,onAppearEnd:i,onEnterEnd:i,onLeaveEnd:i,motionDeadline:500}}},67966:function(e,t,n){n.d(t,{Z:function(){return l}});var r=n(89869);let o={left:{points:["cr","cl"]},right:{points:["cl","cr"]},top:{points:["bc","tc"]},bottom:{points:["tc","bc"]},topLeft:{points:["bl","tl"]},leftTop:{points:["tr","tl"]},topRight:{points:["br","tr"]},rightTop:{points:["tl","tr"]},bottomRight:{points:["tr","br"]},rightBottom:{points:["bl","br"]},bottomLeft:{points:["tl","bl"]},leftBottom:{points:["br","bl"]}},a={topLeft:{points:["bl","tc"]},leftTop:{points:["tr","cl"]},topRight:{points:["br","tc"]},rightTop:{points:["tl","cr"]},bottomRight:{points:["tr","bc"]},rightBottom:{points:["bl","cr"]},bottomLeft:{points:["tl","bc"]},leftBottom:{points:["br","cl"]}},i=new Set(["topLeft","topRight","bottomLeft","bottomRight","leftTop","leftBottom","rightTop","rightBottom"]);function l(e){let{arrowWidth:t,autoAdjustOverflow:n,arrowPointAtCenter:l,offset:s,borderRadius:c,visibleFirst:u}=e,d=t/2,p={};return Object.keys(o).forEach(e=>{let f=Object.assign(Object.assign({},l&&a[e]||o[e]),{offset:[0,0],dynamicInset:!0});switch(p[e]=f,i.has(e)&&(f.autoArrow=!1),e){case"top":case"topLeft":case"topRight":f.offset[1]=-d-s;break;case"bottom":case"bottomLeft":case"bottomRight":f.offset[1]=d+s;break;case"left":case"leftTop":case"leftBottom":f.offset[0]=-d-s;break;case"right":case"rightTop":case"rightBottom":f.offset[0]=d+s}let m=(0,r.wZ)({contentRadius:c,limitVerticalRadius:!0});if(l)switch(e){case"topLeft":case"bottomLeft":f.offset[0]=-m.arrowOffsetHorizontal-d;break;case"topRight":case"bottomRight":f.offset[0]=m.arrowOffsetHorizontal+d;break;case"leftTop":case"rightTop":f.offset[1]=-m.arrowOffsetHorizontal-d;break;case"leftBottom":case"rightBottom":f.offset[1]=m.arrowOffsetHorizontal+d}f.overflow=function(e,t,n,r){if(!1===r)return{adjustX:!1,adjustY:!1};let o={};switch(e){case"top":case"bottom":o.shiftX=2*t.arrowOffsetHorizontal+n,o.shiftY=!0,o.adjustY=!0;break;case"left":case"right":o.shiftY=2*t.arrowOffsetVertical+n,o.shiftX=!0,o.adjustX=!0}let a=Object.assign(Object.assign({},o),r&&"object"==typeof r?r:{});return a.shiftX||(a.adjustX=!0),a.shiftY||(a.adjustY=!0),a}(e,m,t,n),u&&(f.htmlRegion="visibleFirst")}),p}},65823:function(e,t,n){n.d(t,{M2:function(){return i},Tm:function(){return l},l$:function(){return a}});var r,o=n(64090);let{isValidElement:a}=r||(r=n.t(o,2));function i(e){return e&&a(e)&&e.type===o.Fragment}function l(e,t){return a(e)?o.cloneElement(e,"function"==typeof t?t(e.props||{}):t):e}},47794:function(e,t,n){n.d(t,{F:function(){return i},Z:function(){return a}});var r=n(16480),o=n.n(r);function a(e,t,n){return o()({["".concat(e,"-status-success")]:"success"===t,["".concat(e,"-status-warning")]:"warning"===t,["".concat(e,"-status-error")]:"error"===t,["".concat(e,"-status-validating")]:"validating"===t,["".concat(e,"-has-feedback")]:n})}let i=(e,t)=>t||e},76564:function(e,t,n){n.d(t,{G8:function(){return a},ln:function(){return i}});var r=n(64090);function o(){}n(53850);let a=r.createContext({}),i=()=>{let e=()=>{};return e.deprecated=o,e}},86718:function(e,t,n){let r=n(64090).createContext(void 0);t.Z=r},51350:function(e,t,n){n.d(t,{Te:function(){return c},aG:function(){return i},hU:function(){return u},nx:function(){return l}});var r=n(64090),o=n(65823);let a=/^[\u4e00-\u9fa5]{2}$/,i=a.test.bind(a);function l(e){return"danger"===e?{danger:!0}:{type:e}}function s(e){return"string"==typeof e}function c(e){return"text"===e||"link"===e}function u(e,t){let n=!1,a=[];return r.Children.forEach(e,e=>{let t=typeof e,r="string"===t||"number"===t;if(n&&r){let t=a.length-1,n=a[t];a[t]="".concat(n).concat(e)}else a.push(e);n=r}),r.Children.map(a,e=>(function(e,t){if(null==e)return;let n=t?" ":"";return"string"!=typeof e&&"number"!=typeof e&&s(e.type)&&i(e.props.children)?(0,o.Tm)(e,{children:e.props.children.split("").join(n)}):s(e)?i(e)?r.createElement("span",null,e.split("").join(n)):r.createElement("span",null,e):(0,o.M2)(e)?r.createElement("span",null,e):e})(e,t))}},1861:function(e,t,n){n.d(t,{ZP:function(){return eb}});var r=n(64090),o=n(16480),a=n.n(o),i=n(35704),l=n(74084),s=n(73193),c=n(57499),u=n(65823),d=n(76585);let p=e=>{let{componentCls:t,colorPrimary:n}=e;return{[t]:{position:"absolute",background:"transparent",pointerEvents:"none",boxSizing:"border-box",color:"var(--wave-color, ".concat(n,")"),boxShadow:"0 0 0 0 currentcolor",opacity:.2,"&.wave-motion-appear":{transition:["box-shadow 0.4s ".concat(e.motionEaseOutCirc),"opacity 2s ".concat(e.motionEaseOutCirc)].join(","),"&-active":{boxShadow:"0 0 0 6px currentcolor",opacity:0},"&.wave-quick":{transition:["box-shadow 0.3s ".concat(e.motionEaseInOut),"opacity 0.35s ".concat(e.motionEaseInOut)].join(",")}}}}};var f=(0,d.ZP)("Wave",e=>[p(e)]),m=n(48563),g=n(19223),h=n(49367),b=n(37274);function v(e){return e&&"#fff"!==e&&"#ffffff"!==e&&"rgb(255, 255, 255)"!==e&&"rgba(255, 255, 255, 1)"!==e&&function(e){let t=(e||"").match(/rgba?\((\d*), (\d*), (\d*)(, [\d.]*)?\)/);return!t||!t[1]||!t[2]||!t[3]||!(t[1]===t[2]&&t[2]===t[3])}(e)&&!/rgba\((?:\d*, ){3}0\)/.test(e)&&"transparent"!==e}let y="ant-wave-target";function E(e){return Number.isNaN(e)?0:e}let S=e=>{let{className:t,target:n,component:o}=e,i=r.useRef(null),[l,s]=r.useState(null),[c,u]=r.useState([]),[d,p]=r.useState(0),[f,m]=r.useState(0),[S,w]=r.useState(0),[x,O]=r.useState(0),[C,T]=r.useState(!1),k={left:d,top:f,width:S,height:x,borderRadius:c.map(e=>"".concat(e,"px")).join(" ")};function A(){let e=getComputedStyle(n);s(function(e){let{borderTopColor:t,borderColor:n,backgroundColor:r}=getComputedStyle(e);return v(t)?t:v(n)?n:v(r)?r:null}(n));let t="static"===e.position,{borderLeftWidth:r,borderTopWidth:o}=e;p(t?n.offsetLeft:E(-parseFloat(r))),m(t?n.offsetTop:E(-parseFloat(o))),w(n.offsetWidth),O(n.offsetHeight);let{borderTopLeftRadius:a,borderTopRightRadius:i,borderBottomLeftRadius:l,borderBottomRightRadius:c}=e;u([a,i,c,l].map(e=>E(parseFloat(e))))}if(l&&(k["--wave-color"]=l),r.useEffect(()=>{if(n){let e;let t=(0,g.Z)(()=>{A(),T(!0)});return"undefined"!=typeof ResizeObserver&&(e=new ResizeObserver(A)).observe(n),()=>{g.Z.cancel(t),null==e||e.disconnect()}}},[]),!C)return null;let I=("Checkbox"===o||"Radio"===o)&&(null==n?void 0:n.classList.contains(y));return r.createElement(h.ZP,{visible:!0,motionAppear:!0,motionName:"wave-motion",motionDeadline:5e3,onAppearEnd:(e,t)=>{var n;if(t.deadline||"opacity"===t.propertyName){let e=null===(n=i.current)||void 0===n?void 0:n.parentElement;(0,b.v)(e).then(()=>{null==e||e.remove()})}return!1}},e=>{let{className:n}=e;return r.createElement("div",{ref:i,className:a()(t,{"wave-quick":I},n),style:k})})};var w=(e,t)=>{var n;let{component:o}=t;if("Checkbox"===o&&!(null===(n=e.querySelector("input"))||void 0===n?void 0:n.checked))return;let a=document.createElement("div");a.style.position="absolute",a.style.left="0px",a.style.top="0px",null==e||e.insertBefore(a,null==e?void 0:e.firstChild),(0,b.s)(r.createElement(S,Object.assign({},t,{target:e})),a)},x=n(24750),O=e=>{let{children:t,disabled:n,component:o}=e,{getPrefixCls:i}=(0,r.useContext)(c.E_),d=(0,r.useRef)(null),p=i("wave"),[,h]=f(p),b=function(e,t,n){let{wave:o}=r.useContext(c.E_),[,a,i]=(0,x.ZP)(),l=(0,m.zX)(r=>{let l=e.current;if((null==o?void 0:o.disabled)||!l)return;let s=l.querySelector(".".concat(y))||l,{showEffect:c}=o||{};(c||w)(s,{className:t,token:a,component:n,event:r,hashId:i})}),s=r.useRef();return e=>{g.Z.cancel(s.current),s.current=(0,g.Z)(()=>{l(e)})}}(d,a()(p,h),o);if(r.useEffect(()=>{let e=d.current;if(!e||1!==e.nodeType||n)return;let t=t=>{!(0,s.Z)(t.target)||!e.getAttribute||e.getAttribute("disabled")||e.disabled||e.className.includes("disabled")||e.className.includes("-leave")||b(t)};return e.addEventListener("click",t,!0),()=>{e.removeEventListener("click",t,!0)}},[n]),!r.isValidElement(t))return null!=t?t:null;let v=(0,l.Yr)(t)?(0,l.sQ)(t.ref,d):d;return(0,u.Tm)(t,{ref:v})},C=n(17094),T=n(10693),k=n(92801),A=function(e,t){var n={};for(var r in e)Object.prototype.hasOwnProperty.call(e,r)&&0>t.indexOf(r)&&(n[r]=e[r]);if(null!=e&&"function"==typeof Object.getOwnPropertySymbols)for(var o=0,r=Object.getOwnPropertySymbols(e);ot.indexOf(r[o])&&Object.prototype.propertyIsEnumerable.call(e,r[o])&&(n[r[o]]=e[r[o]]);return n};let I=r.createContext(void 0);var R=n(51350);let N=(0,r.forwardRef)((e,t)=>{let{className:n,style:o,children:i,prefixCls:l}=e,s=a()("".concat(l,"-icon"),n);return r.createElement("span",{ref:t,className:s,style:o},i)});var _=n(66155);let P=(0,r.forwardRef)((e,t)=>{let{prefixCls:n,className:o,style:i,iconClassName:l}=e,s=a()("".concat(n,"-loading-icon"),o);return r.createElement(N,{prefixCls:n,className:s,style:i,ref:t},r.createElement(_.Z,{className:l}))}),M=()=>({width:0,opacity:0,transform:"scale(0)"}),L=e=>({width:e.scrollWidth,opacity:1,transform:"scale(1)"});var D=e=>{let{prefixCls:t,loading:n,existIcon:o,className:a,style:i}=e,l=!!n;return o?r.createElement(P,{prefixCls:t,className:a,style:i}):r.createElement(h.ZP,{visible:l,motionName:"".concat(t,"-loading-icon-motion"),motionLeave:l,removeOnLeave:!0,onAppearStart:M,onAppearActive:L,onEnterStart:M,onEnterActive:L,onLeaveStart:L,onLeaveActive:M},(e,n)=>{let{className:o,style:l}=e;return r.createElement(P,{prefixCls:t,className:a,style:Object.assign(Object.assign({},i),l),ref:n,iconClassName:o})})},j=n(8985),F=n(11303),B=n(80316);let U=(e,t)=>({["> span, > ".concat(e)]:{"&:not(:last-child)":{["&, & > ".concat(e)]:{"&:not(:disabled)":{borderInlineEndColor:t}}},"&:not(:first-child)":{["&, & > ".concat(e)]:{"&:not(:disabled)":{borderInlineStartColor:t}}}}});var Z=e=>{let{componentCls:t,fontSize:n,lineWidth:r,groupBorderColor:o,colorErrorHover:a}=e;return{["".concat(t,"-group")]:[{position:"relative",display:"inline-flex",["> span, > ".concat(t)]:{"&:not(:last-child)":{["&, & > ".concat(t)]:{borderStartEndRadius:0,borderEndEndRadius:0}},"&:not(:first-child)":{marginInlineStart:e.calc(r).mul(-1).equal(),["&, & > ".concat(t)]:{borderStartStartRadius:0,borderEndStartRadius:0}}},[t]:{position:"relative",zIndex:1,"&:hover,\n &:focus,\n &:active":{zIndex:2},"&[disabled]":{zIndex:0}},["".concat(t,"-icon-only")]:{fontSize:n}},U("".concat(t,"-primary"),o),U("".concat(t,"-danger"),a)]}},z=n(49202);let H=e=>{let{paddingInline:t,onlyIconSize:n,paddingBlock:r}=e;return(0,B.TS)(e,{buttonPaddingHorizontal:t,buttonPaddingVertical:r,buttonIconOnlyFontSize:n})},G=e=>{var t,n,r,o,a,i;let l=null!==(t=e.contentFontSize)&&void 0!==t?t:e.fontSize,s=null!==(n=e.contentFontSizeSM)&&void 0!==n?n:e.fontSize,c=null!==(r=e.contentFontSizeLG)&&void 0!==r?r:e.fontSizeLG,u=null!==(o=e.contentLineHeight)&&void 0!==o?o:(0,z.D)(l),d=null!==(a=e.contentLineHeightSM)&&void 0!==a?a:(0,z.D)(s),p=null!==(i=e.contentLineHeightLG)&&void 0!==i?i:(0,z.D)(c);return{fontWeight:400,defaultShadow:"0 ".concat(e.controlOutlineWidth,"px 0 ").concat(e.controlTmpOutline),primaryShadow:"0 ".concat(e.controlOutlineWidth,"px 0 ").concat(e.controlOutline),dangerShadow:"0 ".concat(e.controlOutlineWidth,"px 0 ").concat(e.colorErrorOutline),primaryColor:e.colorTextLightSolid,dangerColor:e.colorTextLightSolid,borderColorDisabled:e.colorBorder,defaultGhostColor:e.colorBgContainer,ghostBg:"transparent",defaultGhostBorderColor:e.colorBgContainer,paddingInline:e.paddingContentHorizontal-e.lineWidth,paddingInlineLG:e.paddingContentHorizontal-e.lineWidth,paddingInlineSM:8-e.lineWidth,onlyIconSize:e.fontSizeLG,onlyIconSizeSM:e.fontSizeLG-2,onlyIconSizeLG:e.fontSizeLG+2,groupBorderColor:e.colorPrimaryHover,linkHoverBg:"transparent",textHoverBg:e.colorBgTextHover,defaultColor:e.colorText,defaultBg:e.colorBgContainer,defaultBorderColor:e.colorBorder,defaultBorderColorDisabled:e.colorBorder,contentFontSize:l,contentFontSizeSM:s,contentFontSizeLG:c,contentLineHeight:u,contentLineHeightSM:d,contentLineHeightLG:p,paddingBlock:Math.max((e.controlHeight-l*u)/2-e.lineWidth,0),paddingBlockSM:Math.max((e.controlHeightSM-s*d)/2-e.lineWidth,0),paddingBlockLG:Math.max((e.controlHeightLG-c*p)/2-e.lineWidth,0)}},$=e=>{let{componentCls:t,iconCls:n,fontWeight:r}=e;return{[t]:{outline:"none",position:"relative",display:"inline-block",fontWeight:r,whiteSpace:"nowrap",textAlign:"center",backgroundImage:"none",background:"transparent",border:"".concat((0,j.bf)(e.lineWidth)," ").concat(e.lineType," transparent"),cursor:"pointer",transition:"all ".concat(e.motionDurationMid," ").concat(e.motionEaseInOut),userSelect:"none",touchAction:"manipulation",color:e.colorText,"&:disabled > *":{pointerEvents:"none"},"> span":{display:"inline-block"},["".concat(t,"-icon")]:{lineHeight:0},["> ".concat(n," + span, > span + ").concat(n)]:{marginInlineStart:e.marginXS},["&:not(".concat(t,"-icon-only) > ").concat(t,"-icon")]:{["&".concat(t,"-loading-icon, &:not(:last-child)")]:{marginInlineEnd:e.marginXS}},"> a":{color:"currentColor"},"&:not(:disabled)":Object.assign({},(0,F.Qy)(e)),["&".concat(t,"-two-chinese-chars::first-letter")]:{letterSpacing:"0.34em"},["&".concat(t,"-two-chinese-chars > *:not(").concat(n,")")]:{marginInlineEnd:"-0.34em",letterSpacing:"0.34em"},["&-icon-only".concat(t,"-compact-item")]:{flex:"none"}}}},W=(e,t,n)=>({["&:not(:disabled):not(".concat(e,"-disabled)")]:{"&:hover":t,"&:active":n}}),V=e=>({minWidth:e.controlHeight,paddingInlineStart:0,paddingInlineEnd:0,borderRadius:"50%"}),q=e=>({borderRadius:e.controlHeight,paddingInlineStart:e.calc(e.controlHeight).div(2).equal(),paddingInlineEnd:e.calc(e.controlHeight).div(2).equal()}),Y=e=>({cursor:"not-allowed",borderColor:e.borderColorDisabled,color:e.colorTextDisabled,background:e.colorBgContainerDisabled,boxShadow:"none"}),K=(e,t,n,r,o,a,i,l)=>({["&".concat(e,"-background-ghost")]:Object.assign(Object.assign({color:n||void 0,background:t,borderColor:r||void 0,boxShadow:"none"},W(e,Object.assign({background:t},i),Object.assign({background:t},l))),{"&:disabled":{cursor:"not-allowed",color:o||void 0,borderColor:a||void 0}})}),X=e=>({["&:disabled, &".concat(e.componentCls,"-disabled")]:Object.assign({},Y(e))}),Q=e=>Object.assign({},X(e)),J=e=>({["&:disabled, &".concat(e.componentCls,"-disabled")]:{cursor:"not-allowed",color:e.colorTextDisabled}}),ee=e=>Object.assign(Object.assign(Object.assign(Object.assign(Object.assign({},Q(e)),{background:e.defaultBg,borderColor:e.defaultBorderColor,color:e.defaultColor,boxShadow:e.defaultShadow}),W(e.componentCls,{color:e.colorPrimaryHover,borderColor:e.colorPrimaryHover},{color:e.colorPrimaryActive,borderColor:e.colorPrimaryActive})),K(e.componentCls,e.ghostBg,e.defaultGhostColor,e.defaultGhostBorderColor,e.colorTextDisabled,e.colorBorder)),{["&".concat(e.componentCls,"-dangerous")]:Object.assign(Object.assign(Object.assign({color:e.colorError,borderColor:e.colorError},W(e.componentCls,{color:e.colorErrorHover,borderColor:e.colorErrorBorderHover},{color:e.colorErrorActive,borderColor:e.colorErrorActive})),K(e.componentCls,e.ghostBg,e.colorError,e.colorError,e.colorTextDisabled,e.colorBorder)),X(e))}),et=e=>Object.assign(Object.assign(Object.assign(Object.assign(Object.assign({},Q(e)),{color:e.primaryColor,background:e.colorPrimary,boxShadow:e.primaryShadow}),W(e.componentCls,{color:e.colorTextLightSolid,background:e.colorPrimaryHover},{color:e.colorTextLightSolid,background:e.colorPrimaryActive})),K(e.componentCls,e.ghostBg,e.colorPrimary,e.colorPrimary,e.colorTextDisabled,e.colorBorder,{color:e.colorPrimaryHover,borderColor:e.colorPrimaryHover},{color:e.colorPrimaryActive,borderColor:e.colorPrimaryActive})),{["&".concat(e.componentCls,"-dangerous")]:Object.assign(Object.assign(Object.assign({background:e.colorError,boxShadow:e.dangerShadow,color:e.dangerColor},W(e.componentCls,{background:e.colorErrorHover},{background:e.colorErrorActive})),K(e.componentCls,e.ghostBg,e.colorError,e.colorError,e.colorTextDisabled,e.colorBorder,{color:e.colorErrorHover,borderColor:e.colorErrorHover},{color:e.colorErrorActive,borderColor:e.colorErrorActive})),X(e))}),en=e=>Object.assign(Object.assign({},ee(e)),{borderStyle:"dashed"}),er=e=>Object.assign(Object.assign(Object.assign({color:e.colorLink},W(e.componentCls,{color:e.colorLinkHover,background:e.linkHoverBg},{color:e.colorLinkActive})),J(e)),{["&".concat(e.componentCls,"-dangerous")]:Object.assign(Object.assign({color:e.colorError},W(e.componentCls,{color:e.colorErrorHover},{color:e.colorErrorActive})),J(e))}),eo=e=>Object.assign(Object.assign(Object.assign({},W(e.componentCls,{color:e.colorText,background:e.textHoverBg},{color:e.colorText,background:e.colorBgTextActive})),J(e)),{["&".concat(e.componentCls,"-dangerous")]:Object.assign(Object.assign({color:e.colorError},J(e)),W(e.componentCls,{color:e.colorErrorHover,background:e.colorErrorBg},{color:e.colorErrorHover,background:e.colorErrorBg}))}),ea=e=>{let{componentCls:t}=e;return{["".concat(t,"-default")]:ee(e),["".concat(t,"-primary")]:et(e),["".concat(t,"-dashed")]:en(e),["".concat(t,"-link")]:er(e),["".concat(t,"-text")]:eo(e),["".concat(t,"-ghost")]:K(e.componentCls,e.ghostBg,e.colorBgContainer,e.colorBgContainer,e.colorTextDisabled,e.colorBorder)}},ei=function(e){let t=arguments.length>1&&void 0!==arguments[1]?arguments[1]:"",{componentCls:n,controlHeight:r,fontSize:o,lineHeight:a,borderRadius:i,buttonPaddingHorizontal:l,iconCls:s,buttonPaddingVertical:c}=e,u="".concat(n,"-icon-only");return[{["".concat(n).concat(t)]:{fontSize:o,lineHeight:a,height:r,padding:"".concat((0,j.bf)(c)," ").concat((0,j.bf)(l)),borderRadius:i,["&".concat(u)]:{width:r,paddingInlineStart:0,paddingInlineEnd:0,["&".concat(n,"-round")]:{width:"auto"},[s]:{fontSize:e.buttonIconOnlyFontSize}},["&".concat(n,"-loading")]:{opacity:e.opacityLoading,cursor:"default"},["".concat(n,"-loading-icon")]:{transition:"width ".concat(e.motionDurationSlow," ").concat(e.motionEaseInOut,", opacity ").concat(e.motionDurationSlow," ").concat(e.motionEaseInOut)}}},{["".concat(n).concat(n,"-circle").concat(t)]:V(e)},{["".concat(n).concat(n,"-round").concat(t)]:q(e)}]},el=e=>ei((0,B.TS)(e,{fontSize:e.contentFontSize,lineHeight:e.contentLineHeight})),es=e=>ei((0,B.TS)(e,{controlHeight:e.controlHeightSM,fontSize:e.contentFontSizeSM,lineHeight:e.contentLineHeightSM,padding:e.paddingXS,buttonPaddingHorizontal:e.paddingInlineSM,buttonPaddingVertical:e.paddingBlockSM,borderRadius:e.borderRadiusSM,buttonIconOnlyFontSize:e.onlyIconSizeSM}),"".concat(e.componentCls,"-sm")),ec=e=>ei((0,B.TS)(e,{controlHeight:e.controlHeightLG,fontSize:e.contentFontSizeLG,lineHeight:e.contentLineHeightLG,buttonPaddingHorizontal:e.paddingInlineLG,buttonPaddingVertical:e.paddingBlockLG,borderRadius:e.borderRadiusLG,buttonIconOnlyFontSize:e.onlyIconSizeLG}),"".concat(e.componentCls,"-lg")),eu=e=>{let{componentCls:t}=e;return{[t]:{["&".concat(t,"-block")]:{width:"100%"}}}};var ed=(0,d.I$)("Button",e=>{let t=H(e);return[$(t),es(t),el(t),ec(t),eu(t),ea(t),Z(t)]},G,{unitless:{fontWeight:!0,contentLineHeight:!0,contentLineHeightSM:!0,contentLineHeightLG:!0}}),ep=n(12288);let ef=e=>{let{componentCls:t,calc:n}=e;return{[t]:{["&-compact-item".concat(t,"-primary")]:{["&:not([disabled]) + ".concat(t,"-compact-item").concat(t,"-primary:not([disabled])")]:{position:"relative","&:before":{position:"absolute",top:n(e.lineWidth).mul(-1).equal(),insetInlineStart:n(e.lineWidth).mul(-1).equal(),display:"inline-block",width:e.lineWidth,height:"calc(100% + ".concat((0,j.bf)(e.lineWidth)," * 2)"),backgroundColor:e.colorPrimaryHover,content:'""'}}},"&-compact-vertical-item":{["&".concat(t,"-primary")]:{["&:not([disabled]) + ".concat(t,"-compact-vertical-item").concat(t,"-primary:not([disabled])")]:{position:"relative","&:before":{position:"absolute",top:n(e.lineWidth).mul(-1).equal(),insetInlineStart:n(e.lineWidth).mul(-1).equal(),display:"inline-block",width:"calc(100% + ".concat((0,j.bf)(e.lineWidth)," * 2)"),height:e.lineWidth,backgroundColor:e.colorPrimaryHover,content:'""'}}}}}}};var em=(0,d.bk)(["Button","compact"],e=>{let t=H(e);return[(0,ep.c)(t),function(e){var t;let n="".concat(e.componentCls,"-compact-vertical");return{[n]:Object.assign(Object.assign({},{["&-item:not(".concat(n,"-last-item)")]:{marginBottom:e.calc(e.lineWidth).mul(-1).equal()},"&-item":{"&:hover,&:focus,&:active":{zIndex:2},"&[disabled]":{zIndex:0}}}),(t=e.componentCls,{["&-item:not(".concat(n,"-first-item):not(").concat(n,"-last-item)")]:{borderRadius:0},["&-item".concat(n,"-first-item:not(").concat(n,"-last-item)")]:{["&, &".concat(t,"-sm, &").concat(t,"-lg")]:{borderEndEndRadius:0,borderEndStartRadius:0}},["&-item".concat(n,"-last-item:not(").concat(n,"-first-item)")]:{["&, &".concat(t,"-sm, &").concat(t,"-lg")]:{borderStartStartRadius:0,borderStartEndRadius:0}}}))}}(t),ef(t)]},G),eg=function(e,t){var n={};for(var r in e)Object.prototype.hasOwnProperty.call(e,r)&&0>t.indexOf(r)&&(n[r]=e[r]);if(null!=e&&"function"==typeof Object.getOwnPropertySymbols)for(var o=0,r=Object.getOwnPropertySymbols(e);ot.indexOf(r[o])&&Object.prototype.propertyIsEnumerable.call(e,r[o])&&(n[r[o]]=e[r[o]]);return n};let eh=(0,r.forwardRef)((e,t)=>{var n,o;let{loading:s=!1,prefixCls:u,type:d="default",danger:p,shape:f="default",size:m,styles:g,disabled:h,className:b,rootClassName:v,children:y,icon:E,ghost:S=!1,block:w=!1,htmlType:x="button",classNames:A,style:_={}}=e,P=eg(e,["loading","prefixCls","type","danger","shape","size","styles","disabled","className","rootClassName","children","icon","ghost","block","htmlType","classNames","style"]),{getPrefixCls:M,autoInsertSpaceInButton:L,direction:j,button:F}=(0,r.useContext)(c.E_),B=M("btn",u),[U,Z,z]=ed(B),H=(0,r.useContext)(C.Z),G=null!=h?h:H,$=(0,r.useContext)(I),W=(0,r.useMemo)(()=>(function(e){if("object"==typeof e&&e){let t=null==e?void 0:e.delay;return{loading:(t=Number.isNaN(t)||"number"!=typeof t?0:t)<=0,delay:t}}return{loading:!!e,delay:0}})(s),[s]),[V,q]=(0,r.useState)(W.loading),[Y,K]=(0,r.useState)(!1),X=(0,r.createRef)(),Q=(0,l.sQ)(t,X),J=1===r.Children.count(y)&&!E&&!(0,R.Te)(d);(0,r.useEffect)(()=>{let e=null;return W.delay>0?e=setTimeout(()=>{e=null,q(!0)},W.delay):q(W.loading),function(){e&&(clearTimeout(e),e=null)}},[W]),(0,r.useEffect)(()=>{if(!Q||!Q.current||!1===L)return;let e=Q.current.textContent;J&&(0,R.aG)(e)?Y||K(!0):Y&&K(!1)},[Q]);let ee=t=>{let{onClick:n}=e;if(V||G){t.preventDefault();return}null==n||n(t)},et=!1!==L,{compactSize:en,compactItemClassnames:er}=(0,k.ri)(B,j),eo=(0,T.Z)(e=>{var t,n;return null!==(n=null!==(t=null!=m?m:en)&&void 0!==t?t:$)&&void 0!==n?n:e}),ea=eo&&({large:"lg",small:"sm",middle:void 0})[eo]||"",ei=V?"loading":E,el=(0,i.Z)(P,["navigate"]),es=a()(B,Z,z,{["".concat(B,"-").concat(f)]:"default"!==f&&f,["".concat(B,"-").concat(d)]:d,["".concat(B,"-").concat(ea)]:ea,["".concat(B,"-icon-only")]:!y&&0!==y&&!!ei,["".concat(B,"-background-ghost")]:S&&!(0,R.Te)(d),["".concat(B,"-loading")]:V,["".concat(B,"-two-chinese-chars")]:Y&&et&&!V,["".concat(B,"-block")]:w,["".concat(B,"-dangerous")]:!!p,["".concat(B,"-rtl")]:"rtl"===j},er,b,v,null==F?void 0:F.className),ec=Object.assign(Object.assign({},null==F?void 0:F.style),_),eu=a()(null==A?void 0:A.icon,null===(n=null==F?void 0:F.classNames)||void 0===n?void 0:n.icon),ep=Object.assign(Object.assign({},(null==g?void 0:g.icon)||{}),(null===(o=null==F?void 0:F.styles)||void 0===o?void 0:o.icon)||{}),ef=E&&!V?r.createElement(N,{prefixCls:B,className:eu,style:ep},E):r.createElement(D,{existIcon:!!E,prefixCls:B,loading:!!V}),eh=y||0===y?(0,R.hU)(y,J&&et):null;if(void 0!==el.href)return U(r.createElement("a",Object.assign({},el,{className:a()(es,{["".concat(B,"-disabled")]:G}),href:G?void 0:el.href,style:ec,onClick:ee,ref:Q,tabIndex:G?-1:0}),ef,eh));let eb=r.createElement("button",Object.assign({},P,{type:x,className:es,style:ec,onClick:ee,disabled:G,ref:Q}),ef,eh,!!er&&r.createElement(em,{key:"compact",prefixCls:B}));return(0,R.Te)(d)||(eb=r.createElement(O,{component:"Button",disabled:!!V},eb)),U(eb)});eh.Group=e=>{let{getPrefixCls:t,direction:n}=r.useContext(c.E_),{prefixCls:o,size:i,className:l}=e,s=A(e,["prefixCls","size","className"]),u=t("btn-group",o),[,,d]=(0,x.ZP)(),p="";switch(i){case"large":p="lg";break;case"small":p="sm"}let f=a()(u,{["".concat(u,"-").concat(p)]:p,["".concat(u,"-rtl")]:"rtl"===n},l,d);return r.createElement(I.Provider,{value:i},r.createElement("div",Object.assign({},s,{className:f})))},eh.__ANT_BUTTON=!0;var eb=eh},28683:function(e,t,n){var r=n(90791);t.Z=r.Z},17094:function(e,t,n){n.d(t,{n:function(){return a}});var r=n(64090);let o=r.createContext(!1),a=e=>{let{children:t,disabled:n}=e,a=r.useContext(o);return r.createElement(o.Provider,{value:null!=n?n:a},t)};t.Z=o},97303:function(e,t,n){n.d(t,{q:function(){return a}});var r=n(64090);let o=r.createContext(void 0),a=e=>{let{children:t,size:n}=e,a=r.useContext(o);return r.createElement(o.Provider,{value:n||a},t)};t.Z=o},57499:function(e,t,n){n.d(t,{E_:function(){return a},oR:function(){return o}});var r=n(64090);let o="anticon",a=r.createContext({getPrefixCls:(e,t)=>t||(e?"ant-".concat(e):"ant"),iconPrefixCls:o}),{Consumer:i}=a},92935:function(e,t,n){var r=n(24750);t.Z=e=>{let[,,,,t]=(0,r.ZP)();return t?"".concat(e,"-css-var"):""}},10693:function(e,t,n){var r=n(64090),o=n(97303);t.Z=e=>{let t=r.useContext(o.Z);return r.useMemo(()=>e?"string"==typeof e?null!=e?e:t:e instanceof Function?e(t):t:t,[e,t])}},54165:function(e,t,n){let r,o,a,i;n.d(t,{ZP:function(){return G},w6:function(){return Z}});var l=n(64090),s=n.t(l,2),c=n(8985),u=n(67689),d=n(61475),p=n(36597),f=n(76564),m=n(12519),g=n(4678),h=n(33302),b=e=>{let{locale:t={},children:n,_ANT_MARK__:r}=e;l.useEffect(()=>(0,g.f)(t&&t.Modal),[t]);let o=l.useMemo(()=>Object.assign(Object.assign({},t),{exist:!0}),[t]);return l.createElement(h.Z.Provider,{value:o},n)},v=n(79474),y=n(43345),E=n(46864),S=n(57499),w=n(12215),x=n(6336),O=n(22127),C=n(24050);let T="-ant-".concat(Date.now(),"-").concat(Math.random());var k=n(17094),A=n(97303),I=n(92536);let{useId:R}=Object.assign({},s);var N=void 0===R?()=>"":R,_=n(49367),P=n(24750);function M(e){let{children:t}=e,[,n]=(0,P.ZP)(),{motion:r}=n,o=l.useRef(!1);return(o.current=o.current||!1===r,o.current)?l.createElement(_.zt,{motion:r},t):t}var L=()=>null,D=n(28030),j=function(e,t){var n={};for(var r in e)Object.prototype.hasOwnProperty.call(e,r)&&0>t.indexOf(r)&&(n[r]=e[r]);if(null!=e&&"function"==typeof Object.getOwnPropertySymbols)for(var o=0,r=Object.getOwnPropertySymbols(e);ot.indexOf(r[o])&&Object.prototype.propertyIsEnumerable.call(e,r[o])&&(n[r[o]]=e[r[o]]);return n};let F=["getTargetContainer","getPopupContainer","renderEmpty","pageHeader","input","pagination","form","select","button"];function B(){return r||"ant"}function U(){return o||S.oR}let Z=()=>({getPrefixCls:(e,t)=>t||(e?"".concat(B(),"-").concat(e):B()),getIconPrefixCls:U,getRootPrefixCls:()=>r||B(),getTheme:()=>a,holderRender:i}),z=e=>{let{children:t,csp:n,autoInsertSpaceInButton:r,alert:o,anchor:a,form:i,locale:s,componentSize:g,direction:h,space:w,virtual:x,dropdownMatchSelectWidth:O,popupMatchSelectWidth:C,popupOverflow:T,legacyLocale:R,parentContext:_,iconPrefixCls:P,theme:B,componentDisabled:U,segmented:Z,statistic:z,spin:H,calendar:G,carousel:$,cascader:W,collapse:V,typography:q,checkbox:Y,descriptions:K,divider:X,drawer:Q,skeleton:J,steps:ee,image:et,layout:en,list:er,mentions:eo,modal:ea,progress:ei,result:el,slider:es,breadcrumb:ec,menu:eu,pagination:ed,input:ep,empty:ef,badge:em,radio:eg,rate:eh,switch:eb,transfer:ev,avatar:ey,message:eE,tag:eS,table:ew,card:ex,tabs:eO,timeline:eC,timePicker:eT,upload:ek,notification:eA,tree:eI,colorPicker:eR,datePicker:eN,rangePicker:e_,flex:eP,wave:eM,dropdown:eL,warning:eD}=e,ej=l.useCallback((t,n)=>{let{prefixCls:r}=e;if(n)return n;let o=r||_.getPrefixCls("");return t?"".concat(o,"-").concat(t):o},[_.getPrefixCls,e.prefixCls]),eF=P||_.iconPrefixCls||S.oR,eB=n||_.csp;(0,D.Z)(eF,eB);let eU=function(e,t){(0,f.ln)("ConfigProvider");let n=e||{},r=!1!==n.inherit&&t?t:y.u_,o=N();return(0,d.Z)(()=>{var a,i;if(!e)return t;let l=Object.assign({},r.components);Object.keys(e.components||{}).forEach(t=>{l[t]=Object.assign(Object.assign({},l[t]),e.components[t])});let s="css-var-".concat(o.replace(/:/g,"")),c=(null!==(a=n.cssVar)&&void 0!==a?a:r.cssVar)&&Object.assign(Object.assign(Object.assign({prefix:"ant"},"object"==typeof r.cssVar?r.cssVar:{}),"object"==typeof n.cssVar?n.cssVar:{}),{key:"object"==typeof n.cssVar&&(null===(i=n.cssVar)||void 0===i?void 0:i.key)||s});return Object.assign(Object.assign(Object.assign({},r),n),{token:Object.assign(Object.assign({},r.token),n.token),components:l,cssVar:c})},[n,r],(e,t)=>e.some((e,n)=>{let r=t[n];return!(0,I.Z)(e,r,!0)}))}(B,_.theme),eZ={csp:eB,autoInsertSpaceInButton:r,alert:o,anchor:a,locale:s||R,direction:h,space:w,virtual:x,popupMatchSelectWidth:null!=C?C:O,popupOverflow:T,getPrefixCls:ej,iconPrefixCls:eF,theme:eU,segmented:Z,statistic:z,spin:H,calendar:G,carousel:$,cascader:W,collapse:V,typography:q,checkbox:Y,descriptions:K,divider:X,drawer:Q,skeleton:J,steps:ee,image:et,input:ep,layout:en,list:er,mentions:eo,modal:ea,progress:ei,result:el,slider:es,breadcrumb:ec,menu:eu,pagination:ed,empty:ef,badge:em,radio:eg,rate:eh,switch:eb,transfer:ev,avatar:ey,message:eE,tag:eS,table:ew,card:ex,tabs:eO,timeline:eC,timePicker:eT,upload:ek,notification:eA,tree:eI,colorPicker:eR,datePicker:eN,rangePicker:e_,flex:eP,wave:eM,dropdown:eL,warning:eD},ez=Object.assign({},_);Object.keys(eZ).forEach(e=>{void 0!==eZ[e]&&(ez[e]=eZ[e])}),F.forEach(t=>{let n=e[t];n&&(ez[t]=n)});let eH=(0,d.Z)(()=>ez,ez,(e,t)=>{let n=Object.keys(e),r=Object.keys(t);return n.length!==r.length||n.some(n=>e[n]!==t[n])}),eG=l.useMemo(()=>({prefixCls:eF,csp:eB}),[eF,eB]),e$=l.createElement(l.Fragment,null,l.createElement(L,{dropdownMatchSelectWidth:O}),t),eW=l.useMemo(()=>{var e,t,n,r;return(0,p.T)((null===(e=v.Z.Form)||void 0===e?void 0:e.defaultValidateMessages)||{},(null===(n=null===(t=eH.locale)||void 0===t?void 0:t.Form)||void 0===n?void 0:n.defaultValidateMessages)||{},(null===(r=eH.form)||void 0===r?void 0:r.validateMessages)||{},(null==i?void 0:i.validateMessages)||{})},[eH,null==i?void 0:i.validateMessages]);Object.keys(eW).length>0&&(e$=l.createElement(m.Z.Provider,{value:eW},e$)),s&&(e$=l.createElement(b,{locale:s,_ANT_MARK__:"internalMark"},e$)),(eF||eB)&&(e$=l.createElement(u.Z.Provider,{value:eG},e$)),g&&(e$=l.createElement(A.q,{size:g},e$)),e$=l.createElement(M,null,e$);let eV=l.useMemo(()=>{let e=eU||{},{algorithm:t,token:n,components:r,cssVar:o}=e,a=j(e,["algorithm","token","components","cssVar"]),i=t&&(!Array.isArray(t)||t.length>0)?(0,c.jG)(t):y.uH,l={};Object.entries(r||{}).forEach(e=>{let[t,n]=e,r=Object.assign({},n);"algorithm"in r&&(!0===r.algorithm?r.theme=i:(Array.isArray(r.algorithm)||"function"==typeof r.algorithm)&&(r.theme=(0,c.jG)(r.algorithm)),delete r.algorithm),l[t]=r});let s=Object.assign(Object.assign({},E.Z),n);return Object.assign(Object.assign({},a),{theme:i,token:s,components:l,override:Object.assign({override:s},l),cssVar:o})},[eU]);return B&&(e$=l.createElement(y.Mj.Provider,{value:eV},e$)),eH.warning&&(e$=l.createElement(f.G8.Provider,{value:eH.warning},e$)),void 0!==U&&(e$=l.createElement(k.n,{disabled:U},e$)),l.createElement(S.E_.Provider,{value:eH},e$)},H=e=>{let t=l.useContext(S.E_),n=l.useContext(h.Z);return l.createElement(z,Object.assign({parentContext:t,legacyLocale:n},e))};H.ConfigContext=S.E_,H.SizeContext=A.Z,H.config=e=>{let{prefixCls:t,iconPrefixCls:n,theme:l,holderRender:s}=e;void 0!==t&&(r=t),void 0!==n&&(o=n),"holderRender"in e&&(i=s),l&&(Object.keys(l).some(e=>e.endsWith("Color"))?function(e,t){let n=function(e,t){let n={},r=(e,t)=>{let n=e.clone();return(n=(null==t?void 0:t(n))||n).toRgbString()},o=(e,t)=>{let o=new x.C(e),a=(0,w.R_)(o.toRgbString());n["".concat(t,"-color")]=r(o),n["".concat(t,"-color-disabled")]=a[1],n["".concat(t,"-color-hover")]=a[4],n["".concat(t,"-color-active")]=a[6],n["".concat(t,"-color-outline")]=o.clone().setAlpha(.2).toRgbString(),n["".concat(t,"-color-deprecated-bg")]=a[0],n["".concat(t,"-color-deprecated-border")]=a[2]};if(t.primaryColor){o(t.primaryColor,"primary");let e=new x.C(t.primaryColor),a=(0,w.R_)(e.toRgbString());a.forEach((e,t)=>{n["primary-".concat(t+1)]=e}),n["primary-color-deprecated-l-35"]=r(e,e=>e.lighten(35)),n["primary-color-deprecated-l-20"]=r(e,e=>e.lighten(20)),n["primary-color-deprecated-t-20"]=r(e,e=>e.tint(20)),n["primary-color-deprecated-t-50"]=r(e,e=>e.tint(50)),n["primary-color-deprecated-f-12"]=r(e,e=>e.setAlpha(.12*e.getAlpha()));let i=new x.C(a[0]);n["primary-color-active-deprecated-f-30"]=r(i,e=>e.setAlpha(.3*e.getAlpha())),n["primary-color-active-deprecated-d-02"]=r(i,e=>e.darken(2))}t.successColor&&o(t.successColor,"success"),t.warningColor&&o(t.warningColor,"warning"),t.errorColor&&o(t.errorColor,"error"),t.infoColor&&o(t.infoColor,"info");let a=Object.keys(n).map(t=>"--".concat(e,"-").concat(t,": ").concat(n[t],";"));return"\n :root {\n ".concat(a.join("\n"),"\n }\n ").trim()}(e,t);(0,O.Z)()&&(0,C.hq)(n,"".concat(T,"-dynamic-theme"))}(B(),l):a=l)},H.useConfig=function(){return{componentDisabled:(0,l.useContext)(k.Z),componentSize:(0,l.useContext)(A.Z)}},Object.defineProperty(H,"SizeContext",{get:()=>A.Z});var G=H},40491:function(e,t,n){n.d(t,{Z:function(){return er}});var r=n(64090),o=n(50459),a=n(16480),i=n.n(a),l=n(14749),s=n(50833),c=n(80406),u=n(6787),d=n(44101),p=n(74084),f=n(4295),m=n(19223),g=f.Z.ESC,h=f.Z.TAB,b=(0,r.forwardRef)(function(e,t){var n=e.overlay,o=e.arrow,a=e.prefixCls,i=(0,r.useMemo)(function(){return"function"==typeof n?n():n},[n]),l=(0,p.sQ)(t,null==i?void 0:i.ref);return r.createElement(r.Fragment,null,o&&r.createElement("div",{className:"".concat(a,"-arrow")}),r.cloneElement(i,{ref:(0,p.Yr)(i)?l:void 0}))}),v={adjustX:1,adjustY:1},y=[0,0],E={topLeft:{points:["bl","tl"],overflow:v,offset:[0,-4],targetOffset:y},top:{points:["bc","tc"],overflow:v,offset:[0,-4],targetOffset:y},topRight:{points:["br","tr"],overflow:v,offset:[0,-4],targetOffset:y},bottomLeft:{points:["tl","bl"],overflow:v,offset:[0,4],targetOffset:y},bottom:{points:["tc","bc"],overflow:v,offset:[0,4],targetOffset:y},bottomRight:{points:["tr","br"],overflow:v,offset:[0,4],targetOffset:y}},S=["arrow","prefixCls","transitionName","animation","align","placement","placements","getPopupContainer","showAction","hideAction","overlayClassName","overlayStyle","visible","trigger","autoFocus","overlay","children","onVisibleChange"],w=r.forwardRef(function(e,t){var n,o,a,f,v,y,w,x,O,C,T,k,A,I,R=e.arrow,N=void 0!==R&&R,_=e.prefixCls,P=void 0===_?"rc-dropdown":_,M=e.transitionName,L=e.animation,D=e.align,j=e.placement,F=e.placements,B=e.getPopupContainer,U=e.showAction,Z=e.hideAction,z=e.overlayClassName,H=e.overlayStyle,G=e.visible,$=e.trigger,W=void 0===$?["hover"]:$,V=e.autoFocus,q=e.overlay,Y=e.children,K=e.onVisibleChange,X=(0,u.Z)(e,S),Q=r.useState(),J=(0,c.Z)(Q,2),ee=J[0],et=J[1],en="visible"in e?G:ee,er=r.useRef(null),eo=r.useRef(null),ea=r.useRef(null);r.useImperativeHandle(t,function(){return er.current});var ei=function(e){et(e),null==K||K(e)};o=(n={visible:en,triggerRef:ea,onVisibleChange:ei,autoFocus:V,overlayRef:eo}).visible,a=n.triggerRef,f=n.onVisibleChange,v=n.autoFocus,y=n.overlayRef,w=r.useRef(!1),x=function(){if(o){var e,t;null===(e=a.current)||void 0===e||null===(t=e.focus)||void 0===t||t.call(e),null==f||f(!1)}},O=function(){var e;return null!==(e=y.current)&&void 0!==e&&!!e.focus&&(y.current.focus(),w.current=!0,!0)},C=function(e){switch(e.keyCode){case g:x();break;case h:var t=!1;w.current||(t=O()),t?e.preventDefault():x()}},r.useEffect(function(){return o?(window.addEventListener("keydown",C),v&&(0,m.Z)(O,3),function(){window.removeEventListener("keydown",C),w.current=!1}):function(){w.current=!1}},[o]);var el=function(){return r.createElement(b,{ref:eo,overlay:q,prefixCls:P,arrow:N})},es=r.cloneElement(Y,{className:i()(null===(I=Y.props)||void 0===I?void 0:I.className,en&&(void 0!==(T=e.openClassName)?T:"".concat(P,"-open"))),ref:(0,p.Yr)(Y)?(0,p.sQ)(ea,Y.ref):void 0}),ec=Z;return ec||-1===W.indexOf("contextMenu")||(ec=["click"]),r.createElement(d.Z,(0,l.Z)({builtinPlacements:void 0===F?E:F},X,{prefixCls:P,ref:er,popupClassName:i()(z,(0,s.Z)({},"".concat(P,"-show-arrow"),N)),popupStyle:H,action:W,showAction:U,hideAction:ec,popupPlacement:void 0===j?"bottomLeft":j,popupAlign:D,popupTransitionName:M,popupAnimation:L,popupVisible:en,stretch:(k=e.minOverlayWidthMatchTrigger,A=e.alignPoint,"minOverlayWidthMatchTrigger"in e?k:!A)?"minWidth":"",popup:"function"==typeof q?el:el(),onPopupVisibleChange:ei,onPopupClick:function(t){var n=e.onOverlayClick;et(!1),n&&n(t)},getPopupContainer:B}),es)}),x=n(48563),O=n(44329),C=n(35704),T=n(51761),k=n(67966),A=n(21467),I=n(65823),R=n(76564),N=n(86718),_=n(57499),P=n(95781),M=n(81642),L=n(24750),D=n(11303),j=n(202),F=n(25926),B=n(58854),U=n(89869),Z=n(76585),z=n(80316),H=e=>{let{componentCls:t,menuCls:n,colorError:r,colorTextLightSolid:o}=e,a="".concat(n,"-item");return{["".concat(t,", ").concat(t,"-menu-submenu")]:{["".concat(n," ").concat(a)]:{["&".concat(a,"-danger:not(").concat(a,"-disabled)")]:{color:r,"&:hover":{color:o,backgroundColor:r}}}}}},G=n(2638),$=n(8985);let W=e=>{let{componentCls:t,menuCls:n,zIndexPopup:r,dropdownArrowDistance:o,sizePopupArrow:a,antCls:i,iconCls:l,motionDurationMid:s,paddingBlock:c,fontSize:u,dropdownEdgeChildPadding:d,colorTextDisabled:p,fontSizeIcon:f,controlPaddingHorizontal:m,colorBgElevated:g}=e;return[{[t]:Object.assign(Object.assign({},(0,D.Wf)(e)),{position:"absolute",top:-9999,left:{_skip_check_:!0,value:-9999},zIndex:r,display:"block","&::before":{position:"absolute",insetBlock:e.calc(a).div(2).sub(o).equal(),zIndex:-9999,opacity:1e-4,content:'""'},["&-trigger".concat(i,"-btn")]:{["& > ".concat(l,"-down, & > ").concat(i,"-btn-icon > ").concat(l,"-down")]:{fontSize:f}},["".concat(t,"-wrap")]:{position:"relative",["".concat(i,"-btn > ").concat(l,"-down")]:{fontSize:f},["".concat(l,"-down::before")]:{transition:"transform ".concat(s)}},["".concat(t,"-wrap-open")]:{["".concat(l,"-down::before")]:{transform:"rotate(180deg)"}},"\n &-hidden,\n &-menu-hidden,\n &-menu-submenu-hidden\n ":{display:"none"},["&".concat(i,"-slide-down-enter").concat(i,"-slide-down-enter-active").concat(t,"-placement-bottomLeft,\n &").concat(i,"-slide-down-appear").concat(i,"-slide-down-appear-active").concat(t,"-placement-bottomLeft,\n &").concat(i,"-slide-down-enter").concat(i,"-slide-down-enter-active").concat(t,"-placement-bottom,\n &").concat(i,"-slide-down-appear").concat(i,"-slide-down-appear-active").concat(t,"-placement-bottom,\n &").concat(i,"-slide-down-enter").concat(i,"-slide-down-enter-active").concat(t,"-placement-bottomRight,\n &").concat(i,"-slide-down-appear").concat(i,"-slide-down-appear-active").concat(t,"-placement-bottomRight")]:{animationName:j.fJ},["&".concat(i,"-slide-up-enter").concat(i,"-slide-up-enter-active").concat(t,"-placement-topLeft,\n &").concat(i,"-slide-up-appear").concat(i,"-slide-up-appear-active").concat(t,"-placement-topLeft,\n &").concat(i,"-slide-up-enter").concat(i,"-slide-up-enter-active").concat(t,"-placement-top,\n &").concat(i,"-slide-up-appear").concat(i,"-slide-up-appear-active").concat(t,"-placement-top,\n &").concat(i,"-slide-up-enter").concat(i,"-slide-up-enter-active").concat(t,"-placement-topRight,\n &").concat(i,"-slide-up-appear").concat(i,"-slide-up-appear-active").concat(t,"-placement-topRight")]:{animationName:j.Qt},["&".concat(i,"-slide-down-leave").concat(i,"-slide-down-leave-active").concat(t,"-placement-bottomLeft,\n &").concat(i,"-slide-down-leave").concat(i,"-slide-down-leave-active").concat(t,"-placement-bottom,\n &").concat(i,"-slide-down-leave").concat(i,"-slide-down-leave-active").concat(t,"-placement-bottomRight")]:{animationName:j.Uw},["&".concat(i,"-slide-up-leave").concat(i,"-slide-up-leave-active").concat(t,"-placement-topLeft,\n &").concat(i,"-slide-up-leave").concat(i,"-slide-up-leave-active").concat(t,"-placement-top,\n &").concat(i,"-slide-up-leave").concat(i,"-slide-up-leave-active").concat(t,"-placement-topRight")]:{animationName:j.ly}})},(0,U.ZP)(e,g,{arrowPlacement:{top:!0,bottom:!0}}),{["".concat(t," ").concat(n)]:{position:"relative",margin:0},["".concat(n,"-submenu-popup")]:{position:"absolute",zIndex:r,background:"transparent",boxShadow:"none",transformOrigin:"0 0","ul, li":{listStyle:"none",margin:0}},["".concat(t,", ").concat(t,"-menu-submenu")]:{[n]:Object.assign(Object.assign({padding:d,listStyleType:"none",backgroundColor:g,backgroundClip:"padding-box",borderRadius:e.borderRadiusLG,outline:"none",boxShadow:e.boxShadowSecondary},(0,D.Qy)(e)),{["".concat(n,"-item-group-title")]:{padding:"".concat((0,$.bf)(c)," ").concat((0,$.bf)(m)),color:e.colorTextDescription,transition:"all ".concat(s)},["".concat(n,"-item")]:{position:"relative",display:"flex",alignItems:"center"},["".concat(n,"-item-icon")]:{minWidth:u,marginInlineEnd:e.marginXS,fontSize:e.fontSizeSM},["".concat(n,"-title-content")]:{flex:"auto","> a":{color:"inherit",transition:"all ".concat(s),"&:hover":{color:"inherit"},"&::after":{position:"absolute",inset:0,content:'""'}}},["".concat(n,"-item, ").concat(n,"-submenu-title")]:Object.assign(Object.assign({clear:"both",margin:0,padding:"".concat((0,$.bf)(c)," ").concat((0,$.bf)(m)),color:e.colorText,fontWeight:"normal",fontSize:u,lineHeight:e.lineHeight,cursor:"pointer",transition:"all ".concat(s),borderRadius:e.borderRadiusSM,"&:hover, &-active":{backgroundColor:e.controlItemBgHover}},(0,D.Qy)(e)),{"&-selected":{color:e.colorPrimary,backgroundColor:e.controlItemBgActive,"&:hover, &-active":{backgroundColor:e.controlItemBgActiveHover}},"&-disabled":{color:p,cursor:"not-allowed","&:hover":{color:p,backgroundColor:g,cursor:"not-allowed"},a:{pointerEvents:"none"}},"&-divider":{height:1,margin:"".concat((0,$.bf)(e.marginXXS)," 0"),overflow:"hidden",lineHeight:0,backgroundColor:e.colorSplit},["".concat(t,"-menu-submenu-expand-icon")]:{position:"absolute",insetInlineEnd:e.paddingXS,["".concat(t,"-menu-submenu-arrow-icon")]:{marginInlineEnd:"0 !important",color:e.colorTextDescription,fontSize:f,fontStyle:"normal"}}}),["".concat(n,"-item-group-list")]:{margin:"0 ".concat((0,$.bf)(e.marginXS)),padding:0,listStyle:"none"},["".concat(n,"-submenu-title")]:{paddingInlineEnd:e.calc(m).add(e.fontSizeSM).equal()},["".concat(n,"-submenu-vertical")]:{position:"relative"},["".concat(n,"-submenu").concat(n,"-submenu-disabled ").concat(t,"-menu-submenu-title")]:{["&, ".concat(t,"-menu-submenu-arrow-icon")]:{color:p,backgroundColor:g,cursor:"not-allowed"}},["".concat(n,"-submenu-selected ").concat(t,"-menu-submenu-title")]:{color:e.colorPrimary}})}},[(0,j.oN)(e,"slide-up"),(0,j.oN)(e,"slide-down"),(0,F.Fm)(e,"move-up"),(0,F.Fm)(e,"move-down"),(0,B._y)(e,"zoom-big")]]};var V=(0,Z.I$)("Dropdown",e=>{let{marginXXS:t,sizePopupArrow:n,paddingXXS:r,componentCls:o}=e,a=(0,z.TS)(e,{menuCls:"".concat(o,"-menu"),dropdownArrowDistance:e.calc(n).div(2).add(t).equal(),dropdownEdgeChildPadding:r});return[W(a),H(a)]},e=>Object.assign(Object.assign({zIndexPopup:e.zIndexPopupBase+50,paddingBlock:(e.controlHeight-e.fontSize*e.lineHeight)/2},(0,U.wZ)({contentRadius:e.borderRadiusLG,limitVerticalRadius:!0})),(0,G.w)(e))),q=n(92935);let Y=e=>{let t;let{menu:n,arrow:a,prefixCls:l,children:s,trigger:c,disabled:u,dropdownRender:d,getPopupContainer:p,overlayClassName:f,rootClassName:m,overlayStyle:g,open:h,onOpenChange:b,visible:v,onVisibleChange:y,mouseEnterDelay:E=.15,mouseLeaveDelay:S=.1,autoAdjustOverflow:A=!0,placement:D="",overlay:j,transitionName:F}=e,{getPopupContainer:B,getPrefixCls:U,direction:Z,dropdown:z}=r.useContext(_.E_);(0,R.ln)("Dropdown");let H=r.useMemo(()=>{let e=U();return void 0!==F?F:D.includes("top")?"".concat(e,"-slide-down"):"".concat(e,"-slide-up")},[U,D,F]),G=r.useMemo(()=>D?D.includes("Center")?D.slice(0,D.indexOf("Center")):D:"rtl"===Z?"bottomRight":"bottomLeft",[D,Z]),$=U("dropdown",l),W=(0,q.Z)($),[Y,K,X]=V($,W),[,Q]=(0,L.ZP)(),J=r.Children.only(s),ee=(0,I.Tm)(J,{className:i()("".concat($,"-trigger"),{["".concat($,"-rtl")]:"rtl"===Z},J.props.className),disabled:u}),et=u?[]:c;et&&et.includes("contextMenu")&&(t=!0);let[en,er]=(0,O.Z)(!1,{value:null!=h?h:v}),eo=(0,x.zX)(e=>{null==b||b(e,{source:"trigger"}),null==y||y(e),er(e)}),ea=i()(f,m,K,X,W,null==z?void 0:z.className,{["".concat($,"-rtl")]:"rtl"===Z}),ei=(0,k.Z)({arrowPointAtCenter:"object"==typeof a&&a.pointAtCenter,autoAdjustOverflow:A,offset:Q.marginXXS,arrowWidth:a?Q.sizePopupArrow:0,borderRadius:Q.borderRadius}),el=r.useCallback(()=>{null!=n&&n.selectable&&null!=n&&n.multiple||(null==b||b(!1,{source:"menu"}),er(!1))},[null==n?void 0:n.selectable,null==n?void 0:n.multiple]),[es,ec]=(0,T.Cn)("Dropdown",null==g?void 0:g.zIndex),eu=r.createElement(w,Object.assign({alignPoint:t},(0,C.Z)(e,["rootClassName"]),{mouseEnterDelay:E,mouseLeaveDelay:S,visible:en,builtinPlacements:ei,arrow:!!a,overlayClassName:ea,prefixCls:$,getPopupContainer:p||B,transitionName:H,trigger:et,overlay:()=>{let e;return e=(null==n?void 0:n.items)?r.createElement(P.Z,Object.assign({},n)):"function"==typeof j?j():j,d&&(e=d(e)),e=r.Children.only("string"==typeof e?r.createElement("span",null,e):e),r.createElement(M.J,{prefixCls:"".concat($,"-menu"),rootClassName:i()(X,W),expandIcon:r.createElement("span",{className:"".concat($,"-menu-submenu-arrow")},r.createElement(o.Z,{className:"".concat($,"-menu-submenu-arrow-icon")})),mode:"vertical",selectable:!1,onClick:el,validator:e=>{let{mode:t}=e}},e)},placement:G,onVisibleChange:eo,overlayStyle:Object.assign(Object.assign(Object.assign({},null==z?void 0:z.style),g),{zIndex:es})}),ee);return es&&(eu=r.createElement(N.Z.Provider,{value:ec},eu)),Y(eu)},K=(0,A.Z)(Y,"dropdown",e=>e,function(e){return Object.assign(Object.assign({},e),{align:{overflow:{adjustX:!1,adjustY:!1}}})});Y._InternalPanelDoNotUseOrYouWillBeFired=e=>r.createElement(K,Object.assign({},e),r.createElement("span",null));var X=n(31413),Q=n(1861),J=n(65270),ee=n(92801),et=function(e,t){var n={};for(var r in e)Object.prototype.hasOwnProperty.call(e,r)&&0>t.indexOf(r)&&(n[r]=e[r]);if(null!=e&&"function"==typeof Object.getOwnPropertySymbols)for(var o=0,r=Object.getOwnPropertySymbols(e);ot.indexOf(r[o])&&Object.prototype.propertyIsEnumerable.call(e,r[o])&&(n[r[o]]=e[r[o]]);return n};let en=e=>{let{getPopupContainer:t,getPrefixCls:n,direction:o}=r.useContext(_.E_),{prefixCls:a,type:l="default",danger:s,disabled:c,loading:u,onClick:d,htmlType:p,children:f,className:m,menu:g,arrow:h,autoFocus:b,overlay:v,trigger:y,align:E,open:S,onOpenChange:w,placement:x,getPopupContainer:O,href:C,icon:T=r.createElement(X.Z,null),title:k,buttonsRender:A=e=>e,mouseEnterDelay:I,mouseLeaveDelay:R,overlayClassName:N,overlayStyle:P,destroyPopupOnHide:M,dropdownRender:L}=e,D=et(e,["prefixCls","type","danger","disabled","loading","onClick","htmlType","children","className","menu","arrow","autoFocus","overlay","trigger","align","open","onOpenChange","placement","getPopupContainer","href","icon","title","buttonsRender","mouseEnterDelay","mouseLeaveDelay","overlayClassName","overlayStyle","destroyPopupOnHide","dropdownRender"]),j=n("dropdown",a),F={menu:g,arrow:h,autoFocus:b,align:E,disabled:c,trigger:c?[]:y,onOpenChange:w,getPopupContainer:O||t,mouseEnterDelay:I,mouseLeaveDelay:R,overlayClassName:N,overlayStyle:P,destroyPopupOnHide:M,dropdownRender:L},{compactSize:B,compactItemClassnames:U}=(0,ee.ri)(j,o),Z=i()("".concat(j,"-button"),U,m);"overlay"in e&&(F.overlay=v),"open"in e&&(F.open=S),"placement"in e?F.placement=x:F.placement="rtl"===o?"bottomLeft":"bottomRight";let[z,H]=A([r.createElement(Q.ZP,{type:l,danger:s,disabled:c,loading:u,onClick:d,htmlType:p,href:C,title:k},f),r.createElement(Q.ZP,{type:l,danger:s,icon:T})]);return r.createElement(J.Z.Compact,Object.assign({className:Z,size:B,block:!0},D),z,r.createElement(Y,Object.assign({},F),H))};en.__ANT_BUTTON=!0,Y.Button=en;var er=Y},47137:function(e,t,n){n.d(t,{RV:function(){return s},Rk:function(){return c},Ux:function(){return d},aM:function(){return u},pg:function(){return p},q3:function(){return i},qI:function(){return l}});var r=n(64090),o=n(76570),a=n(35704);let i=r.createContext({labelAlign:"right",vertical:!1,itemRef:()=>{}}),l=r.createContext(null),s=e=>{let t=(0,a.Z)(e,["prefixCls"]);return r.createElement(o.RV,Object.assign({},t))},c=r.createContext({prefixCls:""}),u=r.createContext({}),d=e=>{let{children:t,status:n,override:o}=e,a=(0,r.useContext)(u),i=(0,r.useMemo)(()=>{let e=Object.assign({},a);return o&&delete e.isFormItemInput,n&&(delete e.status,delete e.hasFeedback,delete e.feedbackIcon),e},[n,o,a]);return r.createElement(u.Provider,{value:i},t)},p=(0,r.createContext)(void 0)},8443:function(e,t,n){var r=n(64090),o=n(47137);let a=["outlined","borderless","filled"];t.Z=function(e){let t,n=arguments.length>1&&void 0!==arguments[1]?arguments[1]:void 0,i=(0,r.useContext)(o.pg);t=void 0!==e?e:!1===n?"borderless":null!=i?i:"outlined";let l=a.includes(t);return[t,l]}},81583:function(e,t,n){n.d(t,{Z:function(){return eD}});var r=n(63787),o=n(16480),a=n.n(o),i=n(49367),l=n(64090),s=n(47387),c=n(47137);function u(e){let[t,n]=l.useState(e);return l.useEffect(()=>{let t=setTimeout(()=>{n(e)},e.length?0:10);return()=>{clearTimeout(t)}},[e]),t}var d=n(8985),p=n(11303),f=n(58854),m=n(46154),g=n(80316),h=n(76585),b=e=>{let{componentCls:t}=e,n="".concat(t,"-show-help"),r="".concat(t,"-show-help-item");return{[n]:{transition:"opacity ".concat(e.motionDurationSlow," ").concat(e.motionEaseInOut),"&-appear, &-enter":{opacity:0,"&-active":{opacity:1}},"&-leave":{opacity:1,"&-active":{opacity:0}},[r]:{overflow:"hidden",transition:"height ".concat(e.motionDurationSlow," ").concat(e.motionEaseInOut,",\n opacity ").concat(e.motionDurationSlow," ").concat(e.motionEaseInOut,",\n transform ").concat(e.motionDurationSlow," ").concat(e.motionEaseInOut," !important"),["&".concat(r,"-appear, &").concat(r,"-enter")]:{transform:"translateY(-5px)",opacity:0,"&-active":{transform:"translateY(0)",opacity:1}},["&".concat(r,"-leave-active")]:{transform:"translateY(-5px)"}}}}};let v=e=>({legend:{display:"block",width:"100%",marginBottom:e.marginLG,padding:0,color:e.colorTextDescription,fontSize:e.fontSizeLG,lineHeight:"inherit",border:0,borderBottom:"".concat((0,d.bf)(e.lineWidth)," ").concat(e.lineType," ").concat(e.colorBorder)},'input[type="search"]':{boxSizing:"border-box"},'input[type="radio"], input[type="checkbox"]':{lineHeight:"normal"},'input[type="file"]':{display:"block"},'input[type="range"]':{display:"block",width:"100%"},"select[multiple], select[size]":{height:"auto"},"input[type='file']:focus,\n input[type='radio']:focus,\n input[type='checkbox']:focus":{outline:0,boxShadow:"0 0 0 ".concat((0,d.bf)(e.controlOutlineWidth)," ").concat(e.controlOutline)},output:{display:"block",paddingTop:15,color:e.colorText,fontSize:e.fontSize,lineHeight:e.lineHeight}}),y=(e,t)=>{let{formItemCls:n}=e;return{[n]:{["".concat(n,"-label > label")]:{height:t},["".concat(n,"-control-input")]:{minHeight:t}}}},E=e=>{let{componentCls:t}=e;return{[e.componentCls]:Object.assign(Object.assign(Object.assign({},(0,p.Wf)(e)),v(e)),{["".concat(t,"-text")]:{display:"inline-block",paddingInlineEnd:e.paddingSM},"&-small":Object.assign({},y(e,e.controlHeightSM)),"&-large":Object.assign({},y(e,e.controlHeightLG))})}},S=e=>{let{formItemCls:t,iconCls:n,componentCls:r,rootPrefixCls:o,labelRequiredMarkColor:a,labelColor:i,labelFontSize:l,labelHeight:s,labelColonMarginInlineStart:c,labelColonMarginInlineEnd:u,itemMarginBottom:d}=e;return{[t]:Object.assign(Object.assign({},(0,p.Wf)(e)),{marginBottom:d,verticalAlign:"top","&-with-help":{transition:"none"},["&-hidden,\n &-hidden.".concat(o,"-row")]:{display:"none"},"&-has-warning":{["".concat(t,"-split")]:{color:e.colorError}},"&-has-error":{["".concat(t,"-split")]:{color:e.colorWarning}},["".concat(t,"-label")]:{flexGrow:0,overflow:"hidden",whiteSpace:"nowrap",textAlign:"end",verticalAlign:"middle","&-left":{textAlign:"start"},"&-wrap":{overflow:"unset",lineHeight:e.lineHeight,whiteSpace:"unset"},"> label":{position:"relative",display:"inline-flex",alignItems:"center",maxWidth:"100%",height:s,color:i,fontSize:l,["> ".concat(n)]:{fontSize:e.fontSize,verticalAlign:"top"},["&".concat(t,"-required:not(").concat(t,"-required-mark-optional)::before")]:{display:"inline-block",marginInlineEnd:e.marginXXS,color:a,fontSize:e.fontSize,fontFamily:"SimSun, sans-serif",lineHeight:1,content:'"*"',["".concat(r,"-hide-required-mark &")]:{display:"none"}},["".concat(t,"-optional")]:{display:"inline-block",marginInlineStart:e.marginXXS,color:e.colorTextDescription,["".concat(r,"-hide-required-mark &")]:{display:"none"}},["".concat(t,"-tooltip")]:{color:e.colorTextDescription,cursor:"help",writingMode:"horizontal-tb",marginInlineStart:e.marginXXS},"&::after":{content:'":"',position:"relative",marginBlock:0,marginInlineStart:c,marginInlineEnd:u},["&".concat(t,"-no-colon::after")]:{content:'"\\a0"'}}},["".concat(t,"-control")]:{"--ant-display":"flex",flexDirection:"column",flexGrow:1,["&:first-child:not([class^=\"'".concat(o,"-col-'\"]):not([class*=\"' ").concat(o,"-col-'\"])")]:{width:"100%"},"&-input":{position:"relative",display:"flex",alignItems:"center",minHeight:e.controlHeight,"&-content":{flex:"auto",maxWidth:"100%"}}},[t]:{"&-explain, &-extra":{clear:"both",color:e.colorTextDescription,fontSize:e.fontSize,lineHeight:e.lineHeight},"&-explain-connected":{width:"100%"},"&-extra":{minHeight:e.controlHeightSM,transition:"color ".concat(e.motionDurationMid," ").concat(e.motionEaseOut)},"&-explain":{"&-error":{color:e.colorError},"&-warning":{color:e.colorWarning}}},["&-with-help ".concat(t,"-explain")]:{height:"auto",opacity:1},["".concat(t,"-feedback-icon")]:{fontSize:e.fontSize,textAlign:"center",visibility:"visible",animationName:f.kr,animationDuration:e.motionDurationMid,animationTimingFunction:e.motionEaseOutBack,pointerEvents:"none","&-success":{color:e.colorSuccess},"&-error":{color:e.colorError},"&-warning":{color:e.colorWarning},"&-validating":{color:e.colorPrimary}}})}},w=e=>{let{componentCls:t,formItemCls:n}=e;return{["".concat(t,"-horizontal")]:{["".concat(n,"-label")]:{flexGrow:0},["".concat(n,"-control")]:{flex:"1 1 0",minWidth:0},["".concat(n,"-label[class$='-24'], ").concat(n,"-label[class*='-24 ']")]:{["& + ".concat(n,"-control")]:{minWidth:"unset"}}}}},x=e=>{let{componentCls:t,formItemCls:n}=e;return{["".concat(t,"-inline")]:{display:"flex",flexWrap:"wrap",[n]:{flex:"none",marginInlineEnd:e.margin,marginBottom:0,"&-row":{flexWrap:"nowrap"},["> ".concat(n,"-label,\n > ").concat(n,"-control")]:{display:"inline-block",verticalAlign:"top"},["> ".concat(n,"-label")]:{flex:"none"},["".concat(t,"-text")]:{display:"inline-block"},["".concat(n,"-has-feedback")]:{display:"inline-block"}}}}},O=e=>({padding:e.verticalLabelPadding,margin:e.verticalLabelMargin,whiteSpace:"initial",textAlign:"start","> label":{margin:0,"&::after":{visibility:"hidden"}}}),C=e=>{let{componentCls:t,formItemCls:n,rootPrefixCls:r}=e;return{["".concat(n," ").concat(n,"-label")]:O(e),["".concat(t,":not(").concat(t,"-inline)")]:{[n]:{flexWrap:"wrap",["".concat(n,"-label, ").concat(n,"-control")]:{['&:not([class*=" '.concat(r,'-col-xs"])')]:{flex:"0 0 100%",maxWidth:"100%"}}}}}},T=e=>{let{componentCls:t,formItemCls:n,rootPrefixCls:r}=e;return{["".concat(t,"-vertical")]:{[n]:{"&-row":{flexDirection:"column"},"&-label > label":{height:"auto"},["".concat(t,"-item-control")]:{width:"100%"}}},["".concat(t,"-vertical ").concat(n,"-label,\n .").concat(r,"-col-24").concat(n,"-label,\n .").concat(r,"-col-xl-24").concat(n,"-label")]:O(e),["@media (max-width: ".concat((0,d.bf)(e.screenXSMax),")")]:[C(e),{[t]:{[".".concat(r,"-col-xs-24").concat(n,"-label")]:O(e)}}],["@media (max-width: ".concat((0,d.bf)(e.screenSMMax),")")]:{[t]:{[".".concat(r,"-col-sm-24").concat(n,"-label")]:O(e)}},["@media (max-width: ".concat((0,d.bf)(e.screenMDMax),")")]:{[t]:{[".".concat(r,"-col-md-24").concat(n,"-label")]:O(e)}},["@media (max-width: ".concat((0,d.bf)(e.screenLGMax),")")]:{[t]:{[".".concat(r,"-col-lg-24").concat(n,"-label")]:O(e)}}}},k=(e,t)=>(0,g.TS)(e,{formItemCls:"".concat(e.componentCls,"-item"),rootPrefixCls:t});var A=(0,h.I$)("Form",(e,t)=>{let{rootPrefixCls:n}=t,r=k(e,n);return[E(r),S(r),b(r),w(r),x(r),T(r),(0,m.Z)(r),f.kr]},e=>({labelRequiredMarkColor:e.colorError,labelColor:e.colorTextHeading,labelFontSize:e.fontSize,labelHeight:e.controlHeight,labelColonMarginInlineStart:e.marginXXS/2,labelColonMarginInlineEnd:e.marginXS,itemMarginBottom:e.marginLG,verticalLabelPadding:"0 0 ".concat(e.paddingXS,"px"),verticalLabelMargin:0}),{order:-1e3}),I=n(92935);let R=[];function N(e,t,n){let r=arguments.length>3&&void 0!==arguments[3]?arguments[3]:0;return{key:"string"==typeof e?e:"".concat(t,"-").concat(r),error:e,errorStatus:n}}var _=e=>{let{help:t,helpStatus:n,errors:o=R,warnings:d=R,className:p,fieldId:f,onVisibleChanged:m}=e,{prefixCls:g}=l.useContext(c.Rk),h="".concat(g,"-item-explain"),b=(0,I.Z)(g),[v,y,E]=A(g,b),S=(0,l.useMemo)(()=>(0,s.Z)(g),[g]),w=u(o),x=u(d),O=l.useMemo(()=>null!=t?[N(t,"help",n)]:[].concat((0,r.Z)(w.map((e,t)=>N(e,"error","error",t))),(0,r.Z)(x.map((e,t)=>N(e,"warning","warning",t)))),[t,n,w,x]),C={};return f&&(C.id="".concat(f,"_help")),v(l.createElement(i.ZP,{motionDeadline:S.motionDeadline,motionName:"".concat(g,"-show-help"),visible:!!O.length,onVisibleChanged:m},e=>{let{className:t,style:n}=e;return l.createElement("div",Object.assign({},C,{className:a()(h,t,E,b,p,y),style:n,role:"alert"}),l.createElement(i.V4,Object.assign({keys:O},(0,s.Z)(g),{motionName:"".concat(g,"-show-help-item"),component:!1}),e=>{let{key:t,error:n,errorStatus:r,className:o,style:i}=e;return l.createElement("div",{key:t,className:a()(o,{["".concat(h,"-").concat(r)]:r}),style:i},n)}))}))},P=n(76570),M=n(57499),L=n(17094),D=n(10693),j=n(97303);let F=e=>"object"==typeof e&&null!=e&&1===e.nodeType,B=(e,t)=>(!t||"hidden"!==e)&&"visible"!==e&&"clip"!==e,U=(e,t)=>{if(e.clientHeight{let t=(e=>{if(!e.ownerDocument||!e.ownerDocument.defaultView)return null;try{return e.ownerDocument.defaultView.frameElement}catch(e){return null}})(e);return!!t&&(t.clientHeightat||a>e&&i=t&&l>=n?a-e-r:i>t&&ln?i-t+o:0,z=e=>{let t=e.parentElement;return null==t?e.getRootNode().host||null:t},H=(e,t)=>{var n,r,o,a;if("undefined"==typeof document)return[];let{scrollMode:i,block:l,inline:s,boundary:c,skipOverflowHiddenElements:u}=t,d="function"==typeof c?c:e=>e!==c;if(!F(e))throw TypeError("Invalid target");let p=document.scrollingElement||document.documentElement,f=[],m=e;for(;F(m)&&d(m);){if((m=z(m))===p){f.push(m);break}null!=m&&m===document.body&&U(m)&&!U(document.documentElement)||null!=m&&U(m,u)&&f.push(m)}let g=null!=(r=null==(n=window.visualViewport)?void 0:n.width)?r:innerWidth,h=null!=(a=null==(o=window.visualViewport)?void 0:o.height)?a:innerHeight,{scrollX:b,scrollY:v}=window,{height:y,width:E,top:S,right:w,bottom:x,left:O}=e.getBoundingClientRect(),{top:C,right:T,bottom:k,left:A}=(e=>{let t=window.getComputedStyle(e);return{top:parseFloat(t.scrollMarginTop)||0,right:parseFloat(t.scrollMarginRight)||0,bottom:parseFloat(t.scrollMarginBottom)||0,left:parseFloat(t.scrollMarginLeft)||0}})(e),I="start"===l||"nearest"===l?S-C:"end"===l?x+k:S+y/2-C+k,R="center"===s?O+E/2-A+T:"end"===s?w+T:O-A,N=[];for(let e=0;e=0&&O>=0&&x<=h&&w<=g&&S>=o&&x<=c&&O>=u&&w<=a)break;let d=getComputedStyle(t),m=parseInt(d.borderLeftWidth,10),C=parseInt(d.borderTopWidth,10),T=parseInt(d.borderRightWidth,10),k=parseInt(d.borderBottomWidth,10),A=0,_=0,P="offsetWidth"in t?t.offsetWidth-t.clientWidth-m-T:0,M="offsetHeight"in t?t.offsetHeight-t.clientHeight-C-k:0,L="offsetWidth"in t?0===t.offsetWidth?0:r/t.offsetWidth:0,D="offsetHeight"in t?0===t.offsetHeight?0:n/t.offsetHeight:0;if(p===t)A="start"===l?I:"end"===l?I-h:"nearest"===l?Z(v,v+h,h,C,k,v+I,v+I+y,y):I-h/2,_="start"===s?R:"center"===s?R-g/2:"end"===s?R-g:Z(b,b+g,g,m,T,b+R,b+R+E,E),A=Math.max(0,A+v),_=Math.max(0,_+b);else{A="start"===l?I-o-C:"end"===l?I-c+k+M:"nearest"===l?Z(o,c,n,C,k+M,I,I+y,y):I-(o+n/2)+M/2,_="start"===s?R-u-m:"center"===s?R-(u+r/2)+P/2:"end"===s?R-a+T+P:Z(u,a,r,m,T+P,R,R+E,E);let{scrollLeft:e,scrollTop:i}=t;A=0===D?0:Math.max(0,Math.min(i+A/D,t.scrollHeight-n/D+M)),_=0===L?0:Math.max(0,Math.min(e+_/L,t.scrollWidth-r/L+P)),I+=i-A,R+=e-_}N.push({el:t,top:A,left:_})}return N},G=e=>!1===e?{block:"end",inline:"nearest"}:e===Object(e)&&0!==Object.keys(e).length?e:{block:"start",inline:"nearest"},$=["parentNode"];function W(e){return void 0===e||!1===e?[]:Array.isArray(e)?e:[e]}function V(e,t){if(!e.length)return;let n=e.join("_");return t?"".concat(t,"_").concat(n):$.includes(n)?"".concat("form_item","_").concat(n):n}function q(e,t,n,r,o,a){let i=r;return void 0!==a?i=a:n.validating?i="validating":e.length?i="error":t.length?i="warning":(n.touched||o&&n.validated)&&(i="success"),i}function Y(e){return W(e).join("_")}function K(e){let[t]=(0,P.cI)(),n=l.useRef({}),r=l.useMemo(()=>null!=e?e:Object.assign(Object.assign({},t),{__INTERNAL__:{itemRef:e=>t=>{let r=Y(e);t?n.current[r]=t:delete n.current[r]}},scrollToField:function(e){let t=arguments.length>1&&void 0!==arguments[1]?arguments[1]:{},n=V(W(e),r.__INTERNAL__.name),o=n?document.getElementById(n):null;o&&function(e,t){if(!e.isConnected||!(e=>{let t=e;for(;t&&t.parentNode;){if(t.parentNode===document)return!0;t=t.parentNode instanceof ShadowRoot?t.parentNode.host:t.parentNode}return!1})(e))return;let n=(e=>{let t=window.getComputedStyle(e);return{top:parseFloat(t.scrollMarginTop)||0,right:parseFloat(t.scrollMarginRight)||0,bottom:parseFloat(t.scrollMarginBottom)||0,left:parseFloat(t.scrollMarginLeft)||0}})(e);if("object"==typeof t&&"function"==typeof t.behavior)return t.behavior(H(e,t));let r="boolean"==typeof t||null==t?void 0:t.behavior;for(let{el:o,top:a,left:i}of H(e,G(t))){let e=a-n.top+n.bottom,t=i-n.left+n.right;o.scroll({top:e,left:t,behavior:r})}}(o,Object.assign({scrollMode:"if-needed",block:"nearest"},t))},getFieldInstance:e=>{let t=Y(e);return n.current[t]}}),[e,t]);return[r]}var X=n(12519),Q=function(e,t){var n={};for(var r in e)Object.prototype.hasOwnProperty.call(e,r)&&0>t.indexOf(r)&&(n[r]=e[r]);if(null!=e&&"function"==typeof Object.getOwnPropertySymbols)for(var o=0,r=Object.getOwnPropertySymbols(e);ot.indexOf(r[o])&&Object.prototype.propertyIsEnumerable.call(e,r[o])&&(n[r[o]]=e[r[o]]);return n};let J=l.forwardRef((e,t)=>{let n=l.useContext(L.Z),{getPrefixCls:r,direction:o,form:i}=l.useContext(M.E_),{prefixCls:s,className:u,rootClassName:d,size:p,disabled:f=n,form:m,colon:g,labelAlign:h,labelWrap:b,labelCol:v,wrapperCol:y,hideRequiredMark:E,layout:S="horizontal",scrollToFirstError:w,requiredMark:x,onFinishFailed:O,name:C,style:T,feedbackIcons:k,variant:R}=e,N=Q(e,["prefixCls","className","rootClassName","size","disabled","form","colon","labelAlign","labelWrap","labelCol","wrapperCol","hideRequiredMark","layout","scrollToFirstError","requiredMark","onFinishFailed","name","style","feedbackIcons","variant"]),_=(0,D.Z)(p),F=l.useContext(X.Z),B=(0,l.useMemo)(()=>void 0!==x?x:!E&&(!i||void 0===i.requiredMark||i.requiredMark),[E,x,i]),U=null!=g?g:null==i?void 0:i.colon,Z=r("form",s),z=(0,I.Z)(Z),[H,G,$]=A(Z,z),W=a()(Z,"".concat(Z,"-").concat(S),{["".concat(Z,"-hide-required-mark")]:!1===B,["".concat(Z,"-rtl")]:"rtl"===o,["".concat(Z,"-").concat(_)]:_},$,z,G,null==i?void 0:i.className,u,d),[V]=K(m),{__INTERNAL__:q}=V;q.name=C;let Y=(0,l.useMemo)(()=>({name:C,labelAlign:h,labelCol:v,labelWrap:b,wrapperCol:y,vertical:"vertical"===S,colon:U,requiredMark:B,itemRef:q.itemRef,form:V,feedbackIcons:k}),[C,h,v,y,S,U,B,V,k]);l.useImperativeHandle(t,()=>V);let J=(e,t)=>{if(e){let n={block:"nearest"};"object"==typeof e&&(n=e),V.scrollToField(t,n)}};return H(l.createElement(c.pg.Provider,{value:R},l.createElement(L.n,{disabled:f},l.createElement(j.Z.Provider,{value:_},l.createElement(c.RV,{validateMessages:F},l.createElement(c.q3.Provider,{value:Y},l.createElement(P.ZP,Object.assign({id:C},N,{name:C,onFinishFailed:e=>{if(null==O||O(e),e.errorFields.length){let t=e.errorFields[0].name;if(void 0!==w){J(w,t);return}i&&void 0!==i.scrollToFirstError&&J(i.scrollToFirstError,t)}},form:V,style:Object.assign(Object.assign({},null==i?void 0:i.style),T),className:W}))))))))});var ee=n(89211),et=n(74084),en=n(65823),er=n(76564),eo=n(33054);let ea=()=>{let{status:e,errors:t=[],warnings:n=[]}=(0,l.useContext)(c.aM);return{status:e,errors:t,warnings:n}};ea.Context=c.aM;var ei=n(19223),el=n(73193),es=n(24800),ec=n(35704),eu=n(5056),ed=n(90791);let ep=e=>{let{formItemCls:t}=e;return{"@media screen and (-ms-high-contrast: active), (-ms-high-contrast: none)":{["".concat(t,"-control")]:{display:"flex"}}}};var ef=(0,h.bk)(["Form","item-item"],(e,t)=>{let{rootPrefixCls:n}=t;return[ep(k(e,n))]}),em=e=>{let{prefixCls:t,status:n,wrapperCol:r,children:o,errors:i,warnings:s,_internalItemRender:u,extra:d,help:p,fieldId:f,marginBottom:m,onErrorVisibleChanged:g}=e,h="".concat(t,"-item"),b=l.useContext(c.q3),v=r||b.wrapperCol||{},y=a()("".concat(h,"-control"),v.className),E=l.useMemo(()=>Object.assign({},b),[b]);delete E.labelCol,delete E.wrapperCol;let S=l.createElement("div",{className:"".concat(h,"-control-input")},l.createElement("div",{className:"".concat(h,"-control-input-content")},o)),w=l.useMemo(()=>({prefixCls:t,status:n}),[t,n]),x=null!==m||i.length||s.length?l.createElement("div",{style:{display:"flex",flexWrap:"nowrap"}},l.createElement(c.Rk.Provider,{value:w},l.createElement(_,{fieldId:f,errors:i,warnings:s,help:p,helpStatus:n,className:"".concat(h,"-explain-connected"),onVisibleChanged:g})),!!m&&l.createElement("div",{style:{width:0,height:m}})):null,O={};f&&(O.id="".concat(f,"_extra"));let C=d?l.createElement("div",Object.assign({},O,{className:"".concat(h,"-extra")}),d):null,T=u&&"pro_table_render"===u.mark&&u.render?u.render(e,{input:S,errorList:x,extra:C}):l.createElement(l.Fragment,null,S,x,C);return l.createElement(c.q3.Provider,{value:E},l.createElement(ed.Z,Object.assign({},v,{className:y}),T),l.createElement(ef,{prefixCls:t}))},eg=n(14749),eh={icon:{tag:"svg",attrs:{viewBox:"64 64 896 896",focusable:"false"},children:[{tag:"path",attrs:{d:"M512 64C264.6 64 64 264.6 64 512s200.6 448 448 448 448-200.6 448-448S759.4 64 512 64zm0 820c-205.4 0-372-166.6-372-372s166.6-372 372-372 372 166.6 372 372-166.6 372-372 372z"}},{tag:"path",attrs:{d:"M623.6 316.7C593.6 290.4 554 276 512 276s-81.6 14.5-111.6 40.7C369.2 344 352 380.7 352 420v7.6c0 4.4 3.6 8 8 8h48c4.4 0 8-3.6 8-8V420c0-44.1 43.1-80 96-80s96 35.9 96 80c0 31.1-22 59.6-56.1 72.7-21.2 8.1-39.2 22.3-52.1 40.9-13.1 19-19.9 41.8-19.9 64.9V620c0 4.4 3.6 8 8 8h48c4.4 0 8-3.6 8-8v-22.7a48.3 48.3 0 0130.9-44.8c59-22.7 97.1-74.7 97.1-132.5.1-39.3-17.1-76-48.3-103.3zM472 732a40 40 0 1080 0 40 40 0 10-80 0z"}}]},name:"question-circle",theme:"outlined"},eb=n(60688),ev=l.forwardRef(function(e,t){return l.createElement(eb.Z,(0,eg.Z)({},e,{ref:t,icon:eh}))}),ey=n(79474),eE=n(70595),eS=n(1460),ew=function(e,t){var n={};for(var r in e)Object.prototype.hasOwnProperty.call(e,r)&&0>t.indexOf(r)&&(n[r]=e[r]);if(null!=e&&"function"==typeof Object.getOwnPropertySymbols)for(var o=0,r=Object.getOwnPropertySymbols(e);ot.indexOf(r[o])&&Object.prototype.propertyIsEnumerable.call(e,r[o])&&(n[r[o]]=e[r[o]]);return n},ex=e=>{var t;let{prefixCls:n,label:r,htmlFor:o,labelCol:i,labelAlign:s,colon:u,required:d,requiredMark:p,tooltip:f}=e,[m]=(0,eE.Z)("Form"),{vertical:g,labelAlign:h,labelCol:b,labelWrap:v,colon:y}=l.useContext(c.q3);if(!r)return null;let E=i||b||{},S="".concat(n,"-item-label"),w=a()(S,"left"===(s||h)&&"".concat(S,"-left"),E.className,{["".concat(S,"-wrap")]:!!v}),x=r,O=!0===u||!1!==y&&!1!==u;O&&!g&&"string"==typeof r&&""!==r.trim()&&(x=r.replace(/[:|:]\s*$/,""));let C=f?"object"!=typeof f||l.isValidElement(f)?{title:f}:f:null;if(C){let{icon:e=l.createElement(ev,null)}=C,t=ew(C,["icon"]),r=l.createElement(eS.Z,Object.assign({},t),l.cloneElement(e,{className:"".concat(n,"-item-tooltip"),title:"",onClick:e=>{e.preventDefault()},tabIndex:null}));x=l.createElement(l.Fragment,null,x,r)}let T="optional"===p,k="function"==typeof p;k?x=p(x,{required:!!d}):T&&!d&&(x=l.createElement(l.Fragment,null,x,l.createElement("span",{className:"".concat(n,"-item-optional"),title:""},(null==m?void 0:m.optional)||(null===(t=ey.Z.Form)||void 0===t?void 0:t.optional))));let A=a()({["".concat(n,"-item-required")]:d,["".concat(n,"-item-required-mark-optional")]:T||k,["".concat(n,"-item-no-colon")]:!O});return l.createElement(ed.Z,Object.assign({},E,{className:w}),l.createElement("label",{htmlFor:o,className:A,title:"string"==typeof r?r:""},x))},eO=n(99537),eC=n(77136),eT=n(20653),ek=n(66155);let eA={success:eO.Z,warning:eT.Z,error:eC.Z,validating:ek.Z};function eI(e){let{children:t,errors:n,warnings:r,hasFeedback:o,validateStatus:i,prefixCls:s,meta:u,noStyle:d}=e,p="".concat(s,"-item"),{feedbackIcons:f}=l.useContext(c.q3),m=q(n,r,u,null,!!o,i),{isFormItemInput:g,status:h,hasFeedback:b,feedbackIcon:v}=l.useContext(c.aM),y=l.useMemo(()=>{var e;let t;if(o){let i=!0!==o&&o.icons||f,s=m&&(null===(e=null==i?void 0:i({status:m,errors:n,warnings:r}))||void 0===e?void 0:e[m]),c=m&&eA[m];t=!1!==s&&c?l.createElement("span",{className:a()("".concat(p,"-feedback-icon"),"".concat(p,"-feedback-icon-").concat(m))},s||l.createElement(c,null)):null}let i={status:m||"",errors:n,warnings:r,hasFeedback:!!o,feedbackIcon:t,isFormItemInput:!0};return d&&(i.status=(null!=m?m:h)||"",i.isFormItemInput=g,i.hasFeedback=!!(null!=o?o:b),i.feedbackIcon=void 0!==o?i.feedbackIcon:v),i},[m,o,d,g,h]);return l.createElement(c.aM.Provider,{value:y},t)}var eR=function(e,t){var n={};for(var r in e)Object.prototype.hasOwnProperty.call(e,r)&&0>t.indexOf(r)&&(n[r]=e[r]);if(null!=e&&"function"==typeof Object.getOwnPropertySymbols)for(var o=0,r=Object.getOwnPropertySymbols(e);ot.indexOf(r[o])&&Object.prototype.propertyIsEnumerable.call(e,r[o])&&(n[r[o]]=e[r[o]]);return n};function eN(e){let{prefixCls:t,className:n,rootClassName:r,style:o,help:i,errors:s,warnings:d,validateStatus:p,meta:f,hasFeedback:m,hidden:g,children:h,fieldId:b,required:v,isRequired:y,onSubItemMetaChange:E}=e,S=eR(e,["prefixCls","className","rootClassName","style","help","errors","warnings","validateStatus","meta","hasFeedback","hidden","children","fieldId","required","isRequired","onSubItemMetaChange"]),w="".concat(t,"-item"),{requiredMark:x}=l.useContext(c.q3),O=l.useRef(null),C=u(s),T=u(d),k=null!=i,A=!!(k||s.length||d.length),I=!!O.current&&(0,el.Z)(O.current),[R,N]=l.useState(null);(0,es.Z)(()=>{A&&O.current&&N(parseInt(getComputedStyle(O.current).marginBottom,10))},[A,I]);let _=function(){let e=arguments.length>0&&void 0!==arguments[0]&&arguments[0];return q(e?C:f.errors,e?T:f.warnings,f,"",!!m,p)}(),P=a()(w,n,r,{["".concat(w,"-with-help")]:k||C.length||T.length,["".concat(w,"-has-feedback")]:_&&m,["".concat(w,"-has-success")]:"success"===_,["".concat(w,"-has-warning")]:"warning"===_,["".concat(w,"-has-error")]:"error"===_,["".concat(w,"-is-validating")]:"validating"===_,["".concat(w,"-hidden")]:g});return l.createElement("div",{className:P,style:o,ref:O},l.createElement(eu.Z,Object.assign({className:"".concat(w,"-row")},(0,ec.Z)(S,["_internalItemRender","colon","dependencies","extra","fieldKey","getValueFromEvent","getValueProps","htmlFor","id","initialValue","isListField","label","labelAlign","labelCol","labelWrap","messageVariables","name","normalize","noStyle","preserve","requiredMark","rules","shouldUpdate","trigger","tooltip","validateFirst","validateTrigger","valuePropName","wrapperCol","validateDebounce"])),l.createElement(ex,Object.assign({htmlFor:b},e,{requiredMark:x,required:null!=v?v:y,prefixCls:t})),l.createElement(em,Object.assign({},e,f,{errors:C,warnings:T,prefixCls:t,status:_,help:i,marginBottom:R,onErrorVisibleChanged:e=>{e||N(null)}}),l.createElement(c.qI.Provider,{value:E},l.createElement(eI,{prefixCls:t,meta:f,errors:f.errors,warnings:f.warnings,hasFeedback:m,validateStatus:_},h)))),!!R&&l.createElement("div",{className:"".concat(w,"-margin-offset"),style:{marginBottom:-R}}))}let e_=l.memo(e=>{let{children:t}=e;return t},(e,t)=>(function(e,t){let n=Object.keys(e),r=Object.keys(t);return n.length===r.length&&n.every(n=>{let r=e[n],o=t[n];return r===o||"function"==typeof r||"function"==typeof o})})(e.control,t.control)&&e.update===t.update&&e.childProps.length===t.childProps.length&&e.childProps.every((e,n)=>e===t.childProps[n]));function eP(){return{errors:[],warnings:[],touched:!1,validating:!1,name:[],validated:!1}}let eM=function(e){let{name:t,noStyle:n,className:o,dependencies:i,prefixCls:s,shouldUpdate:u,rules:d,children:p,required:f,label:m,messageVariables:g,trigger:h="onChange",validateTrigger:b,hidden:v,help:y}=e,{getPrefixCls:E}=l.useContext(M.E_),{name:S}=l.useContext(c.q3),w=function(e){if("function"==typeof e)return e;let t=(0,eo.Z)(e);return t.length<=1?t[0]:t}(p),x="function"==typeof w,O=l.useContext(c.qI),{validateTrigger:C}=l.useContext(P.zb),T=void 0!==b?b:C,k=null!=t,R=E("form",s),N=(0,I.Z)(R),[_,L,D]=A(R,N);(0,er.ln)("Form.Item");let j=l.useContext(P.ZM),F=l.useRef(),[B,U]=function(e){let[t,n]=l.useState(e),r=(0,l.useRef)(null),o=(0,l.useRef)([]),a=(0,l.useRef)(!1);return l.useEffect(()=>(a.current=!1,()=>{a.current=!0,ei.Z.cancel(r.current),r.current=null}),[]),[t,function(e){a.current||(null===r.current&&(o.current=[],r.current=(0,ei.Z)(()=>{r.current=null,n(e=>{let t=e;return o.current.forEach(e=>{t=e(t)}),t})})),o.current.push(e))}]}({}),[Z,z]=(0,ee.Z)(()=>eP()),H=(e,t)=>{U(n=>{let o=Object.assign({},n),a=[].concat((0,r.Z)(e.name.slice(0,-1)),(0,r.Z)(t)).join("__SPLIT__");return e.destroy?delete o[a]:o[a]=e,o})},[G,$]=l.useMemo(()=>{let e=(0,r.Z)(Z.errors),t=(0,r.Z)(Z.warnings);return Object.values(B).forEach(n=>{e.push.apply(e,(0,r.Z)(n.errors||[])),t.push.apply(t,(0,r.Z)(n.warnings||[]))}),[e,t]},[B,Z.errors,Z.warnings]),q=function(){let{itemRef:e}=l.useContext(c.q3),t=l.useRef({});return function(n,r){let o=r&&"object"==typeof r&&r.ref,a=n.join("_");return(t.current.name!==a||t.current.originRef!==o)&&(t.current.name=a,t.current.originRef=o,t.current.ref=(0,et.sQ)(e(n),o)),t.current.ref}}();function Y(t,r,i){return n&&!v?l.createElement(eI,{prefixCls:R,hasFeedback:e.hasFeedback,validateStatus:e.validateStatus,meta:Z,errors:G,warnings:$,noStyle:!0},t):l.createElement(eN,Object.assign({key:"row"},e,{className:a()(o,D,N,L),prefixCls:R,fieldId:r,isRequired:i,errors:G,warnings:$,meta:Z,onSubItemMetaChange:H}),t)}if(!k&&!x&&!i)return _(Y(w));let K={};return"string"==typeof m?K.label=m:t&&(K.label=String(t)),g&&(K=Object.assign(Object.assign({},K),g)),_(l.createElement(P.gN,Object.assign({},e,{messageVariables:K,trigger:h,validateTrigger:T,onMetaChange:e=>{let t=null==j?void 0:j.getKey(e.name);if(z(e.destroy?eP():e,!0),n&&!1!==y&&O){let n=e.name;if(e.destroy)n=F.current||n;else if(void 0!==t){let[e,o]=t;n=[e].concat((0,r.Z)(o)),F.current=n}O(e,n)}}}),(n,o,a)=>{let s=W(t).length&&o?o.name:[],c=V(s,S),p=void 0!==f?f:!!(d&&d.some(e=>{if(e&&"object"==typeof e&&e.required&&!e.warningOnly)return!0;if("function"==typeof e){let t=e(a);return t&&t.required&&!t.warningOnly}return!1})),m=Object.assign({},n),g=null;if(Array.isArray(w)&&k)g=w;else if(x&&(!(u||i)||k));else if(!i||x||k){if((0,en.l$)(w)){let t=Object.assign(Object.assign({},w.props),m);if(t.id||(t.id=c),y||G.length>0||$.length>0||e.extra){let n=[];(y||G.length>0)&&n.push("".concat(c,"_help")),e.extra&&n.push("".concat(c,"_extra")),t["aria-describedby"]=n.join(" ")}G.length>0&&(t["aria-invalid"]="true"),p&&(t["aria-required"]="true"),(0,et.Yr)(w)&&(t.ref=q(s,w)),new Set([].concat((0,r.Z)(W(h)),(0,r.Z)(W(T)))).forEach(e=>{t[e]=function(){for(var t,n,r,o=arguments.length,a=Array(o),i=0;it.indexOf(r)&&(n[r]=e[r]);if(null!=e&&"function"==typeof Object.getOwnPropertySymbols)for(var o=0,r=Object.getOwnPropertySymbols(e);ot.indexOf(r[o])&&Object.prototype.propertyIsEnumerable.call(e,r[o])&&(n[r[o]]=e[r[o]]);return n};J.Item=eM,J.List=e=>{var{prefixCls:t,children:n}=e,r=eL(e,["prefixCls","children"]);let{getPrefixCls:o}=l.useContext(M.E_),a=o("form",t),i=l.useMemo(()=>({prefixCls:a,status:"error"}),[a]);return l.createElement(P.aV,Object.assign({},r),(e,t,r)=>l.createElement(c.Rk.Provider,{value:i},n(e.map(e=>Object.assign(Object.assign({},e),{fieldKey:e.key})),t,{errors:r.errors,warnings:r.warnings})))},J.ErrorList=_,J.useForm=K,J.useFormInstance=function(){let{form:e}=(0,l.useContext)(c.q3);return e},J.useWatch=P.qo,J.Provider=c.RV,J.create=()=>{};var eD=J},12519:function(e,t,n){var r=n(64090);t.Z=(0,r.createContext)(void 0)},24305:function(e,t,n){let r=(0,n(64090).createContext)({});t.Z=r},90791:function(e,t,n){var r=n(64090),o=n(16480),a=n.n(o),i=n(57499),l=n(24305),s=n(37148),c=function(e,t){var n={};for(var r in e)Object.prototype.hasOwnProperty.call(e,r)&&0>t.indexOf(r)&&(n[r]=e[r]);if(null!=e&&"function"==typeof Object.getOwnPropertySymbols)for(var o=0,r=Object.getOwnPropertySymbols(e);ot.indexOf(r[o])&&Object.prototype.propertyIsEnumerable.call(e,r[o])&&(n[r[o]]=e[r[o]]);return n};let u=["xs","sm","md","lg","xl","xxl"],d=r.forwardRef((e,t)=>{let{getPrefixCls:n,direction:o}=r.useContext(i.E_),{gutter:d,wrap:p}=r.useContext(l.Z),{prefixCls:f,span:m,order:g,offset:h,push:b,pull:v,className:y,children:E,flex:S,style:w}=e,x=c(e,["prefixCls","span","order","offset","push","pull","className","children","flex","style"]),O=n("col",f),[C,T,k]=(0,s.cG)(O),A={};u.forEach(t=>{let n={},r=e[t];"number"==typeof r?n.span=r:"object"==typeof r&&(n=r||{}),delete x[t],A=Object.assign(Object.assign({},A),{["".concat(O,"-").concat(t,"-").concat(n.span)]:void 0!==n.span,["".concat(O,"-").concat(t,"-order-").concat(n.order)]:n.order||0===n.order,["".concat(O,"-").concat(t,"-offset-").concat(n.offset)]:n.offset||0===n.offset,["".concat(O,"-").concat(t,"-push-").concat(n.push)]:n.push||0===n.push,["".concat(O,"-").concat(t,"-pull-").concat(n.pull)]:n.pull||0===n.pull,["".concat(O,"-").concat(t,"-flex-").concat(n.flex)]:n.flex||"auto"===n.flex,["".concat(O,"-rtl")]:"rtl"===o})});let I=a()(O,{["".concat(O,"-").concat(m)]:void 0!==m,["".concat(O,"-order-").concat(g)]:g,["".concat(O,"-offset-").concat(h)]:h,["".concat(O,"-push-").concat(b)]:b,["".concat(O,"-pull-").concat(v)]:v},y,A,T,k),R={};if(d&&d[0]>0){let e=d[0]/2;R.paddingLeft=e,R.paddingRight=e}return S&&(R.flex="number"==typeof S?"".concat(S," ").concat(S," auto"):/^\d+(\.\d+)?(px|em|rem|%)$/.test(S)?"0 0 ".concat(S):S,!1!==p||R.minWidth||(R.minWidth=0)),C(r.createElement("div",Object.assign({},x,{style:Object.assign(Object.assign({},R),w),className:I,ref:t}),E))});t.Z=d},5056:function(e,t,n){n.d(t,{Z:function(){return g}});var r=n(64090),o=n(16480),a=n.n(o),i=n(24750);let l=["xxl","xl","lg","md","sm","xs"],s=e=>({xs:"(max-width: ".concat(e.screenXSMax,"px)"),sm:"(min-width: ".concat(e.screenSM,"px)"),md:"(min-width: ".concat(e.screenMD,"px)"),lg:"(min-width: ".concat(e.screenLG,"px)"),xl:"(min-width: ".concat(e.screenXL,"px)"),xxl:"(min-width: ".concat(e.screenXXL,"px)")}),c=e=>{let t=[].concat(l).reverse();return t.forEach((n,r)=>{let o=n.toUpperCase(),a="screen".concat(o,"Min"),i="screen".concat(o);if(!(e[a]<=e[i]))throw Error("".concat(a,"<=").concat(i," fails : !(").concat(e[a],"<=").concat(e[i],")"));if(rt.indexOf(r)&&(n[r]=e[r]);if(null!=e&&"function"==typeof Object.getOwnPropertySymbols)for(var o=0,r=Object.getOwnPropertySymbols(e);ot.indexOf(r[o])&&Object.prototype.propertyIsEnumerable.call(e,r[o])&&(n[r[o]]=e[r[o]]);return n};function m(e,t){let[n,o]=r.useState("string"==typeof e?e:""),a=()=>{if("string"==typeof e&&o(e),"object"==typeof e)for(let n=0;n{a()},[JSON.stringify(e),t]),n}var g=r.forwardRef((e,t)=>{let{prefixCls:n,justify:o,align:g,className:h,style:b,children:v,gutter:y=0,wrap:E}=e,S=f(e,["prefixCls","justify","align","className","style","children","gutter","wrap"]),{getPrefixCls:w,direction:x}=r.useContext(u.E_),[O,C]=r.useState({xs:!0,sm:!0,md:!0,lg:!0,xl:!0,xxl:!0}),[T,k]=r.useState({xs:!1,sm:!1,md:!1,lg:!1,xl:!1,xxl:!1}),A=m(g,T),I=m(o,T),R=r.useRef(y),N=function(){let[,e]=(0,i.ZP)(),t=s(c(e));return r.useMemo(()=>{let e=new Map,n=-1,r={};return{matchHandlers:{},dispatch:t=>(r=t,e.forEach(e=>e(r)),e.size>=1),subscribe(t){return e.size||this.register(),n+=1,e.set(n,t),t(r),n},unsubscribe(t){e.delete(t),e.size||this.unregister()},unregister(){Object.keys(t).forEach(e=>{let n=t[e],r=this.matchHandlers[n];null==r||r.mql.removeListener(null==r?void 0:r.listener)}),e.clear()},register(){Object.keys(t).forEach(e=>{let n=t[e],o=t=>{let{matches:n}=t;this.dispatch(Object.assign(Object.assign({},r),{[e]:n}))},a=window.matchMedia(n);a.addListener(o),this.matchHandlers[n]={mql:a,listener:o},o(a)})},responsiveMap:t}},[e])}();r.useEffect(()=>{let e=N.subscribe(e=>{k(e);let t=R.current||0;(!Array.isArray(t)&&"object"==typeof t||Array.isArray(t)&&("object"==typeof t[0]||"object"==typeof t[1]))&&C(e)});return()=>N.unsubscribe(e)},[]);let _=w("row",n),[P,M,L]=(0,p.VM)(_),D=(()=>{let e=[void 0,void 0];return(Array.isArray(y)?y:[y,void 0]).forEach((t,n)=>{if("object"==typeof t)for(let r=0;r0?-(D[0]/2):void 0;B&&(F.marginLeft=B,F.marginRight=B),[,F.rowGap]=D;let[U,Z]=D,z=r.useMemo(()=>({gutter:[U,Z],wrap:E}),[U,Z,E]);return P(r.createElement(d.Z.Provider,{value:z},r.createElement("div",Object.assign({},S,{className:j,style:Object.assign(Object.assign({},F),b),ref:t}),v)))})},37148:function(e,t,n){n.d(t,{VM:function(){return u},cG:function(){return d}});var r=n(8985),o=n(76585),a=n(80316);let i=e=>{let{componentCls:t}=e;return{[t]:{position:"relative",maxWidth:"100%",minHeight:1}}},l=(e,t)=>{let{componentCls:n,gridColumns:r}=e,o={};for(let e=r;e>=0;e--)0===e?(o["".concat(n).concat(t,"-").concat(e)]={display:"none"},o["".concat(n,"-push-").concat(e)]={insetInlineStart:"auto"},o["".concat(n,"-pull-").concat(e)]={insetInlineEnd:"auto"},o["".concat(n).concat(t,"-push-").concat(e)]={insetInlineStart:"auto"},o["".concat(n).concat(t,"-pull-").concat(e)]={insetInlineEnd:"auto"},o["".concat(n).concat(t,"-offset-").concat(e)]={marginInlineStart:0},o["".concat(n).concat(t,"-order-").concat(e)]={order:0}):(o["".concat(n).concat(t,"-").concat(e)]=[{"--ant-display":"block",display:"block"},{display:"var(--ant-display)",flex:"0 0 ".concat(e/r*100,"%"),maxWidth:"".concat(e/r*100,"%")}],o["".concat(n).concat(t,"-push-").concat(e)]={insetInlineStart:"".concat(e/r*100,"%")},o["".concat(n).concat(t,"-pull-").concat(e)]={insetInlineEnd:"".concat(e/r*100,"%")},o["".concat(n).concat(t,"-offset-").concat(e)]={marginInlineStart:"".concat(e/r*100,"%")},o["".concat(n).concat(t,"-order-").concat(e)]={order:e});return o},s=(e,t)=>l(e,t),c=(e,t,n)=>({["@media (min-width: ".concat((0,r.bf)(t),")")]:Object.assign({},s(e,n))}),u=(0,o.I$)("Grid",e=>{let{componentCls:t}=e;return{[t]:{display:"flex",flexFlow:"row wrap",minWidth:0,"&::before, &::after":{display:"flex"},"&-no-wrap":{flexWrap:"nowrap"},"&-start":{justifyContent:"flex-start"},"&-center":{justifyContent:"center"},"&-end":{justifyContent:"flex-end"},"&-space-between":{justifyContent:"space-between"},"&-space-around":{justifyContent:"space-around"},"&-space-evenly":{justifyContent:"space-evenly"},"&-top":{alignItems:"flex-start"},"&-middle":{alignItems:"center"},"&-bottom":{alignItems:"flex-end"}}}},()=>({})),d=(0,o.I$)("Grid",e=>{let t=(0,a.TS)(e,{gridColumns:24}),n={"-sm":t.screenSMMin,"-md":t.screenMDMin,"-lg":t.screenLGMin,"-xl":t.screenXLMin,"-xxl":t.screenXXLMin};return[i(t),s(t,""),s(t,"-xs"),Object.keys(n).map(e=>c(t,n[e],e)).reduce((e,t)=>Object.assign(Object.assign({},e),t),{})]},()=>({}))},88707:function(e,t,n){n.d(t,{Z:function(){return em}});var r=n(64090),o=n(20383),a=n(14749),i={icon:{tag:"svg",attrs:{viewBox:"64 64 896 896",focusable:"false"},children:[{tag:"path",attrs:{d:"M890.5 755.3L537.9 269.2c-12.8-17.6-39-17.6-51.7 0L133.5 755.3A8 8 0 00140 768h75c5.1 0 9.9-2.5 12.9-6.6L512 369.8l284.1 391.6c3 4.1 7.8 6.6 12.9 6.6h75c6.5 0 10.3-7.4 6.5-12.7z"}}]},name:"up",theme:"outlined"},l=n(60688),s=r.forwardRef(function(e,t){return r.createElement(l.Z,(0,a.Z)({},e,{ref:t,icon:i}))}),c=n(16480),u=n.n(c),d=n(50833),p=n(6976),f=n(80406),m=n(6787),g=n(47365),h=n(65127);function b(){return"function"==typeof BigInt}function v(e){return!e&&0!==e&&!Number.isNaN(e)||!String(e).trim()}function y(e){var t=e.trim(),n=t.startsWith("-");n&&(t=t.slice(1)),(t=t.replace(/(\.\d*[^0])0*$/,"$1").replace(/\.0*$/,"").replace(/^0+/,"")).startsWith(".")&&(t="0".concat(t));var r=t||"0",o=r.split("."),a=o[0]||"0",i=o[1]||"0";"0"===a&&"0"===i&&(n=!1);var l=n?"-":"";return{negative:n,negativeStr:l,trimStr:r,integerStr:a,decimalStr:i,fullStr:"".concat(l).concat(r)}}function E(e){var t=String(e);return!Number.isNaN(Number(t))&&t.includes("e")}function S(e){var t=String(e);if(E(e)){var n=Number(t.slice(t.indexOf("e-")+2)),r=t.match(/\.(\d+)/);return null!=r&&r[1]&&(n+=r[1].length),n}return t.includes(".")&&x(t)?t.length-t.indexOf(".")-1:0}function w(e){var t=String(e);if(E(e)){if(e>Number.MAX_SAFE_INTEGER)return String(b()?BigInt(e).toString():Number.MAX_SAFE_INTEGER);if(e=this.add(e.negate().toString()).toNumber()}},{key:"toNumber",value:function(){return this.isNaN()?NaN:Number(this.toString())}},{key:"toString",value:function(){var e=!(arguments.length>0)||void 0===arguments[0]||arguments[0];return e?this.isInvalidate()?"":y("".concat(this.getMark()).concat(this.getIntegerStr(),".").concat(this.getDecimalStr())).fullStr:this.origin}}]),e}(),C=function(){function e(t){if((0,g.Z)(this,e),(0,d.Z)(this,"origin",""),(0,d.Z)(this,"number",void 0),(0,d.Z)(this,"empty",void 0),v(t)){this.empty=!0;return}this.origin=String(t),this.number=Number(t)}return(0,h.Z)(e,[{key:"negate",value:function(){return new e(-this.toNumber())}},{key:"add",value:function(t){if(this.isInvalidate())return new e(t);var n=Number(t);if(Number.isNaN(n))return this;var r=this.number+n;if(r>Number.MAX_SAFE_INTEGER)return new e(Number.MAX_SAFE_INTEGER);if(rNumber.MAX_SAFE_INTEGER)return new e(Number.MAX_SAFE_INTEGER);if(r=this.add(e.negate().toString()).toNumber()}},{key:"toNumber",value:function(){return this.number}},{key:"toString",value:function(){var e=!(arguments.length>0)||void 0===arguments[0]||arguments[0];return e?this.isInvalidate()?"":w(this.number):this.origin}}]),e}();function T(e){return b()?new O(e):new C(e)}function k(e,t,n){var r=arguments.length>3&&void 0!==arguments[3]&&arguments[3];if(""===e)return"";var o=y(e),a=o.negativeStr,i=o.integerStr,l=o.decimalStr,s="".concat(t).concat(l),c="".concat(a).concat(i);if(n>=0){var u=Number(l[n]);return u>=5&&!r?k(T(e).add("".concat(a,"0.").concat("0".repeat(n)).concat(10-u)).toString(),t,n,r):0===n?c:"".concat(c).concat(t).concat(l.padEnd(n,"0").slice(0,n))}return".0"===s?c:"".concat(c).concat(s)}var A=n(90089),I=n(24800),R=n(74084),N=n(53850),_=n(76158),P=function(){var e=(0,r.useState)(!1),t=(0,f.Z)(e,2),n=t[0],o=t[1];return(0,I.Z)(function(){o((0,_.Z)())},[]),n},M=n(19223);function L(e){var t=e.prefixCls,n=e.upNode,o=e.downNode,i=e.upDisabled,l=e.downDisabled,s=e.onStep,c=r.useRef(),p=r.useRef([]),f=r.useRef();f.current=s;var m=function(){clearTimeout(c.current)},g=function(e,t){e.preventDefault(),m(),f.current(t),c.current=setTimeout(function e(){f.current(t),c.current=setTimeout(e,200)},600)};if(r.useEffect(function(){return function(){m(),p.current.forEach(function(e){return M.Z.cancel(e)})}},[]),P())return null;var h="".concat(t,"-handler"),b=u()(h,"".concat(h,"-up"),(0,d.Z)({},"".concat(h,"-up-disabled"),i)),v=u()(h,"".concat(h,"-down"),(0,d.Z)({},"".concat(h,"-down-disabled"),l)),y=function(){return p.current.push((0,M.Z)(m))},E={unselectable:"on",role:"button",onMouseUp:y,onMouseLeave:y};return r.createElement("div",{className:"".concat(h,"-wrap")},r.createElement("span",(0,a.Z)({},E,{onMouseDown:function(e){g(e,!0)},"aria-label":"Increase Value","aria-disabled":i,className:b}),n||r.createElement("span",{unselectable:"on",className:"".concat(t,"-handler-up-inner")})),r.createElement("span",(0,a.Z)({},E,{onMouseDown:function(e){g(e,!1)},"aria-label":"Decrease Value","aria-disabled":l,className:v}),o||r.createElement("span",{unselectable:"on",className:"".concat(t,"-handler-down-inner")})))}function D(e){var t="number"==typeof e?w(e):y(e).fullStr;return t.includes(".")?y(t.replace(/(\d)\.(\d)/g,"$1$2.")).fullStr:e+"0"}var j=n(8002),F=["prefixCls","className","style","min","max","step","defaultValue","value","disabled","readOnly","upHandler","downHandler","keyboard","wheel","controls","classNames","stringMode","parser","formatter","precision","decimalSeparator","onChange","onInput","onPressEnter","onStep","changeOnBlur"],B=["disabled","style","prefixCls","value","prefix","suffix","addonBefore","addonAfter","className","classNames"],U=function(e,t){return e||t.isEmpty()?t.toString():t.toNumber()},Z=function(e){var t=T(e);return t.isInvalidate()?null:t},z=r.forwardRef(function(e,t){var n,o,i,l,s,c=e.prefixCls,g=void 0===c?"rc-input-number":c,h=e.className,b=e.style,v=e.min,y=e.max,E=e.step,O=void 0===E?1:E,C=e.defaultValue,A=e.value,_=e.disabled,P=e.readOnly,j=e.upHandler,B=e.downHandler,z=e.keyboard,H=e.wheel,G=e.controls,$=(e.classNames,e.stringMode),W=e.parser,V=e.formatter,q=e.precision,Y=e.decimalSeparator,K=e.onChange,X=e.onInput,Q=e.onPressEnter,J=e.onStep,ee=e.changeOnBlur,et=void 0===ee||ee,en=(0,m.Z)(e,F),er="".concat(g,"-input"),eo=r.useRef(null),ea=r.useState(!1),ei=(0,f.Z)(ea,2),el=ei[0],es=ei[1],ec=r.useRef(!1),eu=r.useRef(!1),ed=r.useRef(!1),ep=r.useState(function(){return T(null!=A?A:C)}),ef=(0,f.Z)(ep,2),em=ef[0],eg=ef[1],eh=r.useCallback(function(e,t){return t?void 0:q>=0?q:Math.max(S(e),S(O))},[q,O]),eb=r.useCallback(function(e){var t=String(e);if(W)return W(t);var n=t;return Y&&(n=n.replace(Y,".")),n.replace(/[^\w.-]+/g,"")},[W,Y]),ev=r.useRef(""),ey=r.useCallback(function(e,t){if(V)return V(e,{userTyping:t,input:String(ev.current)});var n="number"==typeof e?w(e):e;if(!t){var r=eh(n,t);x(n)&&(Y||r>=0)&&(n=k(n,Y||".",r))}return n},[V,eh,Y]),eE=r.useState(function(){var e=null!=C?C:A;return em.isInvalidate()&&["string","number"].includes((0,p.Z)(e))?Number.isNaN(e)?"":e:ey(em.toString(),!1)}),eS=(0,f.Z)(eE,2),ew=eS[0],ex=eS[1];function eO(e,t){ex(ey(e.isInvalidate()?e.toString(!1):e.toString(!t),t))}ev.current=ew;var eC=r.useMemo(function(){return Z(y)},[y,q]),eT=r.useMemo(function(){return Z(v)},[v,q]),ek=r.useMemo(function(){return!(!eC||!em||em.isInvalidate())&&eC.lessEquals(em)},[eC,em]),eA=r.useMemo(function(){return!(!eT||!em||em.isInvalidate())&&em.lessEquals(eT)},[eT,em]),eI=(n=eo.current,o=(0,r.useRef)(null),[function(){try{var e=n.selectionStart,t=n.selectionEnd,r=n.value,a=r.substring(0,e),i=r.substring(t);o.current={start:e,end:t,value:r,beforeTxt:a,afterTxt:i}}catch(e){}},function(){if(n&&o.current&&el)try{var e=n.value,t=o.current,r=t.beforeTxt,a=t.afterTxt,i=t.start,l=e.length;if(e.endsWith(a))l=e.length-o.current.afterTxt.length;else if(e.startsWith(r))l=r.length;else{var s=r[i-1],c=e.indexOf(s,i-1);-1!==c&&(l=c+1)}n.setSelectionRange(l,l)}catch(e){(0,N.ZP)(!1,"Something warning of cursor restore. Please fire issue about this: ".concat(e.message))}}]),eR=(0,f.Z)(eI,2),eN=eR[0],e_=eR[1],eP=function(e){return eC&&!e.lessEquals(eC)?eC:eT&&!eT.lessEquals(e)?eT:null},eM=function(e){return!eP(e)},eL=function(e,t){var n=e,r=eM(n)||n.isEmpty();if(n.isEmpty()||t||(n=eP(n)||n,r=!0),!P&&!_&&r){var o,a=n.toString(),i=eh(a,t);return i>=0&&!eM(n=T(k(a,".",i)))&&(n=T(k(a,".",i,!0))),n.equals(em)||(o=n,void 0===A&&eg(o),null==K||K(n.isEmpty()?null:U($,n)),void 0===A&&eO(n,t)),n}return em},eD=(i=(0,r.useRef)(0),l=function(){M.Z.cancel(i.current)},(0,r.useEffect)(function(){return l},[]),function(e){l(),i.current=(0,M.Z)(function(){e()})}),ej=function e(t){if(eN(),ev.current=t,ex(t),!eu.current){var n=T(eb(t));n.isNaN()||eL(n,!0)}null==X||X(t),eD(function(){var n=t;W||(n=t.replace(/。/g,".")),n!==t&&e(n)})},eF=function(e){if((!e||!ek)&&(e||!eA)){ec.current=!1;var t,n=T(ed.current?D(O):O);e||(n=n.negate());var r=eL((em||T(0)).add(n.toString()),!1);null==J||J(U($,r),{offset:ed.current?D(O):O,type:e?"up":"down"}),null===(t=eo.current)||void 0===t||t.focus()}},eB=function(e){var t=T(eb(ew)),n=t;n=t.isNaN()?eL(em,e):eL(t,e),void 0!==A?eO(em,!1):n.isNaN()||eO(n,!1)};return r.useEffect(function(){var e=function(e){!1!==H&&(eF(e.deltaY<0),e.preventDefault())},t=eo.current;if(t)return t.addEventListener("wheel",e),function(){return t.removeEventListener("wheel",e)}},[eF]),(0,I.o)(function(){em.isInvalidate()||eO(em,!1)},[q,V]),(0,I.o)(function(){var e=T(A);eg(e);var t=T(eb(ew));e.equals(t)&&ec.current&&!V||eO(e,ec.current)},[A]),(0,I.o)(function(){V&&e_()},[ew]),r.createElement("div",{className:u()(g,h,(s={},(0,d.Z)(s,"".concat(g,"-focused"),el),(0,d.Z)(s,"".concat(g,"-disabled"),_),(0,d.Z)(s,"".concat(g,"-readonly"),P),(0,d.Z)(s,"".concat(g,"-not-a-number"),em.isNaN()),(0,d.Z)(s,"".concat(g,"-out-of-range"),!em.isInvalidate()&&!eM(em)),s)),style:b,onFocus:function(){es(!0)},onBlur:function(){et&&eB(!1),es(!1),ec.current=!1},onKeyDown:function(e){var t=e.key,n=e.shiftKey;ec.current=!0,ed.current=n,"Enter"===t&&(eu.current||(ec.current=!1),eB(!1),null==Q||Q(e)),!1!==z&&!eu.current&&["Up","ArrowUp","Down","ArrowDown"].includes(t)&&(eF("Up"===t||"ArrowUp"===t),e.preventDefault())},onKeyUp:function(){ec.current=!1,ed.current=!1},onCompositionStart:function(){eu.current=!0},onCompositionEnd:function(){eu.current=!1,ej(eo.current.value)},onBeforeInput:function(){ec.current=!0}},(void 0===G||G)&&r.createElement(L,{prefixCls:g,upNode:j,downNode:B,upDisabled:ek,downDisabled:eA,onStep:eF}),r.createElement("div",{className:"".concat(er,"-wrap")},r.createElement("input",(0,a.Z)({autoComplete:"off",role:"spinbutton","aria-valuemin":v,"aria-valuemax":y,"aria-valuenow":em.isInvalidate()?null:em.toString(),step:O},en,{ref:(0,R.sQ)(eo,t),className:er,value:ew,onChange:function(e){ej(e.target.value)},disabled:_,readOnly:P}))))}),H=r.forwardRef(function(e,t){var n=e.disabled,o=e.style,i=e.prefixCls,l=e.value,s=e.prefix,c=e.suffix,u=e.addonBefore,d=e.addonAfter,p=e.className,f=e.classNames,g=(0,m.Z)(e,B),h=r.useRef(null);return r.createElement(A.Q,{className:p,triggerFocus:function(e){h.current&&(0,j.nH)(h.current,e)},prefixCls:i,value:l,disabled:n,style:o,prefix:s,suffix:c,addonAfter:d,addonBefore:u,classNames:f,components:{affixWrapper:"div",groupWrapper:"div",wrapper:"div",groupAddon:"div"}},r.createElement(z,(0,a.Z)({prefixCls:i,disabled:n,ref:(0,R.sQ)(h,t),className:null==f?void 0:f.input},g)))});H.displayName="InputNumber";var G=n(47794),$=n(57499),W=n(54165),V=n(17094),q=n(92935),Y=n(10693),K=n(47137),X=n(8443),Q=n(92801),J=n(8985),ee=n(94759),et=n(85980),en=n(61892),er=n(11303),eo=n(12288),ea=n(76585),ei=n(80316),el=n(6336);let es=(e,t)=>{let{componentCls:n,borderRadiusSM:r,borderRadiusLG:o}=e,a="lg"===t?o:r;return{["&-".concat(t)]:{["".concat(n,"-handler-wrap")]:{borderStartEndRadius:a,borderEndEndRadius:a},["".concat(n,"-handler-up")]:{borderStartEndRadius:a},["".concat(n,"-handler-down")]:{borderEndEndRadius:a}}}},ec=e=>{let{componentCls:t,lineWidth:n,lineType:r,borderRadius:o,fontSizeLG:a,controlHeightLG:i,controlHeightSM:l,colorError:s,paddingInlineSM:c,paddingBlockSM:u,paddingBlockLG:d,paddingInlineLG:p,colorTextDescription:f,motionDurationMid:m,handleHoverColor:g,paddingInline:h,paddingBlock:b,handleBg:v,handleActiveBg:y,colorTextDisabled:E,borderRadiusSM:S,borderRadiusLG:w,controlWidth:x,handleOpacity:O,handleBorderColor:C,filledHandleBg:T,lineHeightLG:k,calc:A}=e;return[{[t]:Object.assign(Object.assign(Object.assign(Object.assign(Object.assign(Object.assign(Object.assign({},(0,er.Wf)(e)),(0,ee.ik)(e)),{display:"inline-block",width:x,margin:0,padding:0,borderRadius:o}),(0,en.qG)(e,{["".concat(t,"-handler-wrap")]:{background:v,["".concat(t,"-handler-down")]:{borderBlockStart:"".concat((0,J.bf)(n)," ").concat(r," ").concat(C)}}})),(0,en.H8)(e,{["".concat(t,"-handler-wrap")]:{background:T,["".concat(t,"-handler-down")]:{borderBlockStart:"".concat((0,J.bf)(n)," ").concat(r," ").concat(C)}},"&:focus-within":{["".concat(t,"-handler-wrap")]:{background:v}}})),(0,en.Mu)(e)),{"&-rtl":{direction:"rtl",["".concat(t,"-input")]:{direction:"rtl"}},"&-lg":{padding:0,fontSize:a,lineHeight:k,borderRadius:w,["input".concat(t,"-input")]:{height:A(i).sub(A(n).mul(2)).equal(),padding:"".concat((0,J.bf)(d)," ").concat((0,J.bf)(p))}},"&-sm":{padding:0,borderRadius:S,["input".concat(t,"-input")]:{height:A(l).sub(A(n).mul(2)).equal(),padding:"".concat((0,J.bf)(u)," ").concat((0,J.bf)(c))}},"&-out-of-range":{["".concat(t,"-input-wrap")]:{input:{color:s}}},"&-group":Object.assign(Object.assign(Object.assign({},(0,er.Wf)(e)),(0,ee.s7)(e)),{"&-wrapper":Object.assign(Object.assign(Object.assign({display:"inline-block",textAlign:"start",verticalAlign:"top",["".concat(t,"-affix-wrapper")]:{width:"100%"},"&-lg":{["".concat(t,"-group-addon")]:{borderRadius:w,fontSize:e.fontSizeLG}},"&-sm":{["".concat(t,"-group-addon")]:{borderRadius:S}}},(0,en.ir)(e)),(0,en.S5)(e)),{["&:not(".concat(t,"-compact-first-item):not(").concat(t,"-compact-last-item)").concat(t,"-compact-item")]:{["".concat(t,", ").concat(t,"-group-addon")]:{borderRadius:0}},["&:not(".concat(t,"-compact-last-item)").concat(t,"-compact-first-item")]:{["".concat(t,", ").concat(t,"-group-addon")]:{borderStartEndRadius:0,borderEndEndRadius:0}},["&:not(".concat(t,"-compact-first-item)").concat(t,"-compact-last-item")]:{["".concat(t,", ").concat(t,"-group-addon")]:{borderStartStartRadius:0,borderEndStartRadius:0}}})}),["&-disabled ".concat(t,"-input")]:{cursor:"not-allowed"},[t]:{"&-input":Object.assign(Object.assign(Object.assign(Object.assign({},(0,er.Wf)(e)),{width:"100%",padding:"".concat((0,J.bf)(b)," ").concat((0,J.bf)(h)),textAlign:"start",backgroundColor:"transparent",border:0,borderRadius:o,outline:0,transition:"all ".concat(m," linear"),appearance:"textfield",fontSize:"inherit"}),(0,ee.nz)(e.colorTextPlaceholder)),{'&[type="number"]::-webkit-inner-spin-button, &[type="number"]::-webkit-outer-spin-button':{margin:0,webkitAppearance:"none",appearance:"none"}})}})},{[t]:Object.assign(Object.assign(Object.assign({["&:hover ".concat(t,"-handler-wrap, &-focused ").concat(t,"-handler-wrap")]:{opacity:1},["".concat(t,"-handler-wrap")]:{position:"absolute",insetBlockStart:0,insetInlineEnd:0,width:e.handleWidth,height:"100%",borderStartStartRadius:0,borderStartEndRadius:o,borderEndEndRadius:o,borderEndStartRadius:0,opacity:O,display:"flex",flexDirection:"column",alignItems:"stretch",transition:"opacity ".concat(m," linear ").concat(m),["".concat(t,"-handler")]:{display:"flex",alignItems:"center",justifyContent:"center",flex:"auto",height:"40%",["\n ".concat(t,"-handler-up-inner,\n ").concat(t,"-handler-down-inner\n ")]:{marginInlineEnd:0,fontSize:e.handleFontSize}}},["".concat(t,"-handler")]:{height:"50%",overflow:"hidden",color:f,fontWeight:"bold",lineHeight:0,textAlign:"center",cursor:"pointer",borderInlineStart:"".concat((0,J.bf)(n)," ").concat(r," ").concat(C),transition:"all ".concat(m," linear"),"&:active":{background:y},"&:hover":{height:"60%",["\n ".concat(t,"-handler-up-inner,\n ").concat(t,"-handler-down-inner\n ")]:{color:g}},"&-up-inner, &-down-inner":Object.assign(Object.assign({},(0,er.Ro)()),{color:f,transition:"all ".concat(m," linear"),userSelect:"none"})},["".concat(t,"-handler-up")]:{borderStartEndRadius:o},["".concat(t,"-handler-down")]:{borderEndEndRadius:o}},es(e,"lg")),es(e,"sm")),{"&-disabled, &-readonly":{["".concat(t,"-handler-wrap")]:{display:"none"},["".concat(t,"-input")]:{color:"inherit"}},["\n ".concat(t,"-handler-up-disabled,\n ").concat(t,"-handler-down-disabled\n ")]:{cursor:"not-allowed"},["\n ".concat(t,"-handler-up-disabled:hover &-handler-up-inner,\n ").concat(t,"-handler-down-disabled:hover &-handler-down-inner\n ")]:{color:E}})}]},eu=e=>{let{componentCls:t,paddingBlock:n,paddingInline:r,inputAffixPadding:o,controlWidth:a,borderRadiusLG:i,borderRadiusSM:l,paddingInlineLG:s,paddingInlineSM:c,paddingBlockLG:u,paddingBlockSM:d}=e;return{["".concat(t,"-affix-wrapper")]:Object.assign(Object.assign({["input".concat(t,"-input")]:{padding:"".concat((0,J.bf)(n)," 0")}},(0,ee.ik)(e)),{position:"relative",display:"inline-flex",width:a,padding:0,paddingInlineStart:r,"&-lg":{borderRadius:i,paddingInlineStart:s,["input".concat(t,"-input")]:{padding:"".concat((0,J.bf)(u)," 0")}},"&-sm":{borderRadius:l,paddingInlineStart:c,["input".concat(t,"-input")]:{padding:"".concat((0,J.bf)(d)," 0")}},["&:not(".concat(t,"-disabled):hover")]:{zIndex:1},"&-focused, &:focus":{zIndex:1},["&-disabled > ".concat(t,"-disabled")]:{background:"transparent"},["> div".concat(t)]:{width:"100%",border:"none",outline:"none",["&".concat(t,"-focused")]:{boxShadow:"none !important"}},"&::before":{display:"inline-block",width:0,visibility:"hidden",content:'"\\a0"'},["".concat(t,"-handler-wrap")]:{zIndex:2},[t]:{color:"inherit","&-prefix, &-suffix":{display:"flex",flex:"none",alignItems:"center",pointerEvents:"none"},"&-prefix":{marginInlineEnd:o},"&-suffix":{position:"absolute",insetBlockStart:0,insetInlineEnd:0,zIndex:1,height:"100%",marginInlineEnd:r,marginInlineStart:o}}})}};var ed=(0,ea.I$)("InputNumber",e=>{let t=(0,ei.TS)(e,(0,et.e)(e));return[ec(t),eu(t),(0,eo.c)(t)]},e=>{var t;let n=null!==(t=e.handleVisible)&&void 0!==t?t:"auto";return Object.assign(Object.assign({},(0,et.T)(e)),{controlWidth:90,handleWidth:e.controlHeightSM-2*e.lineWidth,handleFontSize:e.fontSize/2,handleVisible:n,handleActiveBg:e.colorFillAlter,handleBg:e.colorBgContainer,filledHandleBg:new el.C(e.colorFillSecondary).onBackground(e.colorBgContainer).toHexString(),handleHoverColor:e.colorPrimary,handleBorderColor:e.colorBorder,handleOpacity:!0===n?1:0})},{unitless:{handleOpacity:!0}}),ep=function(e,t){var n={};for(var r in e)Object.prototype.hasOwnProperty.call(e,r)&&0>t.indexOf(r)&&(n[r]=e[r]);if(null!=e&&"function"==typeof Object.getOwnPropertySymbols)for(var o=0,r=Object.getOwnPropertySymbols(e);ot.indexOf(r[o])&&Object.prototype.propertyIsEnumerable.call(e,r[o])&&(n[r[o]]=e[r[o]]);return n};let ef=r.forwardRef((e,t)=>{let{getPrefixCls:n,direction:a}=r.useContext($.E_),i=r.useRef(null);r.useImperativeHandle(t,()=>i.current);let{className:l,rootClassName:c,size:d,disabled:p,prefixCls:f,addonBefore:m,addonAfter:g,prefix:h,bordered:b,readOnly:v,status:y,controls:E,variant:S}=e,w=ep(e,["className","rootClassName","size","disabled","prefixCls","addonBefore","addonAfter","prefix","bordered","readOnly","status","controls","variant"]),x=n("input-number",f),O=(0,q.Z)(x),[C,T,k]=ed(x,O),{compactSize:A,compactItemClassnames:I}=(0,Q.ri)(x,a),R=r.createElement(s,{className:"".concat(x,"-handler-up-inner")}),N=r.createElement(o.Z,{className:"".concat(x,"-handler-down-inner")});"object"==typeof E&&(R=void 0===E.upIcon?R:r.createElement("span",{className:"".concat(x,"-handler-up-inner")},E.upIcon),N=void 0===E.downIcon?N:r.createElement("span",{className:"".concat(x,"-handler-down-inner")},E.downIcon));let{hasFeedback:_,status:P,isFormItemInput:M,feedbackIcon:L}=r.useContext(K.aM),D=(0,G.F)(P,y),j=(0,Y.Z)(e=>{var t;return null!==(t=null!=d?d:A)&&void 0!==t?t:e}),F=r.useContext(V.Z),[B,U]=(0,X.Z)(S,b),Z=_&&r.createElement(r.Fragment,null,L),z=u()({["".concat(x,"-lg")]:"large"===j,["".concat(x,"-sm")]:"small"===j,["".concat(x,"-rtl")]:"rtl"===a,["".concat(x,"-in-form-item")]:M},T),W="".concat(x,"-group");return C(r.createElement(H,Object.assign({ref:i,disabled:null!=p?p:F,className:u()(k,O,l,c,I),upHandler:R,downHandler:N,prefixCls:x,readOnly:v,controls:"boolean"==typeof E?E:void 0,prefix:h,suffix:Z,addonAfter:g&&r.createElement(Q.BR,null,r.createElement(K.Ux,{override:!0,status:!0},g)),addonBefore:m&&r.createElement(Q.BR,null,r.createElement(K.Ux,{override:!0,status:!0},m)),classNames:{input:z,variant:u()({["".concat(x,"-").concat(B)]:U},(0,G.Z)(x,D,_)),affixWrapper:u()({["".concat(x,"-affix-wrapper-sm")]:"small"===j,["".concat(x,"-affix-wrapper-lg")]:"large"===j,["".concat(x,"-affix-wrapper-rtl")]:"rtl"===a},T),wrapper:u()({["".concat(W,"-rtl")]:"rtl"===a},T),groupWrapper:u()({["".concat(x,"-group-wrapper-sm")]:"small"===j,["".concat(x,"-group-wrapper-lg")]:"large"===j,["".concat(x,"-group-wrapper-rtl")]:"rtl"===a,["".concat(x,"-group-wrapper-").concat(B)]:U},(0,G.Z)("".concat(x,"-group-wrapper"),D,_),T)}},w)))});ef._InternalPanelDoNotUseOrYouWillBeFired=e=>r.createElement(W.ZP,{theme:{components:{InputNumber:{handleVisible:!0}}}},r.createElement(ef,Object.assign({},e)));var em=ef},88921:function(e,t,n){n.d(t,{Z:function(){return w},n:function(){return S}});var r=n(64090),o=n(16480),a=n.n(o),i=n(90089),l=n(74084),s=n(47794),c=n(57499),u=n(17094),d=n(10693),p=n(47137),f=n(92801),m=n(52274),g=n(94759),h=n(92935),b=n(8443),v=n(77136),y=e=>{let t;return"object"==typeof e&&(null==e?void 0:e.clearIcon)?t=e:e&&(t={clearIcon:r.createElement(v.Z,null)}),t},E=function(e,t){var n={};for(var r in e)Object.prototype.hasOwnProperty.call(e,r)&&0>t.indexOf(r)&&(n[r]=e[r]);if(null!=e&&"function"==typeof Object.getOwnPropertySymbols)for(var o=0,r=Object.getOwnPropertySymbols(e);ot.indexOf(r[o])&&Object.prototype.propertyIsEnumerable.call(e,r[o])&&(n[r[o]]=e[r[o]]);return n};function S(e,t){if(!e)return;e.focus(t);let{cursor:n}=t||{};if(n){let t=e.value.length;switch(n){case"start":e.setSelectionRange(0,0);break;case"end":e.setSelectionRange(t,t);break;default:e.setSelectionRange(0,t)}}}var w=(0,r.forwardRef)((e,t)=>{var n;let{prefixCls:o,bordered:v=!0,status:S,size:w,disabled:x,onBlur:O,onFocus:C,suffix:T,allowClear:k,addonAfter:A,addonBefore:I,className:R,style:N,styles:_,rootClassName:P,onChange:M,classNames:L,variant:D}=e,j=E(e,["prefixCls","bordered","status","size","disabled","onBlur","onFocus","suffix","allowClear","addonAfter","addonBefore","className","style","styles","rootClassName","onChange","classNames","variant"]),{getPrefixCls:F,direction:B,input:U}=r.useContext(c.E_),Z=F("input",o),z=(0,r.useRef)(null),H=(0,h.Z)(Z),[G,$,W]=(0,g.ZP)(Z,H),{compactSize:V,compactItemClassnames:q}=(0,f.ri)(Z,B),Y=(0,d.Z)(e=>{var t;return null!==(t=null!=w?w:V)&&void 0!==t?t:e}),K=r.useContext(u.Z),{status:X,hasFeedback:Q,feedbackIcon:J}=(0,r.useContext)(p.aM),ee=(0,s.F)(X,S),et=!!(e.prefix||e.suffix||e.allowClear||e.showCount)||!!Q;(0,r.useRef)(et);let en=(0,m.Z)(z,!0),er=(Q||T)&&r.createElement(r.Fragment,null,T,Q&&J),eo=y(k),[ea,ei]=(0,b.Z)(D,v);return G(r.createElement(i.Z,Object.assign({ref:(0,l.sQ)(t,z),prefixCls:Z,autoComplete:null==U?void 0:U.autoComplete},j,{disabled:null!=x?x:K,onBlur:e=>{en(),null==O||O(e)},onFocus:e=>{en(),null==C||C(e)},style:Object.assign(Object.assign({},null==U?void 0:U.style),N),styles:Object.assign(Object.assign({},null==U?void 0:U.styles),_),suffix:er,allowClear:eo,className:a()(R,P,W,H,q,null==U?void 0:U.className),onChange:e=>{en(),null==M||M(e)},addonAfter:A&&r.createElement(f.BR,null,r.createElement(p.Ux,{override:!0,status:!0},A)),addonBefore:I&&r.createElement(f.BR,null,r.createElement(p.Ux,{override:!0,status:!0},I)),classNames:Object.assign(Object.assign(Object.assign({},L),null==U?void 0:U.classNames),{input:a()({["".concat(Z,"-sm")]:"small"===Y,["".concat(Z,"-lg")]:"large"===Y,["".concat(Z,"-rtl")]:"rtl"===B},null==L?void 0:L.input,null===(n=null==U?void 0:U.classNames)||void 0===n?void 0:n.input,$),variant:a()({["".concat(Z,"-").concat(ea)]:ei},(0,s.Z)(Z,ee)),affixWrapper:a()({["".concat(Z,"-affix-wrapper-sm")]:"small"===Y,["".concat(Z,"-affix-wrapper-lg")]:"large"===Y,["".concat(Z,"-affix-wrapper-rtl")]:"rtl"===B},$),wrapper:a()({["".concat(Z,"-group-rtl")]:"rtl"===B},$),groupWrapper:a()({["".concat(Z,"-group-wrapper-sm")]:"small"===Y,["".concat(Z,"-group-wrapper-lg")]:"large"===Y,["".concat(Z,"-group-wrapper-rtl")]:"rtl"===B,["".concat(Z,"-group-wrapper-").concat(ea)]:ei},(0,s.Z)("".concat(Z,"-group-wrapper"),ee,Q),$)})})))})},78578:function(e,t,n){n.d(t,{Z:function(){return F}});var r,o=n(64090),a=n(77136),i=n(16480),l=n.n(i),s=n(14749),c=n(50833),u=n(5239),d=n(63787),p=n(80406),f=n(6787),m=n(90089),g=n(44607),h=n(8002),b=n(44329),v=n(6976),y=n(46505),E=n(24800),S=n(19223),w=["letter-spacing","line-height","padding-top","padding-bottom","font-family","font-weight","font-size","font-variant","text-rendering","text-transform","width","text-indent","padding-left","padding-right","border-width","box-sizing","word-break","white-space"],x={},O=["prefixCls","onPressEnter","defaultValue","value","autoSize","onResize","className","style","disabled","onChange","onInternalAutoSize"],C=o.forwardRef(function(e,t){var n=e.prefixCls,a=(e.onPressEnter,e.defaultValue),i=e.value,d=e.autoSize,m=e.onResize,g=e.className,h=e.style,C=e.disabled,T=e.onChange,k=(e.onInternalAutoSize,(0,f.Z)(e,O)),A=(0,b.Z)(a,{value:i,postState:function(e){return null!=e?e:""}}),I=(0,p.Z)(A,2),R=I[0],N=I[1],_=o.useRef();o.useImperativeHandle(t,function(){return{textArea:_.current}});var P=o.useMemo(function(){return d&&"object"===(0,v.Z)(d)?[d.minRows,d.maxRows]:[]},[d]),M=(0,p.Z)(P,2),L=M[0],D=M[1],j=!!d,F=function(){try{if(document.activeElement===_.current){var e=_.current,t=e.selectionStart,n=e.selectionEnd,r=e.scrollTop;_.current.setSelectionRange(t,n),_.current.scrollTop=r}}catch(e){}},B=o.useState(2),U=(0,p.Z)(B,2),Z=U[0],z=U[1],H=o.useState(),G=(0,p.Z)(H,2),$=G[0],W=G[1],V=function(){z(0)};(0,E.Z)(function(){j&&V()},[i,L,D,j]),(0,E.Z)(function(){if(0===Z)z(1);else if(1===Z){var e=function(e){var t,n=arguments.length>1&&void 0!==arguments[1]&&arguments[1],o=arguments.length>2&&void 0!==arguments[2]?arguments[2]:null,a=arguments.length>3&&void 0!==arguments[3]?arguments[3]:null;r||((r=document.createElement("textarea")).setAttribute("tab-index","-1"),r.setAttribute("aria-hidden","true"),document.body.appendChild(r)),e.getAttribute("wrap")?r.setAttribute("wrap",e.getAttribute("wrap")):r.removeAttribute("wrap");var i=function(e){var t=arguments.length>1&&void 0!==arguments[1]&&arguments[1],n=e.getAttribute("id")||e.getAttribute("data-reactid")||e.getAttribute("name");if(t&&x[n])return x[n];var r=window.getComputedStyle(e),o=r.getPropertyValue("box-sizing")||r.getPropertyValue("-moz-box-sizing")||r.getPropertyValue("-webkit-box-sizing"),a=parseFloat(r.getPropertyValue("padding-bottom"))+parseFloat(r.getPropertyValue("padding-top")),i=parseFloat(r.getPropertyValue("border-bottom-width"))+parseFloat(r.getPropertyValue("border-top-width")),l={sizingStyle:w.map(function(e){return"".concat(e,":").concat(r.getPropertyValue(e))}).join(";"),paddingSize:a,borderSize:i,boxSizing:o};return t&&n&&(x[n]=l),l}(e,n),l=i.paddingSize,s=i.borderSize,c=i.boxSizing,u=i.sizingStyle;r.setAttribute("style","".concat(u,";").concat("\n min-height:0 !important;\n max-height:none !important;\n height:0 !important;\n visibility:hidden !important;\n overflow:hidden !important;\n position:absolute !important;\n z-index:-1000 !important;\n top:0 !important;\n right:0 !important;\n pointer-events: none !important;\n")),r.value=e.value||e.placeholder||"";var d=void 0,p=void 0,f=r.scrollHeight;if("border-box"===c?f+=s:"content-box"===c&&(f-=l),null!==o||null!==a){r.value=" ";var m=r.scrollHeight-l;null!==o&&(d=m*o,"border-box"===c&&(d=d+l+s),f=Math.max(d,f)),null!==a&&(p=m*a,"border-box"===c&&(p=p+l+s),t=f>p?"":"hidden",f=Math.min(p,f))}var g={height:f,overflowY:t,resize:"none"};return d&&(g.minHeight=d),p&&(g.maxHeight=p),g}(_.current,!1,L,D);z(2),W(e)}else F()},[Z]);var q=o.useRef(),Y=function(){S.Z.cancel(q.current)};o.useEffect(function(){return Y},[]);var K=(0,u.Z)((0,u.Z)({},h),j?$:null);return(0===Z||1===Z)&&(K.overflowY="hidden",K.overflowX="hidden"),o.createElement(y.Z,{onResize:function(e){2===Z&&(null==m||m(e),d&&(Y(),q.current=(0,S.Z)(function(){V()})))},disabled:!(d||m)},o.createElement("textarea",(0,s.Z)({},k,{ref:_,style:K,className:l()(n,g,(0,c.Z)({},"".concat(n,"-disabled"),C)),disabled:C,value:R,onChange:function(e){N(e.target.value),null==T||T(e)}})))}),T=["defaultValue","value","onFocus","onBlur","onChange","allowClear","maxLength","onCompositionStart","onCompositionEnd","suffix","prefixCls","showCount","count","className","style","disabled","hidden","classNames","styles","onResize"],k=o.forwardRef(function(e,t){var n,r,a,i=e.defaultValue,v=e.value,y=e.onFocus,E=e.onBlur,S=e.onChange,w=e.allowClear,x=e.maxLength,O=e.onCompositionStart,k=e.onCompositionEnd,A=e.suffix,I=e.prefixCls,R=void 0===I?"rc-textarea":I,N=e.showCount,_=e.count,P=e.className,M=e.style,L=e.disabled,D=e.hidden,j=e.classNames,F=e.styles,B=e.onResize,U=(0,f.Z)(e,T),Z=(0,b.Z)(i,{value:v,defaultValue:i}),z=(0,p.Z)(Z,2),H=z[0],G=z[1],$=null==H?"":String(H),W=o.useState(!1),V=(0,p.Z)(W,2),q=V[0],Y=V[1],K=o.useRef(!1),X=o.useState(null),Q=(0,p.Z)(X,2),J=Q[0],ee=Q[1],et=(0,o.useRef)(null),en=function(){var e;return null===(e=et.current)||void 0===e?void 0:e.textArea},er=function(){en().focus()};(0,o.useImperativeHandle)(t,function(){return{resizableTextArea:et.current,focus:er,blur:function(){en().blur()}}}),(0,o.useEffect)(function(){Y(function(e){return!L&&e})},[L]);var eo=o.useState(null),ea=(0,p.Z)(eo,2),ei=ea[0],el=ea[1];o.useEffect(function(){if(ei){var e;(e=en()).setSelectionRange.apply(e,(0,d.Z)(ei))}},[ei]);var es=(0,g.Z)(_,N),ec=null!==(n=es.max)&&void 0!==n?n:x,eu=Number(ec)>0,ed=es.strategy($),ep=!!ec&&ed>ec,ef=function(e,t){var n=t;!K.current&&es.exceedFormatter&&es.max&&es.strategy(t)>es.max&&(n=es.exceedFormatter(t,{max:es.max}),t!==n&&el([en().selectionStart||0,en().selectionEnd||0])),G(n),(0,h.rJ)(e.currentTarget,e,S,n)},em=A;es.show&&(a=es.showFormatter?es.showFormatter({value:$,count:ed,maxLength:ec}):"".concat(ed).concat(eu?" / ".concat(ec):""),em=o.createElement(o.Fragment,null,em,o.createElement("span",{className:l()("".concat(R,"-data-count"),null==j?void 0:j.count),style:null==F?void 0:F.count},a)));var eg=!U.autoSize&&!N&&!w;return o.createElement(m.Q,{value:$,allowClear:w,handleReset:function(e){G(""),er(),(0,h.rJ)(en(),e,S)},suffix:em,prefixCls:R,classNames:(0,u.Z)((0,u.Z)({},j),{},{affixWrapper:l()(null==j?void 0:j.affixWrapper,(r={},(0,c.Z)(r,"".concat(R,"-show-count"),N),(0,c.Z)(r,"".concat(R,"-textarea-allow-clear"),w),r))}),disabled:L,focused:q,className:l()(P,ep&&"".concat(R,"-out-of-range")),style:(0,u.Z)((0,u.Z)({},M),J&&!eg?{height:"auto"}:{}),dataAttrs:{affixWrapper:{"data-count":"string"==typeof a?a:void 0}},hidden:D},o.createElement(C,(0,s.Z)({},U,{maxLength:x,onKeyDown:function(e){var t=U.onPressEnter,n=U.onKeyDown;"Enter"===e.key&&t&&t(e),null==n||n(e)},onChange:function(e){ef(e,e.target.value)},onFocus:function(e){Y(!0),null==y||y(e)},onBlur:function(e){Y(!1),null==E||E(e)},onCompositionStart:function(e){K.current=!0,null==O||O(e)},onCompositionEnd:function(e){K.current=!1,ef(e,e.currentTarget.value),null==k||k(e)},className:l()(null==j?void 0:j.textarea),style:(0,u.Z)((0,u.Z)({},null==F?void 0:F.textarea),{},{resize:null==M?void 0:M.resize}),disabled:L,prefixCls:R,onResize:function(e){var t;null==B||B(e),null!==(t=en())&&void 0!==t&&t.style.height&&ee(!0)},ref:et})))}),A=n(47794),I=n(57499),R=n(17094),N=n(10693),_=n(47137),P=n(88921),M=n(94759),L=n(92935),D=n(8443),j=function(e,t){var n={};for(var r in e)Object.prototype.hasOwnProperty.call(e,r)&&0>t.indexOf(r)&&(n[r]=e[r]);if(null!=e&&"function"==typeof Object.getOwnPropertySymbols)for(var o=0,r=Object.getOwnPropertySymbols(e);ot.indexOf(r[o])&&Object.prototype.propertyIsEnumerable.call(e,r[o])&&(n[r[o]]=e[r[o]]);return n},F=(0,o.forwardRef)((e,t)=>{var n;let r;let{prefixCls:i,bordered:s=!0,size:c,disabled:u,status:d,allowClear:p,classNames:f,rootClassName:m,className:g,variant:h}=e,b=j(e,["prefixCls","bordered","size","disabled","status","allowClear","classNames","rootClassName","className","variant"]),{getPrefixCls:v,direction:y}=o.useContext(I.E_),E=(0,N.Z)(c),S=o.useContext(R.Z),{status:w,hasFeedback:x,feedbackIcon:O}=o.useContext(_.aM),C=(0,A.F)(w,d),T=o.useRef(null);o.useImperativeHandle(t,()=>{var e;return{resizableTextArea:null===(e=T.current)||void 0===e?void 0:e.resizableTextArea,focus:e=>{var t,n;(0,P.n)(null===(n=null===(t=T.current)||void 0===t?void 0:t.resizableTextArea)||void 0===n?void 0:n.textArea,e)},blur:()=>{var e;return null===(e=T.current)||void 0===e?void 0:e.blur()}}});let F=v("input",i);"object"==typeof p&&(null==p?void 0:p.clearIcon)?r=p:p&&(r={clearIcon:o.createElement(a.Z,null)});let B=(0,L.Z)(F),[U,Z,z]=(0,M.ZP)(F,B),[H,G]=(0,D.Z)(h,s);return U(o.createElement(k,Object.assign({},b,{disabled:null!=u?u:S,allowClear:r,className:l()(z,B,g,m),classNames:Object.assign(Object.assign({},f),{textarea:l()({["".concat(F,"-sm")]:"small"===E,["".concat(F,"-lg")]:"large"===E},Z,null==f?void 0:f.textarea),variant:l()({["".concat(F,"-").concat(H)]:G},(0,A.Z)(F,C)),affixWrapper:l()("".concat(F,"-textarea-affix-wrapper"),{["".concat(F,"-affix-wrapper-rtl")]:"rtl"===y,["".concat(F,"-affix-wrapper-sm")]:"small"===E,["".concat(F,"-affix-wrapper-lg")]:"large"===E,["".concat(F,"-textarea-show-count")]:e.showCount||(null===(n=e.count)||void 0===n?void 0:n.show)},Z)}),prefixCls:F,suffix:x&&o.createElement("span",{className:"".concat(F,"-textarea-suffix")},O),ref:T})))})},52274:function(e,t,n){n.d(t,{Z:function(){return o}});var r=n(64090);function o(e,t){let n=(0,r.useRef)([]),o=()=>{n.current.push(setTimeout(()=>{var t,n,r,o;(null===(t=e.current)||void 0===t?void 0:t.input)&&(null===(n=e.current)||void 0===n?void 0:n.input.getAttribute("type"))==="password"&&(null===(r=e.current)||void 0===r?void 0:r.input.hasAttribute("value"))&&(null===(o=e.current)||void 0===o||o.input.removeAttribute("value"))}))};return(0,r.useEffect)(()=>(t&&o(),()=>n.current.forEach(e=>{e&&clearTimeout(e)})),[]),o}},42539:function(e,t,n){n.d(t,{Z:function(){return _}});var r=n(64090),o=n(16480),a=n.n(o),i=n(57499),l=n(47137),s=n(94759),c=n(88921),u=n(14749),d={icon:{tag:"svg",attrs:{viewBox:"64 64 896 896",focusable:"false"},children:[{tag:"path",attrs:{d:"M942.2 486.2Q889.47 375.11 816.7 305l-50.88 50.88C807.31 395.53 843.45 447.4 874.7 512 791.5 684.2 673.4 766 512 766q-72.67 0-133.87-22.38L323 798.75Q408 838 512 838q288.3 0 430.2-300.3a60.29 60.29 0 000-51.5zm-63.57-320.64L836 122.88a8 8 0 00-11.32 0L715.31 232.2Q624.86 186 512 186q-288.3 0-430.2 300.3a60.3 60.3 0 000 51.5q56.69 119.4 136.5 191.41L112.48 835a8 8 0 000 11.31L155.17 889a8 8 0 0011.31 0l712.15-712.12a8 8 0 000-11.32zM149.3 512C232.6 339.8 350.7 258 512 258c54.54 0 104.13 9.36 149.12 28.39l-70.3 70.3a176 176 0 00-238.13 238.13l-83.42 83.42C223.1 637.49 183.3 582.28 149.3 512zm246.7 0a112.11 112.11 0 01146.2-106.69L401.31 546.2A112 112 0 01396 512z"}},{tag:"path",attrs:{d:"M508 624c-3.46 0-6.87-.16-10.25-.47l-52.82 52.82a176.09 176.09 0 00227.42-227.42l-52.82 52.82c.31 3.38.47 6.79.47 10.25a111.94 111.94 0 01-112 112z"}}]},name:"eye-invisible",theme:"outlined"},p=n(60688),f=r.forwardRef(function(e,t){return r.createElement(p.Z,(0,u.Z)({},e,{ref:t,icon:d}))}),m={icon:{tag:"svg",attrs:{viewBox:"64 64 896 896",focusable:"false"},children:[{tag:"path",attrs:{d:"M942.2 486.2C847.4 286.5 704.1 186 512 186c-192.2 0-335.4 100.5-430.2 300.3a60.3 60.3 0 000 51.5C176.6 737.5 319.9 838 512 838c192.2 0 335.4-100.5 430.2-300.3 7.7-16.2 7.7-35 0-51.5zM512 766c-161.3 0-279.4-81.8-362.7-254C232.6 339.8 350.7 258 512 258c161.3 0 279.4 81.8 362.7 254C791.5 684.2 673.4 766 512 766zm-4-430c-97.2 0-176 78.8-176 176s78.8 176 176 176 176-78.8 176-176-78.8-176-176-176zm0 288c-61.9 0-112-50.1-112-112s50.1-112 112-112 112 50.1 112 112-50.1 112-112 112z"}}]},name:"eye",theme:"outlined"},g=r.forwardRef(function(e,t){return r.createElement(p.Z,(0,u.Z)({},e,{ref:t,icon:m}))}),h=n(35704),b=n(74084),v=n(52274),y=function(e,t){var n={};for(var r in e)Object.prototype.hasOwnProperty.call(e,r)&&0>t.indexOf(r)&&(n[r]=e[r]);if(null!=e&&"function"==typeof Object.getOwnPropertySymbols)for(var o=0,r=Object.getOwnPropertySymbols(e);ot.indexOf(r[o])&&Object.prototype.propertyIsEnumerable.call(e,r[o])&&(n[r[o]]=e[r[o]]);return n};let E=e=>e?r.createElement(g,null):r.createElement(f,null),S={click:"onClick",hover:"onMouseOver"},w=r.forwardRef((e,t)=>{let{visibilityToggle:n=!0}=e,o="object"==typeof n&&void 0!==n.visible,[l,s]=(0,r.useState)(()=>!!o&&n.visible),u=(0,r.useRef)(null);r.useEffect(()=>{o&&s(n.visible)},[o,n]);let d=(0,v.Z)(u),p=()=>{let{disabled:t}=e;t||(l&&d(),s(e=>{var t;let r=!e;return"object"==typeof n&&(null===(t=n.onVisibleChange)||void 0===t||t.call(n,r)),r}))},{className:f,prefixCls:m,inputPrefixCls:g,size:w}=e,x=y(e,["className","prefixCls","inputPrefixCls","size"]),{getPrefixCls:O}=r.useContext(i.E_),C=O("input",g),T=O("input-password",m),k=n&&(t=>{let{action:n="click",iconRender:o=E}=e,a=S[n]||"",i=o(l);return r.cloneElement(r.isValidElement(i)?i:r.createElement("span",null,i),{[a]:p,className:"".concat(t,"-icon"),key:"passwordIcon",onMouseDown:e=>{e.preventDefault()},onMouseUp:e=>{e.preventDefault()}})})(T),A=a()(T,f,{["".concat(T,"-").concat(w)]:!!w}),I=Object.assign(Object.assign({},(0,h.Z)(x,["suffix","iconRender","visibilityToggle"])),{type:l?"text":"password",className:A,prefixCls:C,suffix:k});return w&&(I.size=w),r.createElement(c.Z,Object.assign({ref:(0,b.sQ)(t,u)},I))});var x=n(96871),O=n(65823),C=n(1861),T=n(10693),k=n(92801),A=function(e,t){var n={};for(var r in e)Object.prototype.hasOwnProperty.call(e,r)&&0>t.indexOf(r)&&(n[r]=e[r]);if(null!=e&&"function"==typeof Object.getOwnPropertySymbols)for(var o=0,r=Object.getOwnPropertySymbols(e);ot.indexOf(r[o])&&Object.prototype.propertyIsEnumerable.call(e,r[o])&&(n[r[o]]=e[r[o]]);return n};let I=r.forwardRef((e,t)=>{let n;let{prefixCls:o,inputPrefixCls:l,className:s,size:u,suffix:d,enterButton:p=!1,addonAfter:f,loading:m,disabled:g,onSearch:h,onChange:v,onCompositionStart:y,onCompositionEnd:E}=e,S=A(e,["prefixCls","inputPrefixCls","className","size","suffix","enterButton","addonAfter","loading","disabled","onSearch","onChange","onCompositionStart","onCompositionEnd"]),{getPrefixCls:w,direction:I}=r.useContext(i.E_),R=r.useRef(!1),N=w("input-search",o),_=w("input",l),{compactSize:P}=(0,k.ri)(N,I),M=(0,T.Z)(e=>{var t;return null!==(t=null!=u?u:P)&&void 0!==t?t:e}),L=r.useRef(null),D=e=>{var t;document.activeElement===(null===(t=L.current)||void 0===t?void 0:t.input)&&e.preventDefault()},j=e=>{var t,n;h&&h(null===(n=null===(t=L.current)||void 0===t?void 0:t.input)||void 0===n?void 0:n.value,e,{source:"input"})},F="boolean"==typeof p?r.createElement(x.Z,null):null,B="".concat(N,"-button"),U=p||{},Z=U.type&&!0===U.type.__ANT_BUTTON;n=Z||"button"===U.type?(0,O.Tm)(U,Object.assign({onMouseDown:D,onClick:e=>{var t,n;null===(n=null===(t=null==U?void 0:U.props)||void 0===t?void 0:t.onClick)||void 0===n||n.call(t,e),j(e)},key:"enterButton"},Z?{className:B,size:M}:{})):r.createElement(C.ZP,{className:B,type:p?"primary":void 0,size:M,disabled:g,key:"enterButton",onMouseDown:D,onClick:j,loading:m,icon:F},p),f&&(n=[n,(0,O.Tm)(f,{key:"addonAfter"})]);let z=a()(N,{["".concat(N,"-rtl")]:"rtl"===I,["".concat(N,"-").concat(M)]:!!M,["".concat(N,"-with-button")]:!!p},s);return r.createElement(c.Z,Object.assign({ref:(0,b.sQ)(L,t),onPressEnter:e=>{R.current||m||j(e)}},S,{size:M,onCompositionStart:e=>{R.current=!0,null==y||y(e)},onCompositionEnd:e=>{R.current=!1,null==E||E(e)},prefixCls:_,addonAfter:n,suffix:d,onChange:e=>{e&&e.target&&"click"===e.type&&h&&h(e.target.value,e,{source:"clear"}),v&&v(e)},className:z,disabled:g}))});var R=n(78578);let N=c.Z;N.Group=e=>{let{getPrefixCls:t,direction:n}=(0,r.useContext)(i.E_),{prefixCls:o,className:c}=e,u=t("input-group",o),d=t("input"),[p,f]=(0,s.ZP)(d),m=a()(u,{["".concat(u,"-lg")]:"large"===e.size,["".concat(u,"-sm")]:"small"===e.size,["".concat(u,"-compact")]:e.compact,["".concat(u,"-rtl")]:"rtl"===n},f,c),g=(0,r.useContext)(l.aM),h=(0,r.useMemo)(()=>Object.assign(Object.assign({},g),{isFormItemInput:!1}),[g]);return p(r.createElement("span",{className:m,style:e.style,onMouseEnter:e.onMouseEnter,onMouseLeave:e.onMouseLeave,onFocus:e.onFocus,onBlur:e.onBlur},r.createElement(l.aM.Provider,{value:h},e.children)))},N.Search=I,N.TextArea=R.Z,N.Password=w;var _=N},94759:function(e,t,n){n.d(t,{ik:function(){return f},nz:function(){return u},s7:function(){return m}});var r=n(8985),o=n(11303),a=n(12288),i=n(76585),l=n(80316),s=n(85980),c=n(61892);let u=e=>({"&::-moz-placeholder":{opacity:1},"&::placeholder":{color:e,userSelect:"none"},"&:placeholder-shown":{textOverflow:"ellipsis"}}),d=e=>{let{paddingBlockLG:t,lineHeightLG:n,borderRadiusLG:o,paddingInlineLG:a}=e;return{padding:"".concat((0,r.bf)(t)," ").concat((0,r.bf)(a)),fontSize:e.inputFontSizeLG,lineHeight:n,borderRadius:o}},p=e=>({padding:"".concat((0,r.bf)(e.paddingBlockSM)," ").concat((0,r.bf)(e.paddingInlineSM)),fontSize:e.inputFontSizeSM,borderRadius:e.borderRadiusSM}),f=e=>Object.assign(Object.assign({position:"relative",display:"inline-block",width:"100%",minWidth:0,padding:"".concat((0,r.bf)(e.paddingBlock)," ").concat((0,r.bf)(e.paddingInline)),color:e.colorText,fontSize:e.inputFontSize,lineHeight:e.lineHeight,borderRadius:e.borderRadius,transition:"all ".concat(e.motionDurationMid)},u(e.colorTextPlaceholder)),{"textarea&":{maxWidth:"100%",height:"auto",minHeight:e.controlHeight,lineHeight:e.lineHeight,verticalAlign:"bottom",transition:"all ".concat(e.motionDurationSlow,", height 0s"),resize:"vertical"},"&-lg":Object.assign({},d(e)),"&-sm":Object.assign({},p(e)),"&-rtl":{direction:"rtl"},"&-textarea-rtl":{direction:"rtl"}}),m=e=>{let{componentCls:t,antCls:n}=e;return{position:"relative",display:"table",width:"100%",borderCollapse:"separate",borderSpacing:0,"&[class*='col-']":{paddingInlineEnd:e.paddingXS,"&:last-child":{paddingInlineEnd:0}},["&-lg ".concat(t,", &-lg > ").concat(t,"-group-addon")]:Object.assign({},d(e)),["&-sm ".concat(t,", &-sm > ").concat(t,"-group-addon")]:Object.assign({},p(e)),["&-lg ".concat(n,"-select-single ").concat(n,"-select-selector")]:{height:e.controlHeightLG},["&-sm ".concat(n,"-select-single ").concat(n,"-select-selector")]:{height:e.controlHeightSM},["> ".concat(t)]:{display:"table-cell","&:not(:first-child):not(:last-child)":{borderRadius:0}},["".concat(t,"-group")]:{"&-addon, &-wrap":{display:"table-cell",width:1,whiteSpace:"nowrap",verticalAlign:"middle","&:not(:first-child):not(:last-child)":{borderRadius:0}},"&-wrap > *":{display:"block !important"},"&-addon":{position:"relative",padding:"0 ".concat((0,r.bf)(e.paddingInline)),color:e.colorText,fontWeight:"normal",fontSize:e.inputFontSize,textAlign:"center",borderRadius:e.borderRadius,transition:"all ".concat(e.motionDurationSlow),lineHeight:1,["".concat(n,"-select")]:{margin:"".concat((0,r.bf)(e.calc(e.paddingBlock).add(1).mul(-1).equal())," ").concat((0,r.bf)(e.calc(e.paddingInline).mul(-1).equal())),["&".concat(n,"-select-single:not(").concat(n,"-select-customize-input):not(").concat(n,"-pagination-size-changer)")]:{["".concat(n,"-select-selector")]:{backgroundColor:"inherit",border:"".concat((0,r.bf)(e.lineWidth)," ").concat(e.lineType," transparent"),boxShadow:"none"}},"&-open, &-focused":{["".concat(n,"-select-selector")]:{color:e.colorPrimary}}},["".concat(n,"-cascader-picker")]:{margin:"-9px ".concat((0,r.bf)(e.calc(e.paddingInline).mul(-1).equal())),backgroundColor:"transparent",["".concat(n,"-cascader-input")]:{textAlign:"start",border:0,boxShadow:"none"}}}},["".concat(t)]:{width:"100%",marginBottom:0,textAlign:"inherit","&:focus":{zIndex:1,borderInlineEndWidth:1},"&:hover":{zIndex:1,borderInlineEndWidth:1,["".concat(t,"-search-with-button &")]:{zIndex:0}}},["> ".concat(t,":first-child, ").concat(t,"-group-addon:first-child")]:{borderStartEndRadius:0,borderEndEndRadius:0,["".concat(n,"-select ").concat(n,"-select-selector")]:{borderStartEndRadius:0,borderEndEndRadius:0}},["> ".concat(t,"-affix-wrapper")]:{["&:not(:first-child) ".concat(t)]:{borderStartStartRadius:0,borderEndStartRadius:0},["&:not(:last-child) ".concat(t)]:{borderStartEndRadius:0,borderEndEndRadius:0}},["> ".concat(t,":last-child, ").concat(t,"-group-addon:last-child")]:{borderStartStartRadius:0,borderEndStartRadius:0,["".concat(n,"-select ").concat(n,"-select-selector")]:{borderStartStartRadius:0,borderEndStartRadius:0}},["".concat(t,"-affix-wrapper")]:{"&:not(:last-child)":{borderStartEndRadius:0,borderEndEndRadius:0,["".concat(t,"-search &")]:{borderStartStartRadius:e.borderRadius,borderEndStartRadius:e.borderRadius}},["&:not(:first-child), ".concat(t,"-search &:not(:first-child)")]:{borderStartStartRadius:0,borderEndStartRadius:0}},["&".concat(t,"-group-compact")]:Object.assign(Object.assign({display:"block"},(0,o.dF)()),{["".concat(t,"-group-addon, ").concat(t,"-group-wrap, > ").concat(t)]:{"&:not(:first-child):not(:last-child)":{borderInlineEndWidth:e.lineWidth,"&:hover":{zIndex:1},"&:focus":{zIndex:1}}},"& > *":{display:"inline-block",float:"none",verticalAlign:"top",borderRadius:0},["\n & > ".concat(t,"-affix-wrapper,\n & > ").concat(t,"-number-affix-wrapper,\n & > ").concat(n,"-picker-range\n ")]:{display:"inline-flex"},"& > *:not(:last-child)":{marginInlineEnd:e.calc(e.lineWidth).mul(-1).equal(),borderInlineEndWidth:e.lineWidth},["".concat(t)]:{float:"none"},["& > ".concat(n,"-select > ").concat(n,"-select-selector,\n & > ").concat(n,"-select-auto-complete ").concat(t,",\n & > ").concat(n,"-cascader-picker ").concat(t,",\n & > ").concat(t,"-group-wrapper ").concat(t)]:{borderInlineEndWidth:e.lineWidth,borderRadius:0,"&:hover":{zIndex:1},"&:focus":{zIndex:1}},["& > ".concat(n,"-select-focused")]:{zIndex:1},["& > ".concat(n,"-select > ").concat(n,"-select-arrow")]:{zIndex:1},["& > *:first-child,\n & > ".concat(n,"-select:first-child > ").concat(n,"-select-selector,\n & > ").concat(n,"-select-auto-complete:first-child ").concat(t,",\n & > ").concat(n,"-cascader-picker:first-child ").concat(t)]:{borderStartStartRadius:e.borderRadius,borderEndStartRadius:e.borderRadius},["& > *:last-child,\n & > ".concat(n,"-select:last-child > ").concat(n,"-select-selector,\n & > ").concat(n,"-cascader-picker:last-child ").concat(t,",\n & > ").concat(n,"-cascader-picker-focused:last-child ").concat(t)]:{borderInlineEndWidth:e.lineWidth,borderStartEndRadius:e.borderRadius,borderEndEndRadius:e.borderRadius},["& > ".concat(n,"-select-auto-complete ").concat(t)]:{verticalAlign:"top"},["".concat(t,"-group-wrapper + ").concat(t,"-group-wrapper")]:{marginInlineStart:e.calc(e.lineWidth).mul(-1).equal(),["".concat(t,"-affix-wrapper")]:{borderRadius:0}},["".concat(t,"-group-wrapper:not(:last-child)")]:{["&".concat(t,"-search > ").concat(t,"-group")]:{["& > ".concat(t,"-group-addon > ").concat(t,"-search-button")]:{borderRadius:0},["& > ".concat(t)]:{borderStartStartRadius:e.borderRadius,borderStartEndRadius:0,borderEndEndRadius:0,borderEndStartRadius:e.borderRadius}}}})}},g=e=>{let{componentCls:t,controlHeightSM:n,lineWidth:r,calc:a}=e,i=a(n).sub(a(r).mul(2)).sub(16).div(2).equal();return{[t]:Object.assign(Object.assign(Object.assign(Object.assign(Object.assign(Object.assign({},(0,o.Wf)(e)),f(e)),(0,c.qG)(e)),(0,c.H8)(e)),(0,c.Mu)(e)),{'&[type="color"]':{height:e.controlHeight,["&".concat(t,"-lg")]:{height:e.controlHeightLG},["&".concat(t,"-sm")]:{height:n,paddingTop:i,paddingBottom:i}},'&[type="search"]::-webkit-search-cancel-button, &[type="search"]::-webkit-search-decoration':{"-webkit-appearance":"none"}})}},h=e=>{let{componentCls:t}=e;return{["".concat(t,"-clear-icon")]:{margin:0,color:e.colorTextQuaternary,fontSize:e.fontSizeIcon,verticalAlign:-1,cursor:"pointer",transition:"color ".concat(e.motionDurationSlow),"&:hover":{color:e.colorTextTertiary},"&:active":{color:e.colorText},"&-hidden":{visibility:"hidden"},"&-has-suffix":{margin:"0 ".concat((0,r.bf)(e.inputAffixPadding))}}}},b=e=>{let{componentCls:t,inputAffixPadding:n,colorTextDescription:r,motionDurationSlow:o,colorIcon:a,colorIconHover:i,iconCls:l}=e;return{["".concat(t,"-affix-wrapper")]:Object.assign(Object.assign(Object.assign(Object.assign({},f(e)),{display:"inline-flex",["&:not(".concat(t,"-disabled):hover")]:{zIndex:1,["".concat(t,"-search-with-button &")]:{zIndex:0}},"&-focused, &:focus":{zIndex:1},["> input".concat(t)]:{padding:0,fontSize:"inherit",border:"none",borderRadius:0,outline:"none",background:"transparent",color:"inherit","&::-ms-reveal":{display:"none"},"&:focus":{boxShadow:"none !important"}},"&::before":{display:"inline-block",width:0,visibility:"hidden",content:'"\\a0"'},["".concat(t)]:{"&-prefix, &-suffix":{display:"flex",flex:"none",alignItems:"center","> *:not(:last-child)":{marginInlineEnd:e.paddingXS}},"&-show-count-suffix":{color:r},"&-show-count-has-suffix":{marginInlineEnd:e.paddingXXS},"&-prefix":{marginInlineEnd:n},"&-suffix":{marginInlineStart:n}}}),h(e)),{["".concat(l).concat(t,"-password-icon")]:{color:a,cursor:"pointer",transition:"all ".concat(o),"&:hover":{color:i}}})}},v=e=>{let{componentCls:t,borderRadiusLG:n,borderRadiusSM:r}=e;return{["".concat(t,"-group")]:Object.assign(Object.assign(Object.assign({},(0,o.Wf)(e)),m(e)),{"&-rtl":{direction:"rtl"},"&-wrapper":Object.assign(Object.assign(Object.assign({display:"inline-block",width:"100%",textAlign:"start",verticalAlign:"top","&-rtl":{direction:"rtl"},"&-lg":{["".concat(t,"-group-addon")]:{borderRadius:n,fontSize:e.inputFontSizeLG}},"&-sm":{["".concat(t,"-group-addon")]:{borderRadius:r}}},(0,c.ir)(e)),(0,c.S5)(e)),{["&:not(".concat(t,"-compact-first-item):not(").concat(t,"-compact-last-item)").concat(t,"-compact-item")]:{["".concat(t,", ").concat(t,"-group-addon")]:{borderRadius:0}},["&:not(".concat(t,"-compact-last-item)").concat(t,"-compact-first-item")]:{["".concat(t,", ").concat(t,"-group-addon")]:{borderStartEndRadius:0,borderEndEndRadius:0}},["&:not(".concat(t,"-compact-first-item)").concat(t,"-compact-last-item")]:{["".concat(t,", ").concat(t,"-group-addon")]:{borderStartStartRadius:0,borderEndStartRadius:0}}})})}},y=e=>{let{componentCls:t,antCls:n}=e,r="".concat(t,"-search");return{[r]:{["".concat(t)]:{"&:hover, &:focus":{borderColor:e.colorPrimaryHover,["+ ".concat(t,"-group-addon ").concat(r,"-button:not(").concat(n,"-btn-primary)")]:{borderInlineStartColor:e.colorPrimaryHover}}},["".concat(t,"-affix-wrapper")]:{borderRadius:0},["".concat(t,"-lg")]:{lineHeight:e.calc(e.lineHeightLG).sub(2e-4).equal({unit:!1})},["> ".concat(t,"-group")]:{["> ".concat(t,"-group-addon:last-child")]:{insetInlineStart:-1,padding:0,border:0,["".concat(r,"-button")]:{paddingTop:0,paddingBottom:0,borderStartStartRadius:0,borderStartEndRadius:e.borderRadius,borderEndEndRadius:e.borderRadius,borderEndStartRadius:0,boxShadow:"none"},["".concat(r,"-button:not(").concat(n,"-btn-primary)")]:{color:e.colorTextDescription,"&:hover":{color:e.colorPrimaryHover},"&:active":{color:e.colorPrimaryActive},["&".concat(n,"-btn-loading::before")]:{insetInlineStart:0,insetInlineEnd:0,insetBlockStart:0,insetBlockEnd:0}}}},["".concat(r,"-button")]:{height:e.controlHeight,"&:hover, &:focus":{zIndex:1}},["&-large ".concat(r,"-button")]:{height:e.controlHeightLG},["&-small ".concat(r,"-button")]:{height:e.controlHeightSM},"&-rtl":{direction:"rtl"},["&".concat(t,"-compact-item")]:{["&:not(".concat(t,"-compact-last-item)")]:{["".concat(t,"-group-addon")]:{["".concat(t,"-search-button")]:{marginInlineEnd:e.calc(e.lineWidth).mul(-1).equal(),borderRadius:0}}},["&:not(".concat(t,"-compact-first-item)")]:{["".concat(t,",").concat(t,"-affix-wrapper")]:{borderRadius:0}},["> ".concat(t,"-group-addon ").concat(t,"-search-button,\n > ").concat(t,",\n ").concat(t,"-affix-wrapper")]:{"&:hover,&:focus,&:active":{zIndex:2}},["> ".concat(t,"-affix-wrapper-focused")]:{zIndex:2}}}}},E=e=>{let{componentCls:t,paddingLG:n}=e,r="".concat(t,"-textarea");return{[r]:{position:"relative","&-show-count":{["> ".concat(t)]:{height:"100%"},["".concat(t,"-data-count")]:{position:"absolute",bottom:e.calc(e.fontSize).mul(e.lineHeight).mul(-1).equal(),insetInlineEnd:0,color:e.colorTextDescription,whiteSpace:"nowrap",pointerEvents:"none"}},"&-allow-clear":{["> ".concat(t)]:{paddingInlineEnd:n}},["&-affix-wrapper".concat(r,"-has-feedback")]:{["".concat(t)]:{paddingInlineEnd:n}},["&-affix-wrapper".concat(t,"-affix-wrapper")]:{padding:0,["> textarea".concat(t)]:{fontSize:"inherit",border:"none",outline:"none",background:"transparent","&:focus":{boxShadow:"none !important"}},["".concat(t,"-suffix")]:{margin:0,"> *:not(:last-child)":{marginInline:0},["".concat(t,"-clear-icon")]:{position:"absolute",insetInlineEnd:e.paddingXS,insetBlockStart:e.paddingXS},["".concat(r,"-suffix")]:{position:"absolute",top:0,insetInlineEnd:e.paddingInline,bottom:0,zIndex:1,display:"inline-flex",alignItems:"center",margin:"auto",pointerEvents:"none"}}}}}},S=e=>{let{componentCls:t}=e;return{["".concat(t,"-out-of-range")]:{["&, & input, & textarea, ".concat(t,"-show-count-suffix, ").concat(t,"-data-count")]:{color:e.colorError}}}};t.ZP=(0,i.I$)("Input",e=>{let t=(0,l.TS)(e,(0,s.e)(e));return[g(t),E(t),b(t),v(t),y(t),S(t),(0,a.c)(t)]},s.T)},85980:function(e,t,n){n.d(t,{T:function(){return a},e:function(){return o}});var r=n(80316);function o(e){return(0,r.TS)(e,{inputAffixPadding:e.paddingXXS})}let a=e=>{let{controlHeight:t,fontSize:n,lineHeight:r,lineWidth:o,controlHeightSM:a,controlHeightLG:i,fontSizeLG:l,lineHeightLG:s,paddingSM:c,controlPaddingHorizontalSM:u,controlPaddingHorizontal:d,colorFillAlter:p,colorPrimaryHover:f,colorPrimary:m,controlOutlineWidth:g,controlOutline:h,colorErrorOutline:b,colorWarningOutline:v,colorBgContainer:y}=e;return{paddingBlock:Math.max(Math.round((t-n*r)/2*10)/10-o,0),paddingBlockSM:Math.max(Math.round((a-n*r)/2*10)/10-o,0),paddingBlockLG:Math.ceil((i-l*s)/2*10)/10-o,paddingInline:c-o,paddingInlineSM:u-o,paddingInlineLG:d-o,addonBg:p,activeBorderColor:m,hoverBorderColor:f,activeShadow:"0 0 0 ".concat(g,"px ").concat(h),errorActiveShadow:"0 0 0 ".concat(g,"px ").concat(b),warningActiveShadow:"0 0 0 ".concat(g,"px ").concat(v),hoverBg:y,activeBg:y,inputFontSize:n,inputFontSizeLG:l,inputFontSizeSM:n}}},61892:function(e,t,n){n.d(t,{H8:function(){return g},Mu:function(){return p},S5:function(){return b},ir:function(){return d},qG:function(){return c}});var r=n(8985),o=n(80316);let a=e=>({borderColor:e.hoverBorderColor,backgroundColor:e.hoverBg}),i=e=>({color:e.colorTextDisabled,backgroundColor:e.colorBgContainerDisabled,borderColor:e.colorBorder,boxShadow:"none",cursor:"not-allowed",opacity:1,"&:hover:not([disabled])":Object.assign({},a((0,o.TS)(e,{hoverBorderColor:e.colorBorder,hoverBg:e.colorBgContainerDisabled})))}),l=(e,t)=>({background:e.colorBgContainer,borderWidth:e.lineWidth,borderStyle:e.lineType,borderColor:t.borderColor,"&:hover":{borderColor:t.hoverBorderColor,backgroundColor:e.hoverBg},"&:focus, &:focus-within":{borderColor:t.activeBorderColor,boxShadow:t.activeShadow,outline:0,backgroundColor:e.activeBg}}),s=(e,t)=>({["&".concat(e.componentCls,"-status-").concat(t.status,":not(").concat(e.componentCls,"-disabled)")]:Object.assign(Object.assign({},l(e,t)),{["".concat(e.componentCls,"-prefix, ").concat(e.componentCls,"-suffix")]:{color:t.affixColor}})}),c=(e,t)=>({"&-outlined":Object.assign(Object.assign(Object.assign(Object.assign(Object.assign({},l(e,{borderColor:e.colorBorder,hoverBorderColor:e.colorPrimaryHover,activeBorderColor:e.colorPrimary,activeShadow:e.activeShadow})),{["&".concat(e.componentCls,"-disabled, &[disabled]")]:Object.assign({},i(e))}),s(e,{status:"error",borderColor:e.colorError,hoverBorderColor:e.colorErrorBorderHover,activeBorderColor:e.colorError,activeShadow:e.errorActiveShadow,affixColor:e.colorError})),s(e,{status:"warning",borderColor:e.colorWarning,hoverBorderColor:e.colorWarningBorderHover,activeBorderColor:e.colorWarning,activeShadow:e.warningActiveShadow,affixColor:e.colorWarning})),t)}),u=(e,t)=>({["&".concat(e.componentCls,"-group-wrapper-status-").concat(t.status)]:{["".concat(e.componentCls,"-group-addon")]:{borderColor:t.addonBorderColor,color:t.addonColor}}}),d=e=>({"&-outlined":Object.assign(Object.assign(Object.assign({["".concat(e.componentCls,"-group")]:{"&-addon":{background:e.addonBg,border:"".concat((0,r.bf)(e.lineWidth)," ").concat(e.lineType," ").concat(e.colorBorder)},"&-addon:first-child":{borderInlineEnd:0},"&-addon:last-child":{borderInlineStart:0}}},u(e,{status:"error",addonBorderColor:e.colorError,addonColor:e.colorErrorText})),u(e,{status:"warning",addonBorderColor:e.colorWarning,addonColor:e.colorWarningText})),{["&".concat(e.componentCls,"-group-wrapper-disabled")]:{["".concat(e.componentCls,"-group-addon")]:Object.assign({},i(e))}})}),p=(e,t)=>({"&-borderless":Object.assign({background:"transparent",border:"none","&:focus, &:focus-within":{outline:"none"},["&".concat(e.componentCls,"-disabled, &[disabled]")]:{color:e.colorTextDisabled}},t)}),f=(e,t)=>({background:t.bg,borderWidth:e.lineWidth,borderStyle:e.lineType,borderColor:"transparent","input&, & input, textarea&, & textarea":{color:null==t?void 0:t.inputColor},"&:hover":{background:t.hoverBg},"&:focus, &:focus-within":{outline:0,borderColor:t.activeBorderColor,backgroundColor:e.activeBg}}),m=(e,t)=>({["&".concat(e.componentCls,"-status-").concat(t.status,":not(").concat(e.componentCls,"-disabled)")]:Object.assign(Object.assign({},f(e,t)),{["".concat(e.componentCls,"-prefix, ").concat(e.componentCls,"-suffix")]:{color:t.affixColor}})}),g=(e,t)=>({"&-filled":Object.assign(Object.assign(Object.assign(Object.assign(Object.assign({},f(e,{bg:e.colorFillTertiary,hoverBg:e.colorFillSecondary,activeBorderColor:e.colorPrimary})),{["&".concat(e.componentCls,"-disabled, &[disabled]")]:Object.assign({},i(e))}),m(e,{status:"error",bg:e.colorErrorBg,hoverBg:e.colorErrorBgHover,activeBorderColor:e.colorError,inputColor:e.colorErrorText,affixColor:e.colorError})),m(e,{status:"warning",bg:e.colorWarningBg,hoverBg:e.colorWarningBgHover,activeBorderColor:e.colorWarning,inputColor:e.colorWarningText,affixColor:e.colorWarning})),t)}),h=(e,t)=>({["&".concat(e.componentCls,"-group-wrapper-status-").concat(t.status)]:{["".concat(e.componentCls,"-group-addon")]:{background:t.addonBg,color:t.addonColor}}}),b=e=>({"&-filled":Object.assign(Object.assign(Object.assign({["".concat(e.componentCls,"-group")]:{"&-addon":{background:e.colorFillTertiary},["".concat(e.componentCls,"-filled:not(:focus):not(:focus-within)")]:{"&:not(:first-child)":{borderInlineStart:"".concat((0,r.bf)(e.lineWidth)," ").concat(e.lineType," ").concat(e.colorSplit)},"&:not(:last-child)":{borderInlineEnd:"".concat((0,r.bf)(e.lineWidth)," ").concat(e.lineType," ").concat(e.colorSplit)}}}},h(e,{status:"error",addonBg:e.colorErrorBg,addonColor:e.colorErrorText})),h(e,{status:"warning",addonBg:e.colorWarningBg,addonColor:e.colorWarningText})),{["&".concat(e.componentCls,"-group-wrapper-disabled")]:{["".concat(e.componentCls,"-group")]:{"&-addon":{background:e.colorFillTertiary,color:e.colorTextDisabled},"&-addon:first-child":{borderInlineStart:"".concat((0,r.bf)(e.lineWidth)," ").concat(e.lineType," ").concat(e.colorBorder),borderTop:"".concat((0,r.bf)(e.lineWidth)," ").concat(e.lineType," ").concat(e.colorBorder),borderBottom:"".concat((0,r.bf)(e.lineWidth)," ").concat(e.lineType," ").concat(e.colorBorder)},"&-addon:last-child":{borderInlineEnd:"".concat((0,r.bf)(e.lineWidth)," ").concat(e.lineType," ").concat(e.colorBorder),borderTop:"".concat((0,r.bf)(e.lineWidth)," ").concat(e.lineType," ").concat(e.colorBorder),borderBottom:"".concat((0,r.bf)(e.lineWidth)," ").concat(e.lineType," ").concat(e.colorBorder)}}}})})},8407:function(e,t,n){let r;n.d(t,{D:function(){return E},Z:function(){return w}});var o=n(64090),a=n(14749),i={icon:{tag:"svg",attrs:{viewBox:"0 0 1024 1024",focusable:"false"},children:[{tag:"path",attrs:{d:"M912 192H328c-4.4 0-8 3.6-8 8v56c0 4.4 3.6 8 8 8h584c4.4 0 8-3.6 8-8v-56c0-4.4-3.6-8-8-8zm0 284H328c-4.4 0-8 3.6-8 8v56c0 4.4 3.6 8 8 8h584c4.4 0 8-3.6 8-8v-56c0-4.4-3.6-8-8-8zm0 284H328c-4.4 0-8 3.6-8 8v56c0 4.4 3.6 8 8 8h584c4.4 0 8-3.6 8-8v-56c0-4.4-3.6-8-8-8zM104 228a56 56 0 10112 0 56 56 0 10-112 0zm0 284a56 56 0 10112 0 56 56 0 10-112 0zm0 284a56 56 0 10112 0 56 56 0 10-112 0z"}}]},name:"bars",theme:"outlined"},l=n(60688),s=o.forwardRef(function(e,t){return o.createElement(l.Z,(0,a.Z)({},e,{ref:t,icon:i}))}),c={icon:{tag:"svg",attrs:{viewBox:"64 64 896 896",focusable:"false"},children:[{tag:"path",attrs:{d:"M724 218.3V141c0-6.7-7.7-10.4-12.9-6.3L260.3 486.8a31.86 31.86 0 000 50.3l450.8 352.1c5.3 4.1 12.9.4 12.9-6.3v-77.3c0-4.9-2.3-9.6-6.1-12.6l-360-281 360-281.1c3.8-3 6.1-7.7 6.1-12.6z"}}]},name:"left",theme:"outlined"},u=o.forwardRef(function(e,t){return o.createElement(l.Z,(0,a.Z)({},e,{ref:t,icon:c}))}),d=n(50459),p=n(16480),f=n.n(p),m=n(35704),g=e=>!isNaN(parseFloat(e))&&isFinite(e),h=n(57499),b=n(31747),v=function(e,t){var n={};for(var r in e)Object.prototype.hasOwnProperty.call(e,r)&&0>t.indexOf(r)&&(n[r]=e[r]);if(null!=e&&"function"==typeof Object.getOwnPropertySymbols)for(var o=0,r=Object.getOwnPropertySymbols(e);ot.indexOf(r[o])&&Object.prototype.propertyIsEnumerable.call(e,r[o])&&(n[r[o]]=e[r[o]]);return n};let y={xs:"479.98px",sm:"575.98px",md:"767.98px",lg:"991.98px",xl:"1199.98px",xxl:"1599.98px"},E=o.createContext({}),S=(r=0,function(){let e=arguments.length>0&&void 0!==arguments[0]?arguments[0]:"";return r+=1,"".concat(e).concat(r)});var w=o.forwardRef((e,t)=>{let{prefixCls:n,className:r,trigger:a,children:i,defaultCollapsed:l=!1,theme:c="dark",style:p={},collapsible:w=!1,reverseArrow:x=!1,width:O=200,collapsedWidth:C=80,zeroWidthTriggerStyle:T,breakpoint:k,onCollapse:A,onBreakpoint:I}=e,R=v(e,["prefixCls","className","trigger","children","defaultCollapsed","theme","style","collapsible","reverseArrow","width","collapsedWidth","zeroWidthTriggerStyle","breakpoint","onCollapse","onBreakpoint"]),{siderHook:N}=(0,o.useContext)(b.V),[_,P]=(0,o.useState)("collapsed"in e?e.collapsed:l),[M,L]=(0,o.useState)(!1);(0,o.useEffect)(()=>{"collapsed"in e&&P(e.collapsed)},[e.collapsed]);let D=(t,n)=>{"collapsed"in e||P(t),null==A||A(t,n)},j=(0,o.useRef)();j.current=e=>{L(e.matches),null==I||I(e.matches),_!==e.matches&&D(e.matches,"responsive")},(0,o.useEffect)(()=>{let e;function t(e){return j.current(e)}{let{matchMedia:n}=window;if(n&&k&&k in y){e=n("screen and (max-width: ".concat(y[k],")"));try{e.addEventListener("change",t)}catch(n){e.addListener(t)}t(e)}}return()=>{try{null==e||e.removeEventListener("change",t)}catch(n){null==e||e.removeListener(t)}}},[k]),(0,o.useEffect)(()=>{let e=S("ant-sider-");return N.addSider(e),()=>N.removeSider(e)},[]);let F=()=>{D(!_,"clickTrigger")},{getPrefixCls:B}=(0,o.useContext)(h.E_),U=o.useMemo(()=>({siderCollapsed:_}),[_]);return o.createElement(E.Provider,{value:U},(()=>{let e=B("layout-sider",n),l=(0,m.Z)(R,["collapsed"]),h=_?C:O,b=g(h)?"".concat(h,"px"):String(h),v=0===parseFloat(String(C||0))?o.createElement("span",{onClick:F,className:f()("".concat(e,"-zero-width-trigger"),"".concat(e,"-zero-width-trigger-").concat(x?"right":"left")),style:T},a||o.createElement(s,null)):null,y={expanded:x?o.createElement(d.Z,null):o.createElement(u,null),collapsed:x?o.createElement(u,null):o.createElement(d.Z,null)}[_?"collapsed":"expanded"],E=null!==a?v||o.createElement("div",{className:"".concat(e,"-trigger"),onClick:F,style:{width:b}},a||y):null,S=Object.assign(Object.assign({},p),{flex:"0 0 ".concat(b),maxWidth:b,minWidth:b,width:b}),k=f()(e,"".concat(e,"-").concat(c),{["".concat(e,"-collapsed")]:!!_,["".concat(e,"-has-trigger")]:w&&null!==a&&!v,["".concat(e,"-below")]:!!M,["".concat(e,"-zero-width")]:0===parseFloat(b)},r);return o.createElement("aside",Object.assign({className:k},l,{style:S,ref:t}),o.createElement("div",{className:"".concat(e,"-children")},i),w||M&&v?E:null)})())})},31747:function(e,t,n){n.d(t,{V:function(){return r}});let r=n(64090).createContext({siderHook:{addSider:()=>null,removeSider:()=>null}})},33509:function(e,t,n){n.d(t,{default:function(){return C}});var r=n(63787),o=n(64090),a=n(16480),i=n.n(a),l=n(35704),s=n(57499),c=n(31747),u=n(33054),d=n(8407),p=n(8985),f=n(76585),m=e=>{let{componentCls:t,bodyBg:n,lightSiderBg:r,lightTriggerBg:o,lightTriggerColor:a}=e;return{["".concat(t,"-sider-light")]:{background:r,["".concat(t,"-sider-trigger")]:{color:a,background:o},["".concat(t,"-sider-zero-width-trigger")]:{color:a,background:o,border:"1px solid ".concat(n),borderInlineStart:0}}}};let g=e=>{let{antCls:t,componentCls:n,colorText:r,triggerColor:o,footerBg:a,triggerBg:i,headerHeight:l,headerPadding:s,headerColor:c,footerPadding:u,triggerHeight:d,zeroTriggerHeight:f,zeroTriggerWidth:g,motionDurationMid:h,motionDurationSlow:b,fontSize:v,borderRadius:y,bodyBg:E,headerBg:S,siderBg:w}=e;return{[n]:Object.assign(Object.assign({display:"flex",flex:"auto",flexDirection:"column",minHeight:0,background:E,"&, *":{boxSizing:"border-box"},["&".concat(n,"-has-sider")]:{flexDirection:"row",["> ".concat(n,", > ").concat(n,"-content")]:{width:0}},["".concat(n,"-header, &").concat(n,"-footer")]:{flex:"0 0 auto"},["".concat(n,"-sider")]:{position:"relative",minWidth:0,background:w,transition:"all ".concat(h,", background 0s"),"&-children":{height:"100%",marginTop:-.1,paddingTop:.1,["".concat(t,"-menu").concat(t,"-menu-inline-collapsed")]:{width:"auto"}},"&-has-trigger":{paddingBottom:d},"&-right":{order:1},"&-trigger":{position:"fixed",bottom:0,zIndex:1,height:d,color:o,lineHeight:(0,p.bf)(d),textAlign:"center",background:i,cursor:"pointer",transition:"all ".concat(h)},"&-zero-width":{"> *":{overflow:"hidden"},"&-trigger":{position:"absolute",top:l,insetInlineEnd:e.calc(g).mul(-1).equal(),zIndex:1,width:g,height:f,color:o,fontSize:e.fontSizeXL,display:"flex",alignItems:"center",justifyContent:"center",background:w,borderStartStartRadius:0,borderStartEndRadius:y,borderEndEndRadius:y,borderEndStartRadius:0,cursor:"pointer",transition:"background ".concat(b," ease"),"&::after":{position:"absolute",inset:0,background:"transparent",transition:"all ".concat(b),content:'""'},"&:hover::after":{background:"rgba(255, 255, 255, 0.2)"},"&-right":{insetInlineStart:e.calc(g).mul(-1).equal(),borderStartStartRadius:y,borderStartEndRadius:0,borderEndEndRadius:0,borderEndStartRadius:y}}}}},m(e)),{"&-rtl":{direction:"rtl"}}),["".concat(n,"-header")]:{height:l,padding:s,color:c,lineHeight:(0,p.bf)(l),background:S,["".concat(t,"-menu")]:{lineHeight:"inherit"}},["".concat(n,"-footer")]:{padding:u,color:r,fontSize:v,background:a},["".concat(n,"-content")]:{flex:"auto",minHeight:0}}};var h=(0,f.I$)("Layout",e=>[g(e)],e=>{let{colorBgLayout:t,controlHeight:n,controlHeightLG:r,colorText:o,controlHeightSM:a,marginXXS:i,colorTextLightSolid:l,colorBgContainer:s}=e,c=1.25*r;return{colorBgHeader:"#001529",colorBgBody:t,colorBgTrigger:"#002140",bodyBg:t,headerBg:"#001529",headerHeight:2*n,headerPadding:"0 ".concat(c,"px"),headerColor:o,footerPadding:"".concat(a,"px ").concat(c,"px"),footerBg:t,siderBg:"#001529",triggerHeight:r+2*i,triggerBg:"#002140",triggerColor:l,zeroTriggerWidth:r,zeroTriggerHeight:r,lightSiderBg:s,lightTriggerBg:s,lightTriggerColor:o}},{deprecatedTokens:[["colorBgBody","bodyBg"],["colorBgHeader","headerBg"],["colorBgTrigger","triggerBg"]]}),b=function(e,t){var n={};for(var r in e)Object.prototype.hasOwnProperty.call(e,r)&&0>t.indexOf(r)&&(n[r]=e[r]);if(null!=e&&"function"==typeof Object.getOwnPropertySymbols)for(var o=0,r=Object.getOwnPropertySymbols(e);ot.indexOf(r[o])&&Object.prototype.propertyIsEnumerable.call(e,r[o])&&(n[r[o]]=e[r[o]]);return n};function v(e){let{suffixCls:t,tagName:n,displayName:r}=e;return e=>o.forwardRef((r,a)=>o.createElement(e,Object.assign({ref:a,suffixCls:t,tagName:n},r)))}let y=o.forwardRef((e,t)=>{let{prefixCls:n,suffixCls:r,className:a,tagName:l}=e,c=b(e,["prefixCls","suffixCls","className","tagName"]),{getPrefixCls:u}=o.useContext(s.E_),d=u("layout",n),[p,f,m]=h(d),g=r?"".concat(d,"-").concat(r):d;return p(o.createElement(l,Object.assign({className:i()(n||g,a,f,m),ref:t},c)))}),E=o.forwardRef((e,t)=>{let{direction:n}=o.useContext(s.E_),[a,p]=o.useState([]),{prefixCls:f,className:m,rootClassName:g,children:v,hasSider:y,tagName:E,style:S}=e,w=b(e,["prefixCls","className","rootClassName","children","hasSider","tagName","style"]),x=(0,l.Z)(w,["suffixCls"]),{getPrefixCls:O,layout:C}=o.useContext(s.E_),T=O("layout",f),k="boolean"==typeof y?y:!!a.length||(0,u.Z)(v).some(e=>e.type===d.Z),[A,I,R]=h(T),N=i()(T,{["".concat(T,"-has-sider")]:k,["".concat(T,"-rtl")]:"rtl"===n},null==C?void 0:C.className,m,g,I,R),_=o.useMemo(()=>({siderHook:{addSider:e=>{p(t=>[].concat((0,r.Z)(t),[e]))},removeSider:e=>{p(t=>t.filter(t=>t!==e))}}}),[]);return A(o.createElement(c.V.Provider,{value:_},o.createElement(E,Object.assign({ref:t,className:N,style:Object.assign(Object.assign({},null==C?void 0:C.style),S)},x),v)))}),S=v({tagName:"div",displayName:"Layout"})(E),w=v({suffixCls:"header",tagName:"header",displayName:"Header"})(y),x=v({suffixCls:"footer",tagName:"footer",displayName:"Footer"})(y),O=v({suffixCls:"content",tagName:"main",displayName:"Content"})(y);S.Header=w,S.Footer=x,S.Content=O,S.Sider=d.Z,S._InternalSiderContext=d.D;var C=S},33302:function(e,t,n){let r=(0,n(64090).createContext)(void 0);t.Z=r},79474:function(e,t,n){n.d(t,{Z:function(){return i}});var r={placeholder:"Select time",rangePlaceholder:["Start time","End time"]};let o={lang:Object.assign({placeholder:"Select date",yearPlaceholder:"Select year",quarterPlaceholder:"Select quarter",monthPlaceholder:"Select month",weekPlaceholder:"Select week",rangePlaceholder:["Start date","End date"],rangeYearPlaceholder:["Start year","End year"],rangeQuarterPlaceholder:["Start quarter","End quarter"],rangeMonthPlaceholder:["Start month","End month"],rangeWeekPlaceholder:["Start week","End week"]},{locale:"en_US",today:"Today",now:"Now",backToToday:"Back to today",ok:"OK",clear:"Clear",month:"Month",year:"Year",timeSelect:"select time",dateSelect:"select date",weekSelect:"Choose a week",monthSelect:"Choose a month",yearSelect:"Choose a year",decadeSelect:"Choose a decade",yearFormat:"YYYY",dateFormat:"M/D/YYYY",dayFormat:"D",dateTimeFormat:"M/D/YYYY HH:mm:ss",monthBeforeYear:!0,previousMonth:"Previous month (PageUp)",nextMonth:"Next month (PageDown)",previousYear:"Last year (Control + left)",nextYear:"Next year (Control + right)",previousDecade:"Last decade",nextDecade:"Next decade",previousCentury:"Last century",nextCentury:"Next century"}),timePickerLocale:Object.assign({},r)},a="${label} is not a valid ${type}";var i={locale:"en",Pagination:{items_per_page:"/ page",jump_to:"Go to",jump_to_confirm:"confirm",page:"Page",prev_page:"Previous Page",next_page:"Next Page",prev_5:"Previous 5 Pages",next_5:"Next 5 Pages",prev_3:"Previous 3 Pages",next_3:"Next 3 Pages",page_size:"Page Size"},DatePicker:o,TimePicker:r,Calendar:o,global:{placeholder:"Please select"},Table:{filterTitle:"Filter menu",filterConfirm:"OK",filterReset:"Reset",filterEmptyText:"No filters",filterCheckall:"Select all items",filterSearchPlaceholder:"Search in filters",emptyText:"No data",selectAll:"Select current page",selectInvert:"Invert current page",selectNone:"Clear all data",selectionAll:"Select all data",sortTitle:"Sort",expand:"Expand row",collapse:"Collapse row",triggerDesc:"Click to sort descending",triggerAsc:"Click to sort ascending",cancelSort:"Click to cancel sorting"},Tour:{Next:"Next",Previous:"Previous",Finish:"Finish"},Modal:{okText:"OK",cancelText:"Cancel",justOkText:"OK"},Popconfirm:{okText:"OK",cancelText:"Cancel"},Transfer:{titles:["",""],searchPlaceholder:"Search here",itemUnit:"item",itemsUnit:"items",remove:"Remove",selectCurrent:"Select current page",removeCurrent:"Remove current page",selectAll:"Select all data",removeAll:"Remove all data",selectInvert:"Invert current page"},Upload:{uploading:"Uploading...",removeFile:"Remove file",uploadError:"Upload error",previewFile:"Preview file",downloadFile:"Download file"},Empty:{description:"No data"},Icon:{icon:"icon"},Text:{edit:"Edit",copy:"Copy",copied:"Copied",expand:"Expand"},PageHeader:{back:"Back"},Form:{optional:"(optional)",defaultValidateMessages:{default:"Field validation error for ${label}",required:"Please enter ${label}",enum:"${label} must be one of [${enum}]",whitespace:"${label} cannot be a blank character",date:{format:"${label} date format is invalid",parse:"${label} cannot be converted to a date",invalid:"${label} is an invalid date"},types:{string:a,method:a,array:a,object:a,number:a,date:a,boolean:a,integer:a,float:a,regexp:a,email:a,url:a,hex:a},string:{len:"${label} must be ${len} characters",min:"${label} must be at least ${min} characters",max:"${label} must be up to ${max} characters",range:"${label} must be between ${min}-${max} characters"},number:{len:"${label} must be equal to ${len}",min:"${label} must be minimum ${min}",max:"${label} must be maximum ${max}",range:"${label} must be between ${min}-${max}"},array:{len:"Must be ${len} ${label}",min:"At least ${min} ${label}",max:"At most ${max} ${label}",range:"The amount of ${label} must be between ${min}-${max}"},pattern:{mismatch:"${label} does not match the pattern ${pattern}"}}},Image:{preview:"Preview"},QRCode:{expired:"QR code expired",refresh:"Refresh",scanned:"Scanned"},ColorPicker:{presetEmpty:"Empty"}}},70595:function(e,t,n){var r=n(64090),o=n(33302),a=n(79474);t.Z=(e,t)=>{let n=r.useContext(o.Z);return[r.useMemo(()=>{var r;let o=t||a.Z[e],i=null!==(r=null==n?void 0:n[e])&&void 0!==r?r:{};return Object.assign(Object.assign({},"function"==typeof o?o():o),i||{})},[e,t,n]),r.useMemo(()=>{let e=null==n?void 0:n.locale;return(null==n?void 0:n.exist)&&!e?a.Z.locale:e},[n])]}},81642:function(e,t,n){n.d(t,{J:function(){return s}});var r=n(64090),o=n(48563),a=n(92801),i=function(e,t){var n={};for(var r in e)Object.prototype.hasOwnProperty.call(e,r)&&0>t.indexOf(r)&&(n[r]=e[r]);if(null!=e&&"function"==typeof Object.getOwnPropertySymbols)for(var o=0,r=Object.getOwnPropertySymbols(e);ot.indexOf(r[o])&&Object.prototype.propertyIsEnumerable.call(e,r[o])&&(n[r[o]]=e[r[o]]);return n};let l=r.createContext(null),s=r.forwardRef((e,t)=>{let{children:n}=e,s=i(e,["children"]),c=r.useContext(l),u=r.useMemo(()=>Object.assign(Object.assign({},c),s),[c,s.prefixCls,s.mode,s.selectable,s.rootClassName]),d=(0,o.t4)(n),p=(0,o.x1)(t,d?n.ref:null);return r.createElement(l.Provider,{value:u},r.createElement(a.BR,null,d?r.cloneElement(n,{ref:p}):n))});t.Z=l},95781:function(e,t,n){n.d(t,{Z:function(){return tg}});var r=n(64090),o=n(14749),a=n(50833),i=n(5239),l=n(63787),s=n(80406),c=n(6787),u=n(16480),d=n.n(u),p=n(54739),f=n(44329),m=n(92536),g=n(53850),h=n(89542),b=r.createContext(null);function v(e,t){return void 0===e?null:"".concat(e,"-").concat(t)}function y(e){return v(r.useContext(b),e)}var E=n(61475),S=["children","locked"],w=r.createContext(null);function x(e){var t=e.children,n=e.locked,o=(0,c.Z)(e,S),a=r.useContext(w),l=(0,E.Z)(function(){var e;return e=(0,i.Z)({},a),Object.keys(o).forEach(function(t){var n=o[t];void 0!==n&&(e[t]=n)}),e},[a,o],function(e,t){return!n&&(e[0]!==t[0]||!(0,m.Z)(e[1],t[1],!0))});return r.createElement(w.Provider,{value:l},t)}var O=r.createContext(null);function C(){return r.useContext(O)}var T=r.createContext([]);function k(e){var t=r.useContext(T);return r.useMemo(function(){return void 0!==e?[].concat((0,l.Z)(t),[e]):t},[t,e])}var A=r.createContext(null),I=r.createContext({}),R=n(73193);function N(e){var t=arguments.length>1&&void 0!==arguments[1]&&arguments[1];if((0,R.Z)(e)){var n=e.nodeName.toLowerCase(),r=["input","select","textarea","button"].includes(n)||e.isContentEditable||"a"===n&&!!e.getAttribute("href"),o=e.getAttribute("tabindex"),a=Number(o),i=null;return o&&!Number.isNaN(a)?i=a:r&&null===i&&(i=0),r&&e.disabled&&(i=null),null!==i&&(i>=0||t&&i<0)}return!1}var _=n(4295),P=n(19223),M=_.Z.LEFT,L=_.Z.RIGHT,D=_.Z.UP,j=_.Z.DOWN,F=_.Z.ENTER,B=_.Z.ESC,U=_.Z.HOME,Z=_.Z.END,z=[D,j,M,L];function H(e,t){return(function(e){var t=arguments.length>1&&void 0!==arguments[1]&&arguments[1],n=(0,l.Z)(e.querySelectorAll("*")).filter(function(e){return N(e,t)});return N(e,t)&&n.unshift(e),n})(e,!0).filter(function(e){return t.has(e)})}function G(e,t,n){var r=arguments.length>3&&void 0!==arguments[3]?arguments[3]:1;if(!e)return null;var o=H(e,t),a=o.length,i=o.findIndex(function(e){return n===e});return r<0?-1===i?i=a-1:i-=1:r>0&&(i+=1),o[i=(i+a)%a]}var $=function(e,t){var n=new Set,r=new Map,o=new Map;return e.forEach(function(e){var a=document.querySelector("[data-menu-id='".concat(v(t,e),"']"));a&&(n.add(a),o.set(a,e),r.set(e,a))}),{elements:n,key2element:r,element2key:o}},W="__RC_UTIL_PATH_SPLIT__",V=function(e){return e.join(W)},q="rc-menu-more";function Y(e){var t=r.useRef(e);t.current=e;var n=r.useCallback(function(){for(var e,n=arguments.length,r=Array(n),o=0;o1&&(O.motionAppear=!1);var C=O.onVisibleChanged;return(O.onVisibleChanged=function(e){return h.current||e||E(!0),null==C?void 0:C(e)},y)?null:r.createElement(x,{mode:c,locked:!h.current},r.createElement(eT.ZP,(0,o.Z)({visible:S},O,{forceRender:p,removeOnLeave:!1,leavedClassName:"".concat(d,"-hidden")}),function(e){var n=e.className,o=e.style;return r.createElement(eh,{id:t,className:n,style:o},l)}))}var eA=["style","className","title","eventKey","warnKey","disabled","internalPopupClose","children","itemIcon","expandIcon","popupClassName","popupOffset","popupStyle","onClick","onMouseEnter","onMouseLeave","onTitleClick","onTitleMouseEnter","onTitleMouseLeave"],eI=["active"],eR=function(e){var t,n=e.style,l=e.className,u=e.title,f=e.eventKey,m=(e.warnKey,e.disabled),g=e.internalPopupClose,h=e.children,b=e.itemIcon,v=e.expandIcon,E=e.popupClassName,S=e.popupOffset,O=e.popupStyle,C=e.onClick,T=e.onMouseEnter,R=e.onMouseLeave,N=e.onTitleClick,_=e.onTitleMouseEnter,P=e.onTitleMouseLeave,M=(0,c.Z)(e,eA),L=y(f),D=r.useContext(w),j=D.prefixCls,F=D.mode,B=D.openKeys,U=D.disabled,Z=D.overflowDisabled,z=D.activeKey,H=D.selectedKeys,G=D.itemIcon,$=D.expandIcon,W=D.onItemClick,V=D.onOpenChange,q=D.onActive,K=r.useContext(I)._internalRenderSubMenuItem,X=r.useContext(A).isSubPathKey,Q=k(),J="".concat(j,"-submenu"),ee=U||m,et=r.useRef(),en=r.useRef(),er=null!=v?v:$,el=B.includes(f),ec=!Z&&el,eu=X(H,f),ed=eo(f,ee,_,P),ep=ed.active,ef=(0,c.Z)(ed,eI),em=r.useState(!1),eg=(0,s.Z)(em,2),eb=eg[0],ev=eg[1],ey=function(e){ee||ev(e)},eE=r.useMemo(function(){return ep||"inline"!==F&&(eb||X([z],f))},[F,ep,z,eb,f,X]),eS=ea(Q.length),ew=Y(function(e){null==C||C(es(e)),W(e)}),ex=L&&"".concat(L,"-popup"),eO=r.createElement("div",(0,o.Z)({role:"menuitem",style:eS,className:"".concat(J,"-title"),tabIndex:ee?null:-1,ref:et,title:"string"==typeof u?u:null,"data-menu-id":Z&&L?null:L,"aria-expanded":ec,"aria-haspopup":!0,"aria-controls":ex,"aria-disabled":ee,onClick:function(e){ee||(null==N||N({key:f,domEvent:e}),"inline"===F&&V(f,!el))},onFocus:function(){q(f)}},ef),u,r.createElement(ei,{icon:"horizontal"!==F?er:void 0,props:(0,i.Z)((0,i.Z)({},e),{},{isOpen:ec,isSubMenu:!0})},r.createElement("i",{className:"".concat(J,"-arrow")}))),eT=r.useRef(F);if("inline"!==F&&Q.length>1?eT.current="vertical":eT.current=F,!Z){var eR=eT.current;eO=r.createElement(eC,{mode:eR,prefixCls:J,visible:!g&&ec&&"inline"!==F,popupClassName:E,popupOffset:S,popupStyle:O,popup:r.createElement(x,{mode:"horizontal"===eR?"vertical":eR},r.createElement(eh,{id:ex,ref:en},h)),disabled:ee,onVisibleChange:function(e){"inline"!==F&&V(f,e)}},eO)}var eN=r.createElement(p.Z.Item,(0,o.Z)({role:"none"},M,{component:"li",style:n,className:d()(J,"".concat(J,"-").concat(F),l,(t={},(0,a.Z)(t,"".concat(J,"-open"),ec),(0,a.Z)(t,"".concat(J,"-active"),eE),(0,a.Z)(t,"".concat(J,"-selected"),eu),(0,a.Z)(t,"".concat(J,"-disabled"),ee),t)),onMouseEnter:function(e){ey(!0),null==T||T({key:f,domEvent:e})},onMouseLeave:function(e){ey(!1),null==R||R({key:f,domEvent:e})}}),eO,!Z&&r.createElement(ek,{id:ex,open:ec,keyPath:Q},h));return K&&(eN=K(eN,e,{selected:eu,active:eE,open:ec,disabled:ee})),r.createElement(x,{onItemClick:ew,mode:"horizontal"===F?"vertical":F,itemIcon:null!=b?b:G,expandIcon:er},eN)};function eN(e){var t,n=e.eventKey,o=e.children,a=k(n),i=ev(o,a),l=C();return r.useEffect(function(){if(l)return l.registerPath(n,a),function(){l.unregisterPath(n,a)}},[a]),t=l?i:r.createElement(eR,e,i),r.createElement(T.Provider,{value:a},t)}var e_=n(6976),eP=["className","title","eventKey","children"],eM=["children"],eL=function(e){var t=e.className,n=e.title,a=(e.eventKey,e.children),i=(0,c.Z)(e,eP),l=r.useContext(w).prefixCls,s="".concat(l,"-item-group");return r.createElement("li",(0,o.Z)({role:"presentation"},i,{onClick:function(e){return e.stopPropagation()},className:d()(s,t)}),r.createElement("div",{role:"presentation",className:"".concat(s,"-title"),title:"string"==typeof n?n:void 0},n),r.createElement("ul",{role:"group",className:"".concat(s,"-list")},a))};function eD(e){var t=e.children,n=(0,c.Z)(e,eM),o=ev(t,k(n.eventKey));return C()?o:r.createElement(eL,(0,en.Z)(n,["warnKey"]),o)}function ej(e){var t=e.className,n=e.style,o=r.useContext(w).prefixCls;return C()?null:r.createElement("li",{role:"separator",className:d()("".concat(o,"-item-divider"),t),style:n})}var eF=["label","children","key","type"],eB=["prefixCls","rootClassName","style","className","tabIndex","items","children","direction","id","mode","inlineCollapsed","disabled","disabledOverflow","subMenuOpenDelay","subMenuCloseDelay","forceSubMenuRender","defaultOpenKeys","openKeys","activeKey","defaultActiveFirst","selectable","multiple","defaultSelectedKeys","selectedKeys","onSelect","onDeselect","inlineIndent","motion","defaultMotions","triggerSubMenuAction","builtinPlacements","itemIcon","expandIcon","overflowedIndicator","overflowedIndicatorPopupClassName","getPopupContainer","onClick","onOpenChange","onKeyDown","openAnimation","openTransitionName","_internalRenderMenuItem","_internalRenderSubMenuItem"],eU=[],eZ=r.forwardRef(function(e,t){var n,u,g,v,y,E,S,w,C,T,k,R,N,_,Q,J,ee,et,en,er,eo,ea,ei,el,ec,eu,ed,ep=e.prefixCls,ef=void 0===ep?"rc-menu":ep,eg=e.rootClassName,eh=e.style,eb=e.className,ey=e.tabIndex,eE=e.items,eS=e.children,ew=e.direction,ex=e.id,eO=e.mode,eC=void 0===eO?"vertical":eO,eT=e.inlineCollapsed,ek=e.disabled,eA=e.disabledOverflow,eI=e.subMenuOpenDelay,eR=e.subMenuCloseDelay,eP=e.forceSubMenuRender,eM=e.defaultOpenKeys,eL=e.openKeys,eZ=e.activeKey,ez=e.defaultActiveFirst,eH=e.selectable,eG=void 0===eH||eH,e$=e.multiple,eW=void 0!==e$&&e$,eV=e.defaultSelectedKeys,eq=e.selectedKeys,eY=e.onSelect,eK=e.onDeselect,eX=e.inlineIndent,eQ=e.motion,eJ=e.defaultMotions,e0=e.triggerSubMenuAction,e1=e.builtinPlacements,e2=e.itemIcon,e4=e.expandIcon,e3=e.overflowedIndicator,e6=void 0===e3?"...":e3,e5=e.overflowedIndicatorPopupClassName,e8=e.getPopupContainer,e9=e.onClick,e7=e.onOpenChange,te=e.onKeyDown,tt=(e.openAnimation,e.openTransitionName,e._internalRenderMenuItem),tn=e._internalRenderSubMenuItem,tr=(0,c.Z)(e,eB),to=r.useMemo(function(){var e;return e=eS,eE&&(e=function e(t){return(t||[]).map(function(t,n){if(t&&"object"===(0,e_.Z)(t)){var a=t.label,i=t.children,l=t.key,s=t.type,u=(0,c.Z)(t,eF),d=null!=l?l:"tmp-".concat(n);return i||"group"===s?"group"===s?r.createElement(eD,(0,o.Z)({key:d},u,{title:a}),e(i)):r.createElement(eN,(0,o.Z)({key:d},u,{title:a}),e(i)):"divider"===s?r.createElement(ej,(0,o.Z)({key:d},u)):r.createElement(em,(0,o.Z)({key:d},u),a)}return null}).filter(function(e){return e})}(eE)),ev(e,eU)},[eS,eE]),ta=r.useState(!1),ti=(0,s.Z)(ta,2),tl=ti[0],ts=ti[1],tc=r.useRef(),tu=(n=(0,f.Z)(ex,{value:ex}),g=(u=(0,s.Z)(n,2))[0],v=u[1],r.useEffect(function(){X+=1;var e="".concat(K,"-").concat(X);v("rc-menu-uuid-".concat(e))},[]),g),td="rtl"===ew,tp=(0,f.Z)(eM,{value:eL,postState:function(e){return e||eU}}),tf=(0,s.Z)(tp,2),tm=tf[0],tg=tf[1],th=function(e){var t=arguments.length>1&&void 0!==arguments[1]&&arguments[1];function n(){tg(e),null==e7||e7(e)}t?(0,h.flushSync)(n):n()},tb=r.useState(tm),tv=(0,s.Z)(tb,2),ty=tv[0],tE=tv[1],tS=r.useRef(!1),tw=r.useMemo(function(){return("inline"===eC||"vertical"===eC)&&eT?["vertical",eT]:[eC,!1]},[eC,eT]),tx=(0,s.Z)(tw,2),tO=tx[0],tC=tx[1],tT="inline"===tO,tk=r.useState(tO),tA=(0,s.Z)(tk,2),tI=tA[0],tR=tA[1],tN=r.useState(tC),t_=(0,s.Z)(tN,2),tP=t_[0],tM=t_[1];r.useEffect(function(){tR(tO),tM(tC),tS.current&&(tT?tg(ty):th(eU))},[tO,tC]);var tL=r.useState(0),tD=(0,s.Z)(tL,2),tj=tD[0],tF=tD[1],tB=tj>=to.length-1||"horizontal"!==tI||eA;r.useEffect(function(){tT&&tE(tm)},[tm]),r.useEffect(function(){return tS.current=!0,function(){tS.current=!1}},[]);var tU=(y=r.useState({}),E=(0,s.Z)(y,2)[1],S=(0,r.useRef)(new Map),w=(0,r.useRef)(new Map),C=r.useState([]),k=(T=(0,s.Z)(C,2))[0],R=T[1],N=(0,r.useRef)(0),_=(0,r.useRef)(!1),Q=function(){_.current||E({})},J=(0,r.useCallback)(function(e,t){var n=V(t);w.current.set(n,e),S.current.set(e,n),N.current+=1;var r=N.current;Promise.resolve().then(function(){r===N.current&&Q()})},[]),ee=(0,r.useCallback)(function(e,t){var n=V(t);w.current.delete(n),S.current.delete(e)},[]),et=(0,r.useCallback)(function(e){R(e)},[]),en=(0,r.useCallback)(function(e,t){var n=(S.current.get(e)||"").split(W);return t&&k.includes(n[0])&&n.unshift(q),n},[k]),er=(0,r.useCallback)(function(e,t){return e.some(function(e){return en(e,!0).includes(t)})},[en]),eo=(0,r.useCallback)(function(e){var t="".concat(S.current.get(e)).concat(W),n=new Set;return(0,l.Z)(w.current.keys()).forEach(function(e){e.startsWith(t)&&n.add(w.current.get(e))}),n},[]),r.useEffect(function(){return function(){_.current=!0}},[]),{registerPath:J,unregisterPath:ee,refreshOverflowKeys:et,isSubPathKey:er,getKeyPath:en,getKeys:function(){var e=(0,l.Z)(S.current.keys());return k.length&&e.push(q),e},getSubPathKeys:eo}),tZ=tU.registerPath,tz=tU.unregisterPath,tH=tU.refreshOverflowKeys,tG=tU.isSubPathKey,t$=tU.getKeyPath,tW=tU.getKeys,tV=tU.getSubPathKeys,tq=r.useMemo(function(){return{registerPath:tZ,unregisterPath:tz}},[tZ,tz]),tY=r.useMemo(function(){return{isSubPathKey:tG}},[tG]);r.useEffect(function(){tH(tB?eU:to.slice(tj+1).map(function(e){return e.key}))},[tj,tB]);var tK=(0,f.Z)(eZ||ez&&(null===(eu=to[0])||void 0===eu?void 0:eu.key),{value:eZ}),tX=(0,s.Z)(tK,2),tQ=tX[0],tJ=tX[1],t0=Y(function(e){tJ(e)}),t1=Y(function(){tJ(void 0)});(0,r.useImperativeHandle)(t,function(){return{list:tc.current,focus:function(e){var t,n,r=$(tW(),tu),o=r.elements,a=r.key2element,i=r.element2key,l=H(tc.current,o),s=null!=tQ?tQ:l[0]?i.get(l[0]):null===(t=to.find(function(e){return!e.props.disabled}))||void 0===t?void 0:t.key,c=a.get(s);s&&c&&(null==c||null===(n=c.focus)||void 0===n||n.call(c,e))}}});var t2=(0,f.Z)(eV||[],{value:eq,postState:function(e){return Array.isArray(e)?e:null==e?eU:[e]}}),t4=(0,s.Z)(t2,2),t3=t4[0],t6=t4[1],t5=function(e){if(eG){var t,n=e.key,r=t3.includes(n);t6(t=eW?r?t3.filter(function(e){return e!==n}):[].concat((0,l.Z)(t3),[n]):[n]);var o=(0,i.Z)((0,i.Z)({},e),{},{selectedKeys:t});r?null==eK||eK(o):null==eY||eY(o)}!eW&&tm.length&&"inline"!==tI&&th(eU)},t8=Y(function(e){null==e9||e9(es(e)),t5(e)}),t9=Y(function(e,t){var n=tm.filter(function(t){return t!==e});if(t)n.push(e);else if("inline"!==tI){var r=tV(e);n=n.filter(function(e){return!r.has(e)})}(0,m.Z)(tm,n,!0)||th(n,!0)}),t7=(ea=function(e,t){var n=null!=t?t:!tm.includes(e);t9(e,n)},ei=r.useRef(),(el=r.useRef()).current=tQ,ec=function(){P.Z.cancel(ei.current)},r.useEffect(function(){return function(){ec()}},[]),function(e){var t=e.which;if([].concat(z,[F,B,U,Z]).includes(t)){var n=tW(),r=$(n,tu),o=r,i=o.elements,l=o.key2element,s=o.element2key,c=function(e,t){for(var n=e||document.activeElement;n;){if(t.has(n))return n;n=n.parentElement}return null}(l.get(tQ),i),u=s.get(c),d=function(e,t,n,r){var o,i,l,s,c="prev",u="next",d="children",p="parent";if("inline"===e&&r===F)return{inlineTrigger:!0};var f=(o={},(0,a.Z)(o,D,c),(0,a.Z)(o,j,u),o),m=(i={},(0,a.Z)(i,M,n?u:c),(0,a.Z)(i,L,n?c:u),(0,a.Z)(i,j,d),(0,a.Z)(i,F,d),i),g=(l={},(0,a.Z)(l,D,c),(0,a.Z)(l,j,u),(0,a.Z)(l,F,d),(0,a.Z)(l,B,p),(0,a.Z)(l,M,n?d:p),(0,a.Z)(l,L,n?p:d),l);switch(null===(s=({inline:f,horizontal:m,vertical:g,inlineSub:f,horizontalSub:g,verticalSub:g})["".concat(e).concat(t?"":"Sub")])||void 0===s?void 0:s[r]){case c:return{offset:-1,sibling:!0};case u:return{offset:1,sibling:!0};case p:return{offset:-1,sibling:!1};case d:return{offset:1,sibling:!1};default:return null}}(tI,1===t$(u,!0).length,td,t);if(!d&&t!==U&&t!==Z)return;(z.includes(t)||[U,Z].includes(t))&&e.preventDefault();var p=function(e){if(e){var t=e,n=e.querySelector("a");null!=n&&n.getAttribute("href")&&(t=n);var r=s.get(e);tJ(r),ec(),ei.current=(0,P.Z)(function(){el.current===r&&t.focus()})}};if([U,Z].includes(t)||d.sibling||!c){var f,m=H(f=c&&"inline"!==tI?function(e){for(var t=e;t;){if(t.getAttribute("data-menu-list"))return t;t=t.parentElement}return null}(c):tc.current,i);p(t===U?m[0]:t===Z?m[m.length-1]:G(f,i,c,d.offset))}else if(d.inlineTrigger)ea(u);else if(d.offset>0)ea(u,!0),ec(),ei.current=(0,P.Z)(function(){r=$(n,tu);var e=c.getAttribute("aria-controls");p(G(document.getElementById(e),r.elements))},5);else if(d.offset<0){var g=t$(u,!0),h=g[g.length-2],b=l.get(h);ea(h,!1),p(b)}}null==te||te(e)});r.useEffect(function(){ts(!0)},[]);var ne=r.useMemo(function(){return{_internalRenderMenuItem:tt,_internalRenderSubMenuItem:tn}},[tt,tn]),nt="horizontal"!==tI||eA?to:to.map(function(e,t){return r.createElement(x,{key:e.key,overflowDisabled:t>tj},e)}),nn=r.createElement(p.Z,(0,o.Z)({id:ex,ref:tc,prefixCls:"".concat(ef,"-overflow"),component:"ul",itemComponent:em,className:d()(ef,"".concat(ef,"-root"),"".concat(ef,"-").concat(tI),eb,(ed={},(0,a.Z)(ed,"".concat(ef,"-inline-collapsed"),tP),(0,a.Z)(ed,"".concat(ef,"-rtl"),td),ed),eg),dir:ew,style:eh,role:"menu",tabIndex:void 0===ey?0:ey,data:nt,renderRawItem:function(e){return e},renderRawRest:function(e){var t=e.length,n=t?to.slice(-t):null;return r.createElement(eN,{eventKey:q,title:e6,disabled:tB,internalPopupClose:0===t,popupClassName:e5},n)},maxCount:"horizontal"!==tI||eA?p.Z.INVALIDATE:p.Z.RESPONSIVE,ssr:"full","data-menu-list":!0,onVisibleChange:function(e){tF(e)},onKeyDown:t7},tr));return r.createElement(I.Provider,{value:ne},r.createElement(b.Provider,{value:tu},r.createElement(x,{prefixCls:ef,rootClassName:eg,mode:tI,openKeys:tm,rtl:td,disabled:ek,motion:tl?eQ:null,defaultMotions:tl?eJ:null,activeKey:tQ,onActive:t0,onInactive:t1,selectedKeys:t3,inlineIndent:void 0===eX?24:eX,subMenuOpenDelay:void 0===eI?.1:eI,subMenuCloseDelay:void 0===eR?.1:eR,forceSubMenuRender:eP,builtinPlacements:e1,triggerSubMenuAction:void 0===e0?"hover":e0,getPopupContainer:e8,itemIcon:e2,expandIcon:e4,onItemClick:t8,onOpenChange:t9},r.createElement(A.Provider,{value:tY},nn),r.createElement("div",{style:{display:"none"},"aria-hidden":!0},r.createElement(O.Provider,{value:tq},to)))))});eZ.Item=em,eZ.SubMenu=eN,eZ.ItemGroup=eD,eZ.Divider=ej;var ez=n(8407),eH=n(31413),eG=n(48563),e$=n(47387),eW=n(65823),eV=n(57499),eq=function(e,t){var n={};for(var r in e)Object.prototype.hasOwnProperty.call(e,r)&&0>t.indexOf(r)&&(n[r]=e[r]);if(null!=e&&"function"==typeof Object.getOwnPropertySymbols)for(var o=0,r=Object.getOwnPropertySymbols(e);ot.indexOf(r[o])&&Object.prototype.propertyIsEnumerable.call(e,r[o])&&(n[r[o]]=e[r[o]]);return n},eY=e=>{let{prefixCls:t,className:n,dashed:o}=e,a=eq(e,["prefixCls","className","dashed"]),{getPrefixCls:i}=r.useContext(eV.E_),l=i("menu",t),s=d()({["".concat(l,"-item-divider-dashed")]:!!o},n);return r.createElement(ej,Object.assign({className:s},a))},eK=n(1460);let eX=(0,r.createContext)({prefixCls:"",firstLevel:!0,inlineCollapsed:!1});var eQ=e=>{var t;let{className:n,children:o,icon:a,title:i,danger:l}=e,{prefixCls:s,firstLevel:c,direction:u,disableMenuItemTitleTooltip:p,inlineCollapsed:f}=r.useContext(eX),{siderCollapsed:m}=r.useContext(ez.D),g=i;void 0===i?g=c?o:"":!1===i&&(g="");let h={title:g};m||f||(h.title=null,h.open=!1);let b=(0,eb.Z)(o).length,v=r.createElement(em,Object.assign({},(0,en.Z)(e,["title","icon","danger"]),{className:d()({["".concat(s,"-item-danger")]:l,["".concat(s,"-item-only-child")]:(a?b+1:b)===1},n),title:"string"==typeof i?i:void 0}),(0,eW.Tm)(a,{className:d()((0,eW.l$)(a)?null===(t=a.props)||void 0===t?void 0:t.className:"","".concat(s,"-item-icon"))}),(e=>{let t=r.createElement("span",{className:"".concat(s,"-title-content")},o);return(!a||(0,eW.l$)(o)&&"span"===o.type)&&o&&e&&c&&"string"==typeof o?r.createElement("div",{className:"".concat(s,"-inline-collapsed-noicon")},o.charAt(0)):t})(f));return p||(v=r.createElement(eK.Z,Object.assign({},h,{placement:"rtl"===u?"left":"right",overlayClassName:"".concat(s,"-inline-collapsed-tooltip")}),v)),v},eJ=n(51761),e0=e=>{var t;let n;let{popupClassName:o,icon:a,title:i,theme:l}=e,s=r.useContext(eX),{prefixCls:c,inlineCollapsed:u,theme:p}=s,f=k();if(a){let e=(0,eW.l$)(i)&&"span"===i.type;n=r.createElement(r.Fragment,null,(0,eW.Tm)(a,{className:d()((0,eW.l$)(a)?null===(t=a.props)||void 0===t?void 0:t.className:"","".concat(c,"-item-icon"))}),e?i:r.createElement("span",{className:"".concat(c,"-title-content")},i))}else n=u&&!f.length&&i&&"string"==typeof i?r.createElement("div",{className:"".concat(c,"-inline-collapsed-noicon")},i.charAt(0)):r.createElement("span",{className:"".concat(c,"-title-content")},i);let m=r.useMemo(()=>Object.assign(Object.assign({},s),{firstLevel:!1}),[s]),[g]=(0,eJ.Cn)("Menu");return r.createElement(eX.Provider,{value:m},r.createElement(eN,Object.assign({},(0,en.Z)(e,["icon"]),{title:n,popupClassName:d()(c,o,"".concat(c,"-").concat(l||p)),popupStyle:{zIndex:g}})))},e1=function(e,t){var n={};for(var r in e)Object.prototype.hasOwnProperty.call(e,r)&&0>t.indexOf(r)&&(n[r]=e[r]);if(null!=e&&"function"==typeof Object.getOwnPropertySymbols)for(var o=0,r=Object.getOwnPropertySymbols(e);ot.indexOf(r[o])&&Object.prototype.propertyIsEnumerable.call(e,r[o])&&(n[r[o]]=e[r[o]]);return n},e2=n(81642),e4=n(8985),e3=n(6336),e6=n(11303),e5=n(46154),e8=n(202),e9=n(58854),e7=n(76585),te=n(80316),tt=e=>{let{componentCls:t,motionDurationSlow:n,horizontalLineHeight:r,colorSplit:o,lineWidth:a,lineType:i,itemPaddingInline:l}=e;return{["".concat(t,"-horizontal")]:{lineHeight:r,border:0,borderBottom:"".concat((0,e4.bf)(a)," ").concat(i," ").concat(o),boxShadow:"none","&::after":{display:"block",clear:"both",height:0,content:'"\\20"'},["".concat(t,"-item, ").concat(t,"-submenu")]:{position:"relative",display:"inline-block",verticalAlign:"bottom",paddingInline:l},["> ".concat(t,"-item:hover,\n > ").concat(t,"-item-active,\n > ").concat(t,"-submenu ").concat(t,"-submenu-title:hover")]:{backgroundColor:"transparent"},["".concat(t,"-item, ").concat(t,"-submenu-title")]:{transition:["border-color ".concat(n),"background ".concat(n)].join(",")},["".concat(t,"-submenu-arrow")]:{display:"none"}}}},tn=e=>{let{componentCls:t,menuArrowOffset:n,calc:r}=e;return{["".concat(t,"-rtl")]:{direction:"rtl"},["".concat(t,"-submenu-rtl")]:{transformOrigin:"100% 0"},["".concat(t,"-rtl").concat(t,"-vertical,\n ").concat(t,"-submenu-rtl ").concat(t,"-vertical")]:{["".concat(t,"-submenu-arrow")]:{"&::before":{transform:"rotate(-45deg) translateY(".concat((0,e4.bf)(r(n).mul(-1).equal()),")")},"&::after":{transform:"rotate(45deg) translateY(".concat((0,e4.bf)(n),")")}}}}};let tr=e=>Object.assign({},(0,e6.oN)(e));var to=(e,t)=>{let{componentCls:n,itemColor:r,itemSelectedColor:o,groupTitleColor:a,itemBg:i,subMenuItemBg:l,itemSelectedBg:s,activeBarHeight:c,activeBarWidth:u,activeBarBorderWidth:d,motionDurationSlow:p,motionEaseInOut:f,motionEaseOut:m,itemPaddingInline:g,motionDurationMid:h,itemHoverColor:b,lineType:v,colorSplit:y,itemDisabledColor:E,dangerItemColor:S,dangerItemHoverColor:w,dangerItemSelectedColor:x,dangerItemActiveBg:O,dangerItemSelectedBg:C,popupBg:T,itemHoverBg:k,itemActiveBg:A,menuSubMenuBg:I,horizontalItemSelectedColor:R,horizontalItemSelectedBg:N,horizontalItemBorderRadius:_,horizontalItemHoverBg:P}=e;return{["".concat(n,"-").concat(t,", ").concat(n,"-").concat(t," > ").concat(n)]:{color:r,background:i,["&".concat(n,"-root:focus-visible")]:Object.assign({},tr(e)),["".concat(n,"-item-group-title")]:{color:a},["".concat(n,"-submenu-selected")]:{["> ".concat(n,"-submenu-title")]:{color:o}},["".concat(n,"-item-disabled, ").concat(n,"-submenu-disabled")]:{color:"".concat(E," !important")},["".concat(n,"-item:not(").concat(n,"-item-selected):not(").concat(n,"-submenu-selected)")]:{["&:hover, > ".concat(n,"-submenu-title:hover")]:{color:b}},["&:not(".concat(n,"-horizontal)")]:{["".concat(n,"-item:not(").concat(n,"-item-selected)")]:{"&:hover":{backgroundColor:k},"&:active":{backgroundColor:A}},["".concat(n,"-submenu-title")]:{"&:hover":{backgroundColor:k},"&:active":{backgroundColor:A}}},["".concat(n,"-item-danger")]:{color:S,["&".concat(n,"-item:hover")]:{["&:not(".concat(n,"-item-selected):not(").concat(n,"-submenu-selected)")]:{color:w}},["&".concat(n,"-item:active")]:{background:O}},["".concat(n,"-item a")]:{"&, &:hover":{color:"inherit"}},["".concat(n,"-item-selected")]:{color:o,["&".concat(n,"-item-danger")]:{color:x},"a, a:hover":{color:"inherit"}},["& ".concat(n,"-item-selected")]:{backgroundColor:s,["&".concat(n,"-item-danger")]:{backgroundColor:C}},["".concat(n,"-item, ").concat(n,"-submenu-title")]:{["&:not(".concat(n,"-item-disabled):focus-visible")]:Object.assign({},tr(e))},["&".concat(n,"-submenu > ").concat(n)]:{backgroundColor:I},["&".concat(n,"-popup > ").concat(n)]:{backgroundColor:T},["&".concat(n,"-submenu-popup > ").concat(n)]:{backgroundColor:T},["&".concat(n,"-horizontal")]:Object.assign(Object.assign({},"dark"===t?{borderBottom:0}:{}),{["> ".concat(n,"-item, > ").concat(n,"-submenu")]:{top:d,marginTop:e.calc(d).mul(-1).equal(),marginBottom:0,borderRadius:_,"&::after":{position:"absolute",insetInline:g,bottom:0,borderBottom:"".concat((0,e4.bf)(c)," solid transparent"),transition:"border-color ".concat(p," ").concat(f),content:'""'},"&:hover, &-active, &-open":{background:P,"&::after":{borderBottomWidth:c,borderBottomColor:R}},"&-selected":{color:R,backgroundColor:N,"&:hover":{backgroundColor:N},"&::after":{borderBottomWidth:c,borderBottomColor:R}}}}),["&".concat(n,"-root")]:{["&".concat(n,"-inline, &").concat(n,"-vertical")]:{borderInlineEnd:"".concat((0,e4.bf)(d)," ").concat(v," ").concat(y)}},["&".concat(n,"-inline")]:{["".concat(n,"-sub").concat(n,"-inline")]:{background:l},["".concat(n,"-item")]:{position:"relative","&::after":{position:"absolute",insetBlock:0,insetInlineEnd:0,borderInlineEnd:"".concat((0,e4.bf)(u)," solid ").concat(o),transform:"scaleY(0.0001)",opacity:0,transition:["transform ".concat(h," ").concat(m),"opacity ".concat(h," ").concat(m)].join(","),content:'""'},["&".concat(n,"-item-danger")]:{"&::after":{borderInlineEndColor:x}}},["".concat(n,"-selected, ").concat(n,"-item-selected")]:{"&::after":{transform:"scaleY(1)",opacity:1,transition:["transform ".concat(h," ").concat(f),"opacity ".concat(h," ").concat(f)].join(",")}}}}}};let ta=e=>{let{componentCls:t,itemHeight:n,itemMarginInline:r,padding:o,menuArrowSize:a,marginXS:i,itemMarginBlock:l,itemWidth:s}=e,c=e.calc(a).add(o).add(i).equal();return{["".concat(t,"-item")]:{position:"relative",overflow:"hidden"},["".concat(t,"-item, ").concat(t,"-submenu-title")]:{height:n,lineHeight:(0,e4.bf)(n),paddingInline:o,overflow:"hidden",textOverflow:"ellipsis",marginInline:r,marginBlock:l,width:s},["> ".concat(t,"-item,\n > ").concat(t,"-submenu > ").concat(t,"-submenu-title")]:{height:n,lineHeight:(0,e4.bf)(n)},["".concat(t,"-item-group-list ").concat(t,"-submenu-title,\n ").concat(t,"-submenu-title")]:{paddingInlineEnd:c}}};var ti=e=>{let{componentCls:t,iconCls:n,itemHeight:r,colorTextLightSolid:o,dropdownWidth:a,controlHeightLG:i,motionDurationMid:l,motionEaseOut:s,paddingXL:c,itemMarginInline:u,fontSizeLG:d,motionDurationSlow:p,paddingXS:f,boxShadowSecondary:m,collapsedWidth:g,collapsedIconSize:h}=e,b={height:r,lineHeight:(0,e4.bf)(r),listStylePosition:"inside",listStyleType:"disc"};return[{[t]:{"&-inline, &-vertical":Object.assign({["&".concat(t,"-root")]:{boxShadow:"none"}},ta(e))},["".concat(t,"-submenu-popup")]:{["".concat(t,"-vertical")]:Object.assign(Object.assign({},ta(e)),{boxShadow:m})}},{["".concat(t,"-submenu-popup ").concat(t,"-vertical").concat(t,"-sub")]:{minWidth:a,maxHeight:"calc(100vh - ".concat((0,e4.bf)(e.calc(i).mul(2.5).equal()),")"),padding:"0",overflow:"hidden",borderInlineEnd:0,"&:not([class*='-active'])":{overflowX:"hidden",overflowY:"auto"}}},{["".concat(t,"-inline")]:{width:"100%",["&".concat(t,"-root")]:{["".concat(t,"-item, ").concat(t,"-submenu-title")]:{display:"flex",alignItems:"center",transition:["border-color ".concat(p),"background ".concat(p),"padding ".concat(l," ").concat(s)].join(","),["> ".concat(t,"-title-content")]:{flex:"auto",minWidth:0,overflow:"hidden",textOverflow:"ellipsis"},"> *":{flex:"none"}}},["".concat(t,"-sub").concat(t,"-inline")]:{padding:0,border:0,borderRadius:0,boxShadow:"none",["& > ".concat(t,"-submenu > ").concat(t,"-submenu-title")]:b,["& ".concat(t,"-item-group-title")]:{paddingInlineStart:c}},["".concat(t,"-item")]:b}},{["".concat(t,"-inline-collapsed")]:{width:g,["&".concat(t,"-root")]:{["".concat(t,"-item, ").concat(t,"-submenu ").concat(t,"-submenu-title")]:{["> ".concat(t,"-inline-collapsed-noicon")]:{fontSize:d,textAlign:"center"}}},["> ".concat(t,"-item,\n > ").concat(t,"-item-group > ").concat(t,"-item-group-list > ").concat(t,"-item,\n > ").concat(t,"-item-group > ").concat(t,"-item-group-list > ").concat(t,"-submenu > ").concat(t,"-submenu-title,\n > ").concat(t,"-submenu > ").concat(t,"-submenu-title")]:{insetInlineStart:0,paddingInline:"calc(50% - ".concat((0,e4.bf)(e.calc(d).div(2).equal())," - ").concat((0,e4.bf)(u),")"),textOverflow:"clip",["\n ".concat(t,"-submenu-arrow,\n ").concat(t,"-submenu-expand-icon\n ")]:{opacity:0},["".concat(t,"-item-icon, ").concat(n)]:{margin:0,fontSize:h,lineHeight:(0,e4.bf)(r),"+ span":{display:"inline-block",opacity:0}}},["".concat(t,"-item-icon, ").concat(n)]:{display:"inline-block"},"&-tooltip":{pointerEvents:"none",["".concat(t,"-item-icon, ").concat(n)]:{display:"none"},"a, a:hover":{color:o}},["".concat(t,"-item-group-title")]:Object.assign(Object.assign({},e6.vS),{paddingInline:f})}}]};let tl=e=>{let{componentCls:t,motionDurationSlow:n,motionDurationMid:r,motionEaseInOut:o,motionEaseOut:a,iconCls:i,iconSize:l,iconMarginInlineEnd:s}=e;return{["".concat(t,"-item, ").concat(t,"-submenu-title")]:{position:"relative",display:"block",margin:0,whiteSpace:"nowrap",cursor:"pointer",transition:["border-color ".concat(n),"background ".concat(n),"padding ".concat(n," ").concat(o)].join(","),["".concat(t,"-item-icon, ").concat(i)]:{minWidth:l,fontSize:l,transition:["font-size ".concat(r," ").concat(a),"margin ".concat(n," ").concat(o),"color ".concat(n)].join(","),"+ span":{marginInlineStart:s,opacity:1,transition:["opacity ".concat(n," ").concat(o),"margin ".concat(n),"color ".concat(n)].join(",")}},["".concat(t,"-item-icon")]:Object.assign({},(0,e6.Ro)()),["&".concat(t,"-item-only-child")]:{["> ".concat(i,", > ").concat(t,"-item-icon")]:{marginInlineEnd:0}}},["".concat(t,"-item-disabled, ").concat(t,"-submenu-disabled")]:{background:"none !important",cursor:"not-allowed","&::after":{borderColor:"transparent !important"},a:{color:"inherit !important"},["> ".concat(t,"-submenu-title")]:{color:"inherit !important",cursor:"not-allowed"}}}},ts=e=>{let{componentCls:t,motionDurationSlow:n,motionEaseInOut:r,borderRadius:o,menuArrowSize:a,menuArrowOffset:i}=e;return{["".concat(t,"-submenu")]:{"&-expand-icon, &-arrow":{position:"absolute",top:"50%",insetInlineEnd:e.margin,width:a,color:"currentcolor",transform:"translateY(-50%)",transition:"transform ".concat(n," ").concat(r,", opacity ").concat(n)},"&-arrow":{"&::before, &::after":{position:"absolute",width:e.calc(a).mul(.6).equal(),height:e.calc(a).mul(.15).equal(),backgroundColor:"currentcolor",borderRadius:o,transition:["background ".concat(n," ").concat(r),"transform ".concat(n," ").concat(r),"top ".concat(n," ").concat(r),"color ".concat(n," ").concat(r)].join(","),content:'""'},"&::before":{transform:"rotate(45deg) translateY(".concat((0,e4.bf)(e.calc(i).mul(-1).equal()),")")},"&::after":{transform:"rotate(-45deg) translateY(".concat((0,e4.bf)(i),")")}}}}},tc=e=>{let{antCls:t,componentCls:n,fontSize:r,motionDurationSlow:o,motionDurationMid:a,motionEaseInOut:i,paddingXS:l,padding:s,colorSplit:c,lineWidth:u,zIndexPopup:d,borderRadiusLG:p,subMenuItemBorderRadius:f,menuArrowSize:m,menuArrowOffset:g,lineType:h,menuPanelMaskInset:b,groupTitleLineHeight:v,groupTitleFontSize:y}=e;return[{"":{["".concat(n)]:Object.assign(Object.assign({},(0,e6.dF)()),{"&-hidden":{display:"none"}})},["".concat(n,"-submenu-hidden")]:{display:"none"}},{[n]:Object.assign(Object.assign(Object.assign(Object.assign(Object.assign(Object.assign(Object.assign({},(0,e6.Wf)(e)),(0,e6.dF)()),{marginBottom:0,paddingInlineStart:0,fontSize:r,lineHeight:0,listStyle:"none",outline:"none",transition:"width ".concat(o," cubic-bezier(0.2, 0, 0, 1) 0s"),"ul, ol":{margin:0,padding:0,listStyle:"none"},"&-overflow":{display:"flex",["".concat(n,"-item")]:{flex:"none"}},["".concat(n,"-item, ").concat(n,"-submenu, ").concat(n,"-submenu-title")]:{borderRadius:e.itemBorderRadius},["".concat(n,"-item-group-title")]:{padding:"".concat((0,e4.bf)(l)," ").concat((0,e4.bf)(s)),fontSize:y,lineHeight:v,transition:"all ".concat(o)},["&-horizontal ".concat(n,"-submenu")]:{transition:["border-color ".concat(o," ").concat(i),"background ".concat(o," ").concat(i)].join(",")},["".concat(n,"-submenu, ").concat(n,"-submenu-inline")]:{transition:["border-color ".concat(o," ").concat(i),"background ".concat(o," ").concat(i),"padding ".concat(a," ").concat(i)].join(",")},["".concat(n,"-submenu ").concat(n,"-sub")]:{cursor:"initial",transition:["background ".concat(o," ").concat(i),"padding ".concat(o," ").concat(i)].join(",")},["".concat(n,"-title-content")]:{transition:"color ".concat(o),["> ".concat(t,"-typography-ellipsis-single-line")]:{display:"inline",verticalAlign:"unset"}},["".concat(n,"-item a")]:{"&::before":{position:"absolute",inset:0,backgroundColor:"transparent",content:'""'}},["".concat(n,"-item-divider")]:{overflow:"hidden",lineHeight:0,borderColor:c,borderStyle:h,borderWidth:0,borderTopWidth:u,marginBlock:u,padding:0,"&-dashed":{borderStyle:"dashed"}}}),tl(e)),{["".concat(n,"-item-group")]:{["".concat(n,"-item-group-list")]:{margin:0,padding:0,["".concat(n,"-item, ").concat(n,"-submenu-title")]:{paddingInline:"".concat((0,e4.bf)(e.calc(r).mul(2).equal())," ").concat((0,e4.bf)(s))}}},"&-submenu":{"&-popup":{position:"absolute",zIndex:d,borderRadius:p,boxShadow:"none",transformOrigin:"0 0",["&".concat(n,"-submenu")]:{background:"transparent"},"&::before":{position:"absolute",inset:"".concat((0,e4.bf)(b)," 0 0"),zIndex:-1,width:"100%",height:"100%",opacity:0,content:'""'}},"&-placement-rightTop::before":{top:0,insetInlineStart:b},"\n &-placement-leftTop,\n &-placement-bottomRight,\n ":{transformOrigin:"100% 0"},"\n &-placement-leftBottom,\n &-placement-topRight,\n ":{transformOrigin:"100% 100%"},"\n &-placement-rightBottom,\n &-placement-topLeft,\n ":{transformOrigin:"0 100%"},"\n &-placement-bottomLeft,\n &-placement-rightTop,\n ":{transformOrigin:"0 0"},"\n &-placement-leftTop,\n &-placement-leftBottom\n ":{paddingInlineEnd:e.paddingXS},"\n &-placement-rightTop,\n &-placement-rightBottom\n ":{paddingInlineStart:e.paddingXS},"\n &-placement-topRight,\n &-placement-topLeft\n ":{paddingBottom:e.paddingXS},"\n &-placement-bottomRight,\n &-placement-bottomLeft\n ":{paddingTop:e.paddingXS},["> ".concat(n)]:Object.assign(Object.assign(Object.assign({borderRadius:p},tl(e)),ts(e)),{["".concat(n,"-item, ").concat(n,"-submenu > ").concat(n,"-submenu-title")]:{borderRadius:f},["".concat(n,"-submenu-title::after")]:{transition:"transform ".concat(o," ").concat(i)}})}}),ts(e)),{["&-inline-collapsed ".concat(n,"-submenu-arrow,\n &-inline ").concat(n,"-submenu-arrow")]:{"&::before":{transform:"rotate(-45deg) translateX(".concat((0,e4.bf)(g),")")},"&::after":{transform:"rotate(45deg) translateX(".concat((0,e4.bf)(e.calc(g).mul(-1).equal()),")")}},["".concat(n,"-submenu-open").concat(n,"-submenu-inline > ").concat(n,"-submenu-title > ").concat(n,"-submenu-arrow")]:{transform:"translateY(".concat((0,e4.bf)(e.calc(m).mul(.2).mul(-1).equal()),")"),"&::after":{transform:"rotate(-45deg) translateX(".concat((0,e4.bf)(e.calc(g).mul(-1).equal()),")")},"&::before":{transform:"rotate(45deg) translateX(".concat((0,e4.bf)(g),")")}}})},{["".concat(t,"-layout-header")]:{[n]:{lineHeight:"inherit"}}}]},tu=e=>{var t,n,r;let{colorPrimary:o,colorError:a,colorTextDisabled:i,colorErrorBg:l,colorText:s,colorTextDescription:c,colorBgContainer:u,colorFillAlter:d,colorFillContent:p,lineWidth:f,lineWidthBold:m,controlItemBgActive:g,colorBgTextHover:h,controlHeightLG:b,lineHeight:v,colorBgElevated:y,marginXXS:E,padding:S,fontSize:w,controlHeightSM:x,fontSizeLG:O,colorTextLightSolid:C,colorErrorHover:T}=e,k=null!==(t=e.activeBarWidth)&&void 0!==t?t:0,A=null!==(n=e.activeBarBorderWidth)&&void 0!==n?n:f,I=null!==(r=e.itemMarginInline)&&void 0!==r?r:e.marginXXS,R=new e3.C(C).setAlpha(.65).toRgbString();return{dropdownWidth:160,zIndexPopup:e.zIndexPopupBase+50,radiusItem:e.borderRadiusLG,itemBorderRadius:e.borderRadiusLG,radiusSubMenuItem:e.borderRadiusSM,subMenuItemBorderRadius:e.borderRadiusSM,colorItemText:s,itemColor:s,colorItemTextHover:s,itemHoverColor:s,colorItemTextHoverHorizontal:o,horizontalItemHoverColor:o,colorGroupTitle:c,groupTitleColor:c,colorItemTextSelected:o,itemSelectedColor:o,colorItemTextSelectedHorizontal:o,horizontalItemSelectedColor:o,colorItemBg:u,itemBg:u,colorItemBgHover:h,itemHoverBg:h,colorItemBgActive:p,itemActiveBg:g,colorSubItemBg:d,subMenuItemBg:d,colorItemBgSelected:g,itemSelectedBg:g,colorItemBgSelectedHorizontal:"transparent",horizontalItemSelectedBg:"transparent",colorActiveBarWidth:0,activeBarWidth:k,colorActiveBarHeight:m,activeBarHeight:m,colorActiveBarBorderSize:f,activeBarBorderWidth:A,colorItemTextDisabled:i,itemDisabledColor:i,colorDangerItemText:a,dangerItemColor:a,colorDangerItemTextHover:a,dangerItemHoverColor:a,colorDangerItemTextSelected:a,dangerItemSelectedColor:a,colorDangerItemBgActive:l,dangerItemActiveBg:l,colorDangerItemBgSelected:l,dangerItemSelectedBg:l,itemMarginInline:I,horizontalItemBorderRadius:0,horizontalItemHoverBg:"transparent",itemHeight:b,groupTitleLineHeight:v,collapsedWidth:2*b,popupBg:y,itemMarginBlock:E,itemPaddingInline:S,horizontalLineHeight:"".concat(1.15*b,"px"),iconSize:w,iconMarginInlineEnd:x-w,collapsedIconSize:O,groupTitleFontSize:w,darkItemDisabledColor:new e3.C(C).setAlpha(.25).toRgbString(),darkItemColor:R,darkDangerItemColor:a,darkItemBg:"#001529",darkPopupBg:"#001529",darkSubMenuItemBg:"#000c17",darkItemSelectedColor:C,darkItemSelectedBg:o,darkDangerItemSelectedBg:a,darkItemHoverBg:"transparent",darkGroupTitleColor:R,darkItemHoverColor:C,darkDangerItemHoverColor:T,darkDangerItemSelectedColor:C,darkDangerItemActiveBg:a,itemWidth:k?"calc(100% + ".concat(A,"px)"):"calc(100% - ".concat(2*I,"px)")}};var td=n(92935),tp=function(e,t){var n={};for(var r in e)Object.prototype.hasOwnProperty.call(e,r)&&0>t.indexOf(r)&&(n[r]=e[r]);if(null!=e&&"function"==typeof Object.getOwnPropertySymbols)for(var o=0,r=Object.getOwnPropertySymbols(e);ot.indexOf(r[o])&&Object.prototype.propertyIsEnumerable.call(e,r[o])&&(n[r[o]]=e[r[o]]);return n};let tf=(0,r.forwardRef)((e,t)=>{var n,o;let a;let i=r.useContext(e2.Z),l=i||{},{getPrefixCls:s,getPopupContainer:c,direction:u,menu:p}=r.useContext(eV.E_),f=s(),{prefixCls:m,className:g,style:h,theme:b="light",expandIcon:v,_internalDisableMenuItemTitleTooltip:y,inlineCollapsed:E,siderCollapsed:S,items:w,children:x,rootClassName:O,mode:C,selectable:T,onClick:k,overflowedIndicatorPopupClassName:A}=e,I=tp(e,["prefixCls","className","style","theme","expandIcon","_internalDisableMenuItemTitleTooltip","inlineCollapsed","siderCollapsed","items","children","rootClassName","mode","selectable","onClick","overflowedIndicatorPopupClassName"]),R=(0,en.Z)(I,["collapsedWidth"]),N=r.useMemo(()=>w?function e(t){return(t||[]).map((t,n)=>{if(t&&"object"==typeof t){let{label:o,children:a,key:i,type:l}=t,s=e1(t,["label","children","key","type"]),c=null!=i?i:"tmp-".concat(n);return a||"group"===l?"group"===l?r.createElement(eD,Object.assign({key:c},s,{title:o}),e(a)):r.createElement(e0,Object.assign({key:c},s,{title:o}),e(a)):"divider"===l?r.createElement(eY,Object.assign({key:c},s)):r.createElement(eQ,Object.assign({key:c},s),o)}return null}).filter(e=>e)}(w):w,[w])||x;null===(n=l.validator)||void 0===n||n.call(l,{mode:C});let _=(0,eG.zX)(function(){var e;null==k||k.apply(void 0,arguments),null===(e=l.onClick)||void 0===e||e.call(l)}),P=l.mode||C,M=null!=T?T:l.selectable,L=r.useMemo(()=>void 0!==S?S:E,[E,S]),D={horizontal:{motionName:"".concat(f,"-slide-up")},inline:(0,e$.Z)(f),other:{motionName:"".concat(f,"-zoom-big")}},j=s("menu",m||l.prefixCls),F=(0,td.Z)(j),[B,U,Z]=function(e){let t=arguments.length>1&&void 0!==arguments[1]?arguments[1]:e,n=!(arguments.length>2)||void 0===arguments[2]||arguments[2];return(0,e7.I$)("Menu",e=>{let{colorBgElevated:t,colorPrimary:n,colorTextLightSolid:r,controlHeightLG:o,fontSize:a,darkItemColor:i,darkDangerItemColor:l,darkItemBg:s,darkSubMenuItemBg:c,darkItemSelectedColor:u,darkItemSelectedBg:d,darkDangerItemSelectedBg:p,darkItemHoverBg:f,darkGroupTitleColor:m,darkItemHoverColor:g,darkItemDisabledColor:h,darkDangerItemHoverColor:b,darkDangerItemSelectedColor:v,darkDangerItemActiveBg:y,popupBg:E,darkPopupBg:S}=e,w=e.calc(a).div(7).mul(5).equal(),x=(0,te.TS)(e,{menuArrowSize:w,menuHorizontalHeight:e.calc(o).mul(1.15).equal(),menuArrowOffset:e.calc(w).mul(.25).equal(),menuPanelMaskInset:-7,menuSubMenuBg:t,calc:e.calc,popupBg:E}),O=(0,te.TS)(x,{itemColor:i,itemHoverColor:g,groupTitleColor:m,itemSelectedColor:u,itemBg:s,popupBg:S,subMenuItemBg:c,itemActiveBg:"transparent",itemSelectedBg:d,activeBarHeight:0,activeBarBorderWidth:0,itemHoverBg:f,itemDisabledColor:h,dangerItemColor:l,dangerItemHoverColor:b,dangerItemSelectedColor:v,dangerItemActiveBg:y,dangerItemSelectedBg:p,menuSubMenuBg:c,horizontalItemSelectedColor:r,horizontalItemSelectedBg:n});return[tc(x),tt(x),ti(x),to(x,"light"),to(O,"dark"),tn(x),(0,e5.Z)(x),(0,e8.oN)(x,"slide-up"),(0,e8.oN)(x,"slide-down"),(0,e9._y)(x,"zoom-big")]},tu,{deprecatedTokens:[["colorGroupTitle","groupTitleColor"],["radiusItem","itemBorderRadius"],["radiusSubMenuItem","subMenuItemBorderRadius"],["colorItemText","itemColor"],["colorItemTextHover","itemHoverColor"],["colorItemTextHoverHorizontal","horizontalItemHoverColor"],["colorItemTextSelected","itemSelectedColor"],["colorItemTextSelectedHorizontal","horizontalItemSelectedColor"],["colorItemTextDisabled","itemDisabledColor"],["colorDangerItemText","dangerItemColor"],["colorDangerItemTextHover","dangerItemHoverColor"],["colorDangerItemTextSelected","dangerItemSelectedColor"],["colorDangerItemBgActive","dangerItemActiveBg"],["colorDangerItemBgSelected","dangerItemSelectedBg"],["colorItemBg","itemBg"],["colorItemBgHover","itemHoverBg"],["colorSubItemBg","subMenuItemBg"],["colorItemBgActive","itemActiveBg"],["colorItemBgSelectedHorizontal","horizontalItemSelectedBg"],["colorActiveBarWidth","activeBarWidth"],["colorActiveBarHeight","activeBarHeight"],["colorActiveBarBorderSize","activeBarBorderWidth"],["colorItemBgSelected","itemSelectedBg"]],injectStyle:n,unitless:{groupTitleLineHeight:!0}})(e,t)}(j,F,!i),z=d()("".concat(j,"-").concat(b),null==p?void 0:p.className,g);if("function"==typeof v)a=v;else if(null===v||!1===v)a=null;else if(null===l.expandIcon||!1===l.expandIcon)a=null;else{let e=null!=v?v:l.expandIcon;a=(0,eW.Tm)(e,{className:d()("".concat(j,"-submenu-expand-icon"),(0,eW.l$)(e)?null===(o=e.props)||void 0===o?void 0:o.className:"")})}let H=r.useMemo(()=>({prefixCls:j,inlineCollapsed:L||!1,direction:u,firstLevel:!0,theme:b,mode:P,disableMenuItemTitleTooltip:y}),[j,L,u,y,b]);return B(r.createElement(e2.Z.Provider,{value:null},r.createElement(eX.Provider,{value:H},r.createElement(eZ,Object.assign({getPopupContainer:c,overflowedIndicator:r.createElement(eH.Z,null),overflowedIndicatorPopupClassName:d()(j,"".concat(j,"-").concat(b),A),mode:P,selectable:M,onClick:_},R,{inlineCollapsed:L,style:Object.assign(Object.assign({},null==p?void 0:p.style),h),className:z,prefixCls:j,direction:u,defaultMotions:D,expandIcon:a,ref:t,rootClassName:d()(O,U,l.rootClassName,Z,F)}),N))))}),tm=(0,r.forwardRef)((e,t)=>{let n=(0,r.useRef)(null),o=r.useContext(ez.D);return(0,r.useImperativeHandle)(t,()=>({menu:n.current,focus:e=>{var t;null===(t=n.current)||void 0===t||t.focus(e)}})),r.createElement(tf,Object.assign({ref:n},e,o))});tm.Item=eQ,tm.SubMenu=e0,tm.Divider=eY,tm.ItemGroup=eD;var tg=tm},80588:function(e,t,n){n.d(t,{ZP:function(){return eu}});var r=n(63787),o=n(64090),a=n(37274);let i=o.createContext({});var l=n(57499),s=n(54165),c=n(99537),u=n(77136),d=n(20653),p=n(40388),f=n(66155),m=n(16480),g=n.n(m),h=n(80406),b=n(6787),v=n(5239),y=n(89542),E=n(14749),S=n(50833),w=n(49367),x=n(4295),O=o.forwardRef(function(e,t){var n=e.prefixCls,r=e.style,a=e.className,i=e.duration,l=void 0===i?4.5:i,s=e.eventKey,c=e.content,u=e.closable,d=e.closeIcon,p=e.props,f=e.onClick,m=e.onNoticeClose,b=e.times,v=e.hovering,y=o.useState(!1),w=(0,h.Z)(y,2),O=w[0],C=w[1],T=v||O,k=function(){m(s)};o.useEffect(function(){if(!T&&l>0){var e=setTimeout(function(){k()},1e3*l);return function(){clearTimeout(e)}}},[l,T,b]);var A="".concat(n,"-notice");return o.createElement("div",(0,E.Z)({},p,{ref:t,className:g()(A,a,(0,S.Z)({},"".concat(A,"-closable"),u)),style:r,onMouseEnter:function(e){var t;C(!0),null==p||null===(t=p.onMouseEnter)||void 0===t||t.call(p,e)},onMouseLeave:function(e){var t;C(!1),null==p||null===(t=p.onMouseLeave)||void 0===t||t.call(p,e)},onClick:f}),o.createElement("div",{className:"".concat(A,"-content")},c),u&&o.createElement("a",{tabIndex:0,className:"".concat(A,"-close"),onKeyDown:function(e){("Enter"===e.key||"Enter"===e.code||e.keyCode===x.Z.ENTER)&&k()},onClick:function(e){e.preventDefault(),e.stopPropagation(),k()}},void 0===d?"x":d))}),C=o.createContext({}),T=function(e){var t=e.children,n=e.classNames;return o.createElement(C.Provider,{value:{classNames:n}},t)},k=n(6976),A=function(e){var t,n,r,o={offset:8,threshold:3,gap:16};return e&&"object"===(0,k.Z)(e)&&(o.offset=null!==(t=e.offset)&&void 0!==t?t:8,o.threshold=null!==(n=e.threshold)&&void 0!==n?n:3,o.gap=null!==(r=e.gap)&&void 0!==r?r:16),[!!e,o]},I=["className","style","classNames","styles"],R=function(e){var t,n=e.configList,a=e.placement,i=e.prefixCls,l=e.className,s=e.style,c=e.motion,u=e.onAllNoticeRemoved,d=e.onNoticeClose,p=e.stack,f=(0,o.useContext)(C).classNames,m=(0,o.useRef)({}),y=(0,o.useState)(null),x=(0,h.Z)(y,2),T=x[0],k=x[1],R=(0,o.useState)([]),N=(0,h.Z)(R,2),_=N[0],P=N[1],M=n.map(function(e){return{config:e,key:String(e.key)}}),L=A(p),D=(0,h.Z)(L,2),j=D[0],F=D[1],B=F.offset,U=F.threshold,Z=F.gap,z=j&&(_.length>0||M.length<=U),H="function"==typeof c?c(a):c;return(0,o.useEffect)(function(){j&&_.length>1&&P(function(e){return e.filter(function(e){return M.some(function(t){return e===t.key})})})},[_,M,j]),(0,o.useEffect)(function(){var e,t;j&&m.current[null===(e=M[M.length-1])||void 0===e?void 0:e.key]&&k(m.current[null===(t=M[M.length-1])||void 0===t?void 0:t.key])},[M,j]),o.createElement(w.V4,(0,E.Z)({key:a,className:g()(i,"".concat(i,"-").concat(a),null==f?void 0:f.list,l,(t={},(0,S.Z)(t,"".concat(i,"-stack"),!!j),(0,S.Z)(t,"".concat(i,"-stack-expanded"),z),t)),style:s,keys:M,motionAppear:!0},H,{onAllRemoved:function(){u(a)}}),function(e,t){var n=e.config,l=e.className,s=e.style,c=e.index,u=n.key,p=n.times,h=String(u),y=n.className,S=n.style,w=n.classNames,x=n.styles,C=(0,b.Z)(n,I),k=M.findIndex(function(e){return e.key===h}),A={};if(j){var R=M.length-1-(k>-1?k:c-1),N="top"===a||"bottom"===a?"-50%":"0";if(R>0){A.height=z?null===(L=m.current[h])||void 0===L?void 0:L.offsetHeight:null==T?void 0:T.offsetHeight;for(var L,D,F,U,H=0,G=0;G-1?m.current[h]=e:delete m.current[h]},prefixCls:i,classNames:w,styles:x,className:g()(y,null==f?void 0:f.notice),style:S,times:p,key:u,eventKey:u,onNoticeClose:d,hovering:j&&_.length>0})))})},N=o.forwardRef(function(e,t){var n=e.prefixCls,a=void 0===n?"rc-notification":n,i=e.container,l=e.motion,s=e.maxCount,c=e.className,u=e.style,d=e.onAllRemoved,p=e.stack,f=e.renderNotifications,m=o.useState([]),g=(0,h.Z)(m,2),b=g[0],E=g[1],S=function(e){var t,n=b.find(function(t){return t.key===e});null==n||null===(t=n.onClose)||void 0===t||t.call(n),E(function(t){return t.filter(function(t){return t.key!==e})})};o.useImperativeHandle(t,function(){return{open:function(e){E(function(t){var n,o=(0,r.Z)(t),a=o.findIndex(function(t){return t.key===e.key}),i=(0,v.Z)({},e);return a>=0?(i.times=((null===(n=t[a])||void 0===n?void 0:n.times)||0)+1,o[a]=i):(i.times=0,o.push(i)),s>0&&o.length>s&&(o=o.slice(-s)),o})},close:function(e){S(e)},destroy:function(){E([])}}});var w=o.useState({}),x=(0,h.Z)(w,2),O=x[0],C=x[1];o.useEffect(function(){var e={};b.forEach(function(t){var n=t.placement,r=void 0===n?"topRight":n;r&&(e[r]=e[r]||[],e[r].push(t))}),Object.keys(O).forEach(function(t){e[t]=e[t]||[]}),C(e)},[b]);var T=function(e){C(function(t){var n=(0,v.Z)({},t);return(n[e]||[]).length||delete n[e],n})},k=o.useRef(!1);if(o.useEffect(function(){Object.keys(O).length>0?k.current=!0:k.current&&(null==d||d(),k.current=!1)},[O]),!i)return null;var A=Object.keys(O);return(0,y.createPortal)(o.createElement(o.Fragment,null,A.map(function(e){var t=O[e],n=o.createElement(R,{key:e,configList:t,placement:e,prefixCls:a,className:null==c?void 0:c(e),style:null==u?void 0:u(e),motion:l,onNoticeClose:S,onAllNoticeRemoved:T,stack:p});return f?f(n,{prefixCls:a,key:e}):n})),i)}),_=["getContainer","motion","prefixCls","maxCount","className","style","onAllRemoved","stack","renderNotifications"],P=function(){return document.body},M=0,L=n(8985),D=n(51761),j=n(11303),F=n(76585),B=n(80316);let U=e=>{let{componentCls:t,iconCls:n,boxShadow:r,colorText:o,colorSuccess:a,colorError:i,colorWarning:l,colorInfo:s,fontSizeLG:c,motionEaseInOutCirc:u,motionDurationSlow:d,marginXS:p,paddingXS:f,borderRadiusLG:m,zIndexPopup:g,contentPadding:h,contentBg:b}=e,v="".concat(t,"-notice"),y=new L.E4("MessageMoveIn",{"0%":{padding:0,transform:"translateY(-100%)",opacity:0},"100%":{padding:f,transform:"translateY(0)",opacity:1}}),E=new L.E4("MessageMoveOut",{"0%":{maxHeight:e.height,padding:f,opacity:1},"100%":{maxHeight:0,padding:0,opacity:0}}),S={padding:f,textAlign:"center",["".concat(t,"-custom-content > ").concat(n)]:{verticalAlign:"text-bottom",marginInlineEnd:p,fontSize:c},["".concat(v,"-content")]:{display:"inline-block",padding:h,background:b,borderRadius:m,boxShadow:r,pointerEvents:"all"},["".concat(t,"-success > ").concat(n)]:{color:a},["".concat(t,"-error > ").concat(n)]:{color:i},["".concat(t,"-warning > ").concat(n)]:{color:l},["".concat(t,"-info > ").concat(n,",\n ").concat(t,"-loading > ").concat(n)]:{color:s}};return[{[t]:Object.assign(Object.assign({},(0,j.Wf)(e)),{color:o,position:"fixed",top:p,width:"100%",pointerEvents:"none",zIndex:g,["".concat(t,"-move-up")]:{animationFillMode:"forwards"},["\n ".concat(t,"-move-up-appear,\n ").concat(t,"-move-up-enter\n ")]:{animationName:y,animationDuration:d,animationPlayState:"paused",animationTimingFunction:u},["\n ".concat(t,"-move-up-appear").concat(t,"-move-up-appear-active,\n ").concat(t,"-move-up-enter").concat(t,"-move-up-enter-active\n ")]:{animationPlayState:"running"},["".concat(t,"-move-up-leave")]:{animationName:E,animationDuration:d,animationPlayState:"paused",animationTimingFunction:u},["".concat(t,"-move-up-leave").concat(t,"-move-up-leave-active")]:{animationPlayState:"running"},"&-rtl":{direction:"rtl",span:{direction:"rtl"}}})},{[t]:{["".concat(v,"-wrapper")]:Object.assign({},S)}},{["".concat(t,"-notice-pure-panel")]:Object.assign(Object.assign({},S),{padding:0,textAlign:"start"})}]};var Z=(0,F.I$)("Message",e=>[U((0,B.TS)(e,{height:150}))],e=>({zIndexPopup:e.zIndexPopupBase+D.u6+10,contentBg:e.colorBgElevated,contentPadding:"".concat((e.controlHeightLG-e.fontSize*e.lineHeight)/2,"px ").concat(e.paddingSM,"px")})),z=n(92935),H=function(e,t){var n={};for(var r in e)Object.prototype.hasOwnProperty.call(e,r)&&0>t.indexOf(r)&&(n[r]=e[r]);if(null!=e&&"function"==typeof Object.getOwnPropertySymbols)for(var o=0,r=Object.getOwnPropertySymbols(e);ot.indexOf(r[o])&&Object.prototype.propertyIsEnumerable.call(e,r[o])&&(n[r[o]]=e[r[o]]);return n};let G={info:o.createElement(p.Z,null),success:o.createElement(c.Z,null),error:o.createElement(u.Z,null),warning:o.createElement(d.Z,null),loading:o.createElement(f.Z,null)},$=e=>{let{prefixCls:t,type:n,icon:r,children:a}=e;return o.createElement("div",{className:g()("".concat(t,"-custom-content"),"".concat(t,"-").concat(n))},r||G[n],o.createElement("span",null,a))};var W=n(81303),V=n(76564);function q(e){let t;let n=new Promise(n=>{t=e(()=>{n(!0)})}),r=()=>{null==t||t()};return r.then=(e,t)=>n.then(e,t),r.promise=n,r}var Y=function(e,t){var n={};for(var r in e)Object.prototype.hasOwnProperty.call(e,r)&&0>t.indexOf(r)&&(n[r]=e[r]);if(null!=e&&"function"==typeof Object.getOwnPropertySymbols)for(var o=0,r=Object.getOwnPropertySymbols(e);ot.indexOf(r[o])&&Object.prototype.propertyIsEnumerable.call(e,r[o])&&(n[r[o]]=e[r[o]]);return n};let K=e=>{let{children:t,prefixCls:n}=e,r=(0,z.Z)(n),[a,i,l]=Z(n,r);return a(o.createElement(T,{classNames:{list:g()(i,l,r)}},t))},X=(e,t)=>{let{prefixCls:n,key:r}=t;return o.createElement(K,{prefixCls:n,key:r},e)},Q=o.forwardRef((e,t)=>{let{top:n,prefixCls:a,getContainer:i,maxCount:s,duration:c=3,rtl:u,transitionName:d,onAllRemoved:p}=e,{getPrefixCls:f,getPopupContainer:m,message:v,direction:y}=o.useContext(l.E_),E=a||f("message"),S=o.createElement("span",{className:"".concat(E,"-close-x")},o.createElement(W.Z,{className:"".concat(E,"-close-icon")})),[w,x]=function(){var e=arguments.length>0&&void 0!==arguments[0]?arguments[0]:{},t=e.getContainer,n=void 0===t?P:t,a=e.motion,i=e.prefixCls,l=e.maxCount,s=e.className,c=e.style,u=e.onAllRemoved,d=e.stack,p=e.renderNotifications,f=(0,b.Z)(e,_),m=o.useState(),g=(0,h.Z)(m,2),v=g[0],y=g[1],E=o.useRef(),S=o.createElement(N,{container:v,ref:E,prefixCls:i,motion:a,maxCount:l,className:s,style:c,onAllRemoved:u,stack:d,renderNotifications:p}),w=o.useState([]),x=(0,h.Z)(w,2),O=x[0],C=x[1],T=o.useMemo(function(){return{open:function(e){var t=function(){for(var e={},t=arguments.length,n=Array(t),r=0;r({left:"50%",transform:"translateX(-50%)",top:null!=n?n:8}),className:()=>g()({["".concat(E,"-rtl")]:null!=u?u:"rtl"===y}),motion:()=>({motionName:null!=d?d:"".concat(E,"-move-up")}),closable:!1,closeIcon:S,duration:c,getContainer:()=>(null==i?void 0:i())||(null==m?void 0:m())||document.body,maxCount:s,onAllRemoved:p,renderNotifications:X});return o.useImperativeHandle(t,()=>Object.assign(Object.assign({},w),{prefixCls:E,message:v})),x}),J=0;function ee(e){let t=o.useRef(null);return(0,V.ln)("Message"),[o.useMemo(()=>{let e=e=>{var n;null===(n=t.current)||void 0===n||n.close(e)},n=n=>{if(!t.current){let e=()=>{};return e.then=()=>{},e}let{open:r,prefixCls:a,message:i}=t.current,l="".concat(a,"-notice"),{content:s,icon:c,type:u,key:d,className:p,style:f,onClose:m}=n,h=Y(n,["content","icon","type","key","className","style","onClose"]),b=d;return null==b&&(J+=1,b="antd-message-".concat(J)),q(t=>(r(Object.assign(Object.assign({},h),{key:b,content:o.createElement($,{prefixCls:a,type:u,icon:c},s),placement:"top",className:g()(u&&"".concat(l,"-").concat(u),p,null==i?void 0:i.className),style:Object.assign(Object.assign({},null==i?void 0:i.style),f),onClose:()=>{null==m||m(),t()}})),()=>{e(b)}))},r={open:n,destroy:n=>{var r;void 0!==n?e(n):null===(r=t.current)||void 0===r||r.destroy()}};return["info","success","warning","error","loading"].forEach(e=>{r[e]=(t,r,o)=>{let a,i;return"function"==typeof r?i=r:(a=r,i=o),n(Object.assign(Object.assign({onClose:i,duration:a},t&&"object"==typeof t&&"content"in t?t:{content:t}),{type:e}))}}),r},[]),o.createElement(Q,Object.assign({key:"message-holder"},e,{ref:t}))]}let et=null,en=e=>e(),er=[],eo={};function ea(){let{getContainer:e,duration:t,rtl:n,maxCount:r,top:o}=eo,a=(null==e?void 0:e())||document.body;return{getContainer:()=>a,duration:t,rtl:n,maxCount:r,top:o}}let ei=o.forwardRef((e,t)=>{let{messageConfig:n,sync:r}=e,{getPrefixCls:a}=(0,o.useContext)(l.E_),s=eo.prefixCls||a("message"),c=(0,o.useContext)(i),[u,d]=ee(Object.assign(Object.assign(Object.assign({},n),{prefixCls:s}),c.message));return o.useImperativeHandle(t,()=>{let e=Object.assign({},u);return Object.keys(e).forEach(t=>{e[t]=function(){return r(),u[t].apply(u,arguments)}}),{instance:e,sync:r}}),d}),el=o.forwardRef((e,t)=>{let[n,r]=o.useState(ea),a=()=>{r(ea)};o.useEffect(a,[]);let i=(0,s.w6)(),l=i.getRootPrefixCls(),c=i.getIconPrefixCls(),u=i.getTheme(),d=o.createElement(ei,{ref:t,sync:a,messageConfig:n});return o.createElement(s.ZP,{prefixCls:l,iconPrefixCls:c,theme:u},i.holderRender?i.holderRender(d):d)});function es(){if(!et){let e=document.createDocumentFragment(),t={fragment:e};et=t,en(()=>{(0,a.s)(o.createElement(el,{ref:e=>{let{instance:n,sync:r}=e||{};Promise.resolve().then(()=>{!t.instance&&n&&(t.instance=n,t.sync=r,es())})}}),e)});return}et.instance&&(er.forEach(e=>{let{type:t,skipped:n}=e;if(!n)switch(t){case"open":en(()=>{let t=et.instance.open(Object.assign(Object.assign({},eo),e.config));null==t||t.then(e.resolve),e.setCloseFn(t)});break;case"destroy":en(()=>{null==et||et.instance.destroy(e.key)});break;default:en(()=>{var n;let o=(n=et.instance)[t].apply(n,(0,r.Z)(e.args));null==o||o.then(e.resolve),e.setCloseFn(o)})}}),er=[])}let ec={open:function(e){let t=q(t=>{let n;let r={type:"open",config:e,resolve:t,setCloseFn:e=>{n=e}};return er.push(r),()=>{n?en(()=>{n()}):r.skipped=!0}});return es(),t},destroy:function(e){er.push({type:"destroy",key:e}),es()},config:function(e){eo=Object.assign(Object.assign({},eo),e),en(()=>{var e;null===(e=null==et?void 0:et.sync)||void 0===e||e.call(et)})},useMessage:function(e){return ee(e)},_InternalPanelDoNotUseOrYouWillBeFired:e=>{let{prefixCls:t,className:n,type:r,icon:a,content:i}=e,s=H(e,["prefixCls","className","type","icon","content"]),{getPrefixCls:c}=o.useContext(l.E_),u=t||c("message"),d=(0,z.Z)(u),[p,f,m]=Z(u,d);return p(o.createElement(O,Object.assign({},s,{prefixCls:u,className:g()(n,f,"".concat(u,"-notice-pure-panel"),m,d),eventKey:"pure",duration:null,content:o.createElement($,{prefixCls:u,type:r,icon:a},i)})))}};["success","info","warning","error","loading"].forEach(e=>{ec[e]=function(){for(var t=arguments.length,n=Array(t),r=0;r{let r;let o={type:e,args:t,resolve:n,setCloseFn:e=>{r=e}};return er.push(o),()=>{r?en(()=>{r()}):o.skipped=!0}});return es(),n}(e,n)}});var eu=ec},77171:function(e,t,n){let r;n.d(t,{Z:function(){return eX}});var o=n(63787),a=n(64090),i=n(37274),l=n(57499),s=n(54165),c=n(99537),u=n(77136),d=n(20653),p=n(40388),f=n(16480),m=n.n(f),g=n(51761),h=n(47387),b=n(70595),v=n(24750),y=n(89211),E=n(1861),S=n(51350),w=e=>{let{type:t,children:n,prefixCls:r,buttonProps:o,close:i,autoFocus:l,emitEvent:s,isSilent:c,quitOnNullishReturnValue:u,actionFn:d}=e,p=a.useRef(!1),f=a.useRef(null),[m,g]=(0,y.Z)(!1),h=function(){null==i||i.apply(void 0,arguments)};a.useEffect(()=>{let e=null;return l&&(e=setTimeout(()=>{var e;null===(e=f.current)||void 0===e||e.focus()})),()=>{e&&clearTimeout(e)}},[]);let b=e=>{e&&e.then&&(g(!0),e.then(function(){g(!1,!0),h.apply(void 0,arguments),p.current=!1},e=>{if(g(!1,!0),p.current=!1,null==c||!c())return Promise.reject(e)}))};return a.createElement(E.ZP,Object.assign({},(0,S.nx)(t),{onClick:e=>{let t;if(!p.current){if(p.current=!0,!d){h();return}if(s){var n;if(t=d(e),u&&!((n=t)&&n.then)){p.current=!1,h(e);return}}else if(d.length)t=d(i),p.current=!1;else if(!(t=d())){h();return}b(t)}},loading:m,prefixCls:r},o,{ref:f}),n)};let x=a.createContext({}),{Provider:O}=x;var C=()=>{let{autoFocusButton:e,cancelButtonProps:t,cancelTextLocale:n,isSilent:r,mergedOkCancel:o,rootPrefixCls:i,close:l,onCancel:s,onConfirm:c}=(0,a.useContext)(x);return o?a.createElement(w,{isSilent:r,actionFn:s,close:function(){null==l||l.apply(void 0,arguments),null==c||c(!1)},autoFocus:"cancel"===e,buttonProps:t,prefixCls:"".concat(i,"-btn")},n):null},T=()=>{let{autoFocusButton:e,close:t,isSilent:n,okButtonProps:r,rootPrefixCls:o,okTextLocale:i,okType:l,onConfirm:s,onOk:c}=(0,a.useContext)(x);return a.createElement(w,{isSilent:n,type:l||"primary",actionFn:c,close:function(){null==t||t.apply(void 0,arguments),null==s||s(!0)},autoFocus:"ok"===e,buttonProps:r,prefixCls:"".concat(o,"-btn")},i)},k=n(81303),A=n(14749),I=n(80406),R=n(88804),N=a.createContext({}),_=n(5239),P=n(31506),M=n(91010),L=n(4295),D=n(72480);function j(e,t,n){var r=t;return!r&&n&&(r="".concat(e,"-").concat(n)),r}function F(e,t){var n=e["page".concat(t?"Y":"X","Offset")],r="scroll".concat(t?"Top":"Left");if("number"!=typeof n){var o=e.document;"number"!=typeof(n=o.documentElement[r])&&(n=o.body[r])}return n}var B=n(49367),U=n(74084),Z=a.memo(function(e){return e.children},function(e,t){return!t.shouldUpdate}),z={width:0,height:0,overflow:"hidden",outline:"none"},H=a.forwardRef(function(e,t){var n,r,o,i=e.prefixCls,l=e.className,s=e.style,c=e.title,u=e.ariaId,d=e.footer,p=e.closable,f=e.closeIcon,g=e.onClose,h=e.children,b=e.bodyStyle,v=e.bodyProps,y=e.modalRender,E=e.onMouseDown,S=e.onMouseUp,w=e.holderRef,x=e.visible,O=e.forceRender,C=e.width,T=e.height,k=e.classNames,I=e.styles,R=a.useContext(N).panel,P=(0,U.x1)(w,R),M=(0,a.useRef)(),L=(0,a.useRef)();a.useImperativeHandle(t,function(){return{focus:function(){var e;null===(e=M.current)||void 0===e||e.focus()},changeActive:function(e){var t=document.activeElement;e&&t===L.current?M.current.focus():e||t!==M.current||L.current.focus()}}});var D={};void 0!==C&&(D.width=C),void 0!==T&&(D.height=T),d&&(n=a.createElement("div",{className:m()("".concat(i,"-footer"),null==k?void 0:k.footer),style:(0,_.Z)({},null==I?void 0:I.footer)},d)),c&&(r=a.createElement("div",{className:m()("".concat(i,"-header"),null==k?void 0:k.header),style:(0,_.Z)({},null==I?void 0:I.header)},a.createElement("div",{className:"".concat(i,"-title"),id:u},c))),p&&(o=a.createElement("button",{type:"button",onClick:g,"aria-label":"Close",className:"".concat(i,"-close")},f||a.createElement("span",{className:"".concat(i,"-close-x")})));var j=a.createElement("div",{className:m()("".concat(i,"-content"),null==k?void 0:k.content),style:null==I?void 0:I.content},o,r,a.createElement("div",(0,A.Z)({className:m()("".concat(i,"-body"),null==k?void 0:k.body),style:(0,_.Z)((0,_.Z)({},b),null==I?void 0:I.body)},v),h),n);return a.createElement("div",{key:"dialog-element",role:"dialog","aria-labelledby":c?u:null,"aria-modal":"true",ref:P,style:(0,_.Z)((0,_.Z)({},s),D),className:m()(i,l),onMouseDown:E,onMouseUp:S},a.createElement("div",{tabIndex:0,ref:M,style:z,"aria-hidden":"true"}),a.createElement(Z,{shouldUpdate:x||O},y?y(j):j),a.createElement("div",{tabIndex:0,ref:L,style:z,"aria-hidden":"true"}))}),G=a.forwardRef(function(e,t){var n=e.prefixCls,r=e.title,o=e.style,i=e.className,l=e.visible,s=e.forceRender,c=e.destroyOnClose,u=e.motionName,d=e.ariaId,p=e.onVisibleChanged,f=e.mousePosition,g=(0,a.useRef)(),h=a.useState(),b=(0,I.Z)(h,2),v=b[0],y=b[1],E={};function S(){var e,t,n,r,o,a=(n={left:(t=(e=g.current).getBoundingClientRect()).left,top:t.top},o=(r=e.ownerDocument).defaultView||r.parentWindow,n.left+=F(o),n.top+=F(o,!0),n);y(f?"".concat(f.x-a.left,"px ").concat(f.y-a.top,"px"):"")}return v&&(E.transformOrigin=v),a.createElement(B.ZP,{visible:l,onVisibleChanged:p,onAppearPrepare:S,onEnterPrepare:S,forceRender:s,motionName:u,removeOnLeave:c,ref:g},function(l,s){var c=l.className,u=l.style;return a.createElement(H,(0,A.Z)({},e,{ref:t,title:r,ariaId:d,prefixCls:n,holderRef:s,style:(0,_.Z)((0,_.Z)((0,_.Z)({},u),o),E),className:m()(i,c)}))})});function $(e){var t=e.prefixCls,n=e.style,r=e.visible,o=e.maskProps,i=e.motionName,l=e.className;return a.createElement(B.ZP,{key:"mask",visible:r,motionName:i,leavedClassName:"".concat(t,"-mask-hidden")},function(e,r){var i=e.className,s=e.style;return a.createElement("div",(0,A.Z)({ref:r,style:(0,_.Z)((0,_.Z)({},s),n),className:m()("".concat(t,"-mask"),i,l)},o))})}function W(e){var t=e.prefixCls,n=void 0===t?"rc-dialog":t,r=e.zIndex,o=e.visible,i=void 0!==o&&o,l=e.keyboard,s=void 0===l||l,c=e.focusTriggerAfterClose,u=void 0===c||c,d=e.wrapStyle,p=e.wrapClassName,f=e.wrapProps,g=e.onClose,h=e.afterOpenChange,b=e.afterClose,v=e.transitionName,y=e.animation,E=e.closable,S=e.mask,w=void 0===S||S,x=e.maskTransitionName,O=e.maskAnimation,C=e.maskClosable,T=e.maskStyle,k=e.maskProps,R=e.rootClassName,N=e.classNames,F=e.styles,B=(0,a.useRef)(),U=(0,a.useRef)(),Z=(0,a.useRef)(),z=a.useState(i),H=(0,I.Z)(z,2),W=H[0],V=H[1],q=(0,M.Z)();function Y(e){null==g||g(e)}var K=(0,a.useRef)(!1),X=(0,a.useRef)(),Q=null;return(void 0===C||C)&&(Q=function(e){K.current?K.current=!1:U.current===e.target&&Y(e)}),(0,a.useEffect)(function(){i&&(V(!0),(0,P.Z)(U.current,document.activeElement)||(B.current=document.activeElement))},[i]),(0,a.useEffect)(function(){return function(){clearTimeout(X.current)}},[]),a.createElement("div",(0,A.Z)({className:m()("".concat(n,"-root"),R)},(0,D.Z)(e,{data:!0})),a.createElement($,{prefixCls:n,visible:w&&i,motionName:j(n,x,O),style:(0,_.Z)((0,_.Z)({zIndex:r},T),null==F?void 0:F.mask),maskProps:k,className:null==N?void 0:N.mask}),a.createElement("div",(0,A.Z)({tabIndex:-1,onKeyDown:function(e){if(s&&e.keyCode===L.Z.ESC){e.stopPropagation(),Y(e);return}i&&e.keyCode===L.Z.TAB&&Z.current.changeActive(!e.shiftKey)},className:m()("".concat(n,"-wrap"),p,null==N?void 0:N.wrapper),ref:U,onClick:Q,style:(0,_.Z)((0,_.Z)((0,_.Z)({zIndex:r},d),null==F?void 0:F.wrapper),{},{display:W?null:"none"})},f),a.createElement(G,(0,A.Z)({},e,{onMouseDown:function(){clearTimeout(X.current),K.current=!0},onMouseUp:function(){X.current=setTimeout(function(){K.current=!1})},ref:Z,closable:void 0===E||E,ariaId:q,prefixCls:n,visible:i&&W,onClose:Y,onVisibleChanged:function(e){if(e)!function(){if(!(0,P.Z)(U.current,document.activeElement)){var e;null===(e=Z.current)||void 0===e||e.focus()}}();else{if(V(!1),w&&B.current&&u){try{B.current.focus({preventScroll:!0})}catch(e){}B.current=null}W&&(null==b||b())}null==h||h(e)},motionName:j(n,v,y)}))))}G.displayName="Content",n(53850);var V=function(e){var t=e.visible,n=e.getContainer,r=e.forceRender,o=e.destroyOnClose,i=void 0!==o&&o,l=e.afterClose,s=e.panelRef,c=a.useState(t),u=(0,I.Z)(c,2),d=u[0],p=u[1],f=a.useMemo(function(){return{panel:s}},[s]);return(a.useEffect(function(){t&&p(!0)},[t]),r||!i||d)?a.createElement(N.Provider,{value:f},a.createElement(R.Z,{open:t||r||d,autoDestroy:!1,getContainer:n,autoLock:t||d},a.createElement(W,(0,A.Z)({},e,{destroyOnClose:i,afterClose:function(){null==l||l(),p(!1)}})))):null};V.displayName="Dialog";var q=function(e,t,n){let r=arguments.length>3&&void 0!==arguments[3]?arguments[3]:a.createElement(k.Z,null),o=arguments.length>4&&void 0!==arguments[4]&&arguments[4];if("boolean"==typeof e?!e:void 0===t?!o:!1===t||null===t)return[!1,null];let i="boolean"==typeof t||null==t?r:t;return[!0,n?n(i):i]},Y=n(22127),K=n(86718),X=n(47137),Q=n(92801),J=n(48563);function ee(){}let et=a.createContext({add:ee,remove:ee});var en=n(17094),er=()=>{let{cancelButtonProps:e,cancelTextLocale:t,onCancel:n}=(0,a.useContext)(x);return a.createElement(E.ZP,Object.assign({onClick:n},e),t)},eo=()=>{let{confirmLoading:e,okButtonProps:t,okType:n,okTextLocale:r,onOk:o}=(0,a.useContext)(x);return a.createElement(E.ZP,Object.assign({},(0,S.nx)(n),{loading:e,onClick:o},t),r)},ea=n(4678);function ei(e,t){return a.createElement("span",{className:"".concat(e,"-close-x")},t||a.createElement(k.Z,{className:"".concat(e,"-close-icon")}))}let el=e=>{let t;let{okText:n,okType:r="primary",cancelText:i,confirmLoading:l,onOk:s,onCancel:c,okButtonProps:u,cancelButtonProps:d,footer:p}=e,[f]=(0,b.Z)("Modal",(0,ea.A)()),m={confirmLoading:l,okButtonProps:u,cancelButtonProps:d,okTextLocale:n||(null==f?void 0:f.okText),cancelTextLocale:i||(null==f?void 0:f.cancelText),okType:r,onOk:s,onCancel:c},g=a.useMemo(()=>m,(0,o.Z)(Object.values(m)));return"function"==typeof p||void 0===p?(t=a.createElement(a.Fragment,null,a.createElement(er,null),a.createElement(eo,null)),"function"==typeof p&&(t=p(t,{OkBtn:eo,CancelBtn:er})),t=a.createElement(O,{value:g},t)):t=p,a.createElement(en.n,{disabled:!1},t)};var es=n(11303),ec=n(8985),eu=n(59353);let ed=new ec.E4("antFadeIn",{"0%":{opacity:0},"100%":{opacity:1}}),ep=new ec.E4("antFadeOut",{"0%":{opacity:1},"100%":{opacity:0}}),ef=function(e){let t=arguments.length>1&&void 0!==arguments[1]&&arguments[1],{antCls:n}=e,r="".concat(n,"-fade"),o=t?"&":"";return[(0,eu.R)(r,ed,ep,e.motionDurationMid,t),{["\n ".concat(o).concat(r,"-enter,\n ").concat(o).concat(r,"-appear\n ")]:{opacity:0,animationTimingFunction:"linear"},["".concat(o).concat(r,"-leave")]:{animationTimingFunction:"linear"}}]};var em=n(58854),eg=n(80316),eh=n(76585);function eb(e){return{position:e,inset:0}}let ev=e=>{let{componentCls:t,antCls:n}=e;return[{["".concat(t,"-root")]:{["".concat(t).concat(n,"-zoom-enter, ").concat(t).concat(n,"-zoom-appear")]:{transform:"none",opacity:0,animationDuration:e.motionDurationSlow,userSelect:"none"},["".concat(t).concat(n,"-zoom-leave ").concat(t,"-content")]:{pointerEvents:"none"},["".concat(t,"-mask")]:Object.assign(Object.assign({},eb("fixed")),{zIndex:e.zIndexPopupBase,height:"100%",backgroundColor:e.colorBgMask,pointerEvents:"none",["".concat(t,"-hidden")]:{display:"none"}}),["".concat(t,"-wrap")]:Object.assign(Object.assign({},eb("fixed")),{zIndex:e.zIndexPopupBase,overflow:"auto",outline:0,WebkitOverflowScrolling:"touch",["&:has(".concat(t).concat(n,"-zoom-enter), &:has(").concat(t).concat(n,"-zoom-appear)")]:{pointerEvents:"none"}})}},{["".concat(t,"-root")]:ef(e)}]},ey=e=>{let{componentCls:t}=e;return[{["".concat(t,"-root")]:{["".concat(t,"-wrap-rtl")]:{direction:"rtl"},["".concat(t,"-centered")]:{textAlign:"center","&::before":{display:"inline-block",width:0,height:"100%",verticalAlign:"middle",content:'""'},[t]:{top:0,display:"inline-block",paddingBottom:0,textAlign:"start",verticalAlign:"middle"}},["@media (max-width: ".concat(e.screenSMMax,"px)")]:{[t]:{maxWidth:"calc(100vw - 16px)",margin:"".concat((0,ec.bf)(e.marginXS)," auto")},["".concat(t,"-centered")]:{[t]:{flex:1}}}}},{[t]:Object.assign(Object.assign({},(0,es.Wf)(e)),{pointerEvents:"none",position:"relative",top:100,width:"auto",maxWidth:"calc(100vw - ".concat((0,ec.bf)(e.calc(e.margin).mul(2).equal()),")"),margin:"0 auto",paddingBottom:e.paddingLG,["".concat(t,"-title")]:{margin:0,color:e.titleColor,fontWeight:e.fontWeightStrong,fontSize:e.titleFontSize,lineHeight:e.titleLineHeight,wordWrap:"break-word"},["".concat(t,"-content")]:{position:"relative",backgroundColor:e.contentBg,backgroundClip:"padding-box",border:0,borderRadius:e.borderRadiusLG,boxShadow:e.boxShadow,pointerEvents:"auto",padding:e.contentPadding},["".concat(t,"-close")]:Object.assign({position:"absolute",top:e.calc(e.modalHeaderHeight).sub(e.modalCloseBtnSize).div(2).equal(),insetInlineEnd:e.calc(e.modalHeaderHeight).sub(e.modalCloseBtnSize).div(2).equal(),zIndex:e.calc(e.zIndexPopupBase).add(10).equal(),padding:0,color:e.modalCloseIconColor,fontWeight:e.fontWeightStrong,lineHeight:1,textDecoration:"none",background:"transparent",borderRadius:e.borderRadiusSM,width:e.modalCloseBtnSize,height:e.modalCloseBtnSize,border:0,outline:0,cursor:"pointer",transition:"color ".concat(e.motionDurationMid,", background-color ").concat(e.motionDurationMid),"&-x":{display:"flex",fontSize:e.fontSizeLG,fontStyle:"normal",lineHeight:"".concat((0,ec.bf)(e.modalCloseBtnSize)),justifyContent:"center",textTransform:"none",textRendering:"auto"},"&:hover":{color:e.modalIconHoverColor,backgroundColor:e.closeBtnHoverBg,textDecoration:"none"},"&:active":{backgroundColor:e.closeBtnActiveBg}},(0,es.Qy)(e)),["".concat(t,"-header")]:{color:e.colorText,background:e.headerBg,borderRadius:"".concat((0,ec.bf)(e.borderRadiusLG)," ").concat((0,ec.bf)(e.borderRadiusLG)," 0 0"),marginBottom:e.headerMarginBottom,padding:e.headerPadding,borderBottom:e.headerBorderBottom},["".concat(t,"-body")]:{fontSize:e.fontSize,lineHeight:e.lineHeight,wordWrap:"break-word",padding:e.bodyPadding},["".concat(t,"-footer")]:{textAlign:"end",background:e.footerBg,marginTop:e.footerMarginTop,padding:e.footerPadding,borderTop:e.footerBorderTop,borderRadius:e.footerBorderRadius,["> ".concat(e.antCls,"-btn + ").concat(e.antCls,"-btn")]:{marginInlineStart:e.marginXS}},["".concat(t,"-open")]:{overflow:"hidden"}})},{["".concat(t,"-pure-panel")]:{top:"auto",padding:0,display:"flex",flexDirection:"column",["".concat(t,"-content,\n ").concat(t,"-body,\n ").concat(t,"-confirm-body-wrapper")]:{display:"flex",flexDirection:"column",flex:"auto"},["".concat(t,"-confirm-body")]:{marginBottom:"auto"}}}]},eE=e=>{let{componentCls:t}=e;return{["".concat(t,"-root")]:{["".concat(t,"-wrap-rtl")]:{direction:"rtl",["".concat(t,"-confirm-body")]:{direction:"rtl"}}}}},eS=e=>{let t=e.padding,n=e.fontSizeHeading5,r=e.lineHeightHeading5;return(0,eg.TS)(e,{modalHeaderHeight:e.calc(e.calc(r).mul(n).equal()).add(e.calc(t).mul(2).equal()).equal(),modalFooterBorderColorSplit:e.colorSplit,modalFooterBorderStyle:e.lineType,modalFooterBorderWidth:e.lineWidth,modalIconHoverColor:e.colorIconHover,modalCloseIconColor:e.colorIcon,modalCloseBtnSize:e.fontHeight,modalConfirmIconSize:e.fontHeight,modalTitleHeight:e.calc(e.titleFontSize).mul(e.titleLineHeight).equal()})},ew=e=>({footerBg:"transparent",headerBg:e.colorBgElevated,titleLineHeight:e.lineHeightHeading5,titleFontSize:e.fontSizeHeading5,contentBg:e.colorBgElevated,titleColor:e.colorTextHeading,closeBtnHoverBg:e.wireframe?"transparent":e.colorFillContent,closeBtnActiveBg:e.wireframe?"transparent":e.colorFillContentHover,contentPadding:e.wireframe?0:"".concat((0,ec.bf)(e.paddingMD)," ").concat((0,ec.bf)(e.paddingContentHorizontalLG)),headerPadding:e.wireframe?"".concat((0,ec.bf)(e.padding)," ").concat((0,ec.bf)(e.paddingLG)):0,headerBorderBottom:e.wireframe?"".concat((0,ec.bf)(e.lineWidth)," ").concat(e.lineType," ").concat(e.colorSplit):"none",headerMarginBottom:e.wireframe?0:e.marginXS,bodyPadding:e.wireframe?e.paddingLG:0,footerPadding:e.wireframe?"".concat((0,ec.bf)(e.paddingXS)," ").concat((0,ec.bf)(e.padding)):0,footerBorderTop:e.wireframe?"".concat((0,ec.bf)(e.lineWidth)," ").concat(e.lineType," ").concat(e.colorSplit):"none",footerBorderRadius:e.wireframe?"0 0 ".concat((0,ec.bf)(e.borderRadiusLG)," ").concat((0,ec.bf)(e.borderRadiusLG)):0,footerMarginTop:e.wireframe?0:e.marginSM,confirmBodyPadding:e.wireframe?"".concat((0,ec.bf)(2*e.padding)," ").concat((0,ec.bf)(2*e.padding)," ").concat((0,ec.bf)(e.paddingLG)):0,confirmIconMarginInlineEnd:e.wireframe?e.margin:e.marginSM,confirmBtnsMarginTop:e.wireframe?e.marginLG:e.marginSM});var ex=(0,eh.I$)("Modal",e=>{let t=eS(e);return[ey(t),eE(t),ev(t),(0,em._y)(t,"zoom")]},ew,{unitless:{titleLineHeight:!0}}),eO=n(92935),eC=function(e,t){var n={};for(var r in e)Object.prototype.hasOwnProperty.call(e,r)&&0>t.indexOf(r)&&(n[r]=e[r]);if(null!=e&&"function"==typeof Object.getOwnPropertySymbols)for(var o=0,r=Object.getOwnPropertySymbols(e);ot.indexOf(r[o])&&Object.prototype.propertyIsEnumerable.call(e,r[o])&&(n[r[o]]=e[r[o]]);return n};(0,Y.Z)()&&window.document.documentElement&&document.documentElement.addEventListener("click",e=>{r={x:e.pageX,y:e.pageY},setTimeout(()=>{r=null},100)},!0);var eT=e=>{var t;let{getPopupContainer:n,getPrefixCls:o,direction:i,modal:s}=a.useContext(l.E_),c=t=>{let{onCancel:n}=e;null==n||n(t)},{prefixCls:u,className:d,rootClassName:p,open:f,wrapClassName:b,centered:v,getContainer:y,closeIcon:E,closable:S,focusTriggerAfterClose:w=!0,style:x,visible:O,width:C=520,footer:T,classNames:A,styles:I}=e,R=eC(e,["prefixCls","className","rootClassName","open","wrapClassName","centered","getContainer","closeIcon","closable","focusTriggerAfterClose","style","visible","width","footer","classNames","styles"]),N=o("modal",u),_=o(),P=(0,eO.Z)(N),[M,L,D]=ex(N,P),j=m()(b,{["".concat(N,"-centered")]:!!v,["".concat(N,"-wrap-rtl")]:"rtl"===i}),F=null!==T&&a.createElement(el,Object.assign({},e,{onOk:t=>{let{onOk:n}=e;null==n||n(t)},onCancel:c})),[B,U]=q(S,E,e=>ei(N,e),a.createElement(k.Z,{className:"".concat(N,"-close-icon")}),!0),Z=function(e){let t=a.useContext(et),n=a.useRef();return(0,J.zX)(r=>{if(r){let o=e?r.querySelector(e):r;t.add(o),n.current=o}else t.remove(n.current)})}(".".concat(N,"-content")),[z,H]=(0,g.Cn)("Modal",R.zIndex);return M(a.createElement(Q.BR,null,a.createElement(X.Ux,{status:!0,override:!0},a.createElement(K.Z.Provider,{value:H},a.createElement(V,Object.assign({width:C},R,{zIndex:z,getContainer:void 0===y?n:y,prefixCls:N,rootClassName:m()(L,p,D,P),footer:F,visible:null!=f?f:O,mousePosition:null!==(t=R.mousePosition)&&void 0!==t?t:r,onClose:c,closable:B,closeIcon:U,focusTriggerAfterClose:w,transitionName:(0,h.m)(_,"zoom",e.transitionName),maskTransitionName:(0,h.m)(_,"fade",e.maskTransitionName),className:m()(L,d,null==s?void 0:s.className),style:Object.assign(Object.assign({},null==s?void 0:s.style),x),classNames:Object.assign(Object.assign({wrapper:j},null==s?void 0:s.classNames),A),styles:Object.assign(Object.assign({},null==s?void 0:s.styles),I),panelRef:Z}))))))};let ek=e=>{let{componentCls:t,titleFontSize:n,titleLineHeight:r,modalConfirmIconSize:o,fontSize:a,lineHeight:i,modalTitleHeight:l,fontHeight:s,confirmBodyPadding:c}=e,u="".concat(t,"-confirm");return{[u]:{"&-rtl":{direction:"rtl"},["".concat(e.antCls,"-modal-header")]:{display:"none"},["".concat(u,"-body-wrapper")]:Object.assign({},(0,es.dF)()),["&".concat(t," ").concat(t,"-body")]:{padding:c},["".concat(u,"-body")]:{display:"flex",flexWrap:"nowrap",alignItems:"start",["> ".concat(e.iconCls)]:{flex:"none",fontSize:o,marginInlineEnd:e.confirmIconMarginInlineEnd,marginTop:e.calc(e.calc(s).sub(o).equal()).div(2).equal()},["&-has-title > ".concat(e.iconCls)]:{marginTop:e.calc(e.calc(l).sub(o).equal()).div(2).equal()}},["".concat(u,"-paragraph")]:{display:"flex",flexDirection:"column",flex:"auto",rowGap:e.marginXS,maxWidth:"calc(100% - ".concat((0,ec.bf)(e.calc(e.modalConfirmIconSize).add(e.marginSM).equal()),")")},["".concat(u,"-title")]:{color:e.colorTextHeading,fontWeight:e.fontWeightStrong,fontSize:n,lineHeight:r},["".concat(u,"-content")]:{color:e.colorText,fontSize:a,lineHeight:i},["".concat(u,"-btns")]:{textAlign:"end",marginTop:e.confirmBtnsMarginTop,["".concat(e.antCls,"-btn + ").concat(e.antCls,"-btn")]:{marginBottom:0,marginInlineStart:e.marginXS}}},["".concat(u,"-error ").concat(u,"-body > ").concat(e.iconCls)]:{color:e.colorError},["".concat(u,"-warning ").concat(u,"-body > ").concat(e.iconCls,",\n ").concat(u,"-confirm ").concat(u,"-body > ").concat(e.iconCls)]:{color:e.colorWarning},["".concat(u,"-info ").concat(u,"-body > ").concat(e.iconCls)]:{color:e.colorInfo},["".concat(u,"-success ").concat(u,"-body > ").concat(e.iconCls)]:{color:e.colorSuccess}}};var eA=(0,eh.bk)(["Modal","confirm"],e=>[ek(eS(e))],ew,{order:-1e3}),eI=function(e,t){var n={};for(var r in e)Object.prototype.hasOwnProperty.call(e,r)&&0>t.indexOf(r)&&(n[r]=e[r]);if(null!=e&&"function"==typeof Object.getOwnPropertySymbols)for(var o=0,r=Object.getOwnPropertySymbols(e);ot.indexOf(r[o])&&Object.prototype.propertyIsEnumerable.call(e,r[o])&&(n[r[o]]=e[r[o]]);return n};function eR(e){let{prefixCls:t,icon:n,okText:r,cancelText:i,confirmPrefixCls:l,type:s,okCancel:f,footer:g,locale:h}=e,v=eI(e,["prefixCls","icon","okText","cancelText","confirmPrefixCls","type","okCancel","footer","locale"]),y=n;if(!n&&null!==n)switch(s){case"info":y=a.createElement(p.Z,null);break;case"success":y=a.createElement(c.Z,null);break;case"error":y=a.createElement(u.Z,null);break;default:y=a.createElement(d.Z,null)}let E=null!=f?f:"confirm"===s,S=null!==e.autoFocusButton&&(e.autoFocusButton||"ok"),[w]=(0,b.Z)("Modal"),x=h||w,k=r||(E?null==x?void 0:x.okText:null==x?void 0:x.justOkText),A=Object.assign({autoFocusButton:S,cancelTextLocale:i||(null==x?void 0:x.cancelText),okTextLocale:k,mergedOkCancel:E},v),I=a.useMemo(()=>A,(0,o.Z)(Object.values(A))),R=a.createElement(a.Fragment,null,a.createElement(C,null),a.createElement(T,null)),N=void 0!==e.title&&null!==e.title,_="".concat(l,"-body");return a.createElement("div",{className:"".concat(l,"-body-wrapper")},a.createElement("div",{className:m()(_,{["".concat(_,"-has-title")]:N})},y,a.createElement("div",{className:"".concat(l,"-paragraph")},N&&a.createElement("span",{className:"".concat(l,"-title")},e.title),a.createElement("div",{className:"".concat(l,"-content")},e.content))),void 0===g||"function"==typeof g?a.createElement(O,{value:I},a.createElement("div",{className:"".concat(l,"-btns")},"function"==typeof g?g(R,{OkBtn:T,CancelBtn:C}):R)):g,a.createElement(eA,{prefixCls:t}))}let eN=e=>{let{close:t,zIndex:n,afterClose:r,open:o,keyboard:i,centered:l,getContainer:s,maskStyle:c,direction:u,prefixCls:d,wrapClassName:p,rootPrefixCls:f,bodyStyle:b,closable:y=!1,closeIcon:E,modalRender:S,focusTriggerAfterClose:w,onConfirm:x,styles:O}=e,C="".concat(d,"-confirm"),T=e.width||416,k=e.style||{},A=void 0===e.mask||e.mask,I=void 0!==e.maskClosable&&e.maskClosable,R=m()(C,"".concat(C,"-").concat(e.type),{["".concat(C,"-rtl")]:"rtl"===u},e.className),[,N]=(0,v.ZP)(),_=a.useMemo(()=>void 0!==n?n:N.zIndexPopupBase+g.u6,[n,N]);return a.createElement(eT,{prefixCls:d,className:R,wrapClassName:m()({["".concat(C,"-centered")]:!!e.centered},p),onCancel:()=>{null==t||t({triggerCancel:!0}),null==x||x(!1)},open:o,title:"",footer:null,transitionName:(0,h.m)(f||"","zoom",e.transitionName),maskTransitionName:(0,h.m)(f||"","fade",e.maskTransitionName),mask:A,maskClosable:I,style:k,styles:Object.assign({body:b,mask:c},O),width:T,zIndex:_,afterClose:r,keyboard:i,centered:l,getContainer:s,closable:y,closeIcon:E,modalRender:S,focusTriggerAfterClose:w},a.createElement(eR,Object.assign({},e,{confirmPrefixCls:C})))};var e_=e=>{let{rootPrefixCls:t,iconPrefixCls:n,direction:r,theme:o}=e;return a.createElement(s.ZP,{prefixCls:t,iconPrefixCls:n,direction:r,theme:o},a.createElement(eN,Object.assign({},e)))},eP=[];let eM="",eL=e=>{var t,n;let{prefixCls:r,getContainer:o,direction:i}=e,s=(0,ea.A)(),c=(0,a.useContext)(l.E_),u=eM||c.getPrefixCls(),d=r||"".concat(u,"-modal"),p=o;return!1===p&&(p=void 0),a.createElement(e_,Object.assign({},e,{rootPrefixCls:u,prefixCls:d,iconPrefixCls:c.iconPrefixCls,theme:c.theme,direction:null!=i?i:c.direction,locale:null!==(n=null===(t=c.locale)||void 0===t?void 0:t.Modal)&&void 0!==n?n:s,getContainer:p}))};function eD(e){let t;let n=(0,s.w6)(),r=document.createDocumentFragment(),l=Object.assign(Object.assign({},e),{close:d,open:!0});function c(){for(var t=arguments.length,n=Array(t),a=0;ae&&e.triggerCancel);e.onCancel&&l&&e.onCancel.apply(e,[()=>{}].concat((0,o.Z)(n.slice(1))));for(let e=0;e{let t=n.getPrefixCls(void 0,eM),o=n.getIconPrefixCls(),l=n.getTheme(),c=a.createElement(eL,Object.assign({},e));(0,i.s)(a.createElement(s.ZP,{prefixCls:t,iconPrefixCls:o,theme:l},n.holderRender?n.holderRender(c):c),r)})}function d(){for(var t=arguments.length,n=Array(t),r=0;r{"function"==typeof e.afterClose&&e.afterClose(),c.apply(this,n)}})).visible&&delete l.visible,u(l)}return u(l),eP.push(d),{destroy:d,update:function(e){u(l="function"==typeof e?e(l):Object.assign(Object.assign({},l),e))}}}function ej(e){return Object.assign(Object.assign({},e),{type:"warning"})}function eF(e){return Object.assign(Object.assign({},e),{type:"info"})}function eB(e){return Object.assign(Object.assign({},e),{type:"success"})}function eU(e){return Object.assign(Object.assign({},e),{type:"error"})}function eZ(e){return Object.assign(Object.assign({},e),{type:"confirm"})}var ez=n(21467),eH=function(e,t){var n={};for(var r in e)Object.prototype.hasOwnProperty.call(e,r)&&0>t.indexOf(r)&&(n[r]=e[r]);if(null!=e&&"function"==typeof Object.getOwnPropertySymbols)for(var o=0,r=Object.getOwnPropertySymbols(e);ot.indexOf(r[o])&&Object.prototype.propertyIsEnumerable.call(e,r[o])&&(n[r[o]]=e[r[o]]);return n},eG=(0,ez.i)(e=>{let{prefixCls:t,className:n,closeIcon:r,closable:o,type:i,title:s,children:c,footer:u}=e,d=eH(e,["prefixCls","className","closeIcon","closable","type","title","children","footer"]),{getPrefixCls:p}=a.useContext(l.E_),f=p(),g=t||p("modal"),h=(0,eO.Z)(f),[b,v,y]=ex(g,h),E="".concat(g,"-confirm"),S={};return S=i?{closable:null!=o&&o,title:"",footer:"",children:a.createElement(eR,Object.assign({},e,{prefixCls:g,confirmPrefixCls:E,rootPrefixCls:f,content:c}))}:{closable:null==o||o,title:s,footer:null!==u&&a.createElement(el,Object.assign({},e)),children:c},b(a.createElement(H,Object.assign({prefixCls:g,className:m()(v,"".concat(g,"-pure-panel"),i&&E,i&&"".concat(E,"-").concat(i),n,y,h)},d,{closeIcon:ei(g,r),closable:o},S)))}),e$=n(79474),eW=function(e,t){var n={};for(var r in e)Object.prototype.hasOwnProperty.call(e,r)&&0>t.indexOf(r)&&(n[r]=e[r]);if(null!=e&&"function"==typeof Object.getOwnPropertySymbols)for(var o=0,r=Object.getOwnPropertySymbols(e);ot.indexOf(r[o])&&Object.prototype.propertyIsEnumerable.call(e,r[o])&&(n[r[o]]=e[r[o]]);return n},eV=a.forwardRef((e,t)=>{var n,{afterClose:r,config:i}=e,s=eW(e,["afterClose","config"]);let[c,u]=a.useState(!0),[d,p]=a.useState(i),{direction:f,getPrefixCls:m}=a.useContext(l.E_),g=m("modal"),h=m(),v=function(){u(!1);for(var e=arguments.length,t=Array(e),n=0;ne&&e.triggerCancel);d.onCancel&&r&&d.onCancel.apply(d,[()=>{}].concat((0,o.Z)(t.slice(1))))};a.useImperativeHandle(t,()=>({destroy:v,update:e=>{p(t=>Object.assign(Object.assign({},t),e))}}));let y=null!==(n=d.okCancel)&&void 0!==n?n:"confirm"===d.type,[E]=(0,b.Z)("Modal",e$.Z.Modal);return a.createElement(e_,Object.assign({prefixCls:g,rootPrefixCls:h},d,{close:v,open:c,afterClose:()=>{var e;r(),null===(e=d.afterClose)||void 0===e||e.call(d)},okText:d.okText||(y?null==E?void 0:E.okText:null==E?void 0:E.justOkText),direction:d.direction||f,cancelText:d.cancelText||(null==E?void 0:E.cancelText)},s))});let eq=0,eY=a.memo(a.forwardRef((e,t)=>{let[n,r]=function(){let[e,t]=a.useState([]);return[e,a.useCallback(e=>(t(t=>[].concat((0,o.Z)(t),[e])),()=>{t(t=>t.filter(t=>t!==e))}),[])]}();return a.useImperativeHandle(t,()=>({patchElement:r}),[]),a.createElement(a.Fragment,null,n)}));function eK(e){return eD(ej(e))}eT.useModal=function(){let e=a.useRef(null),[t,n]=a.useState([]);a.useEffect(()=>{t.length&&((0,o.Z)(t).forEach(e=>{e()}),n([]))},[t]);let r=a.useCallback(t=>function(r){var i;let l,s;eq+=1;let c=a.createRef(),u=new Promise(e=>{l=e}),d=!1,p=a.createElement(eV,{key:"modal-".concat(eq),config:t(r),ref:c,afterClose:()=>{null==s||s()},isSilent:()=>d,onConfirm:e=>{l(e)}});return(s=null===(i=e.current)||void 0===i?void 0:i.patchElement(p))&&eP.push(s),{destroy:()=>{function e(){var e;null===(e=c.current)||void 0===e||e.destroy()}c.current?e():n(t=>[].concat((0,o.Z)(t),[e]))},update:e=>{function t(){var t;null===(t=c.current)||void 0===t||t.update(e)}c.current?t():n(e=>[].concat((0,o.Z)(e),[t]))},then:e=>(d=!0,u.then(e))}},[]);return[a.useMemo(()=>({info:r(eF),success:r(eB),error:r(eU),warning:r(ej),confirm:r(eZ)}),[]),a.createElement(eY,{key:"modal-holder",ref:e})]},eT.info=function(e){return eD(eF(e))},eT.success=function(e){return eD(eB(e))},eT.error=function(e){return eD(eU(e))},eT.warning=eK,eT.warn=eK,eT.confirm=function(e){return eD(eZ(e))},eT.destroyAll=function(){for(;eP.length;){let e=eP.pop();e&&e()}},eT.config=function(e){let{rootPrefixCls:t}=e;eM=t},eT._InternalPanelDoNotUseOrYouWillBeFired=eG;var eX=eT},4678:function(e,t,n){n.d(t,{A:function(){return s},f:function(){return l}});var r=n(79474);let o=Object.assign({},r.Z.Modal),a=[],i=()=>a.reduce((e,t)=>Object.assign(Object.assign({},e),t),r.Z.Modal);function l(e){if(e){let t=Object.assign({},e);return a.push(t),o=i(),()=>{a=a.filter(e=>e!==t),o=i()}}o=Object.assign({},r.Z.Modal)}function s(){return o}},38302:function(e,t,n){var r=n(5056);t.Z=r.Z},50670:function(e,t,n){n.d(t,{default:function(){return tk}});var r=n(64090),o=n(16480),a=n.n(o),i=n(14749),l=n(63787),s=n(50833),c=n(5239),u=n(80406),d=n(6787),p=n(6976),f=n(44329),m=n(53850),g=n(24800),h=n(76158),b=n(4295),v=n(74084),y=function(e){var t=e.className,n=e.customizeIcon,o=e.customizeIconProps,i=e.children,l=e.onMouseDown,s=e.onClick,c="function"==typeof n?n(o):n;return r.createElement("span",{className:t,onMouseDown:function(e){e.preventDefault(),null==l||l(e)},style:{userSelect:"none",WebkitUserSelect:"none"},unselectable:"on",onClick:s,"aria-hidden":!0},void 0!==c?c:r.createElement("span",{className:a()(t.split(/\s+/).map(function(e){return"".concat(e,"-icon")}))},i))},E=function(e,t,n,o,a){var i=arguments.length>5&&void 0!==arguments[5]&&arguments[5],l=arguments.length>6?arguments[6]:void 0,s=arguments.length>7?arguments[7]:void 0,c=r.useMemo(function(){return"object"===(0,p.Z)(o)?o.clearIcon:a||void 0},[o,a]);return{allowClear:r.useMemo(function(){return!i&&!!o&&(!!n.length||!!l)&&!("combobox"===s&&""===l)},[o,i,n.length,l,s]),clearIcon:r.createElement(y,{className:"".concat(e,"-clear"),onMouseDown:t,customizeIcon:c},"\xd7")}},S=r.createContext(null);function w(){var e=arguments.length>0&&void 0!==arguments[0]?arguments[0]:250,t=r.useRef(null),n=r.useRef(null);return r.useEffect(function(){return function(){window.clearTimeout(n.current)}},[]),[function(){return t.current},function(r){(r||null===t.current)&&(t.current=r),window.clearTimeout(n.current),n.current=window.setTimeout(function(){t.current=null},e)}]}var x=n(72480),O=n(54739),C=r.forwardRef(function(e,t){var n,o=e.prefixCls,i=e.id,l=e.inputElement,s=e.disabled,u=e.tabIndex,d=e.autoFocus,p=e.autoComplete,f=e.editable,g=e.activeDescendantId,h=e.value,b=e.maxLength,y=e.onKeyDown,E=e.onMouseDown,S=e.onChange,w=e.onPaste,x=e.onCompositionStart,O=e.onCompositionEnd,C=e.open,T=e.attrs,k=l||r.createElement("input",null),A=k,I=A.ref,R=A.props,N=R.onKeyDown,_=R.onChange,P=R.onMouseDown,M=R.onCompositionStart,L=R.onCompositionEnd,D=R.style;return(0,m.Kp)(!("maxLength"in k.props),"Passing 'maxLength' to input element directly may not work because input in BaseSelect is controlled."),k=r.cloneElement(k,(0,c.Z)((0,c.Z)((0,c.Z)({type:"search"},R),{},{id:i,ref:(0,v.sQ)(t,I),disabled:s,tabIndex:u,autoComplete:p||"off",autoFocus:d,className:a()("".concat(o,"-selection-search-input"),null===(n=k)||void 0===n||null===(n=n.props)||void 0===n?void 0:n.className),role:"combobox","aria-expanded":C||!1,"aria-haspopup":"listbox","aria-owns":"".concat(i,"_list"),"aria-autocomplete":"list","aria-controls":"".concat(i,"_list"),"aria-activedescendant":C?g:void 0},T),{},{value:f?h:"",maxLength:b,readOnly:!f,unselectable:f?null:"on",style:(0,c.Z)((0,c.Z)({},D),{},{opacity:f?null:0}),onKeyDown:function(e){y(e),N&&N(e)},onMouseDown:function(e){E(e),P&&P(e)},onChange:function(e){S(e),_&&_(e)},onCompositionStart:function(e){x(e),M&&M(e)},onCompositionEnd:function(e){O(e),L&&L(e)},onPaste:w}))});function T(e){return Array.isArray(e)?e:void 0!==e?[e]:[]}var k=window.document&&window.document.documentElement;function A(e){return["string","number"].includes((0,p.Z)(e))}function I(e){var t=void 0;return e&&(A(e.title)?t=e.title.toString():A(e.label)&&(t=e.label.toString())),t}function R(e){var t;return null!==(t=e.key)&&void 0!==t?t:e.value}var N=function(e){e.preventDefault(),e.stopPropagation()},_=function(e){var t,n,o=e.id,i=e.prefixCls,l=e.values,c=e.open,d=e.searchValue,p=e.autoClearSearchValue,f=e.inputRef,m=e.placeholder,g=e.disabled,h=e.mode,b=e.showSearch,v=e.autoFocus,E=e.autoComplete,S=e.activeDescendantId,w=e.tabIndex,T=e.removeIcon,A=e.maxTagCount,_=e.maxTagTextLength,P=e.maxTagPlaceholder,M=void 0===P?function(e){return"+ ".concat(e.length," ...")}:P,L=e.tagRender,D=e.onToggleOpen,j=e.onRemove,F=e.onInputChange,B=e.onInputPaste,U=e.onInputKeyDown,Z=e.onInputMouseDown,z=e.onInputCompositionStart,H=e.onInputCompositionEnd,G=r.useRef(null),$=(0,r.useState)(0),W=(0,u.Z)($,2),V=W[0],q=W[1],Y=(0,r.useState)(!1),K=(0,u.Z)(Y,2),X=K[0],Q=K[1],J="".concat(i,"-selection"),ee=c||"multiple"===h&&!1===p||"tags"===h?d:"",et="tags"===h||"multiple"===h&&!1===p||b&&(c||X);t=function(){q(G.current.scrollWidth)},n=[ee],k?r.useLayoutEffect(t,n):r.useEffect(t,n);var en=function(e,t,n,o,i){return r.createElement("span",{title:I(e),className:a()("".concat(J,"-item"),(0,s.Z)({},"".concat(J,"-item-disabled"),n))},r.createElement("span",{className:"".concat(J,"-item-content")},t),o&&r.createElement(y,{className:"".concat(J,"-item-remove"),onMouseDown:N,onClick:i,customizeIcon:T},"\xd7"))},er=r.createElement("div",{className:"".concat(J,"-search"),style:{width:V},onFocus:function(){Q(!0)},onBlur:function(){Q(!1)}},r.createElement(C,{ref:f,open:c,prefixCls:i,id:o,inputElement:null,disabled:g,autoFocus:v,autoComplete:E,editable:et,activeDescendantId:S,value:ee,onKeyDown:U,onMouseDown:Z,onChange:F,onPaste:B,onCompositionStart:z,onCompositionEnd:H,tabIndex:w,attrs:(0,x.Z)(e,!0)}),r.createElement("span",{ref:G,className:"".concat(J,"-search-mirror"),"aria-hidden":!0},ee,"\xa0")),eo=r.createElement(O.Z,{prefixCls:"".concat(J,"-overflow"),data:l,renderItem:function(e){var t,n=e.disabled,o=e.label,a=e.value,i=!g&&!n,l=o;if("number"==typeof _&&("string"==typeof o||"number"==typeof o)){var s=String(l);s.length>_&&(l="".concat(s.slice(0,_),"..."))}var u=function(t){t&&t.stopPropagation(),j(e)};return"function"==typeof L?(t=l,r.createElement("span",{onMouseDown:function(e){N(e),D(!c)}},L({label:t,value:a,disabled:n,closable:i,onClose:u}))):en(e,l,n,i,u)},renderRest:function(e){var t="function"==typeof M?M(e):M;return en({title:t},t,!1)},suffix:er,itemKey:R,maxCount:A});return r.createElement(r.Fragment,null,eo,!l.length&&!ee&&r.createElement("span",{className:"".concat(J,"-placeholder")},m))},P=function(e){var t=e.inputElement,n=e.prefixCls,o=e.id,a=e.inputRef,i=e.disabled,l=e.autoFocus,s=e.autoComplete,c=e.activeDescendantId,d=e.mode,p=e.open,f=e.values,m=e.placeholder,g=e.tabIndex,h=e.showSearch,b=e.searchValue,v=e.activeValue,y=e.maxLength,E=e.onInputKeyDown,S=e.onInputMouseDown,w=e.onInputChange,O=e.onInputPaste,T=e.onInputCompositionStart,k=e.onInputCompositionEnd,A=e.title,R=r.useState(!1),N=(0,u.Z)(R,2),_=N[0],P=N[1],M="combobox"===d,L=M||h,D=f[0],j=b||"";M&&v&&!_&&(j=v),r.useEffect(function(){M&&P(!1)},[M,v]);var F=("combobox"===d||!!p||!!h)&&!!j,B=void 0===A?I(D):A,U=r.useMemo(function(){return D?null:r.createElement("span",{className:"".concat(n,"-selection-placeholder"),style:F?{visibility:"hidden"}:void 0},m)},[D,F,m,n]);return r.createElement(r.Fragment,null,r.createElement("span",{className:"".concat(n,"-selection-search")},r.createElement(C,{ref:a,prefixCls:n,id:o,open:p,inputElement:t,disabled:i,autoFocus:l,autoComplete:s,editable:L,activeDescendantId:c,value:j,onKeyDown:E,onMouseDown:S,onChange:function(e){P(!0),w(e)},onPaste:O,onCompositionStart:T,onCompositionEnd:k,tabIndex:g,attrs:(0,x.Z)(e,!0),maxLength:M?y:void 0})),!M&&D?r.createElement("span",{className:"".concat(n,"-selection-item"),title:B,style:F?{visibility:"hidden"}:void 0},D.label):null,U)},M=r.forwardRef(function(e,t){var n=(0,r.useRef)(null),o=(0,r.useRef)(!1),a=e.prefixCls,l=e.open,s=e.mode,c=e.showSearch,d=e.tokenWithEnter,p=e.autoClearSearchValue,f=e.onSearch,m=e.onSearchSubmit,g=e.onToggleOpen,h=e.onInputKeyDown,v=e.domRef;r.useImperativeHandle(t,function(){return{focus:function(){n.current.focus()},blur:function(){n.current.blur()}}});var y=w(0),E=(0,u.Z)(y,2),S=E[0],x=E[1],O=(0,r.useRef)(null),C=function(e){!1!==f(e,!0,o.current)&&g(!0)},T={inputRef:n,onInputKeyDown:function(e){var t=e.which;(t===b.Z.UP||t===b.Z.DOWN)&&e.preventDefault(),h&&h(e),t!==b.Z.ENTER||"tags"!==s||o.current||l||null==m||m(e.target.value),[b.Z.ESC,b.Z.SHIFT,b.Z.BACKSPACE,b.Z.TAB,b.Z.WIN_KEY,b.Z.ALT,b.Z.META,b.Z.WIN_KEY_RIGHT,b.Z.CTRL,b.Z.SEMICOLON,b.Z.EQUALS,b.Z.CAPS_LOCK,b.Z.CONTEXT_MENU,b.Z.F1,b.Z.F2,b.Z.F3,b.Z.F4,b.Z.F5,b.Z.F6,b.Z.F7,b.Z.F8,b.Z.F9,b.Z.F10,b.Z.F11,b.Z.F12].includes(t)||g(!0)},onInputMouseDown:function(){x(!0)},onInputChange:function(e){var t=e.target.value;if(d&&O.current&&/[\r\n]/.test(O.current)){var n=O.current.replace(/[\r\n]+$/,"").replace(/\r\n/g," ").replace(/[\r\n]/g," ");t=t.replace(n,O.current)}O.current=null,C(t)},onInputPaste:function(e){var t=e.clipboardData,n=null==t?void 0:t.getData("text");O.current=n||""},onInputCompositionStart:function(){o.current=!0},onInputCompositionEnd:function(e){o.current=!1,"combobox"!==s&&C(e.target.value)}},k="multiple"===s||"tags"===s?r.createElement(_,(0,i.Z)({},e,T)):r.createElement(P,(0,i.Z)({},e,T));return r.createElement("div",{ref:v,className:"".concat(a,"-selector"),onClick:function(e){e.target!==n.current&&(void 0!==document.body.style.msTouchAction?setTimeout(function(){n.current.focus()}):n.current.focus())},onMouseDown:function(e){var t=S();e.target===n.current||t||"combobox"===s||e.preventDefault(),("combobox"===s||c&&t)&&l||(l&&!1!==p&&f("",!0,!1),g())}},k)}),L=n(44101),D=["prefixCls","disabled","visible","children","popupElement","animation","transitionName","dropdownStyle","dropdownClassName","direction","placement","builtinPlacements","dropdownMatchSelectWidth","dropdownRender","dropdownAlign","getPopupContainer","empty","getTriggerDOMNode","onPopupVisibleChange","onPopupMouseEnter"],j=function(e){var t=!0===e?0:1;return{bottomLeft:{points:["tl","bl"],offset:[0,4],overflow:{adjustX:t,adjustY:1},htmlRegion:"scroll"},bottomRight:{points:["tr","br"],offset:[0,4],overflow:{adjustX:t,adjustY:1},htmlRegion:"scroll"},topLeft:{points:["bl","tl"],offset:[0,-4],overflow:{adjustX:t,adjustY:1},htmlRegion:"scroll"},topRight:{points:["br","tr"],offset:[0,-4],overflow:{adjustX:t,adjustY:1},htmlRegion:"scroll"}}},F=r.forwardRef(function(e,t){var n=e.prefixCls,o=(e.disabled,e.visible),l=e.children,u=e.popupElement,p=e.animation,f=e.transitionName,m=e.dropdownStyle,g=e.dropdownClassName,h=e.direction,b=e.placement,v=e.builtinPlacements,y=e.dropdownMatchSelectWidth,E=e.dropdownRender,S=e.dropdownAlign,w=e.getPopupContainer,x=e.empty,O=e.getTriggerDOMNode,C=e.onPopupVisibleChange,T=e.onPopupMouseEnter,k=(0,d.Z)(e,D),A="".concat(n,"-dropdown"),I=u;E&&(I=E(u));var R=r.useMemo(function(){return v||j(y)},[v,y]),N=p?"".concat(A,"-").concat(p):f,_="number"==typeof y,P=r.useMemo(function(){return _?null:!1===y?"minWidth":"width"},[y,_]),M=m;_&&(M=(0,c.Z)((0,c.Z)({},M),{},{width:y}));var F=r.useRef(null);return r.useImperativeHandle(t,function(){return{getPopupElement:function(){return F.current}}}),r.createElement(L.Z,(0,i.Z)({},k,{showAction:C?["click"]:[],hideAction:C?["click"]:[],popupPlacement:b||("rtl"===(void 0===h?"ltr":h)?"bottomRight":"bottomLeft"),builtinPlacements:R,prefixCls:A,popupTransitionName:N,popup:r.createElement("div",{ref:F,onMouseEnter:T},I),stretch:P,popupAlign:S,popupVisible:o,getPopupContainer:w,popupClassName:a()(g,(0,s.Z)({},"".concat(A,"-empty"),x)),popupStyle:M,getTriggerDOMNode:O,onPopupVisibleChange:C}),l)}),B=n(56721);function U(e,t){var n,r=e.key;return("value"in e&&(n=e.value),null!=r)?r:void 0!==n?n:"rc-index-key-".concat(t)}function Z(e,t){var n=e||{},r=n.label,o=n.value,a=n.options,i=n.groupLabel,l=r||(t?"children":"label");return{label:l,value:o||"value",options:a||"options",groupLabel:i||l}}function z(e){var t=(0,c.Z)({},e);return"props"in t||Object.defineProperty(t,"props",{get:function(){return(0,m.ZP)(!1,"Return type is option instead of Option instance. Please read value directly instead of reading from `props`."),t}}),t}var H=function(e,t,n){if(!t||!t.length)return null;var r=!1,o=function e(t,n){var o=(0,B.Z)(n),a=o[0],i=o.slice(1);if(!a)return[t];var s=t.split(a);return r=r||s.length>1,s.reduce(function(t,n){return[].concat((0,l.Z)(t),(0,l.Z)(e(n,i)))},[]).filter(Boolean)}(e,t);return r?void 0!==n?o.slice(0,n):o:null},G=r.createContext(null),$=["id","prefixCls","className","showSearch","tagRender","direction","omitDomProps","displayValues","onDisplayValuesChange","emptyOptions","notFoundContent","onClear","mode","disabled","loading","getInputElement","getRawInputElement","open","defaultOpen","onDropdownVisibleChange","activeValue","onActiveValueChange","activeDescendantId","searchValue","autoClearSearchValue","onSearch","onSearchSplit","tokenSeparators","allowClear","suffixIcon","clearIcon","OptionList","animation","transitionName","dropdownStyle","dropdownClassName","dropdownMatchSelectWidth","dropdownRender","dropdownAlign","placement","builtinPlacements","getPopupContainer","showAction","onFocus","onBlur","onKeyUp","onKeyDown","onMouseDown"],W=["value","onChange","removeIcon","placeholder","autoFocus","maxTagCount","maxTagTextLength","maxTagPlaceholder","choiceTransitionName","onInputKeyDown","onPopupScroll","tabIndex"],V=function(e){return"tags"===e||"multiple"===e},q=r.forwardRef(function(e,t){var n,o,m,x,O,C,T,k,A=e.id,I=e.prefixCls,R=e.className,N=e.showSearch,_=e.tagRender,P=e.direction,L=e.omitDomProps,D=e.displayValues,j=e.onDisplayValuesChange,B=e.emptyOptions,U=e.notFoundContent,Z=void 0===U?"Not Found":U,z=e.onClear,q=e.mode,Y=e.disabled,K=e.loading,X=e.getInputElement,Q=e.getRawInputElement,J=e.open,ee=e.defaultOpen,et=e.onDropdownVisibleChange,en=e.activeValue,er=e.onActiveValueChange,eo=e.activeDescendantId,ea=e.searchValue,ei=e.autoClearSearchValue,el=e.onSearch,es=e.onSearchSplit,ec=e.tokenSeparators,eu=e.allowClear,ed=e.suffixIcon,ep=e.clearIcon,ef=e.OptionList,em=e.animation,eg=e.transitionName,eh=e.dropdownStyle,eb=e.dropdownClassName,ev=e.dropdownMatchSelectWidth,ey=e.dropdownRender,eE=e.dropdownAlign,eS=e.placement,ew=e.builtinPlacements,ex=e.getPopupContainer,eO=e.showAction,eC=void 0===eO?[]:eO,eT=e.onFocus,ek=e.onBlur,eA=e.onKeyUp,eI=e.onKeyDown,eR=e.onMouseDown,eN=(0,d.Z)(e,$),e_=V(q),eP=(void 0!==N?N:e_)||"combobox"===q,eM=(0,c.Z)({},eN);W.forEach(function(e){delete eM[e]}),null==L||L.forEach(function(e){delete eM[e]});var eL=r.useState(!1),eD=(0,u.Z)(eL,2),ej=eD[0],eF=eD[1];r.useEffect(function(){eF((0,h.Z)())},[]);var eB=r.useRef(null),eU=r.useRef(null),eZ=r.useRef(null),ez=r.useRef(null),eH=r.useRef(null),eG=r.useRef(!1),e$=function(){var e=arguments.length>0&&void 0!==arguments[0]?arguments[0]:10,t=r.useState(!1),n=(0,u.Z)(t,2),o=n[0],a=n[1],i=r.useRef(null),l=function(){window.clearTimeout(i.current)};return r.useEffect(function(){return l},[]),[o,function(t,n){l(),i.current=window.setTimeout(function(){a(t),n&&n()},e)},l]}(),eW=(0,u.Z)(e$,3),eV=eW[0],eq=eW[1],eY=eW[2];r.useImperativeHandle(t,function(){var e,t;return{focus:null===(e=ez.current)||void 0===e?void 0:e.focus,blur:null===(t=ez.current)||void 0===t?void 0:t.blur,scrollTo:function(e){var t;return null===(t=eH.current)||void 0===t?void 0:t.scrollTo(e)}}});var eK=r.useMemo(function(){if("combobox"!==q)return ea;var e,t=null===(e=D[0])||void 0===e?void 0:e.value;return"string"==typeof t||"number"==typeof t?String(t):""},[ea,q,D]),eX="combobox"===q&&"function"==typeof X&&X()||null,eQ="function"==typeof Q&&Q(),eJ=(0,v.x1)(eU,null==eQ||null===(x=eQ.props)||void 0===x?void 0:x.ref),e0=r.useState(!1),e1=(0,u.Z)(e0,2),e2=e1[0],e4=e1[1];(0,g.Z)(function(){e4(!0)},[]);var e3=(0,f.Z)(!1,{defaultValue:ee,value:J}),e6=(0,u.Z)(e3,2),e5=e6[0],e8=e6[1],e9=!!e2&&e5,e7=!Z&&B;(Y||e7&&e9&&"combobox"===q)&&(e9=!1);var te=!e7&&e9,tt=r.useCallback(function(e){var t=void 0!==e?e:!e9;Y||(e8(t),e9!==t&&(null==et||et(t)))},[Y,e9,e8,et]),tn=r.useMemo(function(){return(ec||[]).some(function(e){return["\n","\r\n"].includes(e)})},[ec]),tr=r.useContext(G)||{},to=tr.maxCount,ta=tr.rawValues,ti=function(e,t,n){if(!((null==ta?void 0:ta.size)>=to)){var r=!0,o=e;null==er||er(null);var a=H(e,ec,to&&to-ta.size),i=n?null:a;return"combobox"!==q&&i&&(o="",null==es||es(i),tt(!1),r=!1),el&&eK!==o&&el(o,{source:t?"typing":"effect"}),r}};r.useEffect(function(){e9||e_||"combobox"===q||ti("",!1,!1)},[e9]),r.useEffect(function(){e5&&Y&&e8(!1),Y&&!eG.current&&eq(!1)},[Y]);var tl=w(),ts=(0,u.Z)(tl,2),tc=ts[0],tu=ts[1],td=r.useRef(!1),tp=[];r.useEffect(function(){return function(){tp.forEach(function(e){return clearTimeout(e)}),tp.splice(0,tp.length)}},[]);var tf=r.useState({}),tm=(0,u.Z)(tf,2)[1];eQ&&(C=function(e){tt(e)}),n=function(){var e;return[eB.current,null===(e=eZ.current)||void 0===e?void 0:e.getPopupElement()]},o=!!eQ,(m=r.useRef(null)).current={open:te,triggerOpen:tt,customizedTrigger:o},r.useEffect(function(){function e(e){if(null===(t=m.current)||void 0===t||!t.customizedTrigger){var t,r=e.target;r.shadowRoot&&e.composed&&(r=e.composedPath()[0]||r),m.current.open&&n().filter(function(e){return e}).every(function(e){return!e.contains(r)&&e!==r})&&m.current.triggerOpen(!1)}}return window.addEventListener("mousedown",e),function(){return window.removeEventListener("mousedown",e)}},[]);var tg=r.useMemo(function(){return(0,c.Z)((0,c.Z)({},e),{},{notFoundContent:Z,open:e9,triggerOpen:te,id:A,showSearch:eP,multiple:e_,toggleOpen:tt})},[e,Z,te,e9,A,eP,e_,tt]),th=!!ed||K;th&&(T=r.createElement(y,{className:a()("".concat(I,"-arrow"),(0,s.Z)({},"".concat(I,"-arrow-loading"),K)),customizeIcon:ed,customizeIconProps:{loading:K,searchValue:eK,open:e9,focused:eV,showSearch:eP}}));var tb=E(I,function(){var e;null==z||z(),null===(e=ez.current)||void 0===e||e.focus(),j([],{type:"clear",values:D}),ti("",!1,!1)},D,eu,ep,Y,eK,q),tv=tb.allowClear,ty=tb.clearIcon,tE=r.createElement(ef,{ref:eH}),tS=a()(I,R,(O={},(0,s.Z)(O,"".concat(I,"-focused"),eV),(0,s.Z)(O,"".concat(I,"-multiple"),e_),(0,s.Z)(O,"".concat(I,"-single"),!e_),(0,s.Z)(O,"".concat(I,"-allow-clear"),eu),(0,s.Z)(O,"".concat(I,"-show-arrow"),th),(0,s.Z)(O,"".concat(I,"-disabled"),Y),(0,s.Z)(O,"".concat(I,"-loading"),K),(0,s.Z)(O,"".concat(I,"-open"),e9),(0,s.Z)(O,"".concat(I,"-customize-input"),eX),(0,s.Z)(O,"".concat(I,"-show-search"),eP),O)),tw=r.createElement(F,{ref:eZ,disabled:Y,prefixCls:I,visible:te,popupElement:tE,animation:em,transitionName:eg,dropdownStyle:eh,dropdownClassName:eb,direction:P,dropdownMatchSelectWidth:ev,dropdownRender:ey,dropdownAlign:eE,placement:eS,builtinPlacements:ew,getPopupContainer:ex,empty:B,getTriggerDOMNode:function(){return eU.current},onPopupVisibleChange:C,onPopupMouseEnter:function(){tm({})}},eQ?r.cloneElement(eQ,{ref:eJ}):r.createElement(M,(0,i.Z)({},e,{domRef:eU,prefixCls:I,inputElement:eX,ref:ez,id:A,showSearch:eP,autoClearSearchValue:ei,mode:q,activeDescendantId:eo,tagRender:_,values:D,open:e9,onToggleOpen:tt,activeValue:en,searchValue:eK,onSearch:ti,onSearchSubmit:function(e){e&&e.trim()&&el(e,{source:"submit"})},onRemove:function(e){j(D.filter(function(t){return t!==e}),{type:"remove",values:[e]})},tokenWithEnter:tn})));return k=eQ?tw:r.createElement("div",(0,i.Z)({className:tS},eM,{ref:eB,onMouseDown:function(e){var t,n=e.target,r=null===(t=eZ.current)||void 0===t?void 0:t.getPopupElement();if(r&&r.contains(n)){var o=setTimeout(function(){var e,t=tp.indexOf(o);-1!==t&&tp.splice(t,1),eY(),ej||r.contains(document.activeElement)||null===(e=ez.current)||void 0===e||e.focus()});tp.push(o)}for(var a=arguments.length,i=Array(a>1?a-1:0),l=1;l=0;i-=1){var s=o[i];if(!s.disabled){o.splice(i,1),a=s;break}}a&&j(o,{type:"remove",values:[a]})}for(var c=arguments.length,u=Array(c>1?c-1:0),d=1;d1?n-1:0),o=1;o0?null:"hidden"},K={position:"absolute",background:"rgba(0, 0, 0, 0.5)",borderRadius:99,cursor:"pointer",userSelect:"none"};return g?(Y.height=8,Y.left=0,Y.right=0,Y.bottom=0,K.height="100%",K.width=h,_?K.left=H:K.right=H):(Y.width=8,Y.top=0,Y.bottom=0,_?Y.right=0:Y.left=0,K.width="100%",K.height=h,K.top=H),r.createElement("div",{ref:P,className:a()(q,(n={},(0,s.Z)(n,"".concat(q,"-horizontal"),g),(0,s.Z)(n,"".concat(q,"-vertical"),!g),(0,s.Z)(n,"".concat(q,"-visible"),j),n)),style:(0,c.Z)((0,c.Z)({},Y),v),onMouseDown:function(e){e.stopPropagation(),e.preventDefault()},onMouseMove:U},r.createElement("div",{ref:M,className:a()("".concat(q,"-thumb"),(0,s.Z)({},"".concat(q,"-thumb-moving"),w)),style:(0,c.Z)((0,c.Z)({},K),y),onMouseDown:$}))});function ea(e){var t=e.children,n=e.setRef,o=r.useCallback(function(e){n(e)},[]);return r.cloneElement(t,{ref:o})}var ei=n(97472),el=n(47365),es=n(65127),ec=function(){function e(){(0,el.Z)(this,e),this.maps=void 0,this.id=0,this.maps=Object.create(null)}return(0,es.Z)(e,[{key:"set",value:function(e,t){this.maps[e]=t,this.id+=1}},{key:"get",value:function(e){return this.maps[e]}}]),e}(),eu=n(48563),ed=("undefined"==typeof navigator?"undefined":(0,p.Z)(navigator))==="object"&&/Firefox/i.test(navigator.userAgent);function ep(e,t){var n=(0,r.useRef)(!1),o=(0,r.useRef)(null),a=(0,r.useRef)({top:e,bottom:t});return a.current.top=e,a.current.bottom=t,function(e){var t=arguments.length>1&&void 0!==arguments[1]&&arguments[1],r=e<0&&a.current.top||e>0&&a.current.bottom;return t&&r?(clearTimeout(o.current),n.current=!1):(!r||n.current)&&(clearTimeout(o.current),n.current=!0,o.current=setTimeout(function(){n.current=!1},50)),!n.current&&r}}var ef=14/15;function em(){var e=arguments.length>0&&void 0!==arguments[0]?arguments[0]:0,t=arguments.length>1&&void 0!==arguments[1]?arguments[1]:0,n=e/t*100;return isNaN(n)&&(n=0),Math.floor(n=Math.min(n=Math.max(n,20),e/2))}var eg=["prefixCls","className","height","itemHeight","fullHeight","style","data","children","itemKey","virtual","direction","scrollWidth","component","onScroll","onVirtualScroll","onVisibleChange","innerProps","extraRender","styles"],eh=[],eb={overflowY:"auto",overflowAnchor:"none"},ev=r.forwardRef(function(e,t){var n,o,l,f,m,h,b,v,y,E,S,w,x,O,C,T,k,A,I,R,N,_,P,M,L,D,j,F,B,U,Z,z,H,G,$,W=e.prefixCls,V=void 0===W?"rc-virtual-list":W,q=e.className,Y=e.height,K=e.itemHeight,X=e.fullHeight,Q=e.style,er=e.data,el=e.children,es=e.itemKey,ev=e.virtual,ey=e.direction,eE=e.scrollWidth,eS=e.component,ew=e.onScroll,ex=e.onVirtualScroll,eO=e.onVisibleChange,eC=e.innerProps,eT=e.extraRender,ek=e.styles,eA=(0,d.Z)(e,eg),eI=!!(!1!==ev&&Y&&K),eR=eI&&er&&(K*er.length>Y||!!eE),eN="rtl"===ey,e_=a()(V,(0,s.Z)({},"".concat(V,"-rtl"),eN),q),eP=er||eh,eM=(0,r.useRef)(),eL=(0,r.useRef)(),eD=(0,r.useState)(0),ej=(0,u.Z)(eD,2),eF=ej[0],eB=ej[1],eU=(0,r.useState)(0),eZ=(0,u.Z)(eU,2),ez=eZ[0],eH=eZ[1],eG=(0,r.useState)(!1),e$=(0,u.Z)(eG,2),eW=e$[0],eV=e$[1],eq=function(){eV(!0)},eY=function(){eV(!1)},eK=r.useCallback(function(e){return"function"==typeof es?es(e):null==e?void 0:e[es]},[es]);function eX(e){eB(function(t){var n,r=(n="function"==typeof e?e(t):e,Number.isNaN(tp.current)||(n=Math.min(n,tp.current)),n=Math.max(n,0));return eM.current.scrollTop=r,r})}var eQ=(0,r.useRef)({start:0,end:eP.length}),eJ=(0,r.useRef)(),e0=(o=r.useState(eP),f=(l=(0,u.Z)(o,2))[0],m=l[1],h=r.useState(null),v=(b=(0,u.Z)(h,2))[0],y=b[1],r.useEffect(function(){var e=function(e,t,n){var r,o,a=e.length,i=t.length;if(0===a&&0===i)return null;a0&&void 0!==arguments[0]&&arguments[0];p();var t=function(){s.current.forEach(function(e,t){if(e&&e.offsetParent){var n=(0,ei.Z)(e),r=n.offsetHeight;c.current.get(t)!==r&&c.current.set(t,n.offsetHeight)}}),l(function(e){return e+1})};e?t():d.current=(0,en.Z)(t)}return(0,r.useEffect)(function(){return p},[]),[function(r,o){var a=e(r),i=s.current.get(a);o?(s.current.set(a,o),f()):s.current.delete(a),!i!=!o&&(o?null==t||t(r):null==n||n(r))},f,c.current,i]}(eK,null,null),e4=(0,u.Z)(e2,4),e3=e4[0],e6=e4[1],e5=e4[2],e8=e4[3],e9=r.useMemo(function(){if(!eI)return{scrollHeight:void 0,start:0,end:eP.length-1,offset:void 0};if(!eR)return{scrollHeight:(null===(e=eL.current)||void 0===e?void 0:e.offsetHeight)||0,start:0,end:eP.length-1,offset:void 0};for(var e,t,n,r,o=0,a=eP.length,i=0;i=eF&&void 0===t&&(t=i,n=o),c>eF+Y&&void 0===r&&(r=i),o=c}return void 0===t&&(t=0,n=0,r=Math.ceil(Y/K)),void 0===r&&(r=eP.length-1),{scrollHeight:o,start:t,end:r=Math.min(r+1,eP.length-1),offset:n}},[eR,eI,eF,eP,e8,Y]),e7=e9.scrollHeight,te=e9.start,tt=e9.end,tn=e9.offset;eQ.current.start=te,eQ.current.end=tt;var tr=r.useState({width:0,height:Y}),to=(0,u.Z)(tr,2),ta=to[0],ti=to[1],tl=(0,r.useRef)(),ts=(0,r.useRef)(),tc=r.useMemo(function(){return em(ta.width,eE)},[ta.width,eE]),tu=r.useMemo(function(){return em(ta.height,e7)},[ta.height,e7]),td=e7-Y,tp=(0,r.useRef)(td);tp.current=td;var tf=eF<=0,tm=eF>=td,tg=ep(tf,tm),th=function(){return{x:eN?-ez:ez,y:eF}},tb=(0,r.useRef)(th()),tv=(0,eu.zX)(function(){if(ex){var e=th();(tb.current.x!==e.x||tb.current.y!==e.y)&&(ex(e),tb.current=e)}});function ty(e,t){t?((0,J.flushSync)(function(){eH(e)}),tv()):eX(e)}var tE=function(e){var t=e,n=eE-ta.width;return Math.min(t=Math.max(t,0),n)},tS=(0,eu.zX)(function(e,t){t?((0,J.flushSync)(function(){eH(function(t){return tE(t+(eN?-e:e))})}),tv()):eX(function(t){return t+e})}),tw=(E=!!eE,S=(0,r.useRef)(0),w=(0,r.useRef)(null),x=(0,r.useRef)(null),O=(0,r.useRef)(!1),C=ep(tf,tm),T=(0,r.useRef)(null),k=(0,r.useRef)(null),[function(e){if(eI){en.Z.cancel(k.current),k.current=(0,en.Z)(function(){T.current=null},2);var t,n=e.deltaX,r=e.deltaY,o=e.shiftKey,a=n,i=r;("sx"===T.current||!T.current&&o&&r&&!n)&&(a=r,i=0,T.current="sx");var l=Math.abs(a),s=Math.abs(i);(null===T.current&&(T.current=E&&l>s?"x":"y"),"y"===T.current)?(t=i,en.Z.cancel(w.current),S.current+=t,x.current=t,C(t)||(ed||e.preventDefault(),w.current=(0,en.Z)(function(){var e=O.current?10:1;tS(S.current*e),S.current=0}))):(tS(a,!0),ed||e.preventDefault())}},function(e){eI&&(O.current=e.detail===x.current)}]),tx=(0,u.Z)(tw,2),tO=tx[0],tC=tx[1];A=function(e,t){return!tg(e,t)&&(tO({preventDefault:function(){},deltaY:e}),!0)},R=(0,r.useRef)(!1),N=(0,r.useRef)(0),_=(0,r.useRef)(null),P=(0,r.useRef)(null),M=function(e){if(R.current){var t=Math.ceil(e.touches[0].pageY),n=N.current-t;N.current=t,A(n)&&e.preventDefault(),clearInterval(P.current),P.current=setInterval(function(){(!A(n*=ef,!0)||.1>=Math.abs(n))&&clearInterval(P.current)},16)}},L=function(){R.current=!1,I()},D=function(e){I(),1!==e.touches.length||R.current||(R.current=!0,N.current=Math.ceil(e.touches[0].pageY),_.current=e.target,_.current.addEventListener("touchmove",M),_.current.addEventListener("touchend",L))},I=function(){_.current&&(_.current.removeEventListener("touchmove",M),_.current.removeEventListener("touchend",L))},(0,g.Z)(function(){return eI&&eM.current.addEventListener("touchstart",D),function(){var e;null===(e=eM.current)||void 0===e||e.removeEventListener("touchstart",D),I(),clearInterval(P.current)}},[eI]),(0,g.Z)(function(){function e(e){eI&&e.preventDefault()}var t=eM.current;return t.addEventListener("wheel",tO),t.addEventListener("DOMMouseScroll",tC),t.addEventListener("MozMousePixelScroll",e),function(){t.removeEventListener("wheel",tO),t.removeEventListener("DOMMouseScroll",tC),t.removeEventListener("MozMousePixelScroll",e)}},[eI]),(0,g.Z)(function(){eE&&eH(function(e){return tE(e)})},[ta.width,eE]);var tT=function(){var e,t;null===(e=tl.current)||void 0===e||e.delayHidden(),null===(t=ts.current)||void 0===t||t.delayHidden()},tk=(j=r.useRef(),F=r.useState(null),U=(B=(0,u.Z)(F,2))[0],Z=B[1],(0,g.Z)(function(){if(U&&U.times<10){if(!eM.current){Z(function(e){return(0,c.Z)({},e)});return}e6(!0);var e=U.targetAlign,t=U.originAlign,n=U.index,r=U.offset,o=eM.current.clientHeight,a=!1,i=e,l=null;if(o){for(var s=e||t,u=0,d=0,p=0,f=Math.min(eP.length-1,n),m=0;m<=f;m+=1){var g=eK(eP[m]);d=u;var h=e5.get(g);u=p=d+(void 0===h?K:h)}for(var b="top"===s?r:o-r,v=f;v>=0;v-=1){var y=eK(eP[v]),E=e5.get(y);if(void 0===E){a=!0;break}if((b-=E)<=0)break}switch(s){case"top":l=d-r;break;case"bottom":l=p-o+r;break;default:var S=eM.current.scrollTop;dS+o&&(i="bottom")}null!==l&&eX(l),l!==U.lastTop&&(a=!0)}a&&Z((0,c.Z)((0,c.Z)({},U),{},{times:U.times+1,targetAlign:i,lastTop:l}))}},[U,eM.current]),function(e){if(null==e){tT();return}if(en.Z.cancel(j.current),"number"==typeof e)eX(e);else if(e&&"object"===(0,p.Z)(e)){var t,n=e.align;t="index"in e?e.index:eP.findIndex(function(t){return eK(t)===e.key});var r=e.offset;Z({times:0,index:t,offset:void 0===r?0:r,originAlign:n})}});r.useImperativeHandle(t,function(){return{getScrollInfo:th,scrollTo:function(e){e&&"object"===(0,p.Z)(e)&&("left"in e||"top"in e)?(void 0!==e.left&&eH(tE(e.left)),tk(e.top)):tk(e)}}}),(0,g.Z)(function(){eO&&eO(eP.slice(te,tt+1),eP)},[te,tt,eP]);var tA=(z=r.useMemo(function(){return[new Map,[]]},[eP,e5.id,K]),G=(H=(0,u.Z)(z,2))[0],$=H[1],function(e){var t=arguments.length>1&&void 0!==arguments[1]?arguments[1]:e,n=G.get(e),r=G.get(t);if(void 0===n||void 0===r)for(var o=eP.length,a=$.length;aY&&r.createElement(eo,{ref:tl,prefixCls:V,scrollOffset:eF,scrollRange:e7,rtl:eN,onScroll:ty,onStartMove:eq,onStopMove:eY,spinSize:tu,containerSize:ta.height,style:null==ek?void 0:ek.verticalScrollBar,thumbStyle:null==ek?void 0:ek.verticalScrollBarThumb}),eR&&eE&&r.createElement(eo,{ref:ts,prefixCls:V,scrollOffset:ez,scrollRange:eE,rtl:eN,onScroll:ty,onStartMove:eq,onStopMove:eY,spinSize:tc,containerSize:ta.width,horizontal:!0,style:null==ek?void 0:ek.horizontalScrollBar,thumbStyle:null==ek?void 0:ek.horizontalScrollBarThumb}))});ev.displayName="List";var ey=["disabled","title","children","style","className"];function eE(e){return"string"==typeof e||"number"==typeof e}var eS=r.forwardRef(function(e,t){var n=r.useContext(S),o=n.prefixCls,c=n.id,p=n.open,f=n.multiple,m=n.mode,g=n.searchValue,h=n.toggleOpen,v=n.notFoundContent,E=n.onPopupScroll,w=r.useContext(G),O=w.maxCount,C=w.flattenOptions,T=w.onActiveValue,k=w.defaultActiveFirstOption,A=w.onSelect,I=w.menuItemSelectedIcon,R=w.rawValues,N=w.fieldNames,_=w.virtual,P=w.direction,M=w.listHeight,L=w.listItemHeight,D=w.optionRender,j="".concat(o,"-item"),F=(0,X.Z)(function(){return C},[p,C],function(e,t){return t[0]&&e[1]!==t[1]}),B=r.useRef(null),U=r.useMemo(function(){return f&&void 0!==O&&(null==R?void 0:R.size)>=O},[f,O,null==R?void 0:R.size]),Z=function(e){e.preventDefault()},z=function(e){var t;null===(t=B.current)||void 0===t||t.scrollTo("number"==typeof e?{index:e}:e)},H=function(e){for(var t=arguments.length>1&&void 0!==arguments[1]?arguments[1]:1,n=F.length,r=0;r1&&void 0!==arguments[1]&&arguments[1];q(e);var n={source:t?"keyboard":"mouse"},r=F[e];if(!r){T(null,-1,n);return}T(r.value,e,n)};(0,r.useEffect)(function(){Y(!1!==k?H(0):-1)},[F.length,g]);var K=r.useCallback(function(e){return R.has(e)&&"combobox"!==m},[m,(0,l.Z)(R).toString(),R.size]);(0,r.useEffect)(function(){var e,t=setTimeout(function(){if(!f&&p&&1===R.size){var e=Array.from(R)[0],t=F.findIndex(function(t){return t.data.value===e});-1!==t&&(Y(t),z(t))}});return p&&(null===(e=B.current)||void 0===e||e.scrollTo(void 0)),function(){return clearTimeout(t)}},[p,g]);var J=function(e){void 0!==e&&A(e,{selected:!R.has(e)}),f||h(!1)};if(r.useImperativeHandle(t,function(){return{onKeyDown:function(e){var t=e.which,n=e.ctrlKey;switch(t){case b.Z.N:case b.Z.P:case b.Z.UP:case b.Z.DOWN:var r=0;if(t===b.Z.UP?r=-1:t===b.Z.DOWN?r=1:/(mac\sos|macintosh)/i.test(navigator.appVersion)&&n&&(t===b.Z.N?r=1:t===b.Z.P&&(r=-1)),0!==r){var o=H(V+r,r);z(o),Y(o,!0)}break;case b.Z.ENTER:var a,i=F[V];!i||null!=i&&null!==(a=i.data)&&void 0!==a&&a.disabled||U?J(void 0):J(i.value),p&&e.preventDefault();break;case b.Z.ESC:h(!1),p&&e.stopPropagation()}},onKeyUp:function(){},scrollTo:function(e){z(e)}}}),0===F.length)return r.createElement("div",{role:"listbox",id:"".concat(c,"_list"),className:"".concat(j,"-empty"),onMouseDown:Z},v);var ee=Object.keys(N).map(function(e){return N[e]}),et=function(e){return e.label};function en(e,t){return{role:e.group?"presentation":"option",id:"".concat(c,"_list_").concat(t)}}var er=function(e){var t=F[e];if(!t)return null;var n=t.data||{},o=n.value,a=t.group,l=(0,x.Z)(n,!0),s=et(t);return t?r.createElement("div",(0,i.Z)({"aria-label":"string"!=typeof s||a?null:s},l,{key:e},en(t,e),{"aria-selected":K(o)}),o):null},eo={role:"listbox",id:"".concat(c,"_list")};return r.createElement(r.Fragment,null,_&&r.createElement("div",(0,i.Z)({},eo,{style:{height:0,width:0,overflow:"hidden"}}),er(V-1),er(V),er(V+1)),r.createElement(ev,{itemKey:"key",ref:B,data:F,height:M,itemHeight:L,fullHeight:!1,onMouseDown:Z,onScroll:E,virtual:_,direction:P,innerProps:_?null:eo},function(e,t){var n=e.group,o=e.groupOption,l=e.data,c=e.label,u=e.value,p=l.key;if(n){var f,m,g=null!==(m=l.title)&&void 0!==m?m:eE(c)?c.toString():void 0;return r.createElement("div",{className:a()(j,"".concat(j,"-group")),title:g},void 0!==c?c:p)}var h=l.disabled,b=l.title,v=(l.children,l.style),E=l.className,S=(0,d.Z)(l,ey),w=(0,Q.Z)(S,ee),O=K(u),C=h||!O&&U,T="".concat(j,"-option"),k=a()(j,T,E,(f={},(0,s.Z)(f,"".concat(T,"-grouped"),o),(0,s.Z)(f,"".concat(T,"-active"),V===t&&!C),(0,s.Z)(f,"".concat(T,"-disabled"),C),(0,s.Z)(f,"".concat(T,"-selected"),O),f)),A=et(e),R=!I||"function"==typeof I||O,N="number"==typeof A?A:A||u,P=eE(N)?N.toString():void 0;return void 0!==b&&(P=b),r.createElement("div",(0,i.Z)({},(0,x.Z)(w),_?{}:en(e,t),{"aria-selected":O,className:k,title:P,onMouseMove:function(){V===t||C||Y(t)},onClick:function(){C||J(u)},style:v}),r.createElement("div",{className:"".concat(T,"-content")},"function"==typeof D?D(e,{index:t}):N),r.isValidElement(I)||O,R&&r.createElement(y,{className:"".concat(j,"-option-state"),customizeIcon:I,customizeIconProps:{value:u,disabled:C,isSelected:O}},O?"✓":null))}))});function ew(e,t){return T(e).join("").toUpperCase().includes(t)}var ex=n(22127),eO=0,eC=(0,ex.Z)(),eT=n(33054),ek=["children","value"],eA=["children"];function eI(e){var t=r.useRef();return t.current=e,r.useCallback(function(){return t.current.apply(t,arguments)},[])}var eR=["id","mode","prefixCls","backfill","fieldNames","inputValue","searchValue","onSearch","autoClearSearchValue","onSelect","onDeselect","dropdownMatchSelectWidth","filterOption","filterSort","optionFilterProp","optionLabelProp","options","optionRender","children","defaultActiveFirstOption","menuItemSelectedIcon","virtual","direction","listHeight","listItemHeight","value","defaultValue","labelInValue","onChange","maxCount"],eN=["inputValue"],e_=r.forwardRef(function(e,t){var n,o,a,m,g,h,b,v=e.id,y=e.mode,E=e.prefixCls,S=e.backfill,w=e.fieldNames,x=e.inputValue,O=e.searchValue,C=e.onSearch,k=e.autoClearSearchValue,A=void 0===k||k,I=e.onSelect,R=e.onDeselect,N=e.dropdownMatchSelectWidth,_=void 0===N||N,P=e.filterOption,M=e.filterSort,L=e.optionFilterProp,D=e.optionLabelProp,j=e.options,F=e.optionRender,B=e.children,H=e.defaultActiveFirstOption,$=e.menuItemSelectedIcon,W=e.virtual,Y=e.direction,K=e.listHeight,X=void 0===K?200:K,Q=e.listItemHeight,J=void 0===Q?20:Q,ee=e.value,et=e.defaultValue,en=e.labelInValue,er=e.onChange,eo=e.maxCount,ea=(0,d.Z)(e,eR),ei=(n=r.useState(),a=(o=(0,u.Z)(n,2))[0],m=o[1],r.useEffect(function(){var e;m("rc_select_".concat((eC?(e=eO,eO+=1):e="TEST_OR_SSR",e)))},[]),v||a),el=V(y),es=!!(!j&&B),ec=r.useMemo(function(){return(void 0!==P||"combobox"!==y)&&P},[P,y]),eu=r.useMemo(function(){return Z(w,es)},[JSON.stringify(w),es]),ed=(0,f.Z)("",{value:void 0!==O?O:x,postState:function(e){return e||""}}),ep=(0,u.Z)(ed,2),ef=ep[0],em=ep[1],eg=r.useMemo(function(){var e=j;j||(e=function e(t){var n=arguments.length>1&&void 0!==arguments[1]&&arguments[1];return(0,eT.Z)(t).map(function(t,o){if(!r.isValidElement(t)||!t.type)return null;var a,i,l,s,u,p=t.type.isSelectOptGroup,f=t.key,m=t.props,g=m.children,h=(0,d.Z)(m,eA);return n||!p?(a=t.key,l=(i=t.props).children,s=i.value,u=(0,d.Z)(i,ek),(0,c.Z)({key:a,value:void 0!==s?s:a,children:l},u)):(0,c.Z)((0,c.Z)({key:"__RC_SELECT_GRP__".concat(null===f?o:f,"__"),label:f},h),{},{options:e(g)})}).filter(function(e){return e})}(B));var t=new Map,n=new Map,o=function(e,t,n){n&&"string"==typeof n&&e.set(t[n],t)};return function e(r){for(var a=arguments.length>1&&void 0!==arguments[1]&&arguments[1],i=0;i1&&void 0!==arguments[1]?arguments[1]:{},n=t.fieldNames,r=t.childrenAsData,o=[],a=Z(n,!1),i=a.label,l=a.value,s=a.options,c=a.groupLabel;return!function e(t,n){Array.isArray(t)&&t.forEach(function(t){if(!n&&s in t){var a=t[c];void 0===a&&r&&(a=t.label),o.push({key:U(t,o.length),group:!0,data:t,label:a}),e(t[s],!0)}else{var u=t[l];o.push({key:U(t,o.length),groupOption:n,data:t,label:t[i],value:u})}})}(e,!1),o}(eH,{fieldNames:eu,childrenAsData:es})},[eH,eu,es]),e$=function(e){var t=ey(e);if(eP(t),er&&(t.length!==eD.length||t.some(function(e,t){var n;return(null===(n=eD[t])||void 0===n?void 0:n.value)!==(null==e?void 0:e.value)}))){var n=en?t:t.map(function(e){return e.value}),r=t.map(function(e){return z(ej(e.value))});er(el?n:n[0],el?r:r[0])}},eW=r.useState(null),eV=(0,u.Z)(eW,2),eq=eV[0],eY=eV[1],eK=r.useState(0),eX=(0,u.Z)(eK,2),eQ=eX[0],eJ=eX[1],e0=void 0!==H?H:"combobox"!==y,e1=r.useCallback(function(e,t){var n=arguments.length>2&&void 0!==arguments[2]?arguments[2]:{},r=n.source;eJ(t),S&&"combobox"===y&&null!==e&&"keyboard"===(void 0===r?"keyboard":r)&&eY(String(e))},[S,y]),e2=function(e,t,n){var r=function(){var t,n=ej(e);return[en?{label:null==n?void 0:n[eu.label],value:e,key:null!==(t=null==n?void 0:n.key)&&void 0!==t?t:e}:e,z(n)]};if(t&&I){var o=r(),a=(0,u.Z)(o,2);I(a[0],a[1])}else if(!t&&R&&"clear"!==n){var i=r(),l=(0,u.Z)(i,2);R(l[0],l[1])}},e4=eI(function(e,t){var n=!el||t.selected;e$(n?el?[].concat((0,l.Z)(eD),[e]):[e]:eD.filter(function(t){return t.value!==e})),e2(e,n),"combobox"===y?eY(""):(!V||A)&&(em(""),eY(""))}),e3=r.useMemo(function(){var e=!1!==W&&!1!==_;return(0,c.Z)((0,c.Z)({},eg),{},{flattenOptions:eG,onActiveValue:e1,defaultActiveFirstOption:e0,onSelect:e4,menuItemSelectedIcon:$,rawValues:eB,fieldNames:eu,virtual:e,direction:Y,listHeight:X,listItemHeight:J,childrenAsData:es,maxCount:eo,optionRender:F})},[eo,eg,eG,e1,e0,e4,$,eB,eu,W,_,Y,X,J,es,F]);return r.createElement(G.Provider,{value:e3},r.createElement(q,(0,i.Z)({},ea,{id:ei,prefixCls:void 0===E?"rc-select":E,ref:t,omitDomProps:eN,mode:y,displayValues:eF,onDisplayValuesChange:function(e,t){e$(e);var n=t.type,r=t.values;("remove"===n||"clear"===n)&&r.forEach(function(e){e2(e.value,!1,n)})},direction:Y,searchValue:ef,onSearch:function(e,t){if(em(e),eY(null),"submit"===t.source){var n=(e||"").trim();n&&(e$(Array.from(new Set([].concat((0,l.Z)(eB),[n])))),e2(n,!0),em(""));return}"blur"!==t.source&&("combobox"===y&&e$(e),null==C||C(e))},autoClearSearchValue:A,onSearchSplit:function(e){var t=e;"tags"!==y&&(t=e.map(function(e){var t=eb.get(e);return null==t?void 0:t.value}).filter(function(e){return void 0!==e}));var n=Array.from(new Set([].concat((0,l.Z)(eB),(0,l.Z)(t))));e$(n),n.forEach(function(e){e2(e,!0)})},dropdownMatchSelectWidth:_,OptionList:eS,emptyOptions:!eG.length,activeValue:eq,activeDescendantId:"".concat(ei,"_list_").concat(eQ)})))});e_.Option=K,e_.OptGroup=Y;var eP=n(51761),eM=n(47387),eL=n(21467),eD=n(47794),ej=n(57499),eF=n(70595),eB=n(6336),eU=n(24750),eZ=n(76585),ez=n(80316);let eH=e=>{let{componentCls:t,margin:n,marginXS:r,marginXL:o,fontSize:a,lineHeight:i}=e;return{[t]:{marginInline:r,fontSize:a,lineHeight:i,textAlign:"center",["".concat(t,"-image")]:{height:e.emptyImgHeight,marginBottom:r,opacity:e.opacityImage,img:{height:"100%"},svg:{maxWidth:"100%",height:"100%",margin:"auto"}},["".concat(t,"-description")]:{color:e.colorText},["".concat(t,"-footer")]:{marginTop:n},"&-normal":{marginBlock:o,color:e.colorTextDisabled,["".concat(t,"-description")]:{color:e.colorTextDisabled},["".concat(t,"-image")]:{height:e.emptyImgHeightMD}},"&-small":{marginBlock:r,color:e.colorTextDisabled,["".concat(t,"-image")]:{height:e.emptyImgHeightSM}}}}};var eG=(0,eZ.I$)("Empty",e=>{let{componentCls:t,controlHeightLG:n,calc:r}=e;return[eH((0,ez.TS)(e,{emptyImgCls:"".concat(t,"-img"),emptyImgHeight:r(n).mul(2.5).equal(),emptyImgHeightMD:n,emptyImgHeightSM:r(n).mul(.875).equal()}))]}),e$=function(e,t){var n={};for(var r in e)Object.prototype.hasOwnProperty.call(e,r)&&0>t.indexOf(r)&&(n[r]=e[r]);if(null!=e&&"function"==typeof Object.getOwnPropertySymbols)for(var o=0,r=Object.getOwnPropertySymbols(e);ot.indexOf(r[o])&&Object.prototype.propertyIsEnumerable.call(e,r[o])&&(n[r[o]]=e[r[o]]);return n};let eW=r.createElement(()=>{let[,e]=(0,eU.ZP)(),t=new eB.C(e.colorBgBase).toHsl().l<.5?{opacity:.65}:{};return r.createElement("svg",{style:t,width:"184",height:"152",viewBox:"0 0 184 152",xmlns:"http://www.w3.org/2000/svg"},r.createElement("g",{fill:"none",fillRule:"evenodd"},r.createElement("g",{transform:"translate(24 31.67)"},r.createElement("ellipse",{fillOpacity:".8",fill:"#F5F5F7",cx:"67.797",cy:"106.89",rx:"67.797",ry:"12.668"}),r.createElement("path",{d:"M122.034 69.674L98.109 40.229c-1.148-1.386-2.826-2.225-4.593-2.225h-51.44c-1.766 0-3.444.839-4.592 2.225L13.56 69.674v15.383h108.475V69.674z",fill:"#AEB8C2"}),r.createElement("path",{d:"M101.537 86.214L80.63 61.102c-1.001-1.207-2.507-1.867-4.048-1.867H31.724c-1.54 0-3.047.66-4.048 1.867L6.769 86.214v13.792h94.768V86.214z",fill:"url(#linearGradient-1)",transform:"translate(13.56)"}),r.createElement("path",{d:"M33.83 0h67.933a4 4 0 0 1 4 4v93.344a4 4 0 0 1-4 4H33.83a4 4 0 0 1-4-4V4a4 4 0 0 1 4-4z",fill:"#F5F5F7"}),r.createElement("path",{d:"M42.678 9.953h50.237a2 2 0 0 1 2 2V36.91a2 2 0 0 1-2 2H42.678a2 2 0 0 1-2-2V11.953a2 2 0 0 1 2-2zM42.94 49.767h49.713a2.262 2.262 0 1 1 0 4.524H42.94a2.262 2.262 0 0 1 0-4.524zM42.94 61.53h49.713a2.262 2.262 0 1 1 0 4.525H42.94a2.262 2.262 0 0 1 0-4.525zM121.813 105.032c-.775 3.071-3.497 5.36-6.735 5.36H20.515c-3.238 0-5.96-2.29-6.734-5.36a7.309 7.309 0 0 1-.222-1.79V69.675h26.318c2.907 0 5.25 2.448 5.25 5.42v.04c0 2.971 2.37 5.37 5.277 5.37h34.785c2.907 0 5.277-2.421 5.277-5.393V75.1c0-2.972 2.343-5.426 5.25-5.426h26.318v33.569c0 .617-.077 1.216-.221 1.789z",fill:"#DCE0E6"})),r.createElement("path",{d:"M149.121 33.292l-6.83 2.65a1 1 0 0 1-1.317-1.23l1.937-6.207c-2.589-2.944-4.109-6.534-4.109-10.408C138.802 8.102 148.92 0 161.402 0 173.881 0 184 8.102 184 18.097c0 9.995-10.118 18.097-22.599 18.097-4.528 0-8.744-1.066-12.28-2.902z",fill:"#DCE0E6"}),r.createElement("g",{transform:"translate(149.65 15.383)",fill:"#FFF"},r.createElement("ellipse",{cx:"20.654",cy:"3.167",rx:"2.849",ry:"2.815"}),r.createElement("path",{d:"M5.698 5.63H0L2.898.704zM9.259.704h4.985V5.63H9.259z"}))))},null),eV=r.createElement(()=>{let[,e]=(0,eU.ZP)(),{colorFill:t,colorFillTertiary:n,colorFillQuaternary:o,colorBgContainer:a}=e,{borderColor:i,shadowColor:l,contentColor:s}=(0,r.useMemo)(()=>({borderColor:new eB.C(t).onBackground(a).toHexShortString(),shadowColor:new eB.C(n).onBackground(a).toHexShortString(),contentColor:new eB.C(o).onBackground(a).toHexShortString()}),[t,n,o,a]);return r.createElement("svg",{width:"64",height:"41",viewBox:"0 0 64 41",xmlns:"http://www.w3.org/2000/svg"},r.createElement("g",{transform:"translate(0 1)",fill:"none",fillRule:"evenodd"},r.createElement("ellipse",{fill:l,cx:"32",cy:"33",rx:"32",ry:"7"}),r.createElement("g",{fillRule:"nonzero",stroke:i},r.createElement("path",{d:"M55 12.76L44.854 1.258C44.367.474 43.656 0 42.907 0H21.093c-.749 0-1.46.474-1.947 1.257L9 12.761V22h46v-9.24z"}),r.createElement("path",{d:"M41.613 15.931c0-1.605.994-2.93 2.227-2.931H55v18.137C55 33.26 53.68 35 52.05 35h-40.1C10.32 35 9 33.259 9 31.137V13h11.16c1.233 0 2.227 1.323 2.227 2.928v.022c0 1.605 1.005 2.901 2.237 2.901h14.752c1.232 0 2.237-1.308 2.237-2.913v-.007z",fill:s}))))},null),eq=e=>{var{className:t,rootClassName:n,prefixCls:o,image:i=eW,description:l,children:s,imageStyle:c,style:u}=e,d=e$(e,["className","rootClassName","prefixCls","image","description","children","imageStyle","style"]);let{getPrefixCls:p,direction:f,empty:m}=r.useContext(ej.E_),g=p("empty",o),[h,b,v]=eG(g),[y]=(0,eF.Z)("Empty"),E=void 0!==l?l:null==y?void 0:y.description,S=null;return S="string"==typeof i?r.createElement("img",{alt:"string"==typeof E?E:"empty",src:i}):i,h(r.createElement("div",Object.assign({className:a()(b,v,g,null==m?void 0:m.className,{["".concat(g,"-normal")]:i===eV,["".concat(g,"-rtl")]:"rtl"===f},t,n),style:Object.assign(Object.assign({},null==m?void 0:m.style),u)},d),r.createElement("div",{className:"".concat(g,"-image"),style:c},S),E&&r.createElement("div",{className:"".concat(g,"-description")},E),s&&r.createElement("div",{className:"".concat(g,"-footer")},s)))};eq.PRESENTED_IMAGE_DEFAULT=eW,eq.PRESENTED_IMAGE_SIMPLE=eV;var eY=e=>{let{componentName:t}=e,{getPrefixCls:n}=(0,r.useContext)(ej.E_),o=n("empty");switch(t){case"Table":case"List":return r.createElement(eq,{image:eq.PRESENTED_IMAGE_SIMPLE});case"Select":case"TreeSelect":case"Cascader":case"Transfer":case"Mentions":return r.createElement(eq,{image:eq.PRESENTED_IMAGE_SIMPLE,className:"".concat(o,"-small")});default:return r.createElement(eq,null)}},eK=n(17094),eX=n(92935),eQ=n(10693),eJ=n(47137),e0=n(8443),e1=n(92801);let e2=e=>{let t={overflow:{adjustX:!0,adjustY:!0,shiftY:!0},htmlRegion:"scroll"===e?"scroll":"visible",dynamicInset:!0};return{bottomLeft:Object.assign(Object.assign({},t),{points:["tl","bl"],offset:[0,4]}),bottomRight:Object.assign(Object.assign({},t),{points:["tr","br"],offset:[0,4]}),topLeft:Object.assign(Object.assign({},t),{points:["bl","tl"],offset:[0,-4]}),topRight:Object.assign(Object.assign({},t),{points:["br","tr"],offset:[0,-4]})}};var e4=n(11303),e3=n(12288),e6=n(202),e5=n(25926);let e8=e=>{let{optionHeight:t,optionFontSize:n,optionLineHeight:r,optionPadding:o}=e;return{position:"relative",display:"block",minHeight:t,padding:o,color:e.colorText,fontWeight:"normal",fontSize:n,lineHeight:r,boxSizing:"border-box"}};var e9=e=>{let{antCls:t,componentCls:n}=e,r="".concat(n,"-item"),o="&".concat(t,"-slide-up-enter").concat(t,"-slide-up-enter-active"),a="&".concat(t,"-slide-up-appear").concat(t,"-slide-up-appear-active"),i="&".concat(t,"-slide-up-leave").concat(t,"-slide-up-leave-active"),l="".concat(n,"-dropdown-placement-");return[{["".concat(n,"-dropdown")]:Object.assign(Object.assign({},(0,e4.Wf)(e)),{position:"absolute",top:-9999,zIndex:e.zIndexPopup,boxSizing:"border-box",padding:e.paddingXXS,overflow:"hidden",fontSize:e.fontSize,fontVariant:"initial",backgroundColor:e.colorBgElevated,borderRadius:e.borderRadiusLG,outline:"none",boxShadow:e.boxShadowSecondary,["\n ".concat(o).concat(l,"bottomLeft,\n ").concat(a).concat(l,"bottomLeft\n ")]:{animationName:e6.fJ},["\n ".concat(o).concat(l,"topLeft,\n ").concat(a).concat(l,"topLeft,\n ").concat(o).concat(l,"topRight,\n ").concat(a).concat(l,"topRight\n ")]:{animationName:e6.Qt},["".concat(i).concat(l,"bottomLeft")]:{animationName:e6.Uw},["\n ".concat(i).concat(l,"topLeft,\n ").concat(i).concat(l,"topRight\n ")]:{animationName:e6.ly},"&-hidden":{display:"none"},["".concat(r)]:Object.assign(Object.assign({},e8(e)),{cursor:"pointer",transition:"background ".concat(e.motionDurationSlow," ease"),borderRadius:e.borderRadiusSM,"&-group":{color:e.colorTextDescription,fontSize:e.fontSizeSM,cursor:"default"},"&-option":{display:"flex","&-content":Object.assign({flex:"auto"},e4.vS),"&-state":{flex:"none",display:"flex",alignItems:"center"},["&-active:not(".concat(r,"-option-disabled)")]:{backgroundColor:e.optionActiveBg},["&-selected:not(".concat(r,"-option-disabled)")]:{color:e.optionSelectedColor,fontWeight:e.optionSelectedFontWeight,backgroundColor:e.optionSelectedBg,["".concat(r,"-option-state")]:{color:e.colorPrimary},["&:has(+ ".concat(r,"-option-selected:not(").concat(r,"-option-disabled))")]:{borderEndStartRadius:0,borderEndEndRadius:0,["& + ".concat(r,"-option-selected:not(").concat(r,"-option-disabled)")]:{borderStartStartRadius:0,borderStartEndRadius:0}}},"&-disabled":{["&".concat(r,"-option-selected")]:{backgroundColor:e.colorBgContainerDisabled},color:e.colorTextDisabled,cursor:"not-allowed"},"&-grouped":{paddingInlineStart:e.calc(e.controlPaddingHorizontal).mul(2).equal()}}}),"&-rtl":{direction:"rtl"}})},(0,e6.oN)(e,"slide-up"),(0,e6.oN)(e,"slide-down"),(0,e5.Fm)(e,"move-up"),(0,e5.Fm)(e,"move-down")]},e7=n(8985);let te=e=>{let{multipleSelectItemHeight:t,selectHeight:n,lineWidth:r}=e;return e.calc(n).sub(t).div(2).sub(r).equal()};function tt(e,t){let{componentCls:n,iconCls:r}=e,o="".concat(n,"-selection-overflow"),a=e.multipleSelectItemHeight,i=te(e),l=t?"".concat(n,"-").concat(t):"";return{["".concat(n,"-multiple").concat(l)]:{fontSize:e.fontSize,[o]:{position:"relative",display:"flex",flex:"auto",flexWrap:"wrap",maxWidth:"100%","&-item":{flex:"none",alignSelf:"center",maxWidth:"100%",display:"inline-flex"}},["".concat(n,"-selector")]:{display:"flex",flexWrap:"wrap",alignItems:"center",height:"100%",paddingInline:e.calc(2).mul(2).equal(),paddingBlock:e.calc(i).sub(2).equal(),borderRadius:e.borderRadius,["".concat(n,"-show-search&")]:{cursor:"text"},["".concat(n,"-disabled&")]:{background:e.multipleSelectorBgDisabled,cursor:"not-allowed"},"&:after":{display:"inline-block",width:0,margin:"".concat((0,e7.bf)(2)," 0"),lineHeight:(0,e7.bf)(a),visibility:"hidden",content:'"\\a0"'}},["\n &".concat(n,"-show-arrow ").concat(n,"-selector,\n &").concat(n,"-allow-clear ").concat(n,"-selector\n ")]:{paddingInlineEnd:e.calc(e.fontSizeIcon).add(e.controlPaddingHorizontal).equal()},["".concat(n,"-selection-item")]:{display:"flex",alignSelf:"center",flex:"none",boxSizing:"border-box",maxWidth:"100%",height:a,marginTop:2,marginBottom:2,lineHeight:(0,e7.bf)(e.calc(a).sub(e.calc(e.lineWidth).mul(2)).equal()),borderRadius:e.borderRadiusSM,cursor:"default",transition:"font-size ".concat(e.motionDurationSlow,", line-height ").concat(e.motionDurationSlow,", height ").concat(e.motionDurationSlow),marginInlineEnd:e.calc(2).mul(2).equal(),paddingInlineStart:e.paddingXS,paddingInlineEnd:e.calc(e.paddingXS).div(2).equal(),["".concat(n,"-disabled&")]:{color:e.multipleItemColorDisabled,borderColor:e.multipleItemBorderColorDisabled,cursor:"not-allowed"},"&-content":{display:"inline-block",marginInlineEnd:e.calc(e.paddingXS).div(2).equal(),overflow:"hidden",whiteSpace:"pre",textOverflow:"ellipsis"},"&-remove":Object.assign(Object.assign({},(0,e4.Ro)()),{display:"inline-flex",alignItems:"center",color:e.colorIcon,fontWeight:"bold",fontSize:10,lineHeight:"inherit",cursor:"pointer",["> ".concat(r)]:{verticalAlign:"-0.2em"},"&:hover":{color:e.colorIconHover}})},["".concat(o,"-item + ").concat(o,"-item")]:{["".concat(n,"-selection-search")]:{marginInlineStart:0}},["".concat(o,"-item-suffix")]:{height:"100%"},["".concat(n,"-selection-search")]:{display:"inline-flex",position:"relative",maxWidth:"100%",marginInlineStart:e.calc(e.inputPaddingHorizontalBase).sub(i).equal(),"\n &-input,\n &-mirror\n ":{height:a,fontFamily:e.fontFamily,lineHeight:(0,e7.bf)(a),transition:"all ".concat(e.motionDurationSlow)},"&-input":{width:"100%",minWidth:4.1},"&-mirror":{position:"absolute",top:0,insetInlineStart:0,insetInlineEnd:"auto",zIndex:999,whiteSpace:"pre",visibility:"hidden"}},["".concat(n,"-selection-placeholder")]:{position:"absolute",top:"50%",insetInlineStart:e.inputPaddingHorizontalBase,insetInlineEnd:e.inputPaddingHorizontalBase,transform:"translateY(-50%)",transition:"all ".concat(e.motionDurationSlow)}}}}var tn=e=>{let{componentCls:t}=e,n=(0,ez.TS)(e,{selectHeight:e.controlHeightSM,multipleSelectItemHeight:e.controlHeightXS,borderRadius:e.borderRadiusSM,borderRadiusSM:e.borderRadiusXS}),r=(0,ez.TS)(e,{fontSize:e.fontSizeLG,selectHeight:e.controlHeightLG,multipleSelectItemHeight:e.multipleItemHeightLG,borderRadius:e.borderRadiusLG,borderRadiusSM:e.borderRadius});return[tt(e),tt(n,"sm"),{["".concat(t,"-multiple").concat(t,"-sm")]:{["".concat(t,"-selection-placeholder")]:{insetInline:e.calc(e.controlPaddingHorizontalSM).sub(e.lineWidth).equal()},["".concat(t,"-selection-search")]:{marginInlineStart:2}}},tt(r,"lg")]};function tr(e,t){let{componentCls:n,inputPaddingHorizontalBase:r,borderRadius:o}=e,a=e.calc(e.controlHeight).sub(e.calc(e.lineWidth).mul(2)).equal(),i=t?"".concat(n,"-").concat(t):"";return{["".concat(n,"-single").concat(i)]:{fontSize:e.fontSize,height:e.controlHeight,["".concat(n,"-selector")]:Object.assign(Object.assign({},(0,e4.Wf)(e,!0)),{display:"flex",borderRadius:o,["".concat(n,"-selection-search")]:{position:"absolute",top:0,insetInlineStart:r,insetInlineEnd:r,bottom:0,"&-input":{width:"100%",WebkitAppearance:"textfield"}},["\n ".concat(n,"-selection-item,\n ").concat(n,"-selection-placeholder\n ")]:{padding:0,lineHeight:(0,e7.bf)(a),transition:"all ".concat(e.motionDurationSlow,", visibility 0s"),alignSelf:"center"},["".concat(n,"-selection-placeholder")]:{transition:"none",pointerEvents:"none"},[["&:after","".concat(n,"-selection-item:empty:after"),"".concat(n,"-selection-placeholder:empty:after")].join(",")]:{display:"inline-block",width:0,visibility:"hidden",content:'"\\a0"'}}),["\n &".concat(n,"-show-arrow ").concat(n,"-selection-item,\n &").concat(n,"-show-arrow ").concat(n,"-selection-placeholder\n ")]:{paddingInlineEnd:e.showArrowPaddingInlineEnd},["&".concat(n,"-open ").concat(n,"-selection-item")]:{color:e.colorTextPlaceholder},["&:not(".concat(n,"-customize-input)")]:{["".concat(n,"-selector")]:{width:"100%",height:"100%",padding:"0 ".concat((0,e7.bf)(r)),["".concat(n,"-selection-search-input")]:{height:a},"&:after":{lineHeight:(0,e7.bf)(a)}}},["&".concat(n,"-customize-input")]:{["".concat(n,"-selector")]:{"&:after":{display:"none"},["".concat(n,"-selection-search")]:{position:"static",width:"100%"},["".concat(n,"-selection-placeholder")]:{position:"absolute",insetInlineStart:0,insetInlineEnd:0,padding:"0 ".concat((0,e7.bf)(r)),"&:after":{display:"none"}}}}}}}let to=(e,t)=>{let{componentCls:n,antCls:r,controlOutlineWidth:o}=e;return{["&:not(".concat(n,"-customize-input) ").concat(n,"-selector")]:{border:"".concat((0,e7.bf)(e.lineWidth)," ").concat(e.lineType," ").concat(t.borderColor),background:e.selectorBg},["&:not(".concat(n,"-disabled):not(").concat(n,"-customize-input):not(").concat(r,"-pagination-size-changer)")]:{["&:hover ".concat(n,"-selector")]:{borderColor:t.hoverBorderHover},["".concat(n,"-focused& ").concat(n,"-selector")]:{borderColor:t.activeBorderColor,boxShadow:"0 0 0 ".concat((0,e7.bf)(o)," ").concat(t.activeShadowColor),outline:0}}}},ta=(e,t)=>({["&".concat(e.componentCls,"-status-").concat(t.status)]:Object.assign({},to(e,t))}),ti=e=>({"&-outlined":Object.assign(Object.assign(Object.assign(Object.assign({},to(e,{borderColor:e.colorBorder,hoverBorderHover:e.colorPrimaryHover,activeBorderColor:e.colorPrimary,activeShadowColor:e.controlOutline})),ta(e,{status:"error",borderColor:e.colorError,hoverBorderHover:e.colorErrorHover,activeBorderColor:e.colorError,activeShadowColor:e.colorErrorOutline})),ta(e,{status:"warning",borderColor:e.colorWarning,hoverBorderHover:e.colorWarningHover,activeBorderColor:e.colorWarning,activeShadowColor:e.colorWarningOutline})),{["&".concat(e.componentCls,"-disabled")]:{["&:not(".concat(e.componentCls,"-customize-input) ").concat(e.componentCls,"-selector")]:{background:e.colorBgContainerDisabled,color:e.colorTextDisabled}},["&".concat(e.componentCls,"-multiple ").concat(e.componentCls,"-selection-item")]:{background:e.multipleItemBg,border:"".concat((0,e7.bf)(e.lineWidth)," ").concat(e.lineType," ").concat(e.multipleItemBorderColor)}})}),tl=(e,t)=>{let{componentCls:n,antCls:r}=e;return{["&:not(".concat(n,"-customize-input) ").concat(n,"-selector")]:{background:t.bg,border:"".concat((0,e7.bf)(e.lineWidth)," ").concat(e.lineType," transparent"),color:t.color},["&:not(".concat(n,"-disabled):not(").concat(n,"-customize-input):not(").concat(r,"-pagination-size-changer)")]:{["&:hover ".concat(n,"-selector")]:{background:t.hoverBg},["".concat(n,"-focused& ").concat(n,"-selector")]:{background:e.selectorBg,borderColor:t.activeBorderColor,outline:0}}}},ts=(e,t)=>({["&".concat(e.componentCls,"-status-").concat(t.status)]:Object.assign({},tl(e,t))}),tc=e=>({"&-filled":Object.assign(Object.assign(Object.assign(Object.assign({},tl(e,{bg:e.colorFillTertiary,hoverBg:e.colorFillSecondary,activeBorderColor:e.colorPrimary,color:e.colorText})),ts(e,{status:"error",bg:e.colorErrorBg,hoverBg:e.colorErrorBgHover,activeBorderColor:e.colorError,color:e.colorError})),ts(e,{status:"warning",bg:e.colorWarningBg,hoverBg:e.colorWarningBgHover,activeBorderColor:e.colorWarning,color:e.colorWarning})),{["&".concat(e.componentCls,"-disabled")]:{["&:not(".concat(e.componentCls,"-customize-input) ").concat(e.componentCls,"-selector")]:{borderColor:e.colorBorder,background:e.colorBgContainerDisabled,color:e.colorTextDisabled}},["&".concat(e.componentCls,"-multiple ").concat(e.componentCls,"-selection-item")]:{background:e.colorBgContainer,border:"".concat((0,e7.bf)(e.lineWidth)," ").concat(e.lineType," ").concat(e.colorSplit)}})}),tu=e=>({"&-borderless":{["".concat(e.componentCls,"-selector")]:{background:"transparent",borderColor:"transparent"},["&".concat(e.componentCls,"-disabled")]:{["&:not(".concat(e.componentCls,"-customize-input) ").concat(e.componentCls,"-selector")]:{color:e.colorTextDisabled}},["&".concat(e.componentCls,"-multiple ").concat(e.componentCls,"-selection-item")]:{background:e.multipleItemBg,border:"".concat((0,e7.bf)(e.lineWidth)," ").concat(e.lineType," ").concat(e.multipleItemBorderColor)}}});var td=e=>({[e.componentCls]:Object.assign(Object.assign(Object.assign({},ti(e)),tc(e)),tu(e))});let tp=e=>{let{componentCls:t}=e;return{position:"relative",transition:"all ".concat(e.motionDurationMid," ").concat(e.motionEaseInOut),input:{cursor:"pointer"},["".concat(t,"-show-search&")]:{cursor:"text",input:{cursor:"auto",color:"inherit",height:"100%"}},["".concat(t,"-disabled&")]:{cursor:"not-allowed",input:{cursor:"not-allowed"}}}},tf=e=>{let{componentCls:t}=e;return{["".concat(t,"-selection-search-input")]:{margin:0,padding:0,background:"transparent",border:"none",outline:"none",appearance:"none",fontFamily:"inherit","&::-webkit-search-cancel-button":{display:"none","-webkit-appearance":"none"}}}},tm=e=>{let{antCls:t,componentCls:n,inputPaddingHorizontalBase:r,iconCls:o}=e;return{[n]:Object.assign(Object.assign({},(0,e4.Wf)(e)),{position:"relative",display:"inline-block",cursor:"pointer",["&:not(".concat(n,"-customize-input) ").concat(n,"-selector")]:Object.assign(Object.assign({},tp(e)),tf(e)),["".concat(n,"-selection-item")]:Object.assign(Object.assign({flex:1,fontWeight:"normal",position:"relative",userSelect:"none"},e4.vS),{["> ".concat(t,"-typography")]:{display:"inline"}}),["".concat(n,"-selection-placeholder")]:Object.assign(Object.assign({},e4.vS),{flex:1,color:e.colorTextPlaceholder,pointerEvents:"none"}),["".concat(n,"-arrow")]:Object.assign(Object.assign({},(0,e4.Ro)()),{position:"absolute",top:"50%",insetInlineStart:"auto",insetInlineEnd:r,height:e.fontSizeIcon,marginTop:e.calc(e.fontSizeIcon).mul(-1).div(2).equal(),color:e.colorTextQuaternary,fontSize:e.fontSizeIcon,lineHeight:1,textAlign:"center",pointerEvents:"none",display:"flex",alignItems:"center",transition:"opacity ".concat(e.motionDurationSlow," ease"),[o]:{verticalAlign:"top",transition:"transform ".concat(e.motionDurationSlow),"> svg":{verticalAlign:"top"},["&:not(".concat(n,"-suffix)")]:{pointerEvents:"auto"}},["".concat(n,"-disabled &")]:{cursor:"not-allowed"},"> *:not(:last-child)":{marginInlineEnd:8}}),["".concat(n,"-clear")]:{position:"absolute",top:"50%",insetInlineStart:"auto",insetInlineEnd:r,zIndex:1,display:"inline-block",width:e.fontSizeIcon,height:e.fontSizeIcon,marginTop:e.calc(e.fontSizeIcon).mul(-1).div(2).equal(),color:e.colorTextQuaternary,fontSize:e.fontSizeIcon,fontStyle:"normal",lineHeight:1,textAlign:"center",textTransform:"none",cursor:"pointer",opacity:0,transition:"color ".concat(e.motionDurationMid," ease, opacity ").concat(e.motionDurationSlow," ease"),textRendering:"auto","&:before":{display:"block"},"&:hover":{color:e.colorTextTertiary}},"&:hover":{["".concat(n,"-clear")]:{opacity:1},["".concat(n,"-arrow:not(:last-child)")]:{opacity:0}}}),["".concat(n,"-has-feedback")]:{["".concat(n,"-clear")]:{insetInlineEnd:e.calc(r).add(e.fontSize).add(e.paddingXS).equal()}}}},tg=e=>{let{componentCls:t}=e;return[{[t]:{["&".concat(t,"-in-form-item")]:{width:"100%"}}},tm(e),function(e){let{componentCls:t}=e,n=e.calc(e.controlPaddingHorizontalSM).sub(e.lineWidth).equal();return[tr(e),tr((0,ez.TS)(e,{controlHeight:e.controlHeightSM,borderRadius:e.borderRadiusSM}),"sm"),{["".concat(t,"-single").concat(t,"-sm")]:{["&:not(".concat(t,"-customize-input)")]:{["".concat(t,"-selection-search")]:{insetInlineStart:n,insetInlineEnd:n},["".concat(t,"-selector")]:{padding:"0 ".concat((0,e7.bf)(n))},["&".concat(t,"-show-arrow ").concat(t,"-selection-search")]:{insetInlineEnd:e.calc(n).add(e.calc(e.fontSize).mul(1.5)).equal()},["\n &".concat(t,"-show-arrow ").concat(t,"-selection-item,\n &").concat(t,"-show-arrow ").concat(t,"-selection-placeholder\n ")]:{paddingInlineEnd:e.calc(e.fontSize).mul(1.5).equal()}}}},tr((0,ez.TS)(e,{controlHeight:e.singleItemHeightLG,fontSize:e.fontSizeLG,borderRadius:e.borderRadiusLG}),"lg")]}(e),tn(e),e9(e),{["".concat(t,"-rtl")]:{direction:"rtl"}},(0,e3.c)(e,{borderElCls:"".concat(t,"-selector"),focusElCls:"".concat(t,"-focused")})]};var th=(0,eZ.I$)("Select",(e,t)=>{let{rootPrefixCls:n}=t,r=(0,ez.TS)(e,{rootPrefixCls:n,inputPaddingHorizontalBase:e.calc(e.paddingSM).sub(1).equal(),multipleSelectItemHeight:e.multipleItemHeight,selectHeight:e.controlHeight});return[tg(r),td(r)]},e=>{let{fontSize:t,lineHeight:n,controlHeight:r,controlPaddingHorizontal:o,zIndexPopupBase:a,colorText:i,fontWeightStrong:l,controlItemBgActive:s,controlItemBgHover:c,colorBgContainer:u,colorFillSecondary:d,controlHeightLG:p,controlHeightSM:f,colorBgContainerDisabled:m,colorTextDisabled:g}=e;return{zIndexPopup:a+50,optionSelectedColor:i,optionSelectedFontWeight:l,optionSelectedBg:s,optionActiveBg:c,optionPadding:"".concat((r-t*n)/2,"px ").concat(o,"px"),optionFontSize:t,optionLineHeight:n,optionHeight:r,selectorBg:u,clearBg:u,singleItemHeightLG:p,multipleItemBg:d,multipleItemBorderColor:"transparent",multipleItemHeight:f,multipleItemHeightLG:r,multipleSelectorBgDisabled:m,multipleItemColorDisabled:g,multipleItemBorderColorDisabled:"transparent",showArrowPaddingInlineEnd:Math.ceil(1.25*e.fontSize)}},{unitless:{optionLineHeight:!0,optionSelectedFontWeight:!0}}),tb=n(90507),tv=n(77136),ty=n(81303),tE=n(20383),tS=n(66155),tw=n(96871),tx=function(e,t){var n={};for(var r in e)Object.prototype.hasOwnProperty.call(e,r)&&0>t.indexOf(r)&&(n[r]=e[r]);if(null!=e&&"function"==typeof Object.getOwnPropertySymbols)for(var o=0,r=Object.getOwnPropertySymbols(e);ot.indexOf(r[o])&&Object.prototype.propertyIsEnumerable.call(e,r[o])&&(n[r[o]]=e[r[o]]);return n};let tO="SECRET_COMBOBOX_MODE_DO_NOT_USE",tC=r.forwardRef((e,t)=>{var n,o,i;let l;let{prefixCls:s,bordered:c,className:u,rootClassName:d,getPopupContainer:p,popupClassName:f,dropdownClassName:m,listHeight:g=256,placement:h,listItemHeight:b,size:v,disabled:y,notFoundContent:E,status:S,builtinPlacements:w,dropdownMatchSelectWidth:x,popupMatchSelectWidth:O,direction:C,style:T,allowClear:k,variant:A,dropdownStyle:I,transitionName:R,tagRender:N,maxCount:_}=e,P=tx(e,["prefixCls","bordered","className","rootClassName","getPopupContainer","popupClassName","dropdownClassName","listHeight","placement","listItemHeight","size","disabled","notFoundContent","status","builtinPlacements","dropdownMatchSelectWidth","popupMatchSelectWidth","direction","style","allowClear","variant","dropdownStyle","transitionName","tagRender","maxCount"]),{getPopupContainer:M,getPrefixCls:L,renderEmpty:D,direction:j,virtual:F,popupMatchSelectWidth:B,popupOverflow:U,select:Z}=r.useContext(ej.E_),[,z]=(0,eU.ZP)(),H=null!=b?b:null==z?void 0:z.controlHeight,G=L("select",s),$=L(),W=null!=C?C:j,{compactSize:V,compactItemClassnames:q}=(0,e1.ri)(G,W),[Y,K]=(0,e0.Z)(A,c),X=(0,eX.Z)(G),[J,ee,et]=th(G,X),en=r.useMemo(()=>{let{mode:t}=e;return"combobox"===t?void 0:t===tO?"combobox":t},[e.mode]),er="multiple"===en||"tags"===en,eo=(o=e.suffixIcon,void 0!==(i=e.showArrow)?i:null!==o),ea=null!==(n=null!=O?O:x)&&void 0!==n?n:B,{status:ei,hasFeedback:el,isFormItemInput:es,feedbackIcon:ec}=r.useContext(eJ.aM),eu=(0,eD.F)(ei,S);l=void 0!==E?E:"combobox"===en?null:(null==D?void 0:D("Select"))||r.createElement(eY,{componentName:"Select"});let{suffixIcon:ed,itemIcon:ep,removeIcon:ef,clearIcon:em}=function(e){let{suffixIcon:t,clearIcon:n,menuItemSelectedIcon:o,removeIcon:a,loading:i,multiple:l,hasFeedback:s,prefixCls:c,showSuffixIcon:u,feedbackIcon:d,showArrow:p,componentName:f}=e,m=null!=n?n:r.createElement(tv.Z,null),g=e=>null!==t||s||p?r.createElement(r.Fragment,null,!1!==u&&e,s&&d):null,h=null;if(void 0!==t)h=g(t);else if(i)h=g(r.createElement(tS.Z,{spin:!0}));else{let e="".concat(c,"-suffix");h=t=>{let{open:n,showSearch:o}=t;return n&&o?g(r.createElement(tw.Z,{className:e})):g(r.createElement(tE.Z,{className:e}))}}let b=null;return b=void 0!==o?o:l?r.createElement(tb.Z,null):null,{clearIcon:m,suffixIcon:h,itemIcon:b,removeIcon:void 0!==a?a:r.createElement(ty.Z,null)}}(Object.assign(Object.assign({},P),{multiple:er,hasFeedback:el,feedbackIcon:ec,showSuffixIcon:eo,prefixCls:G,componentName:"Select"})),eg=(0,Q.Z)(P,["suffixIcon","itemIcon"]),eh=a()(f||m,{["".concat(G,"-dropdown-").concat(W)]:"rtl"===W},d,et,X,ee),eb=(0,eQ.Z)(e=>{var t;return null!==(t=null!=v?v:V)&&void 0!==t?t:e}),ev=r.useContext(eK.Z),ey=a()({["".concat(G,"-lg")]:"large"===eb,["".concat(G,"-sm")]:"small"===eb,["".concat(G,"-rtl")]:"rtl"===W,["".concat(G,"-").concat(Y)]:K,["".concat(G,"-in-form-item")]:es},(0,eD.Z)(G,eu,el),q,null==Z?void 0:Z.className,u,d,et,X,ee),eE=r.useMemo(()=>void 0!==h?h:"rtl"===W?"bottomRight":"bottomLeft",[h,W]),[eS]=(0,eP.Cn)("SelectLike",null==I?void 0:I.zIndex);return J(r.createElement(e_,Object.assign({ref:t,virtual:F,showSearch:null==Z?void 0:Z.showSearch},eg,{style:Object.assign(Object.assign({},null==Z?void 0:Z.style),T),dropdownMatchSelectWidth:ea,transitionName:(0,eM.m)($,"slide-up",R),builtinPlacements:w||e2(U),listHeight:g,listItemHeight:H,mode:en,prefixCls:G,placement:eE,direction:W,suffixIcon:ed,menuItemSelectedIcon:ep,removeIcon:ef,allowClear:!0===k?{clearIcon:em}:k,notFoundContent:l,className:ey,getPopupContainer:p||M,dropdownClassName:eh,disabled:null!=y?y:ev,dropdownStyle:Object.assign(Object.assign({},I),{zIndex:eS}),maxCount:er?_:void 0,tagRender:er?N:void 0})))}),tT=(0,eL.Z)(tC);tC.SECRET_COMBOBOX_MODE_DO_NOT_USE=tO,tC.Option=K,tC.OptGroup=Y,tC._InternalPanelDoNotUseOrYouWillBeFired=tT;var tk=tC},92801:function(e,t,n){n.d(t,{BR:function(){return f},ri:function(){return p}});var r=n(16480),o=n.n(r),a=n(33054),i=n(64090),l=n(57499),s=n(10693),c=n(86682),u=function(e,t){var n={};for(var r in e)Object.prototype.hasOwnProperty.call(e,r)&&0>t.indexOf(r)&&(n[r]=e[r]);if(null!=e&&"function"==typeof Object.getOwnPropertySymbols)for(var o=0,r=Object.getOwnPropertySymbols(e);ot.indexOf(r[o])&&Object.prototype.propertyIsEnumerable.call(e,r[o])&&(n[r[o]]=e[r[o]]);return n};let d=i.createContext(null),p=(e,t)=>{let n=i.useContext(d),r=i.useMemo(()=>{if(!n)return"";let{compactDirection:r,isFirstItem:a,isLastItem:i}=n,l="vertical"===r?"-vertical-":"-";return o()("".concat(e,"-compact").concat(l,"item"),{["".concat(e,"-compact").concat(l,"first-item")]:a,["".concat(e,"-compact").concat(l,"last-item")]:i,["".concat(e,"-compact").concat(l,"item-rtl")]:"rtl"===t})},[e,t,n]);return{compactSize:null==n?void 0:n.compactSize,compactDirection:null==n?void 0:n.compactDirection,compactItemClassnames:r}},f=e=>{let{children:t}=e;return i.createElement(d.Provider,{value:null},t)},m=e=>{var{children:t}=e,n=u(e,["children"]);return i.createElement(d.Provider,{value:n},t)};t.ZP=e=>{let{getPrefixCls:t,direction:n}=i.useContext(l.E_),{size:r,direction:p,block:f,prefixCls:g,className:h,rootClassName:b,children:v}=e,y=u(e,["size","direction","block","prefixCls","className","rootClassName","children"]),E=(0,s.Z)(e=>null!=r?r:e),S=t("space-compact",g),[w,x]=(0,c.Z)(S),O=o()(S,x,{["".concat(S,"-rtl")]:"rtl"===n,["".concat(S,"-block")]:f,["".concat(S,"-vertical")]:"vertical"===p},h,b),C=i.useContext(d),T=(0,a.Z)(v),k=i.useMemo(()=>T.map((e,t)=>{let n=e&&e.key||"".concat(S,"-item-").concat(t);return i.createElement(m,{key:n,compactSize:E,compactDirection:p,isFirstItem:0===t&&(!C||(null==C?void 0:C.isFirstItem)),isLastItem:t===T.length-1&&(!C||(null==C?void 0:C.isLastItem))},e)}),[r,T,C]);return 0===T.length?null:w(i.createElement("div",Object.assign({className:O},y),k))}},65270:function(e,t,n){n.d(t,{Z:function(){return b}});var r=n(64090),o=n(16480),a=n.n(o),i=n(33054);function l(e){return["small","middle","large"].includes(e)}function s(e){return!!e&&"number"==typeof e&&!Number.isNaN(e)}var c=n(57499),u=n(92801);let d=r.createContext({latestIndex:0}),p=d.Provider;var f=e=>{let{className:t,index:n,children:o,split:a,style:i}=e,{latestIndex:l}=r.useContext(d);return null==o?null:r.createElement(r.Fragment,null,r.createElement("div",{className:t,style:i},o),nt.indexOf(r)&&(n[r]=e[r]);if(null!=e&&"function"==typeof Object.getOwnPropertySymbols)for(var o=0,r=Object.getOwnPropertySymbols(e);ot.indexOf(r[o])&&Object.prototype.propertyIsEnumerable.call(e,r[o])&&(n[r[o]]=e[r[o]]);return n};let h=r.forwardRef((e,t)=>{var n,o;let{getPrefixCls:u,space:d,direction:h}=r.useContext(c.E_),{size:b=(null==d?void 0:d.size)||"small",align:v,className:y,rootClassName:E,children:S,direction:w="horizontal",prefixCls:x,split:O,style:C,wrap:T=!1,classNames:k,styles:A}=e,I=g(e,["size","align","className","rootClassName","children","direction","prefixCls","split","style","wrap","classNames","styles"]),[R,N]=Array.isArray(b)?b:[b,b],_=l(N),P=l(R),M=s(N),L=s(R),D=(0,i.Z)(S,{keepEmpty:!0}),j=void 0===v&&"horizontal"===w?"center":v,F=u("space",x),[B,U,Z]=(0,m.Z)(F),z=a()(F,null==d?void 0:d.className,U,"".concat(F,"-").concat(w),{["".concat(F,"-rtl")]:"rtl"===h,["".concat(F,"-align-").concat(j)]:j,["".concat(F,"-gap-row-").concat(N)]:_,["".concat(F,"-gap-col-").concat(R)]:P},y,E,Z),H=a()("".concat(F,"-item"),null!==(n=null==k?void 0:k.item)&&void 0!==n?n:null===(o=null==d?void 0:d.classNames)||void 0===o?void 0:o.item),G=0,$=D.map((e,t)=>{var n,o;null!=e&&(G=t);let a=e&&e.key||"".concat(H,"-").concat(t);return r.createElement(f,{className:H,key:a,index:t,split:O,style:null!==(n=null==A?void 0:A.item)&&void 0!==n?n:null===(o=null==d?void 0:d.styles)||void 0===o?void 0:o.item},e)}),W=r.useMemo(()=>({latestIndex:G}),[G]);if(0===D.length)return null;let V={};return T&&(V.flexWrap="wrap"),!P&&L&&(V.columnGap=R),!_&&M&&(V.rowGap=N),B(r.createElement("div",Object.assign({ref:t,className:z,style:Object.assign(Object.assign(Object.assign({},V),null==d?void 0:d.style),C)},I),r.createElement(p,{value:W},$)))});h.Compact=u.ZP;var b=h},86682:function(e,t,n){n.d(t,{Z:function(){return s}});var r=n(76585),o=n(80316),a=e=>{let{componentCls:t}=e;return{[t]:{"&-block":{display:"flex",width:"100%"},"&-vertical":{flexDirection:"column"}}}};let i=e=>{let{componentCls:t}=e;return{[t]:{display:"inline-flex","&-rtl":{direction:"rtl"},"&-vertical":{flexDirection:"column"},"&-align":{flexDirection:"column","&-center":{alignItems:"center"},"&-start":{alignItems:"flex-start"},"&-end":{alignItems:"flex-end"},"&-baseline":{alignItems:"baseline"}},["".concat(t,"-item:empty")]:{display:"none"}}}},l=e=>{let{componentCls:t}=e;return{[t]:{"&-gap-row-small":{rowGap:e.spaceGapSmallSize},"&-gap-row-middle":{rowGap:e.spaceGapMiddleSize},"&-gap-row-large":{rowGap:e.spaceGapLargeSize},"&-gap-col-small":{columnGap:e.spaceGapSmallSize},"&-gap-col-middle":{columnGap:e.spaceGapMiddleSize},"&-gap-col-large":{columnGap:e.spaceGapLargeSize}}}};var s=(0,r.I$)("Space",e=>{let t=(0,o.TS)(e,{spaceGapSmallSize:e.paddingXS,spaceGapMiddleSize:e.padding,spaceGapLargeSize:e.paddingLG});return[i(t),l(t),a(t)]},()=>({}),{resetStyle:!1})},12288:function(e,t,n){n.d(t,{c:function(){return r}});function r(e){let t=arguments.length>1&&void 0!==arguments[1]?arguments[1]:{focus:!0},{componentCls:n}=e,r="".concat(n,"-compact");return{[r]:Object.assign(Object.assign({},function(e,t,n){let{focusElCls:r,focus:o,borderElCls:a}=n,i=a?"> *":"",l=["hover",o?"focus":null,"active"].filter(Boolean).map(e=>"&:".concat(e," ").concat(i)).join(",");return{["&-item:not(".concat(t,"-last-item)")]:{marginInlineEnd:e.calc(e.lineWidth).mul(-1).equal()},"&-item":Object.assign(Object.assign({[l]:{zIndex:2}},r?{["&".concat(r)]:{zIndex:2}}:{}),{["&[disabled] ".concat(i)]:{zIndex:0}})}}(e,r,t)),function(e,t,n){let{borderElCls:r}=n,o=r?"> ".concat(r):"";return{["&-item:not(".concat(t,"-first-item):not(").concat(t,"-last-item) ").concat(o)]:{borderRadius:0},["&-item:not(".concat(t,"-last-item)").concat(t,"-first-item")]:{["& ".concat(o,", &").concat(e,"-sm ").concat(o,", &").concat(e,"-lg ").concat(o)]:{borderStartEndRadius:0,borderEndEndRadius:0}},["&-item:not(".concat(t,"-first-item)").concat(t,"-last-item")]:{["& ".concat(o,", &").concat(e,"-sm ").concat(o,", &").concat(e,"-lg ").concat(o)]:{borderStartStartRadius:0,borderEndStartRadius:0}}}}(n,r,t))}}},11303:function(e,t,n){n.d(t,{Lx:function(){return s},Qy:function(){return d},Ro:function(){return i},Wf:function(){return a},dF:function(){return l},du:function(){return c},oN:function(){return u},vS:function(){return o}});var r=n(8985);let o={overflow:"hidden",whiteSpace:"nowrap",textOverflow:"ellipsis"},a=function(e){let t=arguments.length>1&&void 0!==arguments[1]&&arguments[1];return{boxSizing:"border-box",margin:0,padding:0,color:e.colorText,fontSize:e.fontSize,lineHeight:e.lineHeight,listStyle:"none",fontFamily:t?"inherit":e.fontFamily}},i=()=>({display:"inline-flex",alignItems:"center",color:"inherit",fontStyle:"normal",lineHeight:0,textAlign:"center",textTransform:"none",verticalAlign:"-0.125em",textRendering:"optimizeLegibility","-webkit-font-smoothing":"antialiased","-moz-osx-font-smoothing":"grayscale","> *":{lineHeight:1},svg:{display:"inline-block"}}),l=()=>({"&::before":{display:"table",content:'""'},"&::after":{display:"table",clear:"both",content:'""'}}),s=e=>({a:{color:e.colorLink,textDecoration:e.linkDecoration,backgroundColor:"transparent",outline:"none",cursor:"pointer",transition:"color ".concat(e.motionDurationSlow),"-webkit-text-decoration-skip":"objects","&:hover":{color:e.colorLinkHover},"&:active":{color:e.colorLinkActive},"&:active,\n &:hover":{textDecoration:e.linkHoverDecoration,outline:0},"&:focus":{textDecoration:e.linkFocusDecoration,outline:0},"&[disabled]":{color:e.colorTextDisabled,cursor:"not-allowed"}}}),c=(e,t)=>{let{fontFamily:n,fontSize:r}=e,o='[class^="'.concat(t,'"], [class*=" ').concat(t,'"]');return{[o]:{fontFamily:n,fontSize:r,boxSizing:"border-box","&::before, &::after":{boxSizing:"border-box"},[o]:{boxSizing:"border-box","&::before, &::after":{boxSizing:"border-box"}}}}},u=e=>({outline:"".concat((0,r.bf)(e.lineWidthFocus)," solid ").concat(e.colorPrimaryBorder),outlineOffset:1,transition:"outline-offset 0s, outline 0s"}),d=e=>({"&:focus-visible":Object.assign({},u(e))})},46154:function(e,t){t.Z=e=>({[e.componentCls]:{["".concat(e.antCls,"-motion-collapse-legacy")]:{overflow:"hidden","&-active":{transition:"height ".concat(e.motionDurationMid," ").concat(e.motionEaseInOut,",\n opacity ").concat(e.motionDurationMid," ").concat(e.motionEaseInOut," !important")}},["".concat(e.antCls,"-motion-collapse")]:{overflow:"hidden",transition:"height ".concat(e.motionDurationMid," ").concat(e.motionEaseInOut,",\n opacity ").concat(e.motionDurationMid," ").concat(e.motionEaseInOut," !important")}}})},59353:function(e,t,n){n.d(t,{R:function(){return a}});let r=e=>({animationDuration:e,animationFillMode:"both"}),o=e=>({animationDuration:e,animationFillMode:"both"}),a=function(e,t,n,a){let i=arguments.length>4&&void 0!==arguments[4]&&arguments[4],l=i?"&":"";return{["\n ".concat(l).concat(e,"-enter,\n ").concat(l).concat(e,"-appear\n ")]:Object.assign(Object.assign({},r(a)),{animationPlayState:"paused"}),["".concat(l).concat(e,"-leave")]:Object.assign(Object.assign({},o(a)),{animationPlayState:"paused"}),["\n ".concat(l).concat(e,"-enter").concat(e,"-enter-active,\n ").concat(l).concat(e,"-appear").concat(e,"-appear-active\n ")]:{animationName:t,animationPlayState:"running"},["".concat(l).concat(e,"-leave").concat(e,"-leave-active")]:{animationName:n,animationPlayState:"running",pointerEvents:"none"}}}},25926:function(e,t,n){n.d(t,{Fm:function(){return p}});var r=n(8985),o=n(59353);let a=new r.E4("antMoveDownIn",{"0%":{transform:"translate3d(0, 100%, 0)",transformOrigin:"0 0",opacity:0},"100%":{transform:"translate3d(0, 0, 0)",transformOrigin:"0 0",opacity:1}}),i=new r.E4("antMoveDownOut",{"0%":{transform:"translate3d(0, 0, 0)",transformOrigin:"0 0",opacity:1},"100%":{transform:"translate3d(0, 100%, 0)",transformOrigin:"0 0",opacity:0}}),l=new r.E4("antMoveLeftIn",{"0%":{transform:"translate3d(-100%, 0, 0)",transformOrigin:"0 0",opacity:0},"100%":{transform:"translate3d(0, 0, 0)",transformOrigin:"0 0",opacity:1}}),s=new r.E4("antMoveLeftOut",{"0%":{transform:"translate3d(0, 0, 0)",transformOrigin:"0 0",opacity:1},"100%":{transform:"translate3d(-100%, 0, 0)",transformOrigin:"0 0",opacity:0}}),c=new r.E4("antMoveRightIn",{"0%":{transform:"translate3d(100%, 0, 0)",transformOrigin:"0 0",opacity:0},"100%":{transform:"translate3d(0, 0, 0)",transformOrigin:"0 0",opacity:1}}),u=new r.E4("antMoveRightOut",{"0%":{transform:"translate3d(0, 0, 0)",transformOrigin:"0 0",opacity:1},"100%":{transform:"translate3d(100%, 0, 0)",transformOrigin:"0 0",opacity:0}}),d={"move-up":{inKeyframes:new r.E4("antMoveUpIn",{"0%":{transform:"translate3d(0, -100%, 0)",transformOrigin:"0 0",opacity:0},"100%":{transform:"translate3d(0, 0, 0)",transformOrigin:"0 0",opacity:1}}),outKeyframes:new r.E4("antMoveUpOut",{"0%":{transform:"translate3d(0, 0, 0)",transformOrigin:"0 0",opacity:1},"100%":{transform:"translate3d(0, -100%, 0)",transformOrigin:"0 0",opacity:0}})},"move-down":{inKeyframes:a,outKeyframes:i},"move-left":{inKeyframes:l,outKeyframes:s},"move-right":{inKeyframes:c,outKeyframes:u}},p=(e,t)=>{let{antCls:n}=e,r="".concat(n,"-").concat(t),{inKeyframes:a,outKeyframes:i}=d[t];return[(0,o.R)(r,a,i,e.motionDurationMid),{["\n ".concat(r,"-enter,\n ").concat(r,"-appear\n ")]:{opacity:0,animationTimingFunction:e.motionEaseOutCirc},["".concat(r,"-leave")]:{animationTimingFunction:e.motionEaseInOutCirc}}]}},202:function(e,t,n){n.d(t,{Qt:function(){return l},Uw:function(){return i},fJ:function(){return a},ly:function(){return s},oN:function(){return d}});var r=n(8985),o=n(59353);let a=new r.E4("antSlideUpIn",{"0%":{transform:"scaleY(0.8)",transformOrigin:"0% 0%",opacity:0},"100%":{transform:"scaleY(1)",transformOrigin:"0% 0%",opacity:1}}),i=new r.E4("antSlideUpOut",{"0%":{transform:"scaleY(1)",transformOrigin:"0% 0%",opacity:1},"100%":{transform:"scaleY(0.8)",transformOrigin:"0% 0%",opacity:0}}),l=new r.E4("antSlideDownIn",{"0%":{transform:"scaleY(0.8)",transformOrigin:"100% 100%",opacity:0},"100%":{transform:"scaleY(1)",transformOrigin:"100% 100%",opacity:1}}),s=new r.E4("antSlideDownOut",{"0%":{transform:"scaleY(1)",transformOrigin:"100% 100%",opacity:1},"100%":{transform:"scaleY(0.8)",transformOrigin:"100% 100%",opacity:0}}),c=new r.E4("antSlideLeftIn",{"0%":{transform:"scaleX(0.8)",transformOrigin:"0% 0%",opacity:0},"100%":{transform:"scaleX(1)",transformOrigin:"0% 0%",opacity:1}}),u={"slide-up":{inKeyframes:a,outKeyframes:i},"slide-down":{inKeyframes:l,outKeyframes:s},"slide-left":{inKeyframes:c,outKeyframes:new r.E4("antSlideLeftOut",{"0%":{transform:"scaleX(1)",transformOrigin:"0% 0%",opacity:1},"100%":{transform:"scaleX(0.8)",transformOrigin:"0% 0%",opacity:0}})},"slide-right":{inKeyframes:new r.E4("antSlideRightIn",{"0%":{transform:"scaleX(0.8)",transformOrigin:"100% 0%",opacity:0},"100%":{transform:"scaleX(1)",transformOrigin:"100% 0%",opacity:1}}),outKeyframes:new r.E4("antSlideRightOut",{"0%":{transform:"scaleX(1)",transformOrigin:"100% 0%",opacity:1},"100%":{transform:"scaleX(0.8)",transformOrigin:"100% 0%",opacity:0}})}},d=(e,t)=>{let{antCls:n}=e,r="".concat(n,"-").concat(t),{inKeyframes:a,outKeyframes:i}=u[t];return[(0,o.R)(r,a,i,e.motionDurationMid),{["\n ".concat(r,"-enter,\n ").concat(r,"-appear\n ")]:{transform:"scale(0)",transformOrigin:"0% 0%",opacity:0,animationTimingFunction:e.motionEaseOutQuint,"&-prepare":{transform:"scale(1)"}},["".concat(r,"-leave")]:{animationTimingFunction:e.motionEaseInQuint}}]}},58854:function(e,t,n){n.d(t,{_y:function(){return g},kr:function(){return a}});var r=n(8985),o=n(59353);let a=new r.E4("antZoomIn",{"0%":{transform:"scale(0.2)",opacity:0},"100%":{transform:"scale(1)",opacity:1}}),i=new r.E4("antZoomOut",{"0%":{transform:"scale(1)"},"100%":{transform:"scale(0.2)",opacity:0}}),l=new r.E4("antZoomBigIn",{"0%":{transform:"scale(0.8)",opacity:0},"100%":{transform:"scale(1)",opacity:1}}),s=new r.E4("antZoomBigOut",{"0%":{transform:"scale(1)"},"100%":{transform:"scale(0.8)",opacity:0}}),c=new r.E4("antZoomUpIn",{"0%":{transform:"scale(0.8)",transformOrigin:"50% 0%",opacity:0},"100%":{transform:"scale(1)",transformOrigin:"50% 0%"}}),u=new r.E4("antZoomUpOut",{"0%":{transform:"scale(1)",transformOrigin:"50% 0%"},"100%":{transform:"scale(0.8)",transformOrigin:"50% 0%",opacity:0}}),d=new r.E4("antZoomLeftIn",{"0%":{transform:"scale(0.8)",transformOrigin:"0% 50%",opacity:0},"100%":{transform:"scale(1)",transformOrigin:"0% 50%"}}),p=new r.E4("antZoomLeftOut",{"0%":{transform:"scale(1)",transformOrigin:"0% 50%"},"100%":{transform:"scale(0.8)",transformOrigin:"0% 50%",opacity:0}}),f=new r.E4("antZoomRightIn",{"0%":{transform:"scale(0.8)",transformOrigin:"100% 50%",opacity:0},"100%":{transform:"scale(1)",transformOrigin:"100% 50%"}}),m={zoom:{inKeyframes:a,outKeyframes:i},"zoom-big":{inKeyframes:l,outKeyframes:s},"zoom-big-fast":{inKeyframes:l,outKeyframes:s},"zoom-left":{inKeyframes:d,outKeyframes:p},"zoom-right":{inKeyframes:f,outKeyframes:new r.E4("antZoomRightOut",{"0%":{transform:"scale(1)",transformOrigin:"100% 50%"},"100%":{transform:"scale(0.8)",transformOrigin:"100% 50%",opacity:0}})},"zoom-up":{inKeyframes:c,outKeyframes:u},"zoom-down":{inKeyframes:new r.E4("antZoomDownIn",{"0%":{transform:"scale(0.8)",transformOrigin:"50% 100%",opacity:0},"100%":{transform:"scale(1)",transformOrigin:"50% 100%"}}),outKeyframes:new r.E4("antZoomDownOut",{"0%":{transform:"scale(1)",transformOrigin:"50% 100%"},"100%":{transform:"scale(0.8)",transformOrigin:"50% 100%",opacity:0}})}},g=(e,t)=>{let{antCls:n}=e,r="".concat(n,"-").concat(t),{inKeyframes:a,outKeyframes:i}=m[t];return[(0,o.R)(r,a,i,"zoom-big-fast"===t?e.motionDurationFast:e.motionDurationMid),{["\n ".concat(r,"-enter,\n ").concat(r,"-appear\n ")]:{transform:"scale(0)",opacity:0,animationTimingFunction:e.motionEaseOutCirc,"&-prepare":{transform:"none"}},["".concat(r,"-leave")]:{animationTimingFunction:e.motionEaseInOutCirc}}]}},89869:function(e,t,n){n.d(t,{ZP:function(){return i},qN:function(){return o},wZ:function(){return a}});var r=n(2638);let o=8;function a(e){let{contentRadius:t,limitVerticalRadius:n}=e,r=t>12?t+2:12;return{arrowOffsetHorizontal:r,arrowOffsetVertical:n?o:r}}function i(e,t,n){var o,a,i,l,s,c,u,d;let{componentCls:p,boxShadowPopoverArrow:f,arrowOffsetVertical:m,arrowOffsetHorizontal:g}=e,{arrowDistance:h=0,arrowPlacement:b={left:!0,right:!0,top:!0,bottom:!0}}=n||{};return{[p]:Object.assign(Object.assign(Object.assign(Object.assign({["".concat(p,"-arrow")]:[Object.assign(Object.assign({position:"absolute",zIndex:1,display:"block"},(0,r.W)(e,t,f)),{"&:before":{background:t}})]},(o=!!b.top,a={[["&-placement-top > ".concat(p,"-arrow"),"&-placement-topLeft > ".concat(p,"-arrow"),"&-placement-topRight > ".concat(p,"-arrow")].join(",")]:{bottom:h,transform:"translateY(100%) rotate(180deg)"},["&-placement-top > ".concat(p,"-arrow")]:{left:{_skip_check_:!0,value:"50%"},transform:"translateX(-50%) translateY(100%) rotate(180deg)"},["&-placement-topLeft > ".concat(p,"-arrow")]:{left:{_skip_check_:!0,value:g}},["&-placement-topRight > ".concat(p,"-arrow")]:{right:{_skip_check_:!0,value:g}}},o?a:{})),(i=!!b.bottom,l={[["&-placement-bottom > ".concat(p,"-arrow"),"&-placement-bottomLeft > ".concat(p,"-arrow"),"&-placement-bottomRight > ".concat(p,"-arrow")].join(",")]:{top:h,transform:"translateY(-100%)"},["&-placement-bottom > ".concat(p,"-arrow")]:{left:{_skip_check_:!0,value:"50%"},transform:"translateX(-50%) translateY(-100%)"},["&-placement-bottomLeft > ".concat(p,"-arrow")]:{left:{_skip_check_:!0,value:g}},["&-placement-bottomRight > ".concat(p,"-arrow")]:{right:{_skip_check_:!0,value:g}}},i?l:{})),(s=!!b.left,c={[["&-placement-left > ".concat(p,"-arrow"),"&-placement-leftTop > ".concat(p,"-arrow"),"&-placement-leftBottom > ".concat(p,"-arrow")].join(",")]:{right:{_skip_check_:!0,value:h},transform:"translateX(100%) rotate(90deg)"},["&-placement-left > ".concat(p,"-arrow")]:{top:{_skip_check_:!0,value:"50%"},transform:"translateY(-50%) translateX(100%) rotate(90deg)"},["&-placement-leftTop > ".concat(p,"-arrow")]:{top:m},["&-placement-leftBottom > ".concat(p,"-arrow")]:{bottom:m}},s?c:{})),(u=!!b.right,d={[["&-placement-right > ".concat(p,"-arrow"),"&-placement-rightTop > ".concat(p,"-arrow"),"&-placement-rightBottom > ".concat(p,"-arrow")].join(",")]:{left:{_skip_check_:!0,value:h},transform:"translateX(-100%) rotate(-90deg)"},["&-placement-right > ".concat(p,"-arrow")]:{top:{_skip_check_:!0,value:"50%"},transform:"translateY(-50%) translateX(-100%) rotate(-90deg)"},["&-placement-rightTop > ".concat(p,"-arrow")]:{top:m},["&-placement-rightBottom > ".concat(p,"-arrow")]:{bottom:m}},u?d:{}))}}},2638:function(e,t,n){n.d(t,{W:function(){return a},w:function(){return o}});var r=n(8985);function o(e){let{sizePopupArrow:t,borderRadiusXS:n,borderRadiusOuter:r}=e,o=t/2,a=1*r/Math.sqrt(2),i=o-r*(1-1/Math.sqrt(2)),l=o-1/Math.sqrt(2)*n,s=r*(Math.sqrt(2)-1)+1/Math.sqrt(2)*n,c=2*o-l,u=2*o-a,d=2*o-0,p=o*Math.sqrt(2)+r*(Math.sqrt(2)-2),f=r*(Math.sqrt(2)-1),m="polygon(".concat(f,"px 100%, 50% ").concat(f,"px, ").concat(2*o-f,"px 100%, ").concat(f,"px 100%)");return{arrowShadowWidth:p,arrowPath:"path('M ".concat(0," ").concat(o," A ").concat(r," ").concat(r," 0 0 0 ").concat(a," ").concat(i," L ").concat(l," ").concat(s," A ").concat(n," ").concat(n," 0 0 1 ").concat(c," ").concat(s," L ").concat(u," ").concat(i," A ").concat(r," ").concat(r," 0 0 0 ").concat(d," ").concat(o," Z')"),arrowPolygon:m}}let a=(e,t,n)=>{let{sizePopupArrow:o,arrowPolygon:a,arrowPath:i,arrowShadowWidth:l,borderRadiusXS:s,calc:c}=e;return{pointerEvents:"none",width:o,height:o,overflow:"hidden","&::before":{position:"absolute",bottom:0,insetInlineStart:0,width:o,height:c(o).div(2).equal(),background:t,clipPath:{_multi_value_:!0,value:[a,i]},content:'""'},"&::after":{content:'""',position:"absolute",width:l,height:l,bottom:0,insetInline:0,margin:"auto",borderRadius:{_skip_check_:!0,value:"0 0 ".concat((0,r.bf)(s)," 0")},transform:"translateY(50%) rotate(-135deg)",boxShadow:n,zIndex:0,background:"transparent"}}}},43345:function(e,t,n){n.d(t,{Mj:function(){return v},u_:function(){return b},uH:function(){return h}});var r=n(64090),o=n(8985),a=n(12215),i=e=>{let{controlHeight:t}=e;return{controlHeightSM:.75*t,controlHeightXS:.5*t,controlHeightLG:1.25*t}},l=n(46864),s=n(6336),c=e=>{let t=e,n=e,r=e,o=e;return e<6&&e>=5?t=e+1:e<16&&e>=6?t=e+2:e>=16&&(t=16),e<7&&e>=5?n=4:e<8&&e>=7?n=5:e<14&&e>=8?n=6:e<16&&e>=14?n=7:e>=16&&(n=8),e<6&&e>=2?r=1:e>=6&&(r=2),e>4&&e<8?o=4:e>=8&&(o=6),{borderRadius:e,borderRadiusXS:r,borderRadiusSM:n,borderRadiusLG:t,borderRadiusOuter:o}};let u=(e,t)=>new s.C(e).setAlpha(t).toRgbString(),d=(e,t)=>new s.C(e).darken(t).toHexString(),p=e=>{let t=(0,a.R_)(e);return{1:t[0],2:t[1],3:t[2],4:t[3],5:t[4],6:t[5],7:t[6],8:t[4],9:t[5],10:t[6]}},f=(e,t)=>{let n=e||"#fff",r=t||"#000";return{colorBgBase:n,colorTextBase:r,colorText:u(r,.88),colorTextSecondary:u(r,.65),colorTextTertiary:u(r,.45),colorTextQuaternary:u(r,.25),colorFill:u(r,.15),colorFillSecondary:u(r,.06),colorFillTertiary:u(r,.04),colorFillQuaternary:u(r,.02),colorBgLayout:d(n,4),colorBgContainer:d(n,0),colorBgElevated:d(n,0),colorBgSpotlight:u(r,.85),colorBgBlur:"transparent",colorBorder:d(n,15),colorBorderSecondary:d(n,6)}};var m=n(49202),g=e=>{let t=(0,m.Z)(e),n=t.map(e=>e.size),r=t.map(e=>e.lineHeight),o=n[1],a=n[0],i=n[2],l=r[1],s=r[0],c=r[2];return{fontSizeSM:a,fontSize:o,fontSizeLG:i,fontSizeXL:n[3],fontSizeHeading1:n[6],fontSizeHeading2:n[5],fontSizeHeading3:n[4],fontSizeHeading4:n[3],fontSizeHeading5:n[2],lineHeight:l,lineHeightLG:c,lineHeightSM:s,fontHeight:Math.round(l*o),fontHeightLG:Math.round(c*i),fontHeightSM:Math.round(s*a),lineHeightHeading1:r[6],lineHeightHeading2:r[5],lineHeightHeading3:r[4],lineHeightHeading4:r[3],lineHeightHeading5:r[2]}};let h=(0,o.jG)(function(e){let t=Object.keys(l.M).map(t=>{let n=(0,a.R_)(e[t]);return Array(10).fill(1).reduce((e,r,o)=>(e["".concat(t,"-").concat(o+1)]=n[o],e["".concat(t).concat(o+1)]=n[o],e),{})}).reduce((e,t)=>e=Object.assign(Object.assign({},e),t),{});return Object.assign(Object.assign(Object.assign(Object.assign(Object.assign(Object.assign(Object.assign({},e),t),function(e,t){let{generateColorPalettes:n,generateNeutralColorPalettes:r}=t,{colorSuccess:o,colorWarning:a,colorError:i,colorInfo:l,colorPrimary:c,colorBgBase:u,colorTextBase:d}=e,p=n(c),f=n(o),m=n(a),g=n(i),h=n(l),b=r(u,d),v=n(e.colorLink||e.colorInfo);return Object.assign(Object.assign({},b),{colorPrimaryBg:p[1],colorPrimaryBgHover:p[2],colorPrimaryBorder:p[3],colorPrimaryBorderHover:p[4],colorPrimaryHover:p[5],colorPrimary:p[6],colorPrimaryActive:p[7],colorPrimaryTextHover:p[8],colorPrimaryText:p[9],colorPrimaryTextActive:p[10],colorSuccessBg:f[1],colorSuccessBgHover:f[2],colorSuccessBorder:f[3],colorSuccessBorderHover:f[4],colorSuccessHover:f[4],colorSuccess:f[6],colorSuccessActive:f[7],colorSuccessTextHover:f[8],colorSuccessText:f[9],colorSuccessTextActive:f[10],colorErrorBg:g[1],colorErrorBgHover:g[2],colorErrorBorder:g[3],colorErrorBorderHover:g[4],colorErrorHover:g[5],colorError:g[6],colorErrorActive:g[7],colorErrorTextHover:g[8],colorErrorText:g[9],colorErrorTextActive:g[10],colorWarningBg:m[1],colorWarningBgHover:m[2],colorWarningBorder:m[3],colorWarningBorderHover:m[4],colorWarningHover:m[4],colorWarning:m[6],colorWarningActive:m[7],colorWarningTextHover:m[8],colorWarningText:m[9],colorWarningTextActive:m[10],colorInfoBg:h[1],colorInfoBgHover:h[2],colorInfoBorder:h[3],colorInfoBorderHover:h[4],colorInfoHover:h[4],colorInfo:h[6],colorInfoActive:h[7],colorInfoTextHover:h[8],colorInfoText:h[9],colorInfoTextActive:h[10],colorLinkHover:v[4],colorLink:v[6],colorLinkActive:v[7],colorBgMask:new s.C("#000").setAlpha(.45).toRgbString(),colorWhite:"#fff"})}(e,{generateColorPalettes:p,generateNeutralColorPalettes:f})),g(e.fontSize)),function(e){let{sizeUnit:t,sizeStep:n}=e;return{sizeXXL:t*(n+8),sizeXL:t*(n+4),sizeLG:t*(n+2),sizeMD:t*(n+1),sizeMS:t*n,size:t*n,sizeSM:t*(n-1),sizeXS:t*(n-2),sizeXXS:t*(n-3)}}(e)),i(e)),function(e){let{motionUnit:t,motionBase:n,borderRadius:r,lineWidth:o}=e;return Object.assign({motionDurationFast:"".concat((n+t).toFixed(1),"s"),motionDurationMid:"".concat((n+2*t).toFixed(1),"s"),motionDurationSlow:"".concat((n+3*t).toFixed(1),"s"),lineWidthBold:o+1},c(r))}(e))}),b={token:l.Z,override:{override:l.Z},hashed:!0},v=r.createContext(b)},46864:function(e,t,n){n.d(t,{M:function(){return r}});let r={blue:"#1677ff",purple:"#722ED1",cyan:"#13C2C2",green:"#52C41A",magenta:"#EB2F96",pink:"#eb2f96",red:"#F5222D",orange:"#FA8C16",yellow:"#FADB14",volcano:"#FA541C",geekblue:"#2F54EB",gold:"#FAAD14",lime:"#A0D911"},o=Object.assign(Object.assign({},r),{colorPrimary:"#1677ff",colorSuccess:"#52c41a",colorWarning:"#faad14",colorError:"#ff4d4f",colorInfo:"#1677ff",colorLink:"",colorTextBase:"",colorBgBase:"",fontFamily:"-apple-system, BlinkMacSystemFont, 'Segoe UI', Roboto, 'Helvetica Neue', Arial,\n'Noto Sans', sans-serif, 'Apple Color Emoji', 'Segoe UI Emoji', 'Segoe UI Symbol',\n'Noto Color Emoji'",fontFamilyCode:"'SFMono-Regular', Consolas, 'Liberation Mono', Menlo, Courier, monospace",fontSize:14,lineWidth:1,lineType:"solid",motionUnit:.1,motionBase:0,motionEaseOutCirc:"cubic-bezier(0.08, 0.82, 0.17, 1)",motionEaseInOutCirc:"cubic-bezier(0.78, 0.14, 0.15, 0.86)",motionEaseOut:"cubic-bezier(0.215, 0.61, 0.355, 1)",motionEaseInOut:"cubic-bezier(0.645, 0.045, 0.355, 1)",motionEaseOutBack:"cubic-bezier(0.12, 0.4, 0.29, 1.46)",motionEaseInBack:"cubic-bezier(0.71, -0.46, 0.88, 0.6)",motionEaseInQuint:"cubic-bezier(0.755, 0.05, 0.855, 0.06)",motionEaseOutQuint:"cubic-bezier(0.23, 1, 0.32, 1)",borderRadius:6,sizeUnit:4,sizeStep:4,sizePopupArrow:16,controlHeight:32,zIndexBase:0,zIndexPopupBase:1e3,opacityImage:1,wireframe:!1,motion:!0});t.Z=o},49202:function(e,t,n){function r(e){return(e+8)/e}function o(e){let t=Array(10).fill(null).map((t,n)=>{let r=e*Math.pow(2.71828,(n-1)/5);return 2*Math.floor((n>1?Math.floor(r):Math.ceil(r))/2)});return t[1]=e,t.map(e=>({size:e,lineHeight:r(e)}))}n.d(t,{D:function(){return r},Z:function(){return o}})},24750:function(e,t,n){n.d(t,{ZP:function(){return b},ID:function(){return m},NJ:function(){return f}});var r=n(64090),o=n(8985),a=n(43345),i=n(46864),l=n(6336);function s(e){return e>=0&&e<=255}var c=function(e,t){let{r:n,g:r,b:o,a:a}=new l.C(e).toRgb();if(a<1)return e;let{r:i,g:c,b:u}=new l.C(t).toRgb();for(let e=.01;e<=1;e+=.01){let t=Math.round((n-i*(1-e))/e),a=Math.round((r-c*(1-e))/e),d=Math.round((o-u*(1-e))/e);if(s(t)&&s(a)&&s(d))return new l.C({r:t,g:a,b:d,a:Math.round(100*e)/100}).toRgbString()}return new l.C({r:n,g:r,b:o,a:1}).toRgbString()},u=function(e,t){var n={};for(var r in e)Object.prototype.hasOwnProperty.call(e,r)&&0>t.indexOf(r)&&(n[r]=e[r]);if(null!=e&&"function"==typeof Object.getOwnPropertySymbols)for(var o=0,r=Object.getOwnPropertySymbols(e);ot.indexOf(r[o])&&Object.prototype.propertyIsEnumerable.call(e,r[o])&&(n[r[o]]=e[r[o]]);return n};function d(e){let{override:t}=e,n=u(e,["override"]),r=Object.assign({},t);Object.keys(i.Z).forEach(e=>{delete r[e]});let o=Object.assign(Object.assign({},n),r);return!1===o.motion&&(o.motionDurationFast="0s",o.motionDurationMid="0s",o.motionDurationSlow="0s"),Object.assign(Object.assign(Object.assign({},o),{colorFillContent:o.colorFillSecondary,colorFillContentHover:o.colorFill,colorFillAlter:o.colorFillQuaternary,colorBgContainerDisabled:o.colorFillTertiary,colorBorderBg:o.colorBgContainer,colorSplit:c(o.colorBorderSecondary,o.colorBgContainer),colorTextPlaceholder:o.colorTextQuaternary,colorTextDisabled:o.colorTextQuaternary,colorTextHeading:o.colorText,colorTextLabel:o.colorTextSecondary,colorTextDescription:o.colorTextTertiary,colorTextLightSolid:o.colorWhite,colorHighlight:o.colorError,colorBgTextHover:o.colorFillSecondary,colorBgTextActive:o.colorFill,colorIcon:o.colorTextTertiary,colorIconHover:o.colorText,colorErrorOutline:c(o.colorErrorBg,o.colorBgContainer),colorWarningOutline:c(o.colorWarningBg,o.colorBgContainer),fontSizeIcon:o.fontSizeSM,lineWidthFocus:4*o.lineWidth,lineWidth:o.lineWidth,controlOutlineWidth:2*o.lineWidth,controlInteractiveSize:o.controlHeight/2,controlItemBgHover:o.colorFillTertiary,controlItemBgActive:o.colorPrimaryBg,controlItemBgActiveHover:o.colorPrimaryBgHover,controlItemBgActiveDisabled:o.colorFill,controlTmpOutline:o.colorFillQuaternary,controlOutline:c(o.colorPrimaryBg,o.colorBgContainer),lineType:o.lineType,borderRadius:o.borderRadius,borderRadiusXS:o.borderRadiusXS,borderRadiusSM:o.borderRadiusSM,borderRadiusLG:o.borderRadiusLG,fontWeightStrong:600,opacityLoading:.65,linkDecoration:"none",linkHoverDecoration:"none",linkFocusDecoration:"none",controlPaddingHorizontal:12,controlPaddingHorizontalSM:8,paddingXXS:o.sizeXXS,paddingXS:o.sizeXS,paddingSM:o.sizeSM,padding:o.size,paddingMD:o.sizeMD,paddingLG:o.sizeLG,paddingXL:o.sizeXL,paddingContentHorizontalLG:o.sizeLG,paddingContentVerticalLG:o.sizeMS,paddingContentHorizontal:o.sizeMS,paddingContentVertical:o.sizeSM,paddingContentHorizontalSM:o.size,paddingContentVerticalSM:o.sizeXS,marginXXS:o.sizeXXS,marginXS:o.sizeXS,marginSM:o.sizeSM,margin:o.size,marginMD:o.sizeMD,marginLG:o.sizeLG,marginXL:o.sizeXL,marginXXL:o.sizeXXL,boxShadow:"\n 0 6px 16px 0 rgba(0, 0, 0, 0.08),\n 0 3px 6px -4px rgba(0, 0, 0, 0.12),\n 0 9px 28px 8px rgba(0, 0, 0, 0.05)\n ",boxShadowSecondary:"\n 0 6px 16px 0 rgba(0, 0, 0, 0.08),\n 0 3px 6px -4px rgba(0, 0, 0, 0.12),\n 0 9px 28px 8px rgba(0, 0, 0, 0.05)\n ",boxShadowTertiary:"\n 0 1px 2px 0 rgba(0, 0, 0, 0.03),\n 0 1px 6px -1px rgba(0, 0, 0, 0.02),\n 0 2px 4px 0 rgba(0, 0, 0, 0.02)\n ",screenXS:480,screenXSMin:480,screenXSMax:575,screenSM:576,screenSMMin:576,screenSMMax:767,screenMD:768,screenMDMin:768,screenMDMax:991,screenLG:992,screenLGMin:992,screenLGMax:1199,screenXL:1200,screenXLMin:1200,screenXLMax:1599,screenXXL:1600,screenXXLMin:1600,boxShadowPopoverArrow:"2px 2px 5px rgba(0, 0, 0, 0.05)",boxShadowCard:"\n 0 1px 2px -2px ".concat(new l.C("rgba(0, 0, 0, 0.16)").toRgbString(),",\n 0 3px 6px 0 ").concat(new l.C("rgba(0, 0, 0, 0.12)").toRgbString(),",\n 0 5px 12px 4px ").concat(new l.C("rgba(0, 0, 0, 0.09)").toRgbString(),"\n "),boxShadowDrawerRight:"\n -6px 0 16px 0 rgba(0, 0, 0, 0.08),\n -3px 0 6px -4px rgba(0, 0, 0, 0.12),\n -9px 0 28px 8px rgba(0, 0, 0, 0.05)\n ",boxShadowDrawerLeft:"\n 6px 0 16px 0 rgba(0, 0, 0, 0.08),\n 3px 0 6px -4px rgba(0, 0, 0, 0.12),\n 9px 0 28px 8px rgba(0, 0, 0, 0.05)\n ",boxShadowDrawerUp:"\n 0 6px 16px 0 rgba(0, 0, 0, 0.08),\n 0 3px 6px -4px rgba(0, 0, 0, 0.12),\n 0 9px 28px 8px rgba(0, 0, 0, 0.05)\n ",boxShadowDrawerDown:"\n 0 -6px 16px 0 rgba(0, 0, 0, 0.08),\n 0 -3px 6px -4px rgba(0, 0, 0, 0.12),\n 0 -9px 28px 8px rgba(0, 0, 0, 0.05)\n ",boxShadowTabsOverflowLeft:"inset 10px 0 8px -8px rgba(0, 0, 0, 0.08)",boxShadowTabsOverflowRight:"inset -10px 0 8px -8px rgba(0, 0, 0, 0.08)",boxShadowTabsOverflowTop:"inset 0 10px 8px -8px rgba(0, 0, 0, 0.08)",boxShadowTabsOverflowBottom:"inset 0 -10px 8px -8px rgba(0, 0, 0, 0.08)"}),r)}var p=function(e,t){var n={};for(var r in e)Object.prototype.hasOwnProperty.call(e,r)&&0>t.indexOf(r)&&(n[r]=e[r]);if(null!=e&&"function"==typeof Object.getOwnPropertySymbols)for(var o=0,r=Object.getOwnPropertySymbols(e);ot.indexOf(r[o])&&Object.prototype.propertyIsEnumerable.call(e,r[o])&&(n[r[o]]=e[r[o]]);return n};let f={lineHeight:!0,lineHeightSM:!0,lineHeightLG:!0,lineHeightHeading1:!0,lineHeightHeading2:!0,lineHeightHeading3:!0,lineHeightHeading4:!0,lineHeightHeading5:!0,opacityLoading:!0,fontWeightStrong:!0,zIndexPopupBase:!0,zIndexBase:!0},m={size:!0,sizeSM:!0,sizeLG:!0,sizeMD:!0,sizeXS:!0,sizeXXS:!0,sizeMS:!0,sizeXL:!0,sizeXXL:!0,sizeUnit:!0,sizeStep:!0,motionBase:!0,motionUnit:!0},g={screenXS:!0,screenXSMin:!0,screenXSMax:!0,screenSM:!0,screenSMMin:!0,screenSMMax:!0,screenMD:!0,screenMDMin:!0,screenMDMax:!0,screenLG:!0,screenLGMin:!0,screenLGMax:!0,screenXL:!0,screenXLMin:!0,screenXLMax:!0,screenXXL:!0,screenXXLMin:!0},h=(e,t,n)=>{let r=n.getDerivativeToken(e),{override:o}=t,a=p(t,["override"]),i=Object.assign(Object.assign({},r),{override:o});return i=d(i),a&&Object.entries(a).forEach(e=>{let[t,n]=e,{theme:r}=n,o=p(n,["theme"]),a=o;r&&(a=h(Object.assign(Object.assign({},i),o),{override:o},r)),i[t]=a}),i};function b(){let{token:e,hashed:t,theme:n,override:l,cssVar:s}=r.useContext(a.Mj),c="".concat("5.13.2","-").concat(t||""),u=n||a.uH,[p,b,v]=(0,o.fp)(u,[i.Z,e],{salt:c,override:l,getComputedToken:h,formatToken:d,cssVar:s&&{prefix:s.prefix,key:s.key,unitless:f,ignore:m,preserve:g}});return[u,v,t?b:"",p,s]}},76585:function(e,t,n){n.d(t,{ZP:function(){return C},I$:function(){return A},bk:function(){return T}});var r=n(64090),o=n(8985);n(48563);var a=n(57499),i=n(11303),l=n(24750),s=n(47365),c=n(65127),u=n(72784),d=n(29676),p=n(68605),f=n(27478);let m=(0,c.Z)(function e(){(0,s.Z)(this,e)}),g=function(e){function t(e){var n,r,o;return(0,s.Z)(this,t),r=t,r=(0,p.Z)(r),(n=(0,u.Z)(this,(0,d.Z)()?Reflect.construct(r,o||[],(0,p.Z)(this).constructor):r.apply(this,o))).result=0,e instanceof t?n.result=e.result:"number"==typeof e&&(n.result=e),n}return(0,f.Z)(t,e),(0,c.Z)(t,[{key:"add",value:function(e){return e instanceof t?this.result+=e.result:"number"==typeof e&&(this.result+=e),this}},{key:"sub",value:function(e){return e instanceof t?this.result-=e.result:"number"==typeof e&&(this.result-=e),this}},{key:"mul",value:function(e){return e instanceof t?this.result*=e.result:"number"==typeof e&&(this.result*=e),this}},{key:"div",value:function(e){return e instanceof t?this.result/=e.result:"number"==typeof e&&(this.result/=e),this}},{key:"equal",value:function(){return this.result}}]),t}(m),h="CALC_UNIT";function b(e){return"number"==typeof e?"".concat(e).concat(h):e}let v=function(e){function t(e){var n,r,o;return(0,s.Z)(this,t),r=t,r=(0,p.Z)(r),(n=(0,u.Z)(this,(0,d.Z)()?Reflect.construct(r,o||[],(0,p.Z)(this).constructor):r.apply(this,o))).result="",e instanceof t?n.result="(".concat(e.result,")"):"number"==typeof e?n.result=b(e):"string"==typeof e&&(n.result=e),n}return(0,f.Z)(t,e),(0,c.Z)(t,[{key:"add",value:function(e){return e instanceof t?this.result="".concat(this.result," + ").concat(e.getResult()):("number"==typeof e||"string"==typeof e)&&(this.result="".concat(this.result," + ").concat(b(e))),this.lowPriority=!0,this}},{key:"sub",value:function(e){return e instanceof t?this.result="".concat(this.result," - ").concat(e.getResult()):("number"==typeof e||"string"==typeof e)&&(this.result="".concat(this.result," - ").concat(b(e))),this.lowPriority=!0,this}},{key:"mul",value:function(e){return this.lowPriority&&(this.result="(".concat(this.result,")")),e instanceof t?this.result="".concat(this.result," * ").concat(e.getResult(!0)):("number"==typeof e||"string"==typeof e)&&(this.result="".concat(this.result," * ").concat(e)),this.lowPriority=!1,this}},{key:"div",value:function(e){return this.lowPriority&&(this.result="(".concat(this.result,")")),e instanceof t?this.result="".concat(this.result," / ").concat(e.getResult(!0)):("number"==typeof e||"string"==typeof e)&&(this.result="".concat(this.result," / ").concat(e)),this.lowPriority=!1,this}},{key:"getResult",value:function(e){return this.lowPriority||e?"(".concat(this.result,")"):this.result}},{key:"equal",value:function(e){let{unit:t=!0}=e||{},n=RegExp("".concat(h),"g");return(this.result=this.result.replace(n,t?"px":""),void 0!==this.lowPriority)?"calc(".concat(this.result,")"):this.result}}]),t}(m);var y=e=>{let t="css"===e?v:g;return e=>new t(e)},E=n(80316),S=n(28030);let w=(e,t,n)=>{var r;return"function"==typeof n?n((0,E.TS)(t,null!==(r=t[e])&&void 0!==r?r:{})):null!=n?n:{}},x=(e,t,n,r)=>{let o=Object.assign({},t[e]);if(null==r?void 0:r.deprecatedTokens){let{deprecatedTokens:e}=r;e.forEach(e=>{var t;let[n,r]=e;((null==o?void 0:o[n])||(null==o?void 0:o[r]))&&(null!==(t=o[r])&&void 0!==t||(o[r]=null==o?void 0:o[n]))})}let a=Object.assign(Object.assign({},n),o);return Object.keys(a).forEach(e=>{a[e]===t[e]&&delete a[e]}),a},O=(e,t)=>"".concat([t,e.replace(/([A-Z]+)([A-Z][a-z]+)/g,"$1-$2").replace(/([a-z])([A-Z])/g,"$1-$2")].filter(Boolean).join("-"));function C(e,t,n){let s=arguments.length>3&&void 0!==arguments[3]?arguments[3]:{},c=Array.isArray(e)?e:[e,e],[u]=c,d=c.join("-");return e=>{let[c,p,f,m,g]=(0,l.ZP)(),{getPrefixCls:h,iconPrefixCls:b,csp:v}=(0,r.useContext)(a.E_),C=h(),T=g?"css":"js",k=y(T),{max:A,min:I}="js"===T?{max:Math.max,min:Math.min}:{max:function(){for(var e=arguments.length,t=Array(e),n=0;n(0,o.bf)(e)).join(","),")")},min:function(){for(var e=arguments.length,t=Array(e),n=0;n(0,o.bf)(e)).join(","),")")}},R={theme:c,token:m,hashId:f,nonce:()=>null==v?void 0:v.nonce,clientOnly:s.clientOnly,order:s.order||-999};return(0,o.xy)(Object.assign(Object.assign({},R),{clientOnly:!1,path:["Shared",C]}),()=>[{"&":(0,i.Lx)(m)}]),(0,S.Z)(b,v),[(0,o.xy)(Object.assign(Object.assign({},R),{path:[d,e,b]}),()=>{if(!1===s.injectStyle)return[];let{token:r,flush:a}=(0,E.ZP)(m),l=w(u,p,n),c=".".concat(e),d=x(u,p,l,{deprecatedTokens:s.deprecatedTokens});g&&Object.keys(l).forEach(e=>{l[e]="var(".concat((0,o.ks)(e,O(u,g.prefix)),")")});let h=(0,E.TS)(r,{componentCls:c,prefixCls:e,iconCls:".".concat(b),antCls:".".concat(C),calc:k,max:A,min:I},g?l:d),v=t(h,{hashId:f,prefixCls:e,rootPrefixCls:C,iconPrefixCls:b});return a(u,d),[!1===s.resetStyle?null:(0,i.du)(h,e),v]}),f]}}let T=(e,t,n,r)=>{let o=C(e,t,n,Object.assign({resetStyle:!1,order:-998},r));return e=>{let{prefixCls:t}=e;return o(t),null}},k=(e,t,n)=>{function a(t){return"".concat(e).concat(t.slice(0,1).toUpperCase()).concat(t.slice(1))}let{unitless:i={},injectStyle:s=!0}=null!=n?n:{},c={[a("zIndexPopup")]:!0};Object.keys(i).forEach(e=>{c[a(e)]=i[e]});let u=r=>{let{rootCls:i,cssVar:s}=r,[,u]=(0,l.ZP)();return(0,o.CI)({path:[e],prefix:s.prefix,key:null==s?void 0:s.key,unitless:Object.assign(Object.assign({},l.NJ),c),ignore:l.ID,token:u,scope:i},()=>{let r=w(e,u,t),o=x(e,u,r,{deprecatedTokens:null==n?void 0:n.deprecatedTokens});return Object.keys(r).forEach(e=>{o[a(e)]=o[e],delete o[e]}),o}),null};return t=>{let[,,,,n]=(0,l.ZP)();return[o=>s&&n?r.createElement(r.Fragment,null,r.createElement(u,{rootCls:t,cssVar:n,component:e}),o):o,null==n?void 0:n.key]}},A=(e,t,n,r)=>{let o=C(e,t,n,r),a=k(Array.isArray(e)?e[0]:e,n,r);return function(e){let t=arguments.length>1&&void 0!==arguments[1]?arguments[1]:e,[,n]=o(e),[r,i]=a(t);return[r,n,i]}}},80316:function(e,t,n){n.d(t,{TS:function(){return a}});let r="undefined"!=typeof CSSINJS_STATISTIC,o=!0;function a(){for(var e=arguments.length,t=Array(e),n=0;n{Object.keys(e).forEach(t=>{Object.defineProperty(a,t,{configurable:!0,enumerable:!0,get:()=>e[t]})})}),o=!0,a}let i={};function l(){}t.ZP=e=>{let t;let n=e,a=l;return r&&"undefined"!=typeof Proxy&&(t=new Set,n=new Proxy(e,{get:(e,n)=>(o&&t.add(n),e[n])}),a=(e,n)=>{var r;i[e]={global:Array.from(t),component:Object.assign(Object.assign({},null===(r=i[e])||void 0===r?void 0:r.component),n)}}),{token:n,keys:t,flush:a}}},28030:function(e,t,n){var r=n(8985),o=n(11303),a=n(24750);t.Z=(e,t)=>{let[n,i]=(0,a.ZP)();return(0,r.xy)({theme:n,token:i,hashId:"",path:["ant-design-icons",e],nonce:()=>null==t?void 0:t.nonce},()=>[{[".".concat(e)]:Object.assign(Object.assign({},(0,o.Ro)()),{[".".concat(e," .").concat(e,"-icon")]:{display:"block"}})}])}},1460:function(e,t,n){n.d(t,{Z:function(){return H}});var r=n(64090),o=n(16480),a=n.n(o);function i(e){var t=e.children,n=e.prefixCls,o=e.id,i=e.overlayInnerStyle,l=e.className,s=e.style;return r.createElement("div",{className:a()("".concat(n,"-content"),l),style:s},r.createElement("div",{className:"".concat(n,"-inner"),id:o,role:"tooltip",style:i},"function"==typeof t?t():t))}var l=n(14749),s=n(5239),c=n(6787),u=n(44101),d={shiftX:64,adjustY:1},p={adjustX:1,shiftY:!0},f=[0,0],m={left:{points:["cr","cl"],overflow:p,offset:[-4,0],targetOffset:f},right:{points:["cl","cr"],overflow:p,offset:[4,0],targetOffset:f},top:{points:["bc","tc"],overflow:d,offset:[0,-4],targetOffset:f},bottom:{points:["tc","bc"],overflow:d,offset:[0,4],targetOffset:f},topLeft:{points:["bl","tl"],overflow:d,offset:[0,-4],targetOffset:f},leftTop:{points:["tr","tl"],overflow:p,offset:[-4,0],targetOffset:f},topRight:{points:["br","tr"],overflow:d,offset:[0,-4],targetOffset:f},rightTop:{points:["tl","tr"],overflow:p,offset:[4,0],targetOffset:f},bottomRight:{points:["tr","br"],overflow:d,offset:[0,4],targetOffset:f},rightBottom:{points:["bl","br"],overflow:p,offset:[4,0],targetOffset:f},bottomLeft:{points:["tl","bl"],overflow:d,offset:[0,4],targetOffset:f},leftBottom:{points:["br","bl"],overflow:p,offset:[-4,0],targetOffset:f}},g=["overlayClassName","trigger","mouseEnterDelay","mouseLeaveDelay","overlayStyle","prefixCls","children","onVisibleChange","afterVisibleChange","transitionName","animation","motion","placement","align","destroyTooltipOnHide","defaultVisible","getTooltipContainer","overlayInnerStyle","arrowContent","overlay","id","showArrow"],h=(0,r.forwardRef)(function(e,t){var n=e.overlayClassName,o=e.trigger,a=e.mouseEnterDelay,d=e.mouseLeaveDelay,p=e.overlayStyle,f=e.prefixCls,h=void 0===f?"rc-tooltip":f,b=e.children,v=e.onVisibleChange,y=e.afterVisibleChange,E=e.transitionName,S=e.animation,w=e.motion,x=e.placement,O=e.align,C=e.destroyTooltipOnHide,T=e.defaultVisible,k=e.getTooltipContainer,A=e.overlayInnerStyle,I=(e.arrowContent,e.overlay),R=e.id,N=e.showArrow,_=(0,c.Z)(e,g),P=(0,r.useRef)(null);(0,r.useImperativeHandle)(t,function(){return P.current});var M=(0,s.Z)({},_);return"visible"in e&&(M.popupVisible=e.visible),r.createElement(u.Z,(0,l.Z)({popupClassName:n,prefixCls:h,popup:function(){return r.createElement(i,{key:"content",prefixCls:h,id:R,overlayInnerStyle:A},I)},action:void 0===o?["hover"]:o,builtinPlacements:m,popupPlacement:void 0===x?"right":x,ref:P,popupAlign:void 0===O?{}:O,getPopupContainer:k,onPopupVisibleChange:v,afterPopupVisibleChange:y,popupTransitionName:E,popupAnimation:S,popupMotion:w,defaultPopupVisible:T,autoDestroy:void 0!==C&&C,mouseLeaveDelay:void 0===d?.1:d,popupStyle:p,mouseEnterDelay:void 0===a?0:a,arrow:void 0===N||N},M),b)}),b=n(44329),v=n(51761),y=n(47387),E=n(67966),S=n(65823),w=n(76564),x=n(86718),O=n(57499),C=n(92801),T=n(24750),k=n(11303),A=n(58854),I=n(89869);let R=["blue","purple","cyan","green","magenta","pink","red","orange","yellow","volcano","geekblue","lime","gold"];var N=n(80316),_=n(76585),P=n(8985),M=n(2638);let L=e=>{var t;let{componentCls:n,tooltipMaxWidth:r,tooltipColor:o,tooltipBg:a,tooltipBorderRadius:i,zIndexPopup:l,controlHeight:s,boxShadowSecondary:c,paddingSM:u,paddingXS:d}=e;return[{[n]:Object.assign(Object.assign(Object.assign(Object.assign({},(0,k.Wf)(e)),{position:"absolute",zIndex:l,display:"block",width:"max-content",maxWidth:r,visibility:"visible",transformOrigin:"var(--arrow-x, 50%) var(--arrow-y, 50%)","&-hidden":{display:"none"},"--antd-arrow-background-color":a,["".concat(n,"-inner")]:{minWidth:s,minHeight:s,padding:"".concat((0,P.bf)(e.calc(u).div(2).equal())," ").concat((0,P.bf)(d)),color:o,textAlign:"start",textDecoration:"none",wordWrap:"break-word",backgroundColor:a,borderRadius:i,boxShadow:c,boxSizing:"border-box"},"&-placement-left,&-placement-leftTop,&-placement-leftBottom,&-placement-right,&-placement-rightTop,&-placement-rightBottom":{["".concat(n,"-inner")]:{borderRadius:e.min(i,I.qN)}},["".concat(n,"-content")]:{position:"relative"}}),(t=(e,t)=>{let{darkColor:r}=t;return{["&".concat(n,"-").concat(e)]:{["".concat(n,"-inner")]:{backgroundColor:r},["".concat(n,"-arrow")]:{"--antd-arrow-background-color":r}}}},R.reduce((n,r)=>{let o=e["".concat(r,"1")],a=e["".concat(r,"3")],i=e["".concat(r,"6")],l=e["".concat(r,"7")];return Object.assign(Object.assign({},n),t(r,{lightColor:o,lightBorderColor:a,darkColor:i,textColor:l}))},{}))),{"&-rtl":{direction:"rtl"}})},(0,I.ZP)(e,"var(--antd-arrow-background-color)"),{["".concat(n,"-pure")]:{position:"relative",maxWidth:"none",margin:e.sizePopupArrow}}]},D=e=>Object.assign(Object.assign({zIndexPopup:e.zIndexPopupBase+70},(0,I.wZ)({contentRadius:e.borderRadius,limitVerticalRadius:!0})),(0,M.w)((0,N.TS)(e,{borderRadiusOuter:Math.min(e.borderRadiusOuter,4)})));function j(e){let t=!(arguments.length>1)||void 0===arguments[1]||arguments[1];return(0,_.I$)("Tooltip",e=>{let{borderRadius:t,colorTextLightSolid:n,colorBgSpotlight:r}=e;return[L((0,N.TS)(e,{tooltipMaxWidth:250,tooltipColor:n,tooltipBorderRadius:t,tooltipBg:r})),(0,A._y)(e,"zoom-big-fast")]},D,{resetStyle:!1,injectStyle:t})(e)}var F=n(63787);let B=R.map(e=>"".concat(e,"-inverse"));function U(e,t){let n=function(e){let t=!(arguments.length>1)||void 0===arguments[1]||arguments[1];return t?[].concat((0,F.Z)(B),(0,F.Z)(R)).includes(e):R.includes(e)}(t),r=a()({["".concat(e,"-").concat(t)]:t&&n}),o={},i={};return t&&!n&&(o.background=t,i["--antd-arrow-background-color"]=t),{className:r,overlayStyle:o,arrowStyle:i}}var Z=function(e,t){var n={};for(var r in e)Object.prototype.hasOwnProperty.call(e,r)&&0>t.indexOf(r)&&(n[r]=e[r]);if(null!=e&&"function"==typeof Object.getOwnPropertySymbols)for(var o=0,r=Object.getOwnPropertySymbols(e);ot.indexOf(r[o])&&Object.prototype.propertyIsEnumerable.call(e,r[o])&&(n[r[o]]=e[r[o]]);return n};let z=r.forwardRef((e,t)=>{var n,o;let{prefixCls:i,openClassName:l,getTooltipContainer:s,overlayClassName:c,color:u,overlayInnerStyle:d,children:p,afterOpenChange:f,afterVisibleChange:m,destroyTooltipOnHide:g,arrow:k=!0,title:A,overlay:I,builtinPlacements:R,arrowPointAtCenter:N=!1,autoAdjustOverflow:_=!0}=e,P=!!k,[,M]=(0,T.ZP)(),{getPopupContainer:L,getPrefixCls:D,direction:F}=r.useContext(O.E_),B=(0,w.ln)("Tooltip"),z=r.useRef(null),H=()=>{var e;null===(e=z.current)||void 0===e||e.forceAlign()};r.useImperativeHandle(t,()=>({forceAlign:H,forcePopupAlign:()=>{B.deprecated(!1,"forcePopupAlign","forceAlign"),H()}}));let[G,$]=(0,b.Z)(!1,{value:null!==(n=e.open)&&void 0!==n?n:e.visible,defaultValue:null!==(o=e.defaultOpen)&&void 0!==o?o:e.defaultVisible}),W=!A&&!I&&0!==A,V=r.useMemo(()=>{var e,t;let n=N;return"object"==typeof k&&(n=null!==(t=null!==(e=k.pointAtCenter)&&void 0!==e?e:k.arrowPointAtCenter)&&void 0!==t?t:N),R||(0,E.Z)({arrowPointAtCenter:n,autoAdjustOverflow:_,arrowWidth:P?M.sizePopupArrow:0,borderRadius:M.borderRadius,offset:M.marginXXS,visibleFirst:!0})},[N,k,R,M]),q=r.useMemo(()=>0===A?A:I||A||"",[I,A]),Y=r.createElement(C.BR,null,"function"==typeof q?q():q),{getPopupContainer:K,placement:X="top",mouseEnterDelay:Q=.1,mouseLeaveDelay:J=.1,overlayStyle:ee,rootClassName:et}=e,en=Z(e,["getPopupContainer","placement","mouseEnterDelay","mouseLeaveDelay","overlayStyle","rootClassName"]),er=D("tooltip",i),eo=D(),ea=e["data-popover-inject"],ei=G;"open"in e||"visible"in e||!W||(ei=!1);let el=(0,S.l$)(p)&&!(0,S.M2)(p)?p:r.createElement("span",null,p),es=el.props,ec=es.className&&"string"!=typeof es.className?es.className:a()(es.className,l||"".concat(er,"-open")),[eu,ed,ep]=j(er,!ea),ef=U(er,u),em=ef.arrowStyle,eg=Object.assign(Object.assign({},d),ef.overlayStyle),eh=a()(c,{["".concat(er,"-rtl")]:"rtl"===F},ef.className,et,ed,ep),[eb,ev]=(0,v.Cn)("Tooltip",en.zIndex),ey=r.createElement(h,Object.assign({},en,{zIndex:eb,showArrow:P,placement:X,mouseEnterDelay:Q,mouseLeaveDelay:J,prefixCls:er,overlayClassName:eh,overlayStyle:Object.assign(Object.assign({},em),ee),getTooltipContainer:K||s||L,ref:z,builtinPlacements:V,overlay:Y,visible:ei,onVisibleChange:t=>{var n,r;$(!W&&t),W||(null===(n=e.onOpenChange)||void 0===n||n.call(e,t),null===(r=e.onVisibleChange)||void 0===r||r.call(e,t))},afterVisibleChange:null!=f?f:m,overlayInnerStyle:eg,arrowContent:r.createElement("span",{className:"".concat(er,"-arrow-content")}),motion:{motionName:(0,y.m)(eo,"zoom-big-fast",e.transitionName),motionDeadline:1e3},destroyTooltipOnHide:!!g}),ei?(0,S.Tm)(el,{className:ec}):el);return eu(r.createElement(x.Z.Provider,{value:ev},ey))});z._InternalPanelDoNotUseOrYouWillBeFired=e=>{let{prefixCls:t,className:n,placement:o="top",title:l,color:s,overlayInnerStyle:c}=e,{getPrefixCls:u}=r.useContext(O.E_),d=u("tooltip",t),[p,f,m]=j(d),g=U(d,s),h=g.arrowStyle,b=Object.assign(Object.assign({},c),g.overlayStyle),v=a()(f,m,d,"".concat(d,"-pure"),"".concat(d,"-placement-").concat(o),n,g.className);return p(r.createElement("div",{className:v,style:h},r.createElement("div",{className:"".concat(d,"-arrow")}),r.createElement(i,Object.assign({},e,{className:f,prefixCls:d,overlayInnerStyle:b}),l)))};var H=z},36083:function(e,t,n){n.d(t,{default:function(){return eh}});var r=n(64090),o=n(90507),a=n(14749),i={icon:{tag:"svg",attrs:{viewBox:"64 64 896 896",focusable:"false"},children:[{tag:"path",attrs:{d:"M832 64H296c-4.4 0-8 3.6-8 8v56c0 4.4 3.6 8 8 8h496v688c0 4.4 3.6 8 8 8h56c4.4 0 8-3.6 8-8V96c0-17.7-14.3-32-32-32zM704 192H192c-17.7 0-32 14.3-32 32v530.7c0 8.5 3.4 16.6 9.4 22.6l173.3 173.3c2.2 2.2 4.7 4 7.4 5.5v1.9h4.2c3.5 1.3 7.2 2 11 2H704c17.7 0 32-14.3 32-32V224c0-17.7-14.3-32-32-32zM350 856.2L263.9 770H350v86.2zM664 888H414V746c0-22.1-17.9-40-40-40H232V264h432v624z"}}]},name:"copy",theme:"outlined"},l=n(60688),s=r.forwardRef(function(e,t){return r.createElement(l.Z,(0,a.Z)({},e,{ref:t,icon:i}))}),c={icon:{tag:"svg",attrs:{viewBox:"64 64 896 896",focusable:"false"},children:[{tag:"path",attrs:{d:"M257.7 752c2 0 4-.2 6-.5L431.9 722c2-.4 3.9-1.3 5.3-2.8l423.9-423.9a9.96 9.96 0 000-14.1L694.9 114.9c-1.9-1.9-4.4-2.9-7.1-2.9s-5.2 1-7.1 2.9L256.8 538.8c-1.5 1.5-2.4 3.3-2.8 5.3l-29.5 168.2a33.5 33.5 0 009.4 29.8c6.6 6.4 14.9 9.9 23.8 9.9zm67.4-174.4L687.8 215l73.3 73.3-362.7 362.6-88.9 15.7 15.6-89zM880 836H144c-17.7 0-32 14.3-32 32v36c0 4.4 3.6 8 8 8h784c4.4 0 8-3.6 8-8v-36c0-17.7-14.3-32-32-32z"}}]},name:"edit",theme:"outlined"},u=r.forwardRef(function(e,t){return r.createElement(l.Z,(0,a.Z)({},e,{ref:t,icon:c}))}),d=n(16480),p=n.n(d),f=n(81441),m=n.n(f),g=n(46505),h=n(33054),b=n(24800),v=n(44329),y=n(35704),E=n(74084),S=n(22127),w=function(e){if((0,S.Z)()&&window.document.documentElement){var t=Array.isArray(e)?e:[e],n=window.document.documentElement;return t.some(function(e){return e in n.style})}return!1},x=function(e,t){if(!w(e))return!1;var n=document.createElement("div"),r=n.style[e];return n.style[e]=t,n.style[e]!==r};function O(e,t){return Array.isArray(e)||void 0===t?w(e):x(e,t)}var C=n(4295),T=function(e,t){var n={};for(var r in e)Object.prototype.hasOwnProperty.call(e,r)&&0>t.indexOf(r)&&(n[r]=e[r]);if(null!=e&&"function"==typeof Object.getOwnPropertySymbols)for(var o=0,r=Object.getOwnPropertySymbols(e);ot.indexOf(r[o])&&Object.prototype.propertyIsEnumerable.call(e,r[o])&&(n[r[o]]=e[r[o]]);return n};let k={border:0,background:"transparent",padding:0,lineHeight:"inherit",display:"inline-block"},A=r.forwardRef((e,t)=>{let{style:n,noStyle:o,disabled:a}=e,i=T(e,["style","noStyle","disabled"]),l={};return o||(l=Object.assign({},k)),a&&(l.pointerEvents="none"),l=Object.assign(Object.assign({},l),n),r.createElement("div",Object.assign({role:"button",tabIndex:0,ref:t},i,{onKeyDown:e=>{let{keyCode:t}=e;t===C.Z.ENTER&&e.preventDefault()},onKeyUp:t=>{let{keyCode:n}=t,{onClick:r}=e;n===C.Z.ENTER&&r&&r()},style:l}))});var I=n(57499),R=n(70595),N=n(1460),_={icon:{tag:"svg",attrs:{viewBox:"64 64 896 896",focusable:"false"},children:[{tag:"path",attrs:{d:"M864 170h-60c-4.4 0-8 3.6-8 8v518H310v-73c0-6.7-7.8-10.5-13-6.3l-141.9 112a8 8 0 000 12.6l141.9 112c5.3 4.2 13 .4 13-6.3v-75h498c35.3 0 64-28.7 64-64V178c0-4.4-3.6-8-8-8z"}}]},name:"enter",theme:"outlined"},P=r.forwardRef(function(e,t){return r.createElement(l.Z,(0,a.Z)({},e,{ref:t,icon:_}))}),M=n(65823),L=n(78578);let D=e=>({color:e.colorLink,textDecoration:"none",outline:"none",cursor:"pointer",transition:"color ".concat(e.motionDurationSlow),"&:focus, &:hover":{color:e.colorLinkHover},"&:active":{color:e.colorLinkActive}});var j=n(76585),F=n(12215),B=n(8985);let U=(e,t,n,r)=>{let{titleMarginBottom:o,fontWeightStrong:a}=r;return{marginBottom:o,color:n,fontWeight:a,fontSize:e,lineHeight:t}},Z=e=>{let t={};return[1,2,3,4,5].forEach(n=>{t["\n h".concat(n,"&,\n div&-h").concat(n,",\n div&-h").concat(n," > textarea,\n h").concat(n,"\n ")]=U(e["fontSizeHeading".concat(n)],e["lineHeightHeading".concat(n)],e.colorTextHeading,e)}),t},z=e=>{let{componentCls:t}=e;return{"a&, a":Object.assign(Object.assign({},D(e)),{textDecoration:e.linkDecoration,"&:active, &:hover":{textDecoration:e.linkHoverDecoration},["&[disabled], &".concat(t,"-disabled")]:{color:e.colorTextDisabled,cursor:"not-allowed","&:active, &:hover":{color:e.colorTextDisabled},"&:active":{pointerEvents:"none"}}})}},H=e=>({code:{margin:"0 0.2em",paddingInline:"0.4em",paddingBlock:"0.2em 0.1em",fontSize:"85%",fontFamily:e.fontFamilyCode,background:"rgba(150, 150, 150, 0.1)",border:"1px solid rgba(100, 100, 100, 0.2)",borderRadius:3},kbd:{margin:"0 0.2em",paddingInline:"0.4em",paddingBlock:"0.15em 0.1em",fontSize:"90%",fontFamily:e.fontFamilyCode,background:"rgba(150, 150, 150, 0.06)",border:"1px solid rgba(100, 100, 100, 0.2)",borderBottomWidth:2,borderRadius:3},mark:{padding:0,backgroundColor:F.EV[2]},"u, ins":{textDecoration:"underline",textDecorationSkipInk:"auto"},"s, del":{textDecoration:"line-through"},strong:{fontWeight:600},"ul, ol":{marginInline:0,marginBlock:"0 1em",padding:0,li:{marginInline:"20px 0",marginBlock:0,paddingInline:"4px 0",paddingBlock:0}},ul:{listStyleType:"circle",ul:{listStyleType:"disc"}},ol:{listStyleType:"decimal"},"pre, blockquote":{margin:"1em 0"},pre:{padding:"0.4em 0.6em",whiteSpace:"pre-wrap",wordWrap:"break-word",background:"rgba(150, 150, 150, 0.1)",border:"1px solid rgba(100, 100, 100, 0.2)",borderRadius:3,fontFamily:e.fontFamilyCode,code:{display:"inline",margin:0,padding:0,fontSize:"inherit",fontFamily:"inherit",background:"transparent",border:0}},blockquote:{paddingInline:"0.6em 0",paddingBlock:0,borderInlineStart:"4px solid rgba(100, 100, 100, 0.2)",opacity:.85}}),G=e=>{let{componentCls:t,paddingSM:n}=e;return{"&-edit-content":{position:"relative","div&":{insetInlineStart:e.calc(e.paddingSM).mul(-1).equal(),marginTop:e.calc(n).mul(-1).equal(),marginBottom:"calc(1em - ".concat((0,B.bf)(n),")")},["".concat(t,"-edit-content-confirm")]:{position:"absolute",insetInlineEnd:e.calc(e.marginXS).add(2).equal(),insetBlockEnd:e.marginXS,color:e.colorTextDescription,fontWeight:"normal",fontSize:e.fontSize,fontStyle:"normal",pointerEvents:"none"},textarea:{margin:"0!important",MozTransition:"none",height:"1em"}}}},$=e=>({["".concat(e.componentCls,"-copy-success")]:{"\n &,\n &:hover,\n &:focus":{color:e.colorSuccess}},["".concat(e.componentCls,"-copy-icon-only")]:{marginInlineStart:0}}),W=()=>({"\n a&-ellipsis,\n span&-ellipsis\n ":{display:"inline-block",maxWidth:"100%"},"&-single-line":{whiteSpace:"nowrap"},"&-ellipsis-single-line":{overflow:"hidden",textOverflow:"ellipsis","a&, span&":{verticalAlign:"bottom"},"> code":{paddingBlock:0,maxWidth:"calc(100% - 1.2em)",display:"inline-block",overflow:"hidden",textOverflow:"ellipsis",verticalAlign:"bottom",boxSizing:"content-box"}},"&-ellipsis-multiple-line":{display:"-webkit-box",overflow:"hidden",WebkitLineClamp:3,WebkitBoxOrient:"vertical"}}),V=e=>{let{componentCls:t,titleMarginTop:n}=e;return{[t]:Object.assign(Object.assign(Object.assign(Object.assign(Object.assign(Object.assign(Object.assign(Object.assign(Object.assign({color:e.colorText,wordBreak:"break-word",lineHeight:e.lineHeight,["&".concat(t,"-secondary")]:{color:e.colorTextDescription},["&".concat(t,"-success")]:{color:e.colorSuccess},["&".concat(t,"-warning")]:{color:e.colorWarning},["&".concat(t,"-danger")]:{color:e.colorError,"a&:active, a&:focus":{color:e.colorErrorActive},"a&:hover":{color:e.colorErrorHover}},["&".concat(t,"-disabled")]:{color:e.colorTextDisabled,cursor:"not-allowed",userSelect:"none"},"\n div&,\n p\n ":{marginBottom:"1em"}},Z(e)),{["\n & + h1".concat(t,",\n & + h2").concat(t,",\n & + h3").concat(t,",\n & + h4").concat(t,",\n & + h5").concat(t,"\n ")]:{marginTop:n},"\n div,\n ul,\n li,\n p,\n h1,\n h2,\n h3,\n h4,\n h5":{"\n + h1,\n + h2,\n + h3,\n + h4,\n + h5\n ":{marginTop:n}}}),H(e)),z(e)),{["\n ".concat(t,"-expand,\n ").concat(t,"-edit,\n ").concat(t,"-copy\n ")]:Object.assign(Object.assign({},D(e)),{marginInlineStart:e.marginXXS})}),G(e)),$(e)),W()),{"&-rtl":{direction:"rtl"}})}};var q=(0,j.I$)("Typography",e=>[V(e)],()=>({titleMarginTop:"1.2em",titleMarginBottom:"0.5em"})),Y=e=>{let{prefixCls:t,"aria-label":n,className:o,style:a,direction:i,maxLength:l,autoSize:s=!0,value:c,onSave:u,onCancel:d,onEnd:f,component:m,enterIcon:g=r.createElement(P,null)}=e,h=r.useRef(null),b=r.useRef(!1),v=r.useRef(),[y,E]=r.useState(c);r.useEffect(()=>{E(c)},[c]),r.useEffect(()=>{if(h.current&&h.current.resizableTextArea){let{textArea:e}=h.current.resizableTextArea;e.focus();let{length:t}=e.value;e.setSelectionRange(t,t)}},[]);let S=()=>{u(y.trim())},w=m?"".concat(t,"-").concat(m):"",[x,O,T]=q(t),k=p()(t,"".concat(t,"-edit-content"),{["".concat(t,"-rtl")]:"rtl"===i},o,w,O,T);return x(r.createElement("div",{className:k,style:a},r.createElement(L.Z,{ref:h,maxLength:l,value:y,onChange:e=>{let{target:t}=e;E(t.value.replace(/[\n\r]/g,""))},onKeyDown:e=>{let{keyCode:t}=e;b.current||(v.current=t)},onKeyUp:e=>{let{keyCode:t,ctrlKey:n,altKey:r,metaKey:o,shiftKey:a}=e;v.current!==t||b.current||n||r||o||a||(t===C.Z.ENTER?(S(),null==f||f()):t===C.Z.ESC&&d())},onCompositionStart:()=>{b.current=!0},onCompositionEnd:()=>{b.current=!1},onBlur:()=>{S()},"aria-label":n,rows:1,autoSize:s}),null!==g?(0,M.Tm)(g,{className:"".concat(t,"-edit-content-confirm")}):null))};function K(e,t){return r.useMemo(()=>{let n=!!e;return[n,Object.assign(Object.assign({},t),n&&"object"==typeof e?e:null)]},[e])}var X=(e,t)=>{let n=r.useRef(!1);r.useEffect(()=>{n.current?e():n.current=!0},t)},Q=function(e,t){var n={};for(var r in e)Object.prototype.hasOwnProperty.call(e,r)&&0>t.indexOf(r)&&(n[r]=e[r]);if(null!=e&&"function"==typeof Object.getOwnPropertySymbols)for(var o=0,r=Object.getOwnPropertySymbols(e);ot.indexOf(r[o])&&Object.prototype.propertyIsEnumerable.call(e,r[o])&&(n[r[o]]=e[r[o]]);return n};let J=r.forwardRef((e,t)=>{let{prefixCls:n,component:o="article",className:a,rootClassName:i,setContentRef:l,children:s,direction:c,style:u}=e,d=Q(e,["prefixCls","component","className","rootClassName","setContentRef","children","direction","style"]),{getPrefixCls:f,direction:m,typography:g}=r.useContext(I.E_),h=t;l&&(h=(0,E.sQ)(t,l));let b=f("typography",n),[v,y,S]=q(b),w=p()(b,null==g?void 0:g.className,{["".concat(b,"-rtl")]:"rtl"===(null!=c?c:m)},a,i,y,S),x=Object.assign(Object.assign({},null==g?void 0:g.style),u);return v(r.createElement(o,Object.assign({className:w,style:x,ref:h},d),s))});function ee(e){let t=typeof e;return"string"===t||"number"===t}function et(e,t){let n=0,r=[];for(let o=0;ot){let e=t-n;return r.push(String(a).slice(0,e)),r}r.push(a),n=i}return e}var en=e=>{let{enabledMeasure:t,children:n,text:o,width:a,fontSize:i,rows:l,onEllipsis:s}=e,[[c,u,d],p]=r.useState([0,0,0]),[f,m]=r.useState(0),[g,v]=r.useState(0),[y,E]=r.useState(0),S=r.useRef(null),w=r.useRef(null),x=r.useMemo(()=>(0,h.Z)(o),[o]),O=r.useMemo(()=>{let e;return e=0,x.forEach(t=>{ee(t)?e+=String(t).length:e+=1}),e},[x]),C=r.useMemo(()=>t&&3===g?n(et(x,u),u{t&&a&&i&&O&&(v(1),p([0,Math.ceil(O/2),O]))},[t,a,i,o,O,l]),(0,b.Z)(()=>{var e;1===g&&E((null===(e=S.current)||void 0===e?void 0:e.offsetHeight)||0)},[g]),(0,b.Z)(()=>{var e,t;if(y){if(1===g)((null===(e=w.current)||void 0===e?void 0:e.offsetHeight)||0)<=l*y?(v(4),s(!1)):v(2);else if(2===g){if(c!==d){let e=(null===(t=w.current)||void 0===t?void 0:t.offsetHeight)||0,n=l*y,r=c,o=d;c===d-1?o=c:e<=n?r=u:o=u;let a=Math.ceil((r+o)/2);p([r,a,o])}else v(3),m(u),s(!0)}}},[g,c,d,l,y]);let T=(e,t,n)=>r.createElement("span",{"aria-hidden":!0,ref:t,style:Object.assign({position:"fixed",display:"block",left:0,top:0,zIndex:-9999,visibility:"hidden",pointerEvents:"none",fontSize:2*Math.ceil(i/2)},n)},e);return r.createElement(r.Fragment,null,C,t&&3!==g&&4!==g&&r.createElement(r.Fragment,null,T("lg",S,{wordBreak:"keep-all",whiteSpace:"nowrap"}),T(1===g?n(x,!1):n(et(x,u),!0),w,{width:a,whiteSpace:"normal",margin:0,padding:0})))},er=e=>{let{enabledEllipsis:t,isEllipsis:n,children:o,tooltipProps:a}=e;return(null==a?void 0:a.title)&&t?r.createElement(N.Z,Object.assign({open:!!n&&void 0},a),o):o},eo=function(e,t){var n={};for(var r in e)Object.prototype.hasOwnProperty.call(e,r)&&0>t.indexOf(r)&&(n[r]=e[r]);if(null!=e&&"function"==typeof Object.getOwnPropertySymbols)for(var o=0,r=Object.getOwnPropertySymbols(e);ot.indexOf(r[o])&&Object.prototype.propertyIsEnumerable.call(e,r[o])&&(n[r[o]]=e[r[o]]);return n};function ea(e,t,n){return!0===e||void 0===e?t:e||n&&t}function ei(e){return!1===e?[!1,!1]:Array.isArray(e)?e:[e]}let el=r.forwardRef((e,t)=>{var n,a,i;let{prefixCls:l,className:c,style:d,type:f,disabled:S,children:w,ellipsis:x,editable:C,copyable:T,component:k,title:_}=e,P=eo(e,["prefixCls","className","style","type","disabled","children","ellipsis","editable","copyable","component","title"]),{getPrefixCls:M,direction:L}=r.useContext(I.E_),[D]=(0,R.Z)("Text"),j=r.useRef(null),F=r.useRef(null),B=M("typography",l),U=(0,y.Z)(P,["mark","code","delete","underline","strong","keyboard","italic"]),[Z,z]=K(C),[H,G]=(0,v.Z)(!1,{value:z.editing}),{triggerType:$=["icon"]}=z,W=e=>{var t;e&&(null===(t=z.onStart)||void 0===t||t.call(z)),G(e)};X(()=>{var e;H||null===(e=F.current)||void 0===e||e.focus()},[H]);let V=e=>{null==e||e.preventDefault(),W(!0)},[q,Q]=K(T),[ee,et]=r.useState(!1),el=r.useRef(null),es={};Q.format&&(es.format=Q.format);let ec=()=>{el.current&&clearTimeout(el.current)},eu=e=>{var t;null==e||e.preventDefault(),null==e||e.stopPropagation(),m()(Q.text||String(w)||"",es),et(!0),ec(),el.current=setTimeout(()=>{et(!1)},3e3),null===(t=Q.onCopy)||void 0===t||t.call(Q,e)};r.useEffect(()=>ec,[]);let[ed,ep]=r.useState(!1),[ef,em]=r.useState(!1),[eg,eh]=r.useState(!1),[eb,ev]=r.useState(!1),[ey,eE]=r.useState(!1),[eS,ew]=r.useState(!0),[ex,eO]=K(x,{expandable:!1}),eC=ex&&!eg,{rows:eT=1}=eO,ek=r.useMemo(()=>!eC||void 0!==eO.suffix||eO.onEllipsis||eO.expandable||Z||q,[eC,eO,Z,q]);(0,b.Z)(()=>{ex&&!ek&&(ep(O("webkitLineClamp")),em(O("textOverflow")))},[ek,ex]);let eA=r.useMemo(()=>!ek&&(1===eT?ef:ed),[ek,ef,ed]),eI=eC&&(eA?ey:eb),eR=eC&&1===eT&&eA,eN=eC&&eT>1&&eA,e_=e=>{var t;eh(!0),null===(t=eO.onExpand)||void 0===t||t.call(eO,e)},[eP,eM]=r.useState(0),[eL,eD]=r.useState(0),ej=e=>{var t;ev(e),eb!==e&&(null===(t=eO.onEllipsis)||void 0===t||t.call(eO,e))};r.useEffect(()=>{let e=j.current;if(ex&&eA&&e){let t=eN?e.offsetHeight{let e=j.current;if("undefined"==typeof IntersectionObserver||!e||!eA||!eC)return;let t=new IntersectionObserver(()=>{ew(!!e.offsetParent)});return t.observe(e),()=>{t.disconnect()}},[eA,eC]);let eF={};eF=!0===eO.tooltip?{title:null!==(n=z.text)&&void 0!==n?n:w}:r.isValidElement(eO.tooltip)?{title:eO.tooltip}:"object"==typeof eO.tooltip?Object.assign({title:null!==(a=z.text)&&void 0!==a?a:w},eO.tooltip):{title:eO.tooltip};let eB=r.useMemo(()=>{let e=e=>["string","number"].includes(typeof e);return!ex||eA?void 0:e(z.text)?z.text:e(w)?w:e(_)?_:e(eF.title)?eF.title:void 0},[ex,eA,_,eF.title,eI]);if(H)return r.createElement(Y,{value:null!==(i=z.text)&&void 0!==i?i:"string"==typeof w?w:"",onSave:e=>{var t;null===(t=z.onChange)||void 0===t||t.call(z,e),W(!1)},onCancel:()=>{var e;null===(e=z.onCancel)||void 0===e||e.call(z),W(!1)},onEnd:z.onEnd,prefixCls:B,className:c,style:d,direction:L,component:k,maxLength:z.maxLength,autoSize:z.autoSize,enterIcon:z.enterIcon});let eU=()=>{let e;let{expandable:t,symbol:n}=eO;return t?(e=n||(null==D?void 0:D.expand),r.createElement("a",{key:"expand",className:"".concat(B,"-expand"),onClick:e_,"aria-label":null==D?void 0:D.expand},e)):null},eZ=()=>{if(!Z)return;let{icon:e,tooltip:t}=z,n=(0,h.Z)(t)[0]||(null==D?void 0:D.edit),o="string"==typeof n?n:"";return $.includes("icon")?r.createElement(N.Z,{key:"edit",title:!1===t?"":n},r.createElement(A,{ref:F,className:"".concat(B,"-edit"),onClick:V,"aria-label":o},e||r.createElement(u,{role:"button"}))):null},ez=()=>{if(!q)return null;let{tooltips:e,icon:t}=Q,n=ei(e),a=ei(t),i=ee?ea(n[1],null==D?void 0:D.copied):ea(n[0],null==D?void 0:D.copy),l=ee?null==D?void 0:D.copied:null==D?void 0:D.copy,c="string"==typeof i?i:l;return r.createElement(N.Z,{key:"copy",title:i},r.createElement(A,{className:p()("".concat(B,"-copy"),{["".concat(B,"-copy-success")]:ee,["".concat(B,"-copy-icon-only")]:null==w}),onClick:eu,"aria-label":c},ee?ea(a[1],r.createElement(o.Z,null),!0):ea(a[0],r.createElement(s,null),!0)))},eH=e=>[e&&eU(),eZ(),ez()],eG=e=>[e&&r.createElement("span",{"aria-hidden":!0,key:"ellipsis"},"..."),eO.suffix,eH(e)];return r.createElement(g.Z,{onResize:(e,t)=>{var n;let{offsetWidth:r}=e;eM(r),eD(parseInt(null===(n=window.getComputedStyle)||void 0===n?void 0:n.call(window,t).fontSize,10)||0)},disabled:!eC},n=>r.createElement(er,{tooltipProps:eF,enabledEllipsis:eC,isEllipsis:eI},r.createElement(J,Object.assign({className:p()({["".concat(B,"-").concat(f)]:f,["".concat(B,"-disabled")]:S,["".concat(B,"-ellipsis")]:ex,["".concat(B,"-single-line")]:eC&&1===eT,["".concat(B,"-ellipsis-single-line")]:eR,["".concat(B,"-ellipsis-multiple-line")]:eN},c),prefixCls:l,style:Object.assign(Object.assign({},d),{WebkitLineClamp:eN?eT:void 0}),component:k,ref:(0,E.sQ)(n,j,t),direction:L,onClick:$.includes("text")?V:void 0,"aria-label":null==eB?void 0:eB.toString(),title:_},U),r.createElement(en,{enabledMeasure:eC&&!eA,text:w,rows:eT,width:eP,fontSize:eL,onEllipsis:ej},(t,n)=>{let o=t;return t.length&&n&&eB&&(o=r.createElement("span",{key:"show-content","aria-hidden":!0},o)),function(e,t){let{mark:n,code:o,underline:a,delete:i,strong:l,keyboard:s,italic:c}=e,u=t;function d(e,t){t&&(u=r.createElement(e,{},u))}return d("strong",l),d("u",a),d("del",i),d("code",o),d("mark",n),d("kbd",s),d("i",c),u}(e,r.createElement(r.Fragment,null,o,eG(n)))}))))});var es=function(e,t){var n={};for(var r in e)Object.prototype.hasOwnProperty.call(e,r)&&0>t.indexOf(r)&&(n[r]=e[r]);if(null!=e&&"function"==typeof Object.getOwnPropertySymbols)for(var o=0,r=Object.getOwnPropertySymbols(e);ot.indexOf(r[o])&&Object.prototype.propertyIsEnumerable.call(e,r[o])&&(n[r[o]]=e[r[o]]);return n};let ec=r.forwardRef((e,t)=>{var{ellipsis:n,rel:o}=e,a=es(e,["ellipsis","rel"]);let i=Object.assign(Object.assign({},a),{rel:void 0===o&&"_blank"===a.target?"noopener noreferrer":o});return delete i.navigate,r.createElement(el,Object.assign({},i,{ref:t,ellipsis:!!n,component:"a"}))}),eu=r.forwardRef((e,t)=>r.createElement(el,Object.assign({ref:t},e,{component:"div"})));var ed=function(e,t){var n={};for(var r in e)Object.prototype.hasOwnProperty.call(e,r)&&0>t.indexOf(r)&&(n[r]=e[r]);if(null!=e&&"function"==typeof Object.getOwnPropertySymbols)for(var o=0,r=Object.getOwnPropertySymbols(e);ot.indexOf(r[o])&&Object.prototype.propertyIsEnumerable.call(e,r[o])&&(n[r[o]]=e[r[o]]);return n},ep=r.forwardRef((e,t)=>{var{ellipsis:n}=e,o=ed(e,["ellipsis"]);let a=r.useMemo(()=>n&&"object"==typeof n?(0,y.Z)(n,["expandable","rows"]):n,[n]);return r.createElement(el,Object.assign({ref:t},o,{ellipsis:a,component:"span"}))}),ef=function(e,t){var n={};for(var r in e)Object.prototype.hasOwnProperty.call(e,r)&&0>t.indexOf(r)&&(n[r]=e[r]);if(null!=e&&"function"==typeof Object.getOwnPropertySymbols)for(var o=0,r=Object.getOwnPropertySymbols(e);ot.indexOf(r[o])&&Object.prototype.propertyIsEnumerable.call(e,r[o])&&(n[r[o]]=e[r[o]]);return n};let em=[1,2,3,4,5],eg=r.forwardRef((e,t)=>{let n;let{level:o=1}=e,a=ef(e,["level"]);return n=em.includes(o)?"h".concat(o):"h1",r.createElement(el,Object.assign({ref:t},a,{component:n}))});J.Text=ep,J.Link=ec,J.Title=eg,J.Paragraph=eu;var eh=J},81441:function(e,t,n){var r=n(89090),o={"text/plain":"Text","text/html":"Url",default:"Text"};e.exports=function(e,t){var n,a,i,l,s,c,u,d,p=!1;t||(t={}),i=t.debug||!1;try{if(s=r(),c=document.createRange(),u=document.getSelection(),(d=document.createElement("span")).textContent=e,d.ariaHidden="true",d.style.all="unset",d.style.position="fixed",d.style.top=0,d.style.clip="rect(0, 0, 0, 0)",d.style.whiteSpace="pre",d.style.webkitUserSelect="text",d.style.MozUserSelect="text",d.style.msUserSelect="text",d.style.userSelect="text",d.addEventListener("copy",function(n){if(n.stopPropagation(),t.format){if(n.preventDefault(),void 0===n.clipboardData){i&&console.warn("unable to use e.clipboardData"),i&&console.warn("trying IE specific stuff"),window.clipboardData.clearData();var r=o[t.format]||o.default;window.clipboardData.setData(r,e)}else n.clipboardData.clearData(),n.clipboardData.setData(t.format,e)}t.onCopy&&(n.preventDefault(),t.onCopy(n.clipboardData))}),document.body.appendChild(d),c.selectNodeContents(d),u.addRange(c),!document.execCommand("copy"))throw Error("copy command was unsuccessful");p=!0}catch(r){i&&console.error("unable to copy using execCommand: ",r),i&&console.warn("trying IE specific stuff");try{window.clipboardData.setData(t.format||"text",e),t.onCopy&&t.onCopy(window.clipboardData),p=!0}catch(r){i&&console.error("unable to copy using clipboardData: ",r),i&&console.error("falling back to prompt"),n="message"in t?t.message:"Copy to clipboard: #{key}, Enter",a=(/mac os x/i.test(navigator.userAgent)?"⌘":"Ctrl")+"+C",l=n.replace(/#{\s*key\s*}/g,a),window.prompt(l,e)}}finally{u&&("function"==typeof u.removeRange?u.removeRange(c):u.removeAllRanges()),d&&document.body.removeChild(d),s()}return p}},6122:function(e,t,n){var r;!function(o){var a,i={precision:20,rounding:4,toExpNeg:-7,toExpPos:21,LN10:"2.302585092994045684017991454684364207601101488628772976033327900967572609677352480235997205089598298341967784042286"},l=!0,s="[DecimalError] ",c=s+"Invalid argument: ",u=s+"Exponent out of range: ",d=Math.floor,p=Math.pow,f=/^(\d+(\.\d*)?|\.\d+)(e[+-]?\d+)?$/i,m=d(1286742750677284.5),g={};function h(e,t){var n,r,o,a,i,s,c,u,d=e.constructor,p=d.precision;if(!e.s||!t.s)return t.s||(t=new d(e)),l?T(t,p):t;if(c=e.d,u=t.d,i=e.e,o=t.e,c=c.slice(),a=i-o){for(a<0?(r=c,a=-a,s=u.length):(r=u,o=i,s=c.length),a>(s=(i=Math.ceil(p/7))>s?i+1:s+1)&&(a=s,r.length=1),r.reverse();a--;)r.push(0);r.reverse()}for((s=c.length)-(a=u.length)<0&&(a=s,r=u,u=c,c=r),n=0;a;)n=(c[--a]=c[a]+u[a]+n)/1e7|0,c[a]%=1e7;for(n&&(c.unshift(n),++o),s=c.length;0==c[--s];)c.pop();return t.d=c,t.e=o,l?T(t,p):t}function b(e,t,n){if(e!==~~e||en)throw Error(c+e)}function v(e){var t,n,r,o=e.length-1,a="",i=e[0];if(o>0){for(a+=i,t=1;te.e^this.s<0?1:-1;for(t=0,n=(r=this.d.length)<(o=e.d.length)?r:o;te.d[t]^this.s<0?1:-1;return r===o?0:r>o^this.s<0?1:-1},g.decimalPlaces=g.dp=function(){var e=this.d.length-1,t=(e-this.e)*7;if(e=this.d[e])for(;e%10==0;e/=10)t--;return t<0?0:t},g.dividedBy=g.div=function(e){return y(this,new this.constructor(e))},g.dividedToIntegerBy=g.idiv=function(e){var t=this.constructor;return T(y(this,new t(e),0,1),t.precision)},g.equals=g.eq=function(e){return!this.cmp(e)},g.exponent=function(){return S(this)},g.greaterThan=g.gt=function(e){return this.cmp(e)>0},g.greaterThanOrEqualTo=g.gte=function(e){return this.cmp(e)>=0},g.isInteger=g.isint=function(){return this.e>this.d.length-2},g.isNegative=g.isneg=function(){return this.s<0},g.isPositive=g.ispos=function(){return this.s>0},g.isZero=function(){return 0===this.s},g.lessThan=g.lt=function(e){return 0>this.cmp(e)},g.lessThanOrEqualTo=g.lte=function(e){return 1>this.cmp(e)},g.logarithm=g.log=function(e){var t,n=this.constructor,r=n.precision,o=r+5;if(void 0===e)e=new n(10);else if((e=new n(e)).s<1||e.eq(a))throw Error(s+"NaN");if(this.s<1)throw Error(s+(this.s?"NaN":"-Infinity"));return this.eq(a)?new n(0):(l=!1,t=y(O(this,o),O(e,o),o),l=!0,T(t,r))},g.minus=g.sub=function(e){return e=new this.constructor(e),this.s==e.s?k(this,e):h(this,(e.s=-e.s,e))},g.modulo=g.mod=function(e){var t,n=this.constructor,r=n.precision;if(!(e=new n(e)).s)throw Error(s+"NaN");return this.s?(l=!1,t=y(this,e,0,1).times(e),l=!0,this.minus(t)):T(new n(this),r)},g.naturalExponential=g.exp=function(){return E(this)},g.naturalLogarithm=g.ln=function(){return O(this)},g.negated=g.neg=function(){var e=new this.constructor(this);return e.s=-e.s||0,e},g.plus=g.add=function(e){return e=new this.constructor(e),this.s==e.s?h(this,e):k(this,(e.s=-e.s,e))},g.precision=g.sd=function(e){var t,n,r;if(void 0!==e&&!!e!==e&&1!==e&&0!==e)throw Error(c+e);if(t=S(this)+1,n=7*(r=this.d.length-1)+1,r=this.d[r]){for(;r%10==0;r/=10)n--;for(r=this.d[0];r>=10;r/=10)n++}return e&&t>n?t:n},g.squareRoot=g.sqrt=function(){var e,t,n,r,o,a,i,c=this.constructor;if(this.s<1){if(!this.s)return new c(0);throw Error(s+"NaN")}for(e=S(this),l=!1,0==(o=Math.sqrt(+this))||o==1/0?(((t=v(this.d)).length+e)%2==0&&(t+="0"),o=Math.sqrt(t),e=d((e+1)/2)-(e<0||e%2),r=new c(t=o==1/0?"5e"+e:(t=o.toExponential()).slice(0,t.indexOf("e")+1)+e)):r=new c(o.toString()),o=i=(n=c.precision)+3;;)if(r=(a=r).plus(y(this,a,i+2)).times(.5),v(a.d).slice(0,i)===(t=v(r.d)).slice(0,i)){if(t=t.slice(i-3,i+1),o==i&&"4999"==t){if(T(a,n+1,0),a.times(a).eq(this)){r=a;break}}else if("9999"!=t)break;i+=4}return l=!0,T(r,n)},g.times=g.mul=function(e){var t,n,r,o,a,i,s,c,u,d=this.constructor,p=this.d,f=(e=new d(e)).d;if(!this.s||!e.s)return new d(0);for(e.s*=this.s,n=this.e+e.e,(c=p.length)<(u=f.length)&&(a=p,p=f,f=a,i=c,c=u,u=i),a=[],r=i=c+u;r--;)a.push(0);for(r=u;--r>=0;){for(t=0,o=c+r;o>r;)s=a[o]+f[r]*p[o-r-1]+t,a[o--]=s%1e7|0,t=s/1e7|0;a[o]=(a[o]+t)%1e7|0}for(;!a[--i];)a.pop();return t?++n:a.shift(),e.d=a,e.e=n,l?T(e,d.precision):e},g.toDecimalPlaces=g.todp=function(e,t){var n=this,r=n.constructor;return(n=new r(n),void 0===e)?n:(b(e,0,1e9),void 0===t?t=r.rounding:b(t,0,8),T(n,e+S(n)+1,t))},g.toExponential=function(e,t){var n,r=this,o=r.constructor;return void 0===e?n=A(r,!0):(b(e,0,1e9),void 0===t?t=o.rounding:b(t,0,8),n=A(r=T(new o(r),e+1,t),!0,e+1)),n},g.toFixed=function(e,t){var n,r,o=this.constructor;return void 0===e?A(this):(b(e,0,1e9),void 0===t?t=o.rounding:b(t,0,8),n=A((r=T(new o(this),e+S(this)+1,t)).abs(),!1,e+S(r)+1),this.isneg()&&!this.isZero()?"-"+n:n)},g.toInteger=g.toint=function(){var e=this.constructor;return T(new e(this),S(this)+1,e.rounding)},g.toNumber=function(){return+this},g.toPower=g.pow=function(e){var t,n,r,o,i,c,u=this,p=u.constructor,f=+(e=new p(e));if(!e.s)return new p(a);if(!(u=new p(u)).s){if(e.s<1)throw Error(s+"Infinity");return u}if(u.eq(a))return u;if(r=p.precision,e.eq(a))return T(u,r);if(c=(t=e.e)>=(n=e.d.length-1),i=u.s,c){if((n=f<0?-f:f)<=9007199254740991){for(o=new p(a),t=Math.ceil(r/7+4),l=!1;n%2&&I((o=o.times(u)).d,t),0!==(n=d(n/2));)I((u=u.times(u)).d,t);return l=!0,e.s<0?new p(a).div(o):T(o,r)}}else if(i<0)throw Error(s+"NaN");return i=i<0&&1&e.d[Math.max(t,n)]?-1:1,u.s=1,l=!1,o=e.times(O(u,r+12)),l=!0,(o=E(o)).s=i,o},g.toPrecision=function(e,t){var n,r,o=this,a=o.constructor;return void 0===e?(n=S(o),r=A(o,n<=a.toExpNeg||n>=a.toExpPos)):(b(e,1,1e9),void 0===t?t=a.rounding:b(t,0,8),n=S(o=T(new a(o),e,t)),r=A(o,e<=n||n<=a.toExpNeg,e)),r},g.toSignificantDigits=g.tosd=function(e,t){var n=this.constructor;return void 0===e?(e=n.precision,t=n.rounding):(b(e,1,1e9),void 0===t?t=n.rounding:b(t,0,8)),T(new n(this),e,t)},g.toString=g.valueOf=g.val=g.toJSON=function(){var e=S(this),t=this.constructor;return A(this,e<=t.toExpNeg||e>=t.toExpPos)};var y=function(){function e(e,t){var n,r=0,o=e.length;for(e=e.slice();o--;)n=e[o]*t+r,e[o]=n%1e7|0,r=n/1e7|0;return r&&e.unshift(r),e}function t(e,t,n,r){var o,a;if(n!=r)a=n>r?1:-1;else for(o=a=0;ot[o]?1:-1;break}return a}function n(e,t,n){for(var r=0;n--;)e[n]-=r,r=e[n]1;)e.shift()}return function(r,o,a,i){var l,c,u,d,p,f,m,g,h,b,v,y,E,w,x,O,C,k,A=r.constructor,I=r.s==o.s?1:-1,R=r.d,N=o.d;if(!r.s)return new A(r);if(!o.s)throw Error(s+"Division by zero");for(u=0,c=r.e-o.e,C=N.length,x=R.length,g=(m=new A(I)).d=[];N[u]==(R[u]||0);)++u;if(N[u]>(R[u]||0)&&--c,(y=null==a?a=A.precision:i?a+(S(r)-S(o))+1:a)<0)return new A(0);if(y=y/7+2|0,u=0,1==C)for(d=0,N=N[0],y++;(u1&&(N=e(N,d),R=e(R,d),C=N.length,x=R.length),w=C,b=(h=R.slice(0,C)).length;b=1e7/2&&++O;do d=0,(l=t(N,h,C,b))<0?(v=h[0],C!=b&&(v=1e7*v+(h[1]||0)),(d=v/O|0)>1?(d>=1e7&&(d=1e7-1),f=(p=e(N,d)).length,b=h.length,1==(l=t(p,h,f,b))&&(d--,n(p,C16)throw Error(u+S(e));if(!e.s)return new f(a);for(null==t?(l=!1,s=m):s=t,i=new f(.03125);e.abs().gte(.1);)e=e.times(i),d+=5;for(s+=Math.log(p(2,d))/Math.LN10*2+5|0,n=r=o=new f(a),f.precision=s;;){if(r=T(r.times(e),s),n=n.times(++c),v((i=o.plus(y(r,n,s))).d).slice(0,s)===v(o.d).slice(0,s)){for(;d--;)o=T(o.times(o),s);return f.precision=m,null==t?(l=!0,T(o,m)):o}o=i}}function S(e){for(var t=7*e.e,n=e.d[0];n>=10;n/=10)t++;return t}function w(e,t,n){if(t>e.LN10.sd())throw l=!0,n&&(e.precision=n),Error(s+"LN10 precision limit exceeded");return T(new e(e.LN10),t)}function x(e){for(var t="";e--;)t+="0";return t}function O(e,t){var n,r,o,i,c,u,d,p,f,m=1,g=e,h=g.d,b=g.constructor,E=b.precision;if(g.s<1)throw Error(s+(g.s?"NaN":"-Infinity"));if(g.eq(a))return new b(0);if(null==t?(l=!1,p=E):p=t,g.eq(10))return null==t&&(l=!0),w(b,p);if(p+=10,b.precision=p,r=(n=v(h)).charAt(0),!(15e14>Math.abs(i=S(g))))return d=w(b,p+2,E).times(i+""),g=O(new b(r+"."+n.slice(1)),p-10).plus(d),b.precision=E,null==t?(l=!0,T(g,E)):g;for(;r<7&&1!=r||1==r&&n.charAt(1)>3;)r=(n=v((g=g.times(e)).d)).charAt(0),m++;for(i=S(g),r>1?(g=new b("0."+n),i++):g=new b(r+"."+n.slice(1)),u=c=g=y(g.minus(a),g.plus(a),p),f=T(g.times(g),p),o=3;;){if(c=T(c.times(f),p),v((d=u.plus(y(c,new b(o),p))).d).slice(0,p)===v(u.d).slice(0,p))return u=u.times(2),0!==i&&(u=u.plus(w(b,p+2,E).times(i+""))),u=y(u,new b(m),p),b.precision=E,null==t?(l=!0,T(u,E)):u;u=d,o+=2}}function C(e,t){var n,r,o;for((n=t.indexOf("."))>-1&&(t=t.replace(".","")),(r=t.search(/e/i))>0?(n<0&&(n=r),n+=+t.slice(r+1),t=t.substring(0,r)):n<0&&(n=t.length),r=0;48===t.charCodeAt(r);)++r;for(o=t.length;48===t.charCodeAt(o-1);)--o;if(t=t.slice(r,o)){if(o-=r,n=n-r-1,e.e=d(n/7),e.d=[],r=(n+1)%7,n<0&&(r+=7),rm||e.e<-m))throw Error(u+n)}else e.s=0,e.e=0,e.d=[0];return e}function T(e,t,n){var r,o,a,i,s,c,f,g,h=e.d;for(i=1,a=h[0];a>=10;a/=10)i++;if((r=t-i)<0)r+=7,o=t,f=h[g=0];else{if((g=Math.ceil((r+1)/7))>=(a=h.length))return e;for(i=1,f=a=h[g];a>=10;a/=10)i++;r%=7,o=r-7+i}if(void 0!==n&&(s=f/(a=p(10,i-o-1))%10|0,c=t<0||void 0!==h[g+1]||f%a,c=n<4?(s||c)&&(0==n||n==(e.s<0?3:2)):s>5||5==s&&(4==n||c||6==n&&(r>0?o>0?f/p(10,i-o):0:h[g-1])%10&1||n==(e.s<0?8:7))),t<1||!h[0])return c?(a=S(e),h.length=1,t=t-a-1,h[0]=p(10,(7-t%7)%7),e.e=d(-t/7)||0):(h.length=1,h[0]=e.e=e.s=0),e;if(0==r?(h.length=g,a=1,g--):(h.length=g+1,a=p(10,7-r),h[g]=o>0?(f/p(10,i-o)%p(10,o)|0)*a:0),c)for(;;){if(0==g){1e7==(h[0]+=a)&&(h[0]=1,++e.e);break}if(h[g]+=a,1e7!=h[g])break;h[g--]=0,a=1}for(r=h.length;0===h[--r];)h.pop();if(l&&(e.e>m||e.e<-m))throw Error(u+S(e));return e}function k(e,t){var n,r,o,a,i,s,c,u,d,p,f=e.constructor,m=f.precision;if(!e.s||!t.s)return t.s?t.s=-t.s:t=new f(e),l?T(t,m):t;if(c=e.d,p=t.d,r=t.e,u=e.e,c=c.slice(),i=u-r){for((d=i<0)?(n=c,i=-i,s=p.length):(n=p,r=u,s=c.length),i>(o=Math.max(Math.ceil(m/7),s)+2)&&(i=o,n.length=1),n.reverse(),o=i;o--;)n.push(0);n.reverse()}else{for((d=(o=c.length)<(s=p.length))&&(s=o),o=0;o0;--o)c[s++]=0;for(o=p.length;o>i;){if(c[--o]0?a=a.charAt(0)+"."+a.slice(1)+x(r):i>1&&(a=a.charAt(0)+"."+a.slice(1)),a=a+(o<0?"e":"e+")+o):o<0?(a="0."+x(-o-1)+a,n&&(r=n-i)>0&&(a+=x(r))):o>=i?(a+=x(o+1-i),n&&(r=n-o-1)>0&&(a=a+"."+x(r))):((r=o+1)0&&(o+1===i&&(a+="."),a+=x(r))),e.s<0?"-"+a:a}function I(e,t){if(e.length>t)return e.length=t,!0}function R(e){if(!e||"object"!=typeof e)throw Error(s+"Object expected");var t,n,r,o=["precision",1,1e9,"rounding",0,8,"toExpNeg",-1/0,0,"toExpPos",0,1/0];for(t=0;t=o[t+1]&&r<=o[t+2])this[n]=r;else throw Error(c+n+": "+r)}if(void 0!==(r=e[n="LN10"])){if(r==Math.LN10)this[n]=new this(r);else throw Error(c+n+": "+r)}return this}(i=function e(t){var n,r,o;function a(e){if(!(this instanceof a))return new a(e);if(this.constructor=a,e instanceof a){this.s=e.s,this.e=e.e,this.d=(e=e.d)?e.slice():e;return}if("number"==typeof e){if(0*e!=0)throw Error(c+e);if(e>0)this.s=1;else if(e<0)e=-e,this.s=-1;else{this.s=0,this.e=0,this.d=[0];return}if(e===~~e&&e<1e7){this.e=0,this.d=[e];return}return C(this,e.toString())}if("string"!=typeof e)throw Error(c+e);if(45===e.charCodeAt(0)?(e=e.slice(1),this.s=-1):this.s=1,f.test(e))C(this,e);else throw Error(c+e)}if(a.prototype=g,a.ROUND_UP=0,a.ROUND_DOWN=1,a.ROUND_CEIL=2,a.ROUND_FLOOR=3,a.ROUND_HALF_UP=4,a.ROUND_HALF_DOWN=5,a.ROUND_HALF_EVEN=6,a.ROUND_HALF_CEIL=7,a.ROUND_HALF_FLOOR=8,a.clone=e,a.config=a.set=R,void 0===t&&(t={}),t)for(n=0,o=["precision","rounding","toExpNeg","toExpPos","LN10"];n4&&m.slice(0,4)===i&&l.test(t)&&("-"===t.charAt(4)?g=i+(n=t.slice(5).replace(s,d)).charAt(0).toUpperCase()+n.slice(1):(f=(p=t).slice(4),t=s.test(f)?p:("-"!==(f=f.replace(c,u)).charAt(0)&&(f="-"+f),i+f)),h=o),new h(g,t))};var l=/^data[-\w.:]+$/i,s=/-[a-z]/g,c=/[A-Z]/g;function u(e){return"-"+e.toLowerCase()}function d(e){return e.charAt(1).toUpperCase()}},31872:function(e,t,n){var r=n(96130),o=n(64730),a=n(61861),i=n(46982),l=n(83671),s=n(53618);e.exports=r([a,o,i,l,s])},83671:function(e,t,n){var r=n(7667),o=n(13585),a=r.booleanish,i=r.number,l=r.spaceSeparated;e.exports=o({transform:function(e,t){return"role"===t?t:"aria-"+t.slice(4).toLowerCase()},properties:{ariaActiveDescendant:null,ariaAtomic:a,ariaAutoComplete:null,ariaBusy:a,ariaChecked:a,ariaColCount:i,ariaColIndex:i,ariaColSpan:i,ariaControls:l,ariaCurrent:null,ariaDescribedBy:l,ariaDetails:null,ariaDisabled:a,ariaDropEffect:l,ariaErrorMessage:null,ariaExpanded:a,ariaFlowTo:l,ariaGrabbed:a,ariaHasPopup:null,ariaHidden:a,ariaInvalid:null,ariaKeyShortcuts:null,ariaLabel:null,ariaLabelledBy:l,ariaLevel:i,ariaLive:null,ariaModal:a,ariaMultiLine:a,ariaMultiSelectable:a,ariaOrientation:null,ariaOwns:l,ariaPlaceholder:null,ariaPosInSet:i,ariaPressed:a,ariaReadOnly:a,ariaRelevant:null,ariaRequired:a,ariaRoleDescription:l,ariaRowCount:i,ariaRowIndex:i,ariaRowSpan:i,ariaSelected:a,ariaSetSize:i,ariaSort:null,ariaValueMax:i,ariaValueMin:i,ariaValueNow:i,ariaValueText:null,role:null}})},53618:function(e,t,n){var r=n(7667),o=n(13585),a=n(46640),i=r.boolean,l=r.overloadedBoolean,s=r.booleanish,c=r.number,u=r.spaceSeparated,d=r.commaSeparated;e.exports=o({space:"html",attributes:{acceptcharset:"accept-charset",classname:"class",htmlfor:"for",httpequiv:"http-equiv"},transform:a,mustUseProperty:["checked","multiple","muted","selected"],properties:{abbr:null,accept:d,acceptCharset:u,accessKey:u,action:null,allow:null,allowFullScreen:i,allowPaymentRequest:i,allowUserMedia:i,alt:null,as:null,async:i,autoCapitalize:null,autoComplete:u,autoFocus:i,autoPlay:i,capture:i,charSet:null,checked:i,cite:null,className:u,cols:c,colSpan:null,content:null,contentEditable:s,controls:i,controlsList:u,coords:c|d,crossOrigin:null,data:null,dateTime:null,decoding:null,default:i,defer:i,dir:null,dirName:null,disabled:i,download:l,draggable:s,encType:null,enterKeyHint:null,form:null,formAction:null,formEncType:null,formMethod:null,formNoValidate:i,formTarget:null,headers:u,height:c,hidden:i,high:c,href:null,hrefLang:null,htmlFor:u,httpEquiv:u,id:null,imageSizes:null,imageSrcSet:d,inputMode:null,integrity:null,is:null,isMap:i,itemId:null,itemProp:u,itemRef:u,itemScope:i,itemType:u,kind:null,label:null,lang:null,language:null,list:null,loading:null,loop:i,low:c,manifest:null,max:null,maxLength:c,media:null,method:null,min:null,minLength:c,multiple:i,muted:i,name:null,nonce:null,noModule:i,noValidate:i,onAbort:null,onAfterPrint:null,onAuxClick:null,onBeforePrint:null,onBeforeUnload:null,onBlur:null,onCancel:null,onCanPlay:null,onCanPlayThrough:null,onChange:null,onClick:null,onClose:null,onContextMenu:null,onCopy:null,onCueChange:null,onCut:null,onDblClick:null,onDrag:null,onDragEnd:null,onDragEnter:null,onDragExit:null,onDragLeave:null,onDragOver:null,onDragStart:null,onDrop:null,onDurationChange:null,onEmptied:null,onEnded:null,onError:null,onFocus:null,onFormData:null,onHashChange:null,onInput:null,onInvalid:null,onKeyDown:null,onKeyPress:null,onKeyUp:null,onLanguageChange:null,onLoad:null,onLoadedData:null,onLoadedMetadata:null,onLoadEnd:null,onLoadStart:null,onMessage:null,onMessageError:null,onMouseDown:null,onMouseEnter:null,onMouseLeave:null,onMouseMove:null,onMouseOut:null,onMouseOver:null,onMouseUp:null,onOffline:null,onOnline:null,onPageHide:null,onPageShow:null,onPaste:null,onPause:null,onPlay:null,onPlaying:null,onPopState:null,onProgress:null,onRateChange:null,onRejectionHandled:null,onReset:null,onResize:null,onScroll:null,onSecurityPolicyViolation:null,onSeeked:null,onSeeking:null,onSelect:null,onSlotChange:null,onStalled:null,onStorage:null,onSubmit:null,onSuspend:null,onTimeUpdate:null,onToggle:null,onUnhandledRejection:null,onUnload:null,onVolumeChange:null,onWaiting:null,onWheel:null,open:i,optimum:c,pattern:null,ping:u,placeholder:null,playsInline:i,poster:null,preload:null,readOnly:i,referrerPolicy:null,rel:u,required:i,reversed:i,rows:c,rowSpan:c,sandbox:u,scope:null,scoped:i,seamless:i,selected:i,shape:null,size:c,sizes:null,slot:null,span:c,spellCheck:s,src:null,srcDoc:null,srcLang:null,srcSet:d,start:c,step:null,style:null,tabIndex:c,target:null,title:null,translate:null,type:null,typeMustMatch:i,useMap:null,value:s,width:c,wrap:null,align:null,aLink:null,archive:u,axis:null,background:null,bgColor:null,border:c,borderColor:null,bottomMargin:c,cellPadding:null,cellSpacing:null,char:null,charOff:null,classId:null,clear:null,code:null,codeBase:null,codeType:null,color:null,compact:i,declare:i,event:null,face:null,frame:null,frameBorder:null,hSpace:c,leftMargin:c,link:null,longDesc:null,lowSrc:null,marginHeight:c,marginWidth:c,noResize:i,noHref:i,noShade:i,noWrap:i,object:null,profile:null,prompt:null,rev:null,rightMargin:c,rules:null,scheme:null,scrolling:s,standby:null,summary:null,text:null,topMargin:c,valueType:null,version:null,vAlign:null,vLink:null,vSpace:c,allowTransparency:null,autoCorrect:null,autoSave:null,disablePictureInPicture:i,disableRemotePlayback:i,prefix:null,property:null,results:c,security:null,unselectable:null}})},46640:function(e,t,n){var r=n(25852);e.exports=function(e,t){return r(e,t.toLowerCase())}},25852:function(e){e.exports=function(e,t){return t in e?e[t]:t}},13585:function(e,t,n){var r=n(39900),o=n(94949),a=n(7478);e.exports=function(e){var t,n,i=e.space,l=e.mustUseProperty||[],s=e.attributes||{},c=e.properties,u=e.transform,d={},p={};for(t in c)n=new a(t,u(s,t),c[t],i),-1!==l.indexOf(t)&&(n.mustUseProperty=!0),d[t]=n,p[r(t)]=t,p[r(n.attribute)]=t;return new o(d,p,i)}},7478:function(e,t,n){var r=n(74108),o=n(7667);e.exports=l,l.prototype=new r,l.prototype.defined=!0;var a=["boolean","booleanish","overloadedBoolean","number","commaSeparated","spaceSeparated","commaOrSpaceSeparated"],i=a.length;function l(e,t,n,l){var s,c,u,d=-1;for(l&&(this.space=l),r.call(this,e,t);++d-1}},64797:function(e){e.exports=function(e,t,n){for(var r=-1,o=null==e?0:e.length;++r0&&a(u)?n>1?e(u,n-1,a,i,l):r(l,u):i||(l[l.length]=u)}return l}},94410:function(e,t,n){var r=n(320)();e.exports=r},77458:function(e,t,n){var r=n(94410),o=n(39406);e.exports=function(e,t){return e&&r(e,t,o)}},38824:function(e,t,n){var r=n(53066),o=n(217);e.exports=function(e,t){t=r(t,e);for(var n=0,a=t.length;null!=e&&nt}},69959:function(e){e.exports=function(e,t){return null!=e&&t in Object(e)}},77095:function(e,t,n){var r=n(47495),o=n(77562),a=n(48150);e.exports=function(e,t,n){return t==t?a(e,t,n):r(e,o,n)}},63686:function(e,t,n){var r=n(7976),o=n(19340);e.exports=function(e){return o(e)&&"[object Arguments]"==r(e)}},29759:function(e,t,n){var r=n(28685),o=n(19340);e.exports=function e(t,n,a,i,l){return t===n||(null!=t&&null!=n&&(o(t)||o(n))?r(t,n,a,i,e,l):t!=t&&n!=n)}},28685:function(e,t,n){var r=n(4380),o=n(63859),a=n(41020),i=n(10701),l=n(96770),s=n(95059),c=n(64843),u=n(30484),d="[object Arguments]",p="[object Array]",f="[object Object]",m=Object.prototype.hasOwnProperty;e.exports=function(e,t,n,g,h,b){var v=s(e),y=s(t),E=v?p:l(e),S=y?p:l(t);E=E==d?f:E,S=S==d?f:S;var w=E==f,x=S==f,O=E==S;if(O&&c(e)){if(!c(t))return!1;v=!0,w=!1}if(O&&!w)return b||(b=new r),v||u(e)?o(e,t,n,g,h,b):a(e,t,E,n,g,h,b);if(!(1&n)){var C=w&&m.call(e,"__wrapped__"),T=x&&m.call(t,"__wrapped__");if(C||T){var k=C?e.value():e,A=T?t.value():t;return b||(b=new r),h(k,A,n,g,b)}}return!!O&&(b||(b=new r),i(e,t,n,g,h,b))}},59165:function(e,t,n){var r=n(4380),o=n(29759);e.exports=function(e,t,n,a){var i=n.length,l=i,s=!a;if(null==e)return!l;for(e=Object(e);i--;){var c=n[i];if(s&&c[2]?c[1]!==e[c[0]]:!(c[0]in e))return!1}for(;++io?0:o+t),(n=n>o?o:n)<0&&(n+=o),o=t>n?0:n-t>>>0,t>>>=0;for(var a=Array(o);++r=200){var g=t?null:l(e);if(g)return s(g);p=!1,u=i,m=new r}else m=t?[]:f;t:for(;++c=o?e:r(e,t,n)}},9058:function(e,t,n){var r=n(62704);e.exports=function(e,t){if(e!==t){var n=void 0!==e,o=null===e,a=e==e,i=r(e),l=void 0!==t,s=null===t,c=t==t,u=r(t);if(!s&&!u&&!i&&e>t||i&&l&&c&&!s&&!u||o&&l&&c||!n&&c||!a)return 1;if(!o&&!i&&!u&&e=s)return c;return c*("desc"==n[o]?-1:1)}}return e.index-t.index}},35852:function(e,t,n){var r=n(67741)["__core-js_shared__"];e.exports=r},91502:function(e,t,n){var r=n(10187);e.exports=function(e,t){return function(n,o){if(null==n)return n;if(!r(n))return e(n,o);for(var a=n.length,i=t?a:-1,l=Object(n);(t?i--:++i-1?l[s?t[c]:c]:void 0}}},16519:function(e,t,n){var r=n(67535),o=n(45021),a=n(55038);e.exports=function(e){return function(t,n,i){return i&&"number"!=typeof i&&o(t,n,i)&&(n=i=void 0),t=a(t),void 0===n?(n=t,t=0):n=a(n),i=void 0===i?tu))return!1;var p=s.get(e),f=s.get(t);if(p&&f)return p==t&&f==e;var m=-1,g=!0,h=2&n?new r:void 0;for(s.set(e,t),s.set(t,e);++m-1&&e%1==0&&e-1}},42572:function(e,t,n){var r=n(89329);e.exports=function(e,t){var n=this.__data__,o=r(n,e);return o<0?(++this.size,n.push([e,t])):n[o][1]=t,this}},26528:function(e,t,n){var r=n(68193),o=n(5835),a=n(58246);e.exports=function(){this.size=0,this.__data__={hash:new r,map:new(a||o),string:new r}}},90972:function(e,t,n){var r=n(72080);e.exports=function(e){var t=r(this,e).delete(e);return this.size-=t?1:0,t}},55981:function(e,t,n){var r=n(72080);e.exports=function(e){return r(this,e).get(e)}},76656:function(e,t,n){var r=n(72080);e.exports=function(e){return r(this,e).has(e)}},45541:function(e,t,n){var r=n(72080);e.exports=function(e,t){var n=r(this,e),o=n.size;return n.set(e,t),this.size+=n.size==o?0:1,this}},38737:function(e){e.exports=function(e){var t=-1,n=Array(e.size);return e.forEach(function(e,r){n[++t]=[r,e]}),n}},69794:function(e){e.exports=function(e,t){return function(n){return null!=n&&n[e]===t&&(void 0!==t||e in Object(n))}}},53092:function(e,t,n){var r=n(49512);e.exports=function(e){var t=r(e,function(e){return 500===n.size&&n.clear(),e}),n=t.cache;return t}},83463:function(e,t,n){var r=n(93245)(Object,"create");e.exports=r},51678:function(e,t,n){var r=n(93332)(Object.keys,Object);e.exports=r},16474:function(e,t,n){e=n.nmd(e);var r=n(58584),o=t&&!t.nodeType&&t,a=o&&e&&!e.nodeType&&e,i=a&&a.exports===o&&r.process,l=function(){try{var e=a&&a.require&&a.require("util").types;if(e)return e;return i&&i.binding&&i.binding("util")}catch(e){}}();e.exports=l},8611:function(e){var t=Object.prototype.toString;e.exports=function(e){return t.call(e)}},93332:function(e){e.exports=function(e,t){return function(n){return e(t(n))}}},7157:function(e,t,n){var r=n(24821),o=Math.max;e.exports=function(e,t,n){return t=o(void 0===t?e.length-1:t,0),function(){for(var a=arguments,i=-1,l=o(a.length-t,0),s=Array(l);++i0){if(++n>=800)return arguments[0]}else n=0;return e.apply(void 0,arguments)}}},4800:function(e,t,n){var r=n(5835);e.exports=function(){this.__data__=new r,this.size=0}},73987:function(e){e.exports=function(e){var t=this.__data__,n=t.delete(e);return this.size=t.size,n}},59728:function(e){e.exports=function(e){return this.__data__.get(e)}},4146:function(e){e.exports=function(e){return this.__data__.has(e)}},81333:function(e,t,n){var r=n(5835),o=n(58246),a=n(93785);e.exports=function(e,t){var n=this.__data__;if(n instanceof r){var i=n.__data__;if(!o||i.length<199)return i.push([e,t]),this.size=++n.size,this;n=this.__data__=new a(i)}return n.set(e,t),this.size=n.size,this}},48150:function(e){e.exports=function(e,t,n){for(var r=n-1,o=e.length;++r=t||n<0||h&&r>=u}function E(){var e,n,r,a=o();if(y(a))return S(a);p=setTimeout(E,(e=a-f,n=a-m,r=t-e,h?l(r,u-n):r))}function S(e){return(p=void 0,b&&s)?v(e):(s=c=void 0,d)}function w(){var e,n=o(),r=y(n);if(s=arguments,c=this,f=n,r){if(void 0===p)return m=e=f,p=setTimeout(E,t),g?v(e):d;if(h)return clearTimeout(p),p=setTimeout(E,t),v(f)}return void 0===p&&(p=setTimeout(E,t)),d}return t=a(t)||0,r(n)&&(g=!!n.leading,u=(h="maxWait"in n)?i(a(n.maxWait)||0,t):u,b="trailing"in n?!!n.trailing:b),w.cancel=function(){void 0!==p&&clearTimeout(p),m=0,s=f=c=p=void 0},w.flush=function(){return void 0===p?d:S(o())},w}},61595:function(e){e.exports=function(e,t){return e===t||e!=e&&t!=t}},72986:function(e,t,n){var r=n(31917),o=n(31062),a=n(49452),i=n(95059),l=n(45021);e.exports=function(e,t,n){var s=i(e)?r:o;return n&&l(e,t,n)&&(t=void 0),s(e,a(t,3))}},209:function(e,t,n){var r=n(70493)(n(87539));e.exports=r},87539:function(e,t,n){var r=n(47495),o=n(49452),a=n(26018),i=Math.max;e.exports=function(e,t,n){var l=null==e?0:e.length;if(!l)return -1;var s=null==n?0:a(n);return s<0&&(s=i(l+s,0)),r(e,o(t,3),s)}},20734:function(e,t,n){var r=n(9677),o=n(30677);e.exports=function(e,t){return r(o(e,t),1)}},44750:function(e,t,n){var r=n(38824);e.exports=function(e,t,n){var o=null==e?void 0:r(e,t);return void 0===o?n:o}},19955:function(e,t,n){var r=n(69959),o=n(24986);e.exports=function(e,t){return null!=e&&o(e,t,r)}},39100:function(e){e.exports=function(e){return e}},99782:function(e,t,n){var r=n(63686),o=n(19340),a=Object.prototype,i=a.hasOwnProperty,l=a.propertyIsEnumerable,s=r(function(){return arguments}())?r:function(e){return o(e)&&i.call(e,"callee")&&!l.call(e,"callee")};e.exports=s},95059:function(e){var t=Array.isArray;e.exports=t},10187:function(e,t,n){var r=n(80509),o=n(54512);e.exports=function(e){return null!=e&&o(e.length)&&!r(e)}},90849:function(e,t,n){var r=n(7976),o=n(19340);e.exports=function(e){return!0===e||!1===e||o(e)&&"[object Boolean]"==r(e)}},64843:function(e,t,n){e=n.nmd(e);var r=n(67741),o=n(33879),a=t&&!t.nodeType&&t,i=a&&e&&!e.nodeType&&e,l=i&&i.exports===a?r.Buffer:void 0,s=l?l.isBuffer:void 0;e.exports=s||o},93574:function(e,t,n){var r=n(29759);e.exports=function(e,t){return r(e,t)}},80509:function(e,t,n){var r=n(7976),o=n(70816);e.exports=function(e){if(!o(e))return!1;var t=r(e);return"[object Function]"==t||"[object GeneratorFunction]"==t||"[object AsyncFunction]"==t||"[object Proxy]"==t}},54512:function(e){e.exports=function(e){return"number"==typeof e&&e>-1&&e%1==0&&e<=9007199254740991}},96240:function(e,t,n){var r=n(39018);e.exports=function(e){return r(e)&&e!=+e}},71292:function(e){e.exports=function(e){return null==e}},39018:function(e,t,n){var r=n(7976),o=n(19340);e.exports=function(e){return"number"==typeof e||o(e)&&"[object Number]"==r(e)}},70816:function(e){e.exports=function(e){var t=typeof e;return null!=e&&("object"==t||"function"==t)}},19340:function(e){e.exports=function(e){return null!=e&&"object"==typeof e}},23393:function(e,t,n){var r=n(7976),o=n(28766),a=n(19340),i=Object.prototype,l=Function.prototype.toString,s=i.hasOwnProperty,c=l.call(Object);e.exports=function(e){if(!a(e)||"[object Object]"!=r(e))return!1;var t=o(e);if(null===t)return!0;var n=s.call(t,"constructor")&&t.constructor;return"function"==typeof n&&n instanceof n&&l.call(n)==c}},96907:function(e,t,n){var r=n(7976),o=n(95059),a=n(19340);e.exports=function(e){return"string"==typeof e||!o(e)&&a(e)&&"[object String]"==r(e)}},62704:function(e,t,n){var r=n(7976),o=n(19340);e.exports=function(e){return"symbol"==typeof e||o(e)&&"[object Symbol]"==r(e)}},30484:function(e,t,n){var r=n(80043),o=n(43863),a=n(16474),i=a&&a.isTypedArray,l=i?o(i):r;e.exports=l},39406:function(e,t,n){var r=n(26546),o=n(92916),a=n(10187);e.exports=function(e){return a(e)?r(e):o(e)}},36887:function(e){e.exports=function(e){var t=null==e?0:e.length;return t?e[t-1]:void 0}},30677:function(e,t,n){var r=n(83690),o=n(49452),a=n(28245),i=n(95059);e.exports=function(e,t){return(i(e)?r:a)(e,o(t,3))}},50924:function(e,t,n){var r=n(30804),o=n(77458),a=n(49452);e.exports=function(e,t){var n={};return t=a(t,3),o(e,function(e,o,a){r(n,o,t(e,o,a))}),n}},5037:function(e,t,n){var r=n(41764),o=n(92262),a=n(39100);e.exports=function(e){return e&&e.length?r(e,a,o):void 0}},49512:function(e,t,n){var r=n(93785);function o(e,t){if("function"!=typeof e||null!=t&&"function"!=typeof t)throw TypeError("Expected a function");var n=function(){var r=arguments,o=t?t.apply(this,r):r[0],a=n.cache;if(a.has(o))return a.get(o);var i=e.apply(this,r);return n.cache=a.set(o,i)||a,i};return n.cache=new(o.Cache||r),n}o.Cache=r,e.exports=o},30264:function(e,t,n){var r=n(41764),o=n(87004),a=n(39100);e.exports=function(e){return e&&e.length?r(e,a,o):void 0}},67222:function(e){e.exports=function(){}},80128:function(e,t,n){var r=n(67741);e.exports=function(){return r.Date.now()}},62973:function(e,t,n){var r=n(60411),o=n(34831),a=n(55632),i=n(217);e.exports=function(e){return a(e)?r(i(e)):o(e)}},1646:function(e,t,n){var r=n(16519)();e.exports=r},13435:function(e,t,n){var r=n(30927),o=n(49452),a=n(61700),i=n(95059),l=n(45021);e.exports=function(e,t,n){var s=i(e)?r:a;return n&&l(e,t,n)&&(t=void 0),s(e,o(t,3))}},97572:function(e,t,n){var r=n(9677),o=n(56871),a=n(70712),i=n(45021),l=a(function(e,t){if(null==e)return[];var n=t.length;return n>1&&i(e,t[0],t[1])?t=[]:n>2&&i(t[0],t[1],t[2])&&(t=[t[0]]),o(e,r(t,1),[])});e.exports=l},30786:function(e){e.exports=function(){return[]}},33879:function(e){e.exports=function(){return!1}},68417:function(e,t,n){var r=n(54525),o=n(70816);e.exports=function(e,t,n){var a=!0,i=!0;if("function"!=typeof e)throw TypeError("Expected a function");return o(n)&&(a="leading"in n?!!n.leading:a,i="trailing"in n?!!n.trailing:i),r(e,t,{leading:a,maxWait:t,trailing:i})}},55038:function(e,t,n){var r=n(89753),o=1/0;e.exports=function(e){return e?(e=r(e))===o||e===-o?(e<0?-1:1)*17976931348623157e292:e==e?e:0:0===e?e:0}},26018:function(e,t,n){var r=n(55038);e.exports=function(e){var t=r(e),n=t%1;return t==t?n?t-n:t:0}},89753:function(e,t,n){var r=n(33223),o=n(70816),a=n(62704),i=0/0,l=/^[-+]0x[0-9a-f]+$/i,s=/^0b[01]+$/i,c=/^0o[0-7]+$/i,u=parseInt;e.exports=function(e){if("number"==typeof e)return e;if(a(e))return i;if(o(e)){var t="function"==typeof e.valueOf?e.valueOf():e;e=o(t)?t+"":t}if("string"!=typeof e)return 0===e?e:+e;e=r(e);var n=s.test(e);return n||c.test(e)?u(e.slice(2),n?2:8):l.test(e)?i:+e}},25635:function(e,t,n){var r=n(2218);e.exports=function(e){return null==e?"":r(e)}},98116:function(e,t,n){var r=n(49452),o=n(15375);e.exports=function(e,t){return e&&e.length?o(e,r(t,2)):[]}},9332:function(e,t,n){var r=n(6551)("toUpperCase");e.exports=r},8792:function(e,t,n){n.d(t,{default:function(){return o.a}});var r=n(25250),o=n.n(r)},47907:function(e,t,n){var r=n(15313);n.o(r,"useRouter")&&n.d(t,{useRouter:function(){return r.useRouter}}),n.o(r,"useSearchParams")&&n.d(t,{useSearchParams:function(){return r.useSearchParams}})},49079:function(e,t,n){var r,o;e.exports=(null==(r=n.g.process)?void 0:r.env)&&"object"==typeof(null==(o=n.g.process)?void 0:o.env)?n.g.process:n(13127)},12956:function(e,t,n){Object.defineProperty(t,"__esModule",{value:!0}),Object.defineProperty(t,"addLocale",{enumerable:!0,get:function(){return r}}),n(82139);let r=function(e){for(var t=arguments.length,n=Array(t>1?t-1:0),r=1;r{})}}function y(e){return"string"==typeof e?e:(0,s.formatUrl)(e)}let E=a.default.forwardRef(function(e,t){let n,r;let{href:s,as:b,children:E,prefetch:S=null,passHref:w,replace:x,shallow:O,scroll:C,locale:T,onClick:k,onMouseEnter:A,onTouchStart:I,legacyBehavior:R=!1,...N}=e;n=E,R&&("string"==typeof n||"number"==typeof n)&&(n=(0,o.jsx)("a",{children:n}));let _=a.default.useContext(d.RouterContext),P=a.default.useContext(p.AppRouterContext),M=null!=_?_:P,L=!_,D=!1!==S,j=null===S?h.PrefetchKind.AUTO:h.PrefetchKind.FULL,{href:F,as:B}=a.default.useMemo(()=>{if(!_){let e=y(s);return{href:e,as:b?y(b):e}}let[e,t]=(0,i.resolveHref)(_,s,!0);return{href:e,as:b?(0,i.resolveHref)(_,b):t||e}},[_,s,b]),U=a.default.useRef(F),Z=a.default.useRef(B);R&&(r=a.default.Children.only(n));let z=R?r&&"object"==typeof r&&r.ref:t,[H,G,$]=(0,f.useIntersection)({rootMargin:"200px"}),W=a.default.useCallback(e=>{(Z.current!==B||U.current!==F)&&($(),Z.current=B,U.current=F),H(e),z&&("function"==typeof z?z(e):"object"==typeof z&&(z.current=e))},[B,z,F,$,H]);a.default.useEffect(()=>{M&&G&&D&&v(M,F,B,{locale:T},{kind:j},L)},[B,F,G,T,D,null==_?void 0:_.locale,M,L,j]);let V={ref:W,onClick(e){R||"function"!=typeof k||k(e),R&&r.props&&"function"==typeof r.props.onClick&&r.props.onClick(e),M&&!e.defaultPrevented&&function(e,t,n,r,o,i,s,c,u){let{nodeName:d}=e.currentTarget;if("A"===d.toUpperCase()&&(function(e){let t=e.currentTarget.getAttribute("target");return t&&"_self"!==t||e.metaKey||e.ctrlKey||e.shiftKey||e.altKey||e.nativeEvent&&2===e.nativeEvent.which}(e)||!u&&!(0,l.isLocalURL)(n)))return;e.preventDefault();let p=()=>{let e=null==s||s;"beforePopState"in t?t[o?"replace":"push"](n,r,{shallow:i,locale:c,scroll:e}):t[o?"replace":"push"](r||n,{scroll:e})};u?a.default.startTransition(p):p()}(e,M,F,B,x,O,C,T,L)},onMouseEnter(e){R||"function"!=typeof A||A(e),R&&r.props&&"function"==typeof r.props.onMouseEnter&&r.props.onMouseEnter(e),M&&(D||!L)&&v(M,F,B,{locale:T,priority:!0,bypassPrefetchedCheck:!0},{kind:j},L)},onTouchStart(e){R||"function"!=typeof I||I(e),R&&r.props&&"function"==typeof r.props.onTouchStart&&r.props.onTouchStart(e),M&&(D||!L)&&v(M,F,B,{locale:T,priority:!0,bypassPrefetchedCheck:!0},{kind:j},L)}};if((0,c.isAbsoluteUrl)(B))V.href=B;else if(!R||w||"a"===r.type&&!("href"in r.props)){let e=void 0!==T?T:null==_?void 0:_.locale,t=(null==_?void 0:_.isLocaleDomain)&&(0,m.getDomainLocale)(B,e,null==_?void 0:_.locales,null==_?void 0:_.domainLocales);V.href=t||(0,g.addBasePath)((0,u.addLocale)(B,e,null==_?void 0:_.defaultLocale))}return R?a.default.cloneElement(r,V):(0,o.jsx)("a",{...N,...V,children:n})});("function"==typeof t.default||"object"==typeof t.default&&null!==t.default)&&void 0===t.default.__esModule&&(Object.defineProperty(t.default,"__esModule",{value:!0}),Object.assign(t.default,t),e.exports=t.default)},52185:function(e,t){Object.defineProperty(t,"__esModule",{value:!0}),function(e,t){for(var n in t)Object.defineProperty(e,n,{enumerable:!0,get:t[n]})}(t,{requestIdleCallback:function(){return n},cancelIdleCallback:function(){return r}});let n="undefined"!=typeof self&&self.requestIdleCallback&&self.requestIdleCallback.bind(window)||function(e){let t=Date.now();return self.setTimeout(function(){e({didTimeout:!1,timeRemaining:function(){return Math.max(0,50-(Date.now()-t))}})},1)},r="undefined"!=typeof self&&self.cancelIdleCallback&&self.cancelIdleCallback.bind(window)||function(e){return clearTimeout(e)};("function"==typeof t.default||"object"==typeof t.default&&null!==t.default)&&void 0===t.default.__esModule&&(Object.defineProperty(t.default,"__esModule",{value:!0}),Object.assign(t.default,t),e.exports=t.default)},14542:function(e,t,n){Object.defineProperty(t,"__esModule",{value:!0}),Object.defineProperty(t,"resolveHref",{enumerable:!0,get:function(){return d}});let r=n(95770),o=n(11030),a=n(24544),i=n(36874),l=n(82139),s=n(17434),c=n(22360),u=n(96735);function d(e,t,n){let d;let p="string"==typeof t?t:(0,o.formatWithValidation)(t),f=p.match(/^[a-zA-Z]{1,}:\/\//),m=f?p.slice(f[0].length):p;if((m.split("?",1)[0]||"").match(/(\/\/|\\)/)){console.error("Invalid href '"+p+"' passed to next/router in page: '"+e.pathname+"'. Repeated forward-slashes (//) or backslashes \\ are not valid in the href.");let t=(0,i.normalizeRepeatedSlashes)(m);p=(f?f[0]:"")+t}if(!(0,s.isLocalURL)(p))return n?[p]:p;try{d=new URL(p.startsWith("#")?e.asPath:e.pathname,"http://n")}catch(e){d=new URL("/","http://n")}try{let e=new URL(p,d);e.pathname=(0,l.normalizePathTrailingSlash)(e.pathname);let t="";if((0,c.isDynamicRoute)(e.pathname)&&e.searchParams&&n){let n=(0,r.searchParamsToUrlQuery)(e.searchParams),{result:i,params:l}=(0,u.interpolateAs)(e.pathname,e.pathname,n);i&&(t=(0,o.formatWithValidation)({pathname:i,hash:e.hash,query:(0,a.omit)(n,l)}))}let i=e.origin===d.origin?e.href.slice(e.origin.length):e.href;return n?[i,t||i]:i}catch(e){return n?[p]:p}}("function"==typeof t.default||"object"==typeof t.default&&null!==t.default)&&void 0===t.default.__esModule&&(Object.defineProperty(t.default,"__esModule",{value:!0}),Object.assign(t.default,t),e.exports=t.default)},45291:function(e,t,n){Object.defineProperty(t,"__esModule",{value:!0}),Object.defineProperty(t,"useIntersection",{enumerable:!0,get:function(){return s}});let r=n(64090),o=n(52185),a="function"==typeof IntersectionObserver,i=new Map,l=[];function s(e){let{rootRef:t,rootMargin:n,disabled:s}=e,c=s||!a,[u,d]=(0,r.useState)(!1),p=(0,r.useRef)(null),f=(0,r.useCallback)(e=>{p.current=e},[]);return(0,r.useEffect)(()=>{if(a){if(c||u)return;let e=p.current;if(e&&e.tagName)return function(e,t,n){let{id:r,observer:o,elements:a}=function(e){let t;let n={root:e.root||null,margin:e.rootMargin||""},r=l.find(e=>e.root===n.root&&e.margin===n.margin);if(r&&(t=i.get(r)))return t;let o=new Map;return t={id:n,observer:new IntersectionObserver(e=>{e.forEach(e=>{let t=o.get(e.target),n=e.isIntersecting||e.intersectionRatio>0;t&&n&&t(n)})},e),elements:o},l.push(n),i.set(n,t),t}(n);return a.set(e,t),o.observe(e),function(){if(a.delete(e),o.unobserve(e),0===a.size){o.disconnect(),i.delete(r);let e=l.findIndex(e=>e.root===r.root&&e.margin===r.margin);e>-1&&l.splice(e,1)}}}(e,e=>e&&d(e),{root:null==t?void 0:t.current,rootMargin:n})}else if(!u){let e=(0,o.requestIdleCallback)(()=>d(!0));return()=>(0,o.cancelIdleCallback)(e)}},[c,n,t,u,p.current]),[f,u,(0,r.useCallback)(()=>{d(!1)},[])]}("function"==typeof t.default||"object"==typeof t.default&&null!==t.default)&&void 0===t.default.__esModule&&(Object.defineProperty(t.default,"__esModule",{value:!0}),Object.assign(t.default,t),e.exports=t.default)},8620:function(e){!function(){var t={675:function(e,t){t.byteLength=function(e){var t=s(e),n=t[0],r=t[1];return(n+r)*3/4-r},t.toByteArray=function(e){var t,n,a=s(e),i=a[0],l=a[1],c=new o((i+l)*3/4-l),u=0,d=l>0?i-4:i;for(n=0;n>16&255,c[u++]=t>>8&255,c[u++]=255&t;return 2===l&&(t=r[e.charCodeAt(n)]<<2|r[e.charCodeAt(n+1)]>>4,c[u++]=255&t),1===l&&(t=r[e.charCodeAt(n)]<<10|r[e.charCodeAt(n+1)]<<4|r[e.charCodeAt(n+2)]>>2,c[u++]=t>>8&255,c[u++]=255&t),c},t.fromByteArray=function(e){for(var t,r=e.length,o=r%3,a=[],i=0,l=r-o;i>18&63]+n[o>>12&63]+n[o>>6&63]+n[63&o]);return a.join("")}(e,i,i+16383>l?l:i+16383));return 1===o?a.push(n[(t=e[r-1])>>2]+n[t<<4&63]+"=="):2===o&&a.push(n[(t=(e[r-2]<<8)+e[r-1])>>10]+n[t>>4&63]+n[t<<2&63]+"="),a.join("")};for(var n=[],r=[],o="undefined"!=typeof Uint8Array?Uint8Array:Array,a="ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/",i=0,l=a.length;i0)throw Error("Invalid string. Length must be a multiple of 4");var n=e.indexOf("=");-1===n&&(n=t);var r=n===t?0:4-n%4;return[n,r]}r["-".charCodeAt(0)]=62,r["_".charCodeAt(0)]=63},72:function(e,t,n){/*! - * The buffer module from node.js, for the browser. - * - * @author Feross Aboukhadijeh - * @license MIT - */var r=n(675),o=n(783),a="function"==typeof Symbol&&"function"==typeof Symbol.for?Symbol.for("nodejs.util.inspect.custom"):null;function i(e){if(e>2147483647)throw RangeError('The value "'+e+'" is invalid for option "size"');var t=new Uint8Array(e);return Object.setPrototypeOf(t,l.prototype),t}function l(e,t,n){if("number"==typeof e){if("string"==typeof t)throw TypeError('The "string" argument must be of type string. Received type number');return u(e)}return s(e,t,n)}function s(e,t,n){if("string"==typeof e)return function(e,t){if(("string"!=typeof t||""===t)&&(t="utf8"),!l.isEncoding(t))throw TypeError("Unknown encoding: "+t);var n=0|f(e,t),r=i(n),o=r.write(e,t);return o!==n&&(r=r.slice(0,o)),r}(e,t);if(ArrayBuffer.isView(e))return d(e);if(null==e)throw TypeError("The first argument must be one of type string, Buffer, ArrayBuffer, Array, or Array-like Object. Received type "+typeof e);if(I(e,ArrayBuffer)||e&&I(e.buffer,ArrayBuffer)||"undefined"!=typeof SharedArrayBuffer&&(I(e,SharedArrayBuffer)||e&&I(e.buffer,SharedArrayBuffer)))return function(e,t,n){var r;if(t<0||e.byteLength=2147483647)throw RangeError("Attempt to allocate Buffer larger than maximum size: 0x7fffffff bytes");return 0|e}function f(e,t){if(l.isBuffer(e))return e.length;if(ArrayBuffer.isView(e)||I(e,ArrayBuffer))return e.byteLength;if("string"!=typeof e)throw TypeError('The "string" argument must be one of type string, Buffer, or ArrayBuffer. Received type '+typeof e);var n=e.length,r=arguments.length>2&&!0===arguments[2];if(!r&&0===n)return 0;for(var o=!1;;)switch(t){case"ascii":case"latin1":case"binary":return n;case"utf8":case"utf-8":return C(e).length;case"ucs2":case"ucs-2":case"utf16le":case"utf-16le":return 2*n;case"hex":return n>>>1;case"base64":return k(e).length;default:if(o)return r?-1:C(e).length;t=(""+t).toLowerCase(),o=!0}}function m(e,t,n){var o,a,i=!1;if((void 0===t||t<0)&&(t=0),t>this.length||((void 0===n||n>this.length)&&(n=this.length),n<=0||(n>>>=0)<=(t>>>=0)))return"";for(e||(e="utf8");;)switch(e){case"hex":return function(e,t,n){var r=e.length;(!t||t<0)&&(t=0),(!n||n<0||n>r)&&(n=r);for(var o="",a=t;a2147483647?n=2147483647:n<-2147483648&&(n=-2147483648),(a=n=+n)!=a&&(n=o?0:e.length-1),n<0&&(n=e.length+n),n>=e.length){if(o)return -1;n=e.length-1}else if(n<0){if(!o)return -1;n=0}if("string"==typeof t&&(t=l.from(t,r)),l.isBuffer(t))return 0===t.length?-1:b(e,t,n,r,o);if("number"==typeof t)return(t&=255,"function"==typeof Uint8Array.prototype.indexOf)?o?Uint8Array.prototype.indexOf.call(e,t,n):Uint8Array.prototype.lastIndexOf.call(e,t,n):b(e,[t],n,r,o);throw TypeError("val must be string, number or Buffer")}function b(e,t,n,r,o){var a,i=1,l=e.length,s=t.length;if(void 0!==r&&("ucs2"===(r=String(r).toLowerCase())||"ucs-2"===r||"utf16le"===r||"utf-16le"===r)){if(e.length<2||t.length<2)return -1;i=2,l/=2,s/=2,n/=2}function c(e,t){return 1===i?e[t]:e.readUInt16BE(t*i)}if(o){var u=-1;for(a=n;al&&(n=l-s),a=n;a>=0;a--){for(var d=!0,p=0;p239?4:c>223?3:c>191?2:1;if(o+d<=n)switch(d){case 1:c<128&&(u=c);break;case 2:(192&(a=e[o+1]))==128&&(s=(31&c)<<6|63&a)>127&&(u=s);break;case 3:a=e[o+1],i=e[o+2],(192&a)==128&&(192&i)==128&&(s=(15&c)<<12|(63&a)<<6|63&i)>2047&&(s<55296||s>57343)&&(u=s);break;case 4:a=e[o+1],i=e[o+2],l=e[o+3],(192&a)==128&&(192&i)==128&&(192&l)==128&&(s=(15&c)<<18|(63&a)<<12|(63&i)<<6|63&l)>65535&&s<1114112&&(u=s)}null===u?(u=65533,d=1):u>65535&&(u-=65536,r.push(u>>>10&1023|55296),u=56320|1023&u),r.push(u),o+=d}return function(e){var t=e.length;if(t<=4096)return String.fromCharCode.apply(String,e);for(var n="",r=0;rn)throw RangeError("Trying to access beyond buffer length")}function E(e,t,n,r,o,a){if(!l.isBuffer(e))throw TypeError('"buffer" argument must be a Buffer instance');if(t>o||te.length)throw RangeError("Index out of range")}function S(e,t,n,r,o,a){if(n+r>e.length||n<0)throw RangeError("Index out of range")}function w(e,t,n,r,a){return t=+t,n>>>=0,a||S(e,t,n,4,34028234663852886e22,-34028234663852886e22),o.write(e,t,n,r,23,4),n+4}function x(e,t,n,r,a){return t=+t,n>>>=0,a||S(e,t,n,8,17976931348623157e292,-17976931348623157e292),o.write(e,t,n,r,52,8),n+8}t.Buffer=l,t.SlowBuffer=function(e){return+e!=e&&(e=0),l.alloc(+e)},t.INSPECT_MAX_BYTES=50,t.kMaxLength=2147483647,l.TYPED_ARRAY_SUPPORT=function(){try{var e=new Uint8Array(1),t={foo:function(){return 42}};return Object.setPrototypeOf(t,Uint8Array.prototype),Object.setPrototypeOf(e,t),42===e.foo()}catch(e){return!1}}(),l.TYPED_ARRAY_SUPPORT||"undefined"==typeof console||"function"!=typeof console.error||console.error("This browser lacks typed array (Uint8Array) support which is required by `buffer` v5.x. Use `buffer` v4.x if you require old browser support."),Object.defineProperty(l.prototype,"parent",{enumerable:!0,get:function(){if(l.isBuffer(this))return this.buffer}}),Object.defineProperty(l.prototype,"offset",{enumerable:!0,get:function(){if(l.isBuffer(this))return this.byteOffset}}),l.poolSize=8192,l.from=function(e,t,n){return s(e,t,n)},Object.setPrototypeOf(l.prototype,Uint8Array.prototype),Object.setPrototypeOf(l,Uint8Array),l.alloc=function(e,t,n){return(c(e),e<=0)?i(e):void 0!==t?"string"==typeof n?i(e).fill(t,n):i(e).fill(t):i(e)},l.allocUnsafe=function(e){return u(e)},l.allocUnsafeSlow=function(e){return u(e)},l.isBuffer=function(e){return null!=e&&!0===e._isBuffer&&e!==l.prototype},l.compare=function(e,t){if(I(e,Uint8Array)&&(e=l.from(e,e.offset,e.byteLength)),I(t,Uint8Array)&&(t=l.from(t,t.offset,t.byteLength)),!l.isBuffer(e)||!l.isBuffer(t))throw TypeError('The "buf1", "buf2" arguments must be one of type Buffer or Uint8Array');if(e===t)return 0;for(var n=e.length,r=t.length,o=0,a=Math.min(n,r);on&&(e+=" ... "),""},a&&(l.prototype[a]=l.prototype.inspect),l.prototype.compare=function(e,t,n,r,o){if(I(e,Uint8Array)&&(e=l.from(e,e.offset,e.byteLength)),!l.isBuffer(e))throw TypeError('The "target" argument must be one of type Buffer or Uint8Array. Received type '+typeof e);if(void 0===t&&(t=0),void 0===n&&(n=e?e.length:0),void 0===r&&(r=0),void 0===o&&(o=this.length),t<0||n>e.length||r<0||o>this.length)throw RangeError("out of range index");if(r>=o&&t>=n)return 0;if(r>=o)return -1;if(t>=n)return 1;if(t>>>=0,n>>>=0,r>>>=0,o>>>=0,this===e)return 0;for(var a=o-r,i=n-t,s=Math.min(a,i),c=this.slice(r,o),u=e.slice(t,n),d=0;d>>=0,isFinite(n)?(n>>>=0,void 0===r&&(r="utf8")):(r=n,n=void 0);else throw Error("Buffer.write(string, encoding, offset[, length]) is no longer supported");var o,a,i,l,s,c,u,d,p,f,m,g,h=this.length-t;if((void 0===n||n>h)&&(n=h),e.length>0&&(n<0||t<0)||t>this.length)throw RangeError("Attempt to write outside buffer bounds");r||(r="utf8");for(var b=!1;;)switch(r){case"hex":return function(e,t,n,r){n=Number(n)||0;var o=e.length-n;r?(r=Number(r))>o&&(r=o):r=o;var a=t.length;r>a/2&&(r=a/2);for(var i=0;i>8,o.push(n%256),o.push(r);return o}(e,this.length-m),this,m,g);default:if(b)throw TypeError("Unknown encoding: "+r);r=(""+r).toLowerCase(),b=!0}},l.prototype.toJSON=function(){return{type:"Buffer",data:Array.prototype.slice.call(this._arr||this,0)}},l.prototype.slice=function(e,t){var n=this.length;e=~~e,t=void 0===t?n:~~t,e<0?(e+=n)<0&&(e=0):e>n&&(e=n),t<0?(t+=n)<0&&(t=0):t>n&&(t=n),t>>=0,t>>>=0,n||y(e,t,this.length);for(var r=this[e],o=1,a=0;++a>>=0,t>>>=0,n||y(e,t,this.length);for(var r=this[e+--t],o=1;t>0&&(o*=256);)r+=this[e+--t]*o;return r},l.prototype.readUInt8=function(e,t){return e>>>=0,t||y(e,1,this.length),this[e]},l.prototype.readUInt16LE=function(e,t){return e>>>=0,t||y(e,2,this.length),this[e]|this[e+1]<<8},l.prototype.readUInt16BE=function(e,t){return e>>>=0,t||y(e,2,this.length),this[e]<<8|this[e+1]},l.prototype.readUInt32LE=function(e,t){return e>>>=0,t||y(e,4,this.length),(this[e]|this[e+1]<<8|this[e+2]<<16)+16777216*this[e+3]},l.prototype.readUInt32BE=function(e,t){return e>>>=0,t||y(e,4,this.length),16777216*this[e]+(this[e+1]<<16|this[e+2]<<8|this[e+3])},l.prototype.readIntLE=function(e,t,n){e>>>=0,t>>>=0,n||y(e,t,this.length);for(var r=this[e],o=1,a=0;++a=(o*=128)&&(r-=Math.pow(2,8*t)),r},l.prototype.readIntBE=function(e,t,n){e>>>=0,t>>>=0,n||y(e,t,this.length);for(var r=t,o=1,a=this[e+--r];r>0&&(o*=256);)a+=this[e+--r]*o;return a>=(o*=128)&&(a-=Math.pow(2,8*t)),a},l.prototype.readInt8=function(e,t){return(e>>>=0,t||y(e,1,this.length),128&this[e])?-((255-this[e]+1)*1):this[e]},l.prototype.readInt16LE=function(e,t){e>>>=0,t||y(e,2,this.length);var n=this[e]|this[e+1]<<8;return 32768&n?4294901760|n:n},l.prototype.readInt16BE=function(e,t){e>>>=0,t||y(e,2,this.length);var n=this[e+1]|this[e]<<8;return 32768&n?4294901760|n:n},l.prototype.readInt32LE=function(e,t){return e>>>=0,t||y(e,4,this.length),this[e]|this[e+1]<<8|this[e+2]<<16|this[e+3]<<24},l.prototype.readInt32BE=function(e,t){return e>>>=0,t||y(e,4,this.length),this[e]<<24|this[e+1]<<16|this[e+2]<<8|this[e+3]},l.prototype.readFloatLE=function(e,t){return e>>>=0,t||y(e,4,this.length),o.read(this,e,!0,23,4)},l.prototype.readFloatBE=function(e,t){return e>>>=0,t||y(e,4,this.length),o.read(this,e,!1,23,4)},l.prototype.readDoubleLE=function(e,t){return e>>>=0,t||y(e,8,this.length),o.read(this,e,!0,52,8)},l.prototype.readDoubleBE=function(e,t){return e>>>=0,t||y(e,8,this.length),o.read(this,e,!1,52,8)},l.prototype.writeUIntLE=function(e,t,n,r){if(e=+e,t>>>=0,n>>>=0,!r){var o=Math.pow(2,8*n)-1;E(this,e,t,n,o,0)}var a=1,i=0;for(this[t]=255&e;++i>>=0,n>>>=0,!r){var o=Math.pow(2,8*n)-1;E(this,e,t,n,o,0)}var a=n-1,i=1;for(this[t+a]=255&e;--a>=0&&(i*=256);)this[t+a]=e/i&255;return t+n},l.prototype.writeUInt8=function(e,t,n){return e=+e,t>>>=0,n||E(this,e,t,1,255,0),this[t]=255&e,t+1},l.prototype.writeUInt16LE=function(e,t,n){return e=+e,t>>>=0,n||E(this,e,t,2,65535,0),this[t]=255&e,this[t+1]=e>>>8,t+2},l.prototype.writeUInt16BE=function(e,t,n){return e=+e,t>>>=0,n||E(this,e,t,2,65535,0),this[t]=e>>>8,this[t+1]=255&e,t+2},l.prototype.writeUInt32LE=function(e,t,n){return e=+e,t>>>=0,n||E(this,e,t,4,4294967295,0),this[t+3]=e>>>24,this[t+2]=e>>>16,this[t+1]=e>>>8,this[t]=255&e,t+4},l.prototype.writeUInt32BE=function(e,t,n){return e=+e,t>>>=0,n||E(this,e,t,4,4294967295,0),this[t]=e>>>24,this[t+1]=e>>>16,this[t+2]=e>>>8,this[t+3]=255&e,t+4},l.prototype.writeIntLE=function(e,t,n,r){if(e=+e,t>>>=0,!r){var o=Math.pow(2,8*n-1);E(this,e,t,n,o-1,-o)}var a=0,i=1,l=0;for(this[t]=255&e;++a>0)-l&255;return t+n},l.prototype.writeIntBE=function(e,t,n,r){if(e=+e,t>>>=0,!r){var o=Math.pow(2,8*n-1);E(this,e,t,n,o-1,-o)}var a=n-1,i=1,l=0;for(this[t+a]=255&e;--a>=0&&(i*=256);)e<0&&0===l&&0!==this[t+a+1]&&(l=1),this[t+a]=(e/i>>0)-l&255;return t+n},l.prototype.writeInt8=function(e,t,n){return e=+e,t>>>=0,n||E(this,e,t,1,127,-128),e<0&&(e=255+e+1),this[t]=255&e,t+1},l.prototype.writeInt16LE=function(e,t,n){return e=+e,t>>>=0,n||E(this,e,t,2,32767,-32768),this[t]=255&e,this[t+1]=e>>>8,t+2},l.prototype.writeInt16BE=function(e,t,n){return e=+e,t>>>=0,n||E(this,e,t,2,32767,-32768),this[t]=e>>>8,this[t+1]=255&e,t+2},l.prototype.writeInt32LE=function(e,t,n){return e=+e,t>>>=0,n||E(this,e,t,4,2147483647,-2147483648),this[t]=255&e,this[t+1]=e>>>8,this[t+2]=e>>>16,this[t+3]=e>>>24,t+4},l.prototype.writeInt32BE=function(e,t,n){return e=+e,t>>>=0,n||E(this,e,t,4,2147483647,-2147483648),e<0&&(e=4294967295+e+1),this[t]=e>>>24,this[t+1]=e>>>16,this[t+2]=e>>>8,this[t+3]=255&e,t+4},l.prototype.writeFloatLE=function(e,t,n){return w(this,e,t,!0,n)},l.prototype.writeFloatBE=function(e,t,n){return w(this,e,t,!1,n)},l.prototype.writeDoubleLE=function(e,t,n){return x(this,e,t,!0,n)},l.prototype.writeDoubleBE=function(e,t,n){return x(this,e,t,!1,n)},l.prototype.copy=function(e,t,n,r){if(!l.isBuffer(e))throw TypeError("argument should be a Buffer");if(n||(n=0),r||0===r||(r=this.length),t>=e.length&&(t=e.length),t||(t=0),r>0&&r=this.length)throw RangeError("Index out of range");if(r<0)throw RangeError("sourceEnd out of bounds");r>this.length&&(r=this.length),e.length-t=0;--a)e[a+t]=this[a+n];else Uint8Array.prototype.set.call(e,this.subarray(n,r),t);return o},l.prototype.fill=function(e,t,n,r){if("string"==typeof e){if("string"==typeof t?(r=t,t=0,n=this.length):"string"==typeof n&&(r=n,n=this.length),void 0!==r&&"string"!=typeof r)throw TypeError("encoding must be a string");if("string"==typeof r&&!l.isEncoding(r))throw TypeError("Unknown encoding: "+r);if(1===e.length){var o,a=e.charCodeAt(0);("utf8"===r&&a<128||"latin1"===r)&&(e=a)}}else"number"==typeof e?e&=255:"boolean"==typeof e&&(e=Number(e));if(t<0||this.length>>=0,n=void 0===n?this.length:n>>>0,e||(e=0),"number"==typeof e)for(o=t;o55295&&n<57344){if(!o){if(n>56319||i+1===r){(t-=3)>-1&&a.push(239,191,189);continue}o=n;continue}if(n<56320){(t-=3)>-1&&a.push(239,191,189),o=n;continue}n=(o-55296<<10|n-56320)+65536}else o&&(t-=3)>-1&&a.push(239,191,189);if(o=null,n<128){if((t-=1)<0)break;a.push(n)}else if(n<2048){if((t-=2)<0)break;a.push(n>>6|192,63&n|128)}else if(n<65536){if((t-=3)<0)break;a.push(n>>12|224,n>>6&63|128,63&n|128)}else if(n<1114112){if((t-=4)<0)break;a.push(n>>18|240,n>>12&63|128,n>>6&63|128,63&n|128)}else throw Error("Invalid code point")}return a}function T(e){for(var t=[],n=0;n=t.length)&&!(o>=e.length);++o)t[o+n]=e[o];return o}function I(e,t){return e instanceof t||null!=e&&null!=e.constructor&&null!=e.constructor.name&&e.constructor.name===t.name}var R=function(){for(var e="0123456789abcdef",t=Array(256),n=0;n<16;++n)for(var r=16*n,o=0;o<16;++o)t[r+o]=e[n]+e[o];return t}()},783:function(e,t){/*! ieee754. BSD-3-Clause License. Feross Aboukhadijeh */t.read=function(e,t,n,r,o){var a,i,l=8*o-r-1,s=(1<>1,u=-7,d=n?o-1:0,p=n?-1:1,f=e[t+d];for(d+=p,a=f&(1<<-u)-1,f>>=-u,u+=l;u>0;a=256*a+e[t+d],d+=p,u-=8);for(i=a&(1<<-u)-1,a>>=-u,u+=r;u>0;i=256*i+e[t+d],d+=p,u-=8);if(0===a)a=1-c;else{if(a===s)return i?NaN:1/0*(f?-1:1);i+=Math.pow(2,r),a-=c}return(f?-1:1)*i*Math.pow(2,a-r)},t.write=function(e,t,n,r,o,a){var i,l,s,c=8*a-o-1,u=(1<>1,p=23===o?5960464477539062e-23:0,f=r?0:a-1,m=r?1:-1,g=t<0||0===t&&1/t<0?1:0;for(isNaN(t=Math.abs(t))||t===1/0?(l=isNaN(t)?1:0,i=u):(i=Math.floor(Math.log(t)/Math.LN2),t*(s=Math.pow(2,-i))<1&&(i--,s*=2),i+d>=1?t+=p/s:t+=p*Math.pow(2,1-d),t*s>=2&&(i++,s/=2),i+d>=u?(l=0,i=u):i+d>=1?(l=(t*s-1)*Math.pow(2,o),i+=d):(l=t*Math.pow(2,d-1)*Math.pow(2,o),i=0));o>=8;e[n+f]=255&l,f+=m,l/=256,o-=8);for(i=i<0;e[n+f]=255&i,f+=m,i/=256,c-=8);e[n+f-m]|=128*g}}},n={};function r(e){var o=n[e];if(void 0!==o)return o.exports;var a=n[e]={exports:{}},i=!0;try{t[e](a,a.exports,r),i=!1}finally{i&&delete n[e]}return a.exports}r.ab="//";var o=r(72);e.exports=o}()},13127:function(e){!function(){var t={229:function(e){var t,n,r,o=e.exports={};function a(){throw Error("setTimeout has not been defined")}function i(){throw Error("clearTimeout has not been defined")}function l(e){if(t===setTimeout)return setTimeout(e,0);if((t===a||!t)&&setTimeout)return t=setTimeout,setTimeout(e,0);try{return t(e,0)}catch(n){try{return t.call(null,e,0)}catch(n){return t.call(this,e,0)}}}!function(){try{t="function"==typeof setTimeout?setTimeout:a}catch(e){t=a}try{n="function"==typeof clearTimeout?clearTimeout:i}catch(e){n=i}}();var s=[],c=!1,u=-1;function d(){c&&r&&(c=!1,r.length?s=r.concat(s):u=-1,s.length&&p())}function p(){if(!c){var e=l(d);c=!0;for(var t=s.length;t;){for(r=s,s=[];++u1)for(var n=1;n{let t=s[e]||"",{repeat:n,optional:r}=l[e],o="["+(n?"...":"")+e+"]";return r&&(o=(t?"":"/")+"["+o+"]"),n&&!Array.isArray(t)&&(t=[t]),(r||e in s)&&(a=a.replace(o,n?t.map(e=>encodeURIComponent(e)).join("/"):encodeURIComponent(t))||"/")})||(a=""),{params:c,result:a}}},11305:function(e,t,n){Object.defineProperty(t,"__esModule",{value:!0}),Object.defineProperty(t,"isDynamicRoute",{enumerable:!0,get:function(){return a}});let r=n(94749),o=/\/\[[^/]+?\](?=\/|$)/;function a(e){return(0,r.isInterceptionRouteAppPath)(e)&&(e=(0,r.extractInterceptionRouteInformation)(e).interceptedRoute),o.test(e)}},17434:function(e,t,n){Object.defineProperty(t,"__esModule",{value:!0}),Object.defineProperty(t,"isLocalURL",{enumerable:!0,get:function(){return a}});let r=n(36874),o=n(87379);function a(e){if(!(0,r.isAbsoluteUrl)(e))return!0;try{let t=(0,r.getLocationOrigin)(),n=new URL(e,t);return n.origin===t&&(0,o.hasBasePath)(n.pathname)}catch(e){return!1}}},24544:function(e,t){function n(e,t){let n={};return Object.keys(e).forEach(r=>{t.includes(r)||(n[r]=e[r])}),n}Object.defineProperty(t,"__esModule",{value:!0}),Object.defineProperty(t,"omit",{enumerable:!0,get:function(){return n}})},95770:function(e,t){function n(e){let t={};return e.forEach((e,n)=>{void 0===t[n]?t[n]=e:Array.isArray(t[n])?t[n].push(e):t[n]=[t[n],e]}),t}function r(e){return"string"!=typeof e&&("number"!=typeof e||isNaN(e))&&"boolean"!=typeof e?"":String(e)}function o(e){let t=new URLSearchParams;return Object.entries(e).forEach(e=>{let[n,o]=e;Array.isArray(o)?o.forEach(e=>t.append(n,r(e))):t.set(n,r(o))}),t}function a(e){for(var t=arguments.length,n=Array(t>1?t-1:0),r=1;r{Array.from(t.keys()).forEach(t=>e.delete(t)),t.forEach((t,n)=>e.append(n,t))}),e}Object.defineProperty(t,"__esModule",{value:!0}),function(e,t){for(var n in t)Object.defineProperty(e,n,{enumerable:!0,get:t[n]})}(t,{searchParamsToUrlQuery:function(){return n},urlQueryToSearchParams:function(){return o},assign:function(){return a}})},2395:function(e,t,n){Object.defineProperty(t,"__esModule",{value:!0}),Object.defineProperty(t,"getRouteMatcher",{enumerable:!0,get:function(){return o}});let r=n(36874);function o(e){let{re:t,groups:n}=e;return e=>{let o=t.exec(e);if(!o)return!1;let a=e=>{try{return decodeURIComponent(e)}catch(e){throw new r.DecodeError("failed to decode param")}},i={};return Object.keys(n).forEach(e=>{let t=n[e],r=o[t.pos];void 0!==r&&(i[e]=~r.indexOf("/")?r.split("/").map(e=>a(e)):t.repeat?[a(r)]:a(r))}),i}}},19935:function(e,t,n){Object.defineProperty(t,"__esModule",{value:!0}),function(e,t){for(var n in t)Object.defineProperty(e,n,{enumerable:!0,get:t[n]})}(t,{getRouteRegex:function(){return s},getNamedRouteRegex:function(){return d},getNamedMiddlewareRegex:function(){return p}});let r=n(94749),o=n(22202),a=n(95868);function i(e){let t=e.startsWith("[")&&e.endsWith("]");t&&(e=e.slice(1,-1));let n=e.startsWith("...");return n&&(e=e.slice(3)),{key:e,repeat:n,optional:t}}function l(e){let t=(0,a.removeTrailingSlash)(e).slice(1).split("/"),n={},l=1;return{parameterizedRoute:t.map(e=>{let t=r.INTERCEPTION_ROUTE_MARKERS.find(t=>e.startsWith(t)),a=e.match(/\[((?:\[.*\])|.+)\]/);if(t&&a){let{key:e,optional:r,repeat:s}=i(a[1]);return n[e]={pos:l++,repeat:s,optional:r},"/"+(0,o.escapeStringRegexp)(t)+"([^/]+?)"}if(!a)return"/"+(0,o.escapeStringRegexp)(e);{let{key:e,repeat:t,optional:r}=i(a[1]);return n[e]={pos:l++,repeat:t,optional:r},t?r?"(?:/(.+?))?":"/(.+?)":"/([^/]+?)"}}).join(""),groups:n}}function s(e){let{parameterizedRoute:t,groups:n}=l(e);return{re:RegExp("^"+t+"(?:/)?$"),groups:n}}function c(e){let{interceptionMarker:t,getSafeRouteKey:n,segment:r,routeKeys:a,keyPrefix:l}=e,{key:s,optional:c,repeat:u}=i(r),d=s.replace(/\W/g,"");l&&(d=""+l+d);let p=!1;(0===d.length||d.length>30)&&(p=!0),isNaN(parseInt(d.slice(0,1)))||(p=!0),p&&(d=n()),l?a[d]=""+l+s:a[d]=s;let f=t?(0,o.escapeStringRegexp)(t):"";return u?c?"(?:/"+f+"(?<"+d+">.+?))?":"/"+f+"(?<"+d+">.+?)":"/"+f+"(?<"+d+">[^/]+?)"}function u(e,t){let n;let i=(0,a.removeTrailingSlash)(e).slice(1).split("/"),l=(n=0,()=>{let e="",t=++n;for(;t>0;)e+=String.fromCharCode(97+(t-1)%26),t=Math.floor((t-1)/26);return e}),s={};return{namedParameterizedRoute:i.map(e=>{let n=r.INTERCEPTION_ROUTE_MARKERS.some(t=>e.startsWith(t)),a=e.match(/\[((?:\[.*\])|.+)\]/);if(n&&a){let[n]=e.split(a[0]);return c({getSafeRouteKey:l,interceptionMarker:n,segment:a[1],routeKeys:s,keyPrefix:t?"nxtI":void 0})}return a?c({getSafeRouteKey:l,segment:a[1],routeKeys:s,keyPrefix:t?"nxtP":void 0}):"/"+(0,o.escapeStringRegexp)(e)}).join(""),routeKeys:s}}function d(e,t){let n=u(e,t);return{...s(e),namedRegex:"^"+n.namedParameterizedRoute+"(?:/)?$",routeKeys:n.routeKeys}}function p(e,t){let{parameterizedRoute:n}=l(e),{catchAll:r=!0}=t;if("/"===n)return{namedRegex:"^/"+(r?".*":"")+"$"};let{namedParameterizedRoute:o}=u(e,!1);return{namedRegex:"^"+o+(r?"(?:(/.*)?)":"")+"$"}}},97409:function(e,t){Object.defineProperty(t,"__esModule",{value:!0}),Object.defineProperty(t,"getSortedRoutes",{enumerable:!0,get:function(){return r}});class n{insert(e){this._insert(e.split("/").filter(Boolean),[],!1)}smoosh(){return this._smoosh()}_smoosh(e){void 0===e&&(e="/");let t=[...this.children.keys()].sort();null!==this.slugName&&t.splice(t.indexOf("[]"),1),null!==this.restSlugName&&t.splice(t.indexOf("[...]"),1),null!==this.optionalRestSlugName&&t.splice(t.indexOf("[[...]]"),1);let n=t.map(t=>this.children.get(t)._smoosh(""+e+t+"/")).reduce((e,t)=>[...e,...t],[]);if(null!==this.slugName&&n.push(...this.children.get("[]")._smoosh(e+"["+this.slugName+"]/")),!this.placeholder){let t="/"===e?"/":e.slice(0,-1);if(null!=this.optionalRestSlugName)throw Error('You cannot define a route with the same specificity as a optional catch-all route ("'+t+'" and "'+t+"[[..."+this.optionalRestSlugName+']]").');n.unshift(t)}return null!==this.restSlugName&&n.push(...this.children.get("[...]")._smoosh(e+"[..."+this.restSlugName+"]/")),null!==this.optionalRestSlugName&&n.push(...this.children.get("[[...]]")._smoosh(e+"[[..."+this.optionalRestSlugName+"]]/")),n}_insert(e,t,r){if(0===e.length){this.placeholder=!1;return}if(r)throw Error("Catch-all must be the last part of the URL.");let o=e[0];if(o.startsWith("[")&&o.endsWith("]")){let n=o.slice(1,-1),i=!1;if(n.startsWith("[")&&n.endsWith("]")&&(n=n.slice(1,-1),i=!0),n.startsWith("...")&&(n=n.substring(3),r=!0),n.startsWith("[")||n.endsWith("]"))throw Error("Segment names may not start or end with extra brackets ('"+n+"').");if(n.startsWith("."))throw Error("Segment names may not start with erroneous periods ('"+n+"').");function a(e,n){if(null!==e&&e!==n)throw Error("You cannot use different slug names for the same dynamic path ('"+e+"' !== '"+n+"').");t.forEach(e=>{if(e===n)throw Error('You cannot have the same slug name "'+n+'" repeat within a single dynamic path');if(e.replace(/\W/g,"")===o.replace(/\W/g,""))throw Error('You cannot have the slug names "'+e+'" and "'+n+'" differ only by non-word symbols within a single dynamic path')}),t.push(n)}if(r){if(i){if(null!=this.restSlugName)throw Error('You cannot use both an required and optional catch-all route at the same level ("[...'+this.restSlugName+']" and "'+e[0]+'" ).');a(this.optionalRestSlugName,n),this.optionalRestSlugName=n,o="[[...]]"}else{if(null!=this.optionalRestSlugName)throw Error('You cannot use both an optional and required catch-all route at the same level ("[[...'+this.optionalRestSlugName+']]" and "'+e[0]+'").');a(this.restSlugName,n),this.restSlugName=n,o="[...]"}}else{if(i)throw Error('Optional route parameters are not yet supported ("'+e[0]+'").');a(this.slugName,n),this.slugName=n,o="[]"}}this.children.has(o)||this.children.set(o,new n),this.children.get(o)._insert(e.slice(1),t,r)}constructor(){this.placeholder=!0,this.children=new Map,this.slugName=null,this.restSlugName=null,this.optionalRestSlugName=null}}function r(e){let t=new n;return e.forEach(e=>t.insert(e)),t.smoosh()}},36874:function(e,t){Object.defineProperty(t,"__esModule",{value:!0}),function(e,t){for(var n in t)Object.defineProperty(e,n,{enumerable:!0,get:t[n]})}(t,{WEB_VITALS:function(){return n},execOnce:function(){return r},isAbsoluteUrl:function(){return a},getLocationOrigin:function(){return i},getURL:function(){return l},getDisplayName:function(){return s},isResSent:function(){return c},normalizeRepeatedSlashes:function(){return u},loadGetInitialProps:function(){return d},SP:function(){return p},ST:function(){return f},DecodeError:function(){return m},NormalizeError:function(){return g},PageNotFoundError:function(){return h},MissingStaticPage:function(){return b},MiddlewareNotFoundError:function(){return v},stringifyError:function(){return y}});let n=["CLS","FCP","FID","INP","LCP","TTFB"];function r(e){let t,n=!1;return function(){for(var r=arguments.length,o=Array(r),a=0;ao.test(e);function i(){let{protocol:e,hostname:t,port:n}=window.location;return e+"//"+t+(n?":"+n:"")}function l(){let{href:e}=window.location,t=i();return e.substring(t.length)}function s(e){return"string"==typeof e?e:e.displayName||e.name||"Unknown"}function c(e){return e.finished||e.headersSent}function u(e){let t=e.split("?");return t[0].replace(/\\/g,"/").replace(/\/\/+/g,"/")+(t[1]?"?"+t.slice(1).join("?"):"")}async function d(e,t){let n=t.res||t.ctx&&t.ctx.res;if(!e.getInitialProps)return t.ctx&&t.Component?{pageProps:await d(t.Component,t.ctx)}:{};let r=await e.getInitialProps(t);if(n&&c(n))return r;if(!r)throw Error('"'+s(e)+'.getInitialProps()" should resolve to an object. But found "'+r+'" instead.');return r}let p="undefined"!=typeof performance,f=p&&["mark","measure","getEntriesByName"].every(e=>"function"==typeof performance[e]);class m extends Error{}class g extends Error{}class h extends Error{constructor(e){super(),this.code="ENOENT",this.name="PageNotFoundError",this.message="Cannot find module for page: "+e}}class b extends Error{constructor(e,t){super(),this.message="Failed to load static file for page: "+e+" "+t}}class v extends Error{constructor(){super(),this.code="ENOENT",this.message="Cannot find the middleware module"}}function y(e){return JSON.stringify({message:e.message,stack:e.stack})}},18314:function(e,t,n){var r=n(41811);function o(){}function a(){}a.resetWarningCache=o,e.exports=function(){function e(e,t,n,o,a,i){if(i!==r){var l=Error("Calling PropTypes validators directly is not supported by the `prop-types` package. Use PropTypes.checkPropTypes() to call them. Read more at http://fb.me/use-check-prop-types");throw l.name="Invariant Violation",l}}function t(){return e}e.isRequired=e;var n={array:e,bigint:e,bool:e,func:e,number:e,object:e,string:e,symbol:e,any:e,arrayOf:t,element:e,elementType:e,instanceOf:t,node:e,objectOf:t,oneOf:t,oneOfType:t,shape:t,exact:t,checkPropTypes:a,resetWarningCache:o};return n.PropTypes=n,n}},74404:function(e,t,n){e.exports=n(18314)()},41811:function(e){e.exports="SECRET_DO_NOT_PASS_THIS_OR_YOU_WILL_BE_FIRED"},76570:function(e,t,n){n.d(t,{gN:function(){return eh},zb:function(){return w},RV:function(){return eT},aV:function(){return eb},ZM:function(){return x},ZP:function(){return e_},cI:function(){return eO},qo:function(){return eR}});var r,o=n(64090),a=n(14749),i=n(6787),l=n(86926),s=n(74902),c=n(5239),u=n(63787),d=n(47365),p=n(65127),f=n(34951),m=n(27478),g=n(85430),h=n(50833),b=n(33054),v=n(92536),y=n(53850),E="RC_FORM_INTERNAL_HOOKS",S=function(){(0,y.ZP)(!1,"Can not find FormContext. Please make sure you wrap Field under Form.")},w=o.createContext({getFieldValue:S,getFieldsValue:S,getFieldError:S,getFieldWarning:S,getFieldsError:S,isFieldsTouched:S,isFieldTouched:S,isFieldValidating:S,isFieldsValidating:S,resetFields:S,setFields:S,setFieldValue:S,setFieldsValue:S,validateFields:S,submit:S,getInternalHooks:function(){return S(),{dispatch:S,initEntityValue:S,registerField:S,useSubscribe:S,setInitialValues:S,destroyForm:S,setCallbacks:S,registerWatch:S,getFields:S,setValidateMessages:S,setPreserve:S,getInitialValue:S}}}),x=o.createContext(null);function O(e){return null==e?[]:Array.isArray(e)?e:[e]}var C=n(49079);function T(){return(T=Object.assign?Object.assign.bind():function(e){for(var t=1;t1?t-1:0),r=1;r=a)return e;switch(e){case"%s":return String(n[o++]);case"%d":return Number(n[o++]);case"%j":try{return JSON.stringify(n[o++])}catch(e){return"[Circular]"}break;default:return e}}):e}function M(e,t){return!!(null==e||"array"===t&&Array.isArray(e)&&!e.length)||("string"===t||"url"===t||"hex"===t||"email"===t||"date"===t||"pattern"===t)&&"string"==typeof e&&!e}function L(e,t,n){var r=0,o=e.length;!function a(i){if(i&&i.length){n(i);return}var l=r;r+=1,l()\[\]\\.,;:\s@"]+(\.[^<>()\[\]\\.,;:\s@"]+)*)|(".+"))@((\[[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}])|(([a-zA-Z\-0-9\u00A0-\uD7FF\uF900-\uFDCF\uFDF0-\uFFEF]+\.)+[a-zA-Z\u00A0-\uD7FF\uF900-\uFDCF\uFDF0-\uFFEF]{2,}))$/,hex:/^#?([a-f0-9]{6}|[a-f0-9]{3})$/i},z={integer:function(e){return z.number(e)&&parseInt(e,10)===e},float:function(e){return z.number(e)&&!z.integer(e)},array:function(e){return Array.isArray(e)},regexp:function(e){if(e instanceof RegExp)return!0;try{return new RegExp(e),!0}catch(e){return!1}},date:function(e){return"function"==typeof e.getTime&&"function"==typeof e.getMonth&&"function"==typeof e.getYear&&!isNaN(e.getTime())},number:function(e){return!isNaN(e)&&"number"==typeof e},object:function(e){return"object"==typeof e&&!z.array(e)},method:function(e){return"function"==typeof e},email:function(e){return"string"==typeof e&&e.length<=320&&!!e.match(Z.email)},url:function(e){return"string"==typeof e&&e.length<=2048&&!!e.match(U())},hex:function(e){return"string"==typeof e&&!!e.match(Z.hex)}},H="enum",G={required:B,whitespace:function(e,t,n,r,o){(/^\s+$/.test(t)||""===t)&&r.push(P(o.messages.whitespace,e.fullField))},type:function(e,t,n,r,o){if(e.required&&void 0===t){B(e,t,n,r,o);return}var a=e.type;["integer","float","array","regexp","object","method","email","number","date","url","hex"].indexOf(a)>-1?z[a](t)||r.push(P(o.messages.types[a],e.fullField,e.type)):a&&typeof t!==e.type&&r.push(P(o.messages.types[a],e.fullField,e.type))},range:function(e,t,n,r,o){var a="number"==typeof e.len,i="number"==typeof e.min,l="number"==typeof e.max,s=t,c=null,u="number"==typeof t,d="string"==typeof t,p=Array.isArray(t);if(u?c="number":d?c="string":p&&(c="array"),!c)return!1;p&&(s=t.length),d&&(s=t.replace(/[\uD800-\uDBFF][\uDC00-\uDFFF]/g,"_").length),a?s!==e.len&&r.push(P(o.messages[c].len,e.fullField,e.len)):i&&!l&&se.max?r.push(P(o.messages[c].max,e.fullField,e.max)):i&&l&&(se.max)&&r.push(P(o.messages[c].range,e.fullField,e.min,e.max))},enum:function(e,t,n,r,o){e[H]=Array.isArray(e[H])?e[H]:[],-1===e[H].indexOf(t)&&r.push(P(o.messages[H],e.fullField,e[H].join(", ")))},pattern:function(e,t,n,r,o){!e.pattern||(e.pattern instanceof RegExp?(e.pattern.lastIndex=0,e.pattern.test(t)||r.push(P(o.messages.pattern.mismatch,e.fullField,t,e.pattern))):"string"!=typeof e.pattern||new RegExp(e.pattern).test(t)||r.push(P(o.messages.pattern.mismatch,e.fullField,t,e.pattern)))}},$=function(e,t,n,r,o){var a=e.type,i=[];if(e.required||!e.required&&r.hasOwnProperty(e.field)){if(M(t,a)&&!e.required)return n();G.required(e,t,r,i,o,a),M(t,a)||G.type(e,t,r,i,o)}n(i)},W={string:function(e,t,n,r,o){var a=[];if(e.required||!e.required&&r.hasOwnProperty(e.field)){if(M(t,"string")&&!e.required)return n();G.required(e,t,r,a,o,"string"),M(t,"string")||(G.type(e,t,r,a,o),G.range(e,t,r,a,o),G.pattern(e,t,r,a,o),!0===e.whitespace&&G.whitespace(e,t,r,a,o))}n(a)},method:function(e,t,n,r,o){var a=[];if(e.required||!e.required&&r.hasOwnProperty(e.field)){if(M(t)&&!e.required)return n();G.required(e,t,r,a,o),void 0!==t&&G.type(e,t,r,a,o)}n(a)},number:function(e,t,n,r,o){var a=[];if(e.required||!e.required&&r.hasOwnProperty(e.field)){if(""===t&&(t=void 0),M(t)&&!e.required)return n();G.required(e,t,r,a,o),void 0!==t&&(G.type(e,t,r,a,o),G.range(e,t,r,a,o))}n(a)},boolean:function(e,t,n,r,o){var a=[];if(e.required||!e.required&&r.hasOwnProperty(e.field)){if(M(t)&&!e.required)return n();G.required(e,t,r,a,o),void 0!==t&&G.type(e,t,r,a,o)}n(a)},regexp:function(e,t,n,r,o){var a=[];if(e.required||!e.required&&r.hasOwnProperty(e.field)){if(M(t)&&!e.required)return n();G.required(e,t,r,a,o),M(t)||G.type(e,t,r,a,o)}n(a)},integer:function(e,t,n,r,o){var a=[];if(e.required||!e.required&&r.hasOwnProperty(e.field)){if(M(t)&&!e.required)return n();G.required(e,t,r,a,o),void 0!==t&&(G.type(e,t,r,a,o),G.range(e,t,r,a,o))}n(a)},float:function(e,t,n,r,o){var a=[];if(e.required||!e.required&&r.hasOwnProperty(e.field)){if(M(t)&&!e.required)return n();G.required(e,t,r,a,o),void 0!==t&&(G.type(e,t,r,a,o),G.range(e,t,r,a,o))}n(a)},array:function(e,t,n,r,o){var a=[];if(e.required||!e.required&&r.hasOwnProperty(e.field)){if(null==t&&!e.required)return n();G.required(e,t,r,a,o,"array"),null!=t&&(G.type(e,t,r,a,o),G.range(e,t,r,a,o))}n(a)},object:function(e,t,n,r,o){var a=[];if(e.required||!e.required&&r.hasOwnProperty(e.field)){if(M(t)&&!e.required)return n();G.required(e,t,r,a,o),void 0!==t&&G.type(e,t,r,a,o)}n(a)},enum:function(e,t,n,r,o){var a=[];if(e.required||!e.required&&r.hasOwnProperty(e.field)){if(M(t)&&!e.required)return n();G.required(e,t,r,a,o),void 0!==t&&G.enum(e,t,r,a,o)}n(a)},pattern:function(e,t,n,r,o){var a=[];if(e.required||!e.required&&r.hasOwnProperty(e.field)){if(M(t,"string")&&!e.required)return n();G.required(e,t,r,a,o),M(t,"string")||G.pattern(e,t,r,a,o)}n(a)},date:function(e,t,n,r,o){var a,i=[];if(e.required||!e.required&&r.hasOwnProperty(e.field)){if(M(t,"date")&&!e.required)return n();G.required(e,t,r,i,o),!M(t,"date")&&(a=t instanceof Date?t:new Date(t),G.type(e,a,r,i,o),a&&G.range(e,a.getTime(),r,i,o))}n(i)},url:$,hex:$,email:$,required:function(e,t,n,r,o){var a=[],i=Array.isArray(t)?"array":typeof t;G.required(e,t,r,a,o,i),n(a)},any:function(e,t,n,r,o){var a=[];if(e.required||!e.required&&r.hasOwnProperty(e.field)){if(M(t)&&!e.required)return n();G.required(e,t,r,a,o)}n(a)}};function V(){return{default:"Validation error on field %s",required:"%s is required",enum:"%s must be one of %s",whitespace:"%s cannot be empty",date:{format:"%s date %s is invalid for format %s",parse:"%s date could not be parsed, %s is invalid ",invalid:"%s date %s is invalid"},types:{string:"%s is not a %s",method:"%s is not a %s (function)",array:"%s is not an %s",object:"%s is not an %s",number:"%s is not a %s",date:"%s is not a %s",boolean:"%s is not a %s",integer:"%s is not an %s",float:"%s is not a %s",regexp:"%s is not a valid %s",email:"%s is not a valid %s",url:"%s is not a valid %s",hex:"%s is not a valid %s"},string:{len:"%s must be exactly %s characters",min:"%s must be at least %s characters",max:"%s cannot be longer than %s characters",range:"%s must be between %s and %s characters"},number:{len:"%s must equal %s",min:"%s cannot be less than %s",max:"%s cannot be greater than %s",range:"%s must be between %s and %s"},array:{len:"%s must be exactly %s in length",min:"%s cannot be less than %s in length",max:"%s cannot be greater than %s in length",range:"%s must be between %s and %s in length"},pattern:{mismatch:"%s value %s does not match pattern %s"},clone:function(){var e=JSON.parse(JSON.stringify(this));return e.clone=this.clone,e}}}var q=V(),Y=function(){function e(e){this.rules=null,this._messages=q,this.define(e)}var t=e.prototype;return t.define=function(e){var t=this;if(!e)throw Error("Cannot configure a schema with no rules");if("object"!=typeof e||Array.isArray(e))throw Error("Rules must be an object");this.rules={},Object.keys(e).forEach(function(n){var r=e[n];t.rules[n]=Array.isArray(r)?r:[r]})},t.messages=function(e){return e&&(this._messages=F(V(),e)),this._messages},t.validate=function(t,n,r){var o=this;void 0===n&&(n={}),void 0===r&&(r=function(){});var a=t,i=n,l=r;if("function"==typeof i&&(l=i,i={}),!this.rules||0===Object.keys(this.rules).length)return l&&l(null,a),Promise.resolve(a);if(i.messages){var s=this.messages();s===q&&(s=V()),F(s,i.messages),i.messages=s}else i.messages=this.messages();var c={};(i.keys||Object.keys(this.rules)).forEach(function(e){var n=o.rules[e],r=a[e];n.forEach(function(n){var i=n;"function"==typeof i.transform&&(a===t&&(a=T({},a)),r=a[e]=i.transform(r)),(i="function"==typeof i?{validator:i}:T({},i)).validator=o.getValidationMethod(i),i.validator&&(i.field=e,i.fullField=i.fullField||e,i.type=o.getType(i),c[e]=c[e]||[],c[e].push({rule:i,value:r,source:a,field:e}))})});var u={};return function(e,t,n,r,o){if(t.first){var a=new Promise(function(t,a){var i;L((i=[],Object.keys(e).forEach(function(t){i.push.apply(i,e[t]||[])}),i),n,function(e){return r(e),e.length?a(new D(e,_(e))):t(o)})});return a.catch(function(e){return e}),a}var i=!0===t.firstFields?Object.keys(e):t.firstFields||[],l=Object.keys(e),s=l.length,c=0,u=[],d=new Promise(function(t,a){var d=function(e){if(u.push.apply(u,e),++c===s)return r(u),u.length?a(new D(u,_(u))):t(o)};l.length||(r(u),t(o)),l.forEach(function(t){var r=e[t];-1!==i.indexOf(t)?L(r,n,d):function(e,t,n){var r=[],o=0,a=e.length;function i(e){r.push.apply(r,e||[]),++o===a&&n(r)}e.forEach(function(e){t(e,i)})}(r,n,d)})});return d.catch(function(e){return e}),d}(c,i,function(t,n){var r,o=t.rule,l=("object"===o.type||"array"===o.type)&&("object"==typeof o.fields||"object"==typeof o.defaultField);function s(e,t){return T({},t,{fullField:o.fullField+"."+e,fullFields:o.fullFields?[].concat(o.fullFields,[e]):[e]})}function c(r){void 0===r&&(r=[]);var c=Array.isArray(r)?r:[r];!i.suppressWarning&&c.length&&e.warning("async-validator:",c),c.length&&void 0!==o.message&&(c=[].concat(o.message));var d=c.map(j(o,a));if(i.first&&d.length)return u[o.field]=1,n(d);if(l){if(o.required&&!t.value)return void 0!==o.message?d=[].concat(o.message).map(j(o,a)):i.error&&(d=[i.error(o,P(i.messages.required,o.field))]),n(d);var p={};o.defaultField&&Object.keys(t.value).map(function(e){p[e]=o.defaultField});var f={};Object.keys(p=T({},p,t.rule.fields)).forEach(function(e){var t=p[e],n=Array.isArray(t)?t:[t];f[e]=n.map(s.bind(null,e))});var m=new e(f);m.messages(i.messages),t.rule.options&&(t.rule.options.messages=i.messages,t.rule.options.error=i.error),m.validate(t.value,t.rule.options||i,function(e){var t=[];d&&d.length&&t.push.apply(t,d),e&&e.length&&t.push.apply(t,e),n(t.length?t:null)})}else n(d)}if(l=l&&(o.required||!o.required&&t.value),o.field=t.field,o.asyncValidator)r=o.asyncValidator(o,t.value,c,t.source,i);else if(o.validator){try{r=o.validator(o,t.value,c,t.source,i)}catch(e){null==console.error||console.error(e),i.suppressValidatorError||setTimeout(function(){throw e},0),c(e.message)}!0===r?c():!1===r?c("function"==typeof o.message?o.message(o.fullField||o.field):o.message||(o.fullField||o.field)+" fails"):r instanceof Array?c(r):r instanceof Error&&c(r.message)}r&&r.then&&r.then(function(){return c()},function(e){return c(e)})},function(e){!function(e){for(var t=[],n={},r=0;r2&&void 0!==arguments[2]&&arguments[2];return e&&e.some(function(e){return ec(t,e,n)})}function ec(e,t){var n=arguments.length>2&&void 0!==arguments[2]&&arguments[2];return!!e&&!!t&&(!!n||e.length===t.length)&&t.every(function(t,n){return e[n]===t})}function eu(e){var t=arguments.length<=1?void 0:arguments[1];return t&&t.target&&"object"===(0,eo.Z)(t.target)&&e in t.target?t.target[e]:t}function ed(e,t,n){var r=e.length;if(t<0||t>=r||n<0||n>=r)return e;var o=e[t],a=t-n;return a>0?[].concat((0,u.Z)(e.slice(0,n)),[o],(0,u.Z)(e.slice(n,t)),(0,u.Z)(e.slice(t+1,r))):a<0?[].concat((0,u.Z)(e.slice(0,t)),(0,u.Z)(e.slice(t+1,n+1)),[o],(0,u.Z)(e.slice(n+1,r))):e}var ep=["name"],ef=[];function em(e,t,n,r,o,a){return"function"==typeof e?e(t,n,"source"in a?{source:a.source}:{}):r!==o}var eg=function(e){(0,m.Z)(n,e);var t=(0,g.Z)(n);function n(e){var r;return(0,d.Z)(this,n),r=t.call(this,e),(0,h.Z)((0,f.Z)(r),"state",{resetCount:0}),(0,h.Z)((0,f.Z)(r),"cancelRegisterFunc",null),(0,h.Z)((0,f.Z)(r),"mounted",!1),(0,h.Z)((0,f.Z)(r),"touched",!1),(0,h.Z)((0,f.Z)(r),"dirty",!1),(0,h.Z)((0,f.Z)(r),"validatePromise",void 0),(0,h.Z)((0,f.Z)(r),"prevValidating",void 0),(0,h.Z)((0,f.Z)(r),"errors",ef),(0,h.Z)((0,f.Z)(r),"warnings",ef),(0,h.Z)((0,f.Z)(r),"cancelRegister",function(){var e=r.props,t=e.preserve,n=e.isListField,o=e.name;r.cancelRegisterFunc&&r.cancelRegisterFunc(n,t,ei(o)),r.cancelRegisterFunc=null}),(0,h.Z)((0,f.Z)(r),"getNamePath",function(){var e=r.props,t=e.name,n=e.fieldContext.prefixName;return void 0!==t?[].concat((0,u.Z)(void 0===n?[]:n),(0,u.Z)(t)):[]}),(0,h.Z)((0,f.Z)(r),"getRules",function(){var e=r.props,t=e.rules,n=e.fieldContext;return(void 0===t?[]:t).map(function(e){return"function"==typeof e?e(n):e})}),(0,h.Z)((0,f.Z)(r),"refresh",function(){r.mounted&&r.setState(function(e){return{resetCount:e.resetCount+1}})}),(0,h.Z)((0,f.Z)(r),"metaCache",null),(0,h.Z)((0,f.Z)(r),"triggerMetaEvent",function(e){var t=r.props.onMetaChange;if(t){var n=(0,c.Z)((0,c.Z)({},r.getMeta()),{},{destroy:e});(0,v.Z)(r.metaCache,n)||t(n),r.metaCache=n}else r.metaCache=null}),(0,h.Z)((0,f.Z)(r),"onStoreChange",function(e,t,n){var o=r.props,a=o.shouldUpdate,i=o.dependencies,l=void 0===i?[]:i,s=o.onReset,c=n.store,u=r.getNamePath(),d=r.getValue(e),p=r.getValue(c),f=t&&es(t,u);switch("valueUpdate"===n.type&&"external"===n.source&&d!==p&&(r.touched=!0,r.dirty=!0,r.validatePromise=null,r.errors=ef,r.warnings=ef,r.triggerMetaEvent()),n.type){case"reset":if(!t||f){r.touched=!1,r.dirty=!1,r.validatePromise=void 0,r.errors=ef,r.warnings=ef,r.triggerMetaEvent(),null==s||s(),r.refresh();return}break;case"remove":if(a){r.reRender();return}break;case"setField":var m=n.data;if(f){"touched"in m&&(r.touched=m.touched),"validating"in m&&!("originRCField"in m)&&(r.validatePromise=m.validating?Promise.resolve([]):null),"errors"in m&&(r.errors=m.errors||ef),"warnings"in m&&(r.warnings=m.warnings||ef),r.dirty=!0,r.triggerMetaEvent(),r.reRender();return}if("value"in m&&es(t,u,!0)||a&&!u.length&&em(a,e,c,d,p,n)){r.reRender();return}break;case"dependenciesUpdate":if(l.map(ei).some(function(e){return es(n.relatedFields,e)})){r.reRender();return}break;default:if(f||(!l.length||u.length||a)&&em(a,e,c,d,p,n)){r.reRender();return}}!0===a&&r.reRender()}),(0,h.Z)((0,f.Z)(r),"validateRules",function(e){var t=r.getNamePath(),n=r.getValue(),o=e||{},a=o.triggerName,i=o.validateOnly,d=Promise.resolve().then((0,s.Z)((0,l.Z)().mark(function o(){var i,p,f,m,g,h,b;return(0,l.Z)().wrap(function(o){for(;;)switch(o.prev=o.next){case 0:if(r.mounted){o.next=2;break}return o.abrupt("return",[]);case 2:if(f=void 0!==(p=(i=r.props).validateFirst)&&p,m=i.messageVariables,g=i.validateDebounce,h=r.getRules(),a&&(h=h.filter(function(e){return e}).filter(function(e){var t=e.validateTrigger;return!t||O(t).includes(a)})),!(g&&a)){o.next=10;break}return o.next=8,new Promise(function(e){setTimeout(e,g)});case 8:if(!(r.validatePromise!==d)){o.next=10;break}return o.abrupt("return",[]);case 10:return(b=function(e,t,n,r,o,a){var i,u,d=e.join("."),p=n.map(function(e,t){var n=e.validator,r=(0,c.Z)((0,c.Z)({},e),{},{ruleIndex:t});return n&&(r.validator=function(e,t,r){var o=!1,a=n(e,t,function(){for(var e=arguments.length,t=Array(e),n=0;n0&&void 0!==arguments[0]?arguments[0]:ef;if(r.validatePromise===d){r.validatePromise=null;var t,n=[],o=[];null===(t=e.forEach)||void 0===t||t.call(e,function(e){var t=e.rule.warningOnly,r=e.errors,a=void 0===r?ef:r;t?o.push.apply(o,(0,u.Z)(a)):n.push.apply(n,(0,u.Z)(a))}),r.errors=n,r.warnings=o,r.triggerMetaEvent(),r.reRender()}}),o.abrupt("return",b);case 13:case"end":return o.stop()}},o)})));return void 0!==i&&i||(r.validatePromise=d,r.dirty=!0,r.errors=ef,r.warnings=ef,r.triggerMetaEvent(),r.reRender()),d}),(0,h.Z)((0,f.Z)(r),"isFieldValidating",function(){return!!r.validatePromise}),(0,h.Z)((0,f.Z)(r),"isFieldTouched",function(){return r.touched}),(0,h.Z)((0,f.Z)(r),"isFieldDirty",function(){return!!r.dirty||void 0!==r.props.initialValue||void 0!==(0,r.props.fieldContext.getInternalHooks(E).getInitialValue)(r.getNamePath())}),(0,h.Z)((0,f.Z)(r),"getErrors",function(){return r.errors}),(0,h.Z)((0,f.Z)(r),"getWarnings",function(){return r.warnings}),(0,h.Z)((0,f.Z)(r),"isListField",function(){return r.props.isListField}),(0,h.Z)((0,f.Z)(r),"isList",function(){return r.props.isList}),(0,h.Z)((0,f.Z)(r),"isPreserve",function(){return r.props.preserve}),(0,h.Z)((0,f.Z)(r),"getMeta",function(){return r.prevValidating=r.isFieldValidating(),{touched:r.isFieldTouched(),validating:r.prevValidating,errors:r.errors,warnings:r.warnings,name:r.getNamePath(),validated:null===r.validatePromise}}),(0,h.Z)((0,f.Z)(r),"getOnlyChild",function(e){if("function"==typeof e){var t=r.getMeta();return(0,c.Z)((0,c.Z)({},r.getOnlyChild(e(r.getControlled(),t,r.props.fieldContext))),{},{isFunction:!0})}var n=(0,b.Z)(e);return 1===n.length&&o.isValidElement(n[0])?{child:n[0],isFunction:!1}:{child:n,isFunction:!1}}),(0,h.Z)((0,f.Z)(r),"getValue",function(e){var t=r.props.fieldContext.getFieldsValue,n=r.getNamePath();return(0,ea.Z)(e||t(!0),n)}),(0,h.Z)((0,f.Z)(r),"getControlled",function(){var e=arguments.length>0&&void 0!==arguments[0]?arguments[0]:{},t=r.props,n=t.trigger,o=t.validateTrigger,a=t.getValueFromEvent,i=t.normalize,l=t.valuePropName,s=t.getValueProps,u=t.fieldContext,d=void 0!==o?o:u.validateTrigger,p=r.getNamePath(),f=u.getInternalHooks,m=u.getFieldsValue,g=f(E).dispatch,b=r.getValue(),v=s||function(e){return(0,h.Z)({},l,e)},y=e[n],S=(0,c.Z)((0,c.Z)({},e),v(b));return S[n]=function(){r.touched=!0,r.dirty=!0,r.triggerMetaEvent();for(var e,t=arguments.length,n=Array(t),o=0;o=0&&t<=n.length?(p.keys=[].concat((0,u.Z)(p.keys.slice(0,t)),[p.id],(0,u.Z)(p.keys.slice(t))),o([].concat((0,u.Z)(n.slice(0,t)),[e],(0,u.Z)(n.slice(t))))):(p.keys=[].concat((0,u.Z)(p.keys),[p.id]),o([].concat((0,u.Z)(n),[e]))),p.id+=1},remove:function(e){var t=i(),n=new Set(Array.isArray(e)?e:[e]);n.size<=0||(p.keys=p.keys.filter(function(e,t){return!n.has(t)}),o(t.filter(function(e,t){return!n.has(t)})))},move:function(e,t){if(e!==t){var n=i();e<0||e>=n.length||t<0||t>=n.length||(p.keys=ed(p.keys,e,t),o(ed(n,e,t)))}}},t)})))},ev=n(80406),ey="__@field_split__";function eE(e){return e.map(function(e){return"".concat((0,eo.Z)(e),":").concat(e)}).join(ey)}var eS=function(){function e(){(0,d.Z)(this,e),(0,h.Z)(this,"kvs",new Map)}return(0,p.Z)(e,[{key:"set",value:function(e,t){this.kvs.set(eE(e),t)}},{key:"get",value:function(e){return this.kvs.get(eE(e))}},{key:"update",value:function(e,t){var n=t(this.get(e));n?this.set(e,n):this.delete(e)}},{key:"delete",value:function(e){this.kvs.delete(eE(e))}},{key:"map",value:function(e){return(0,u.Z)(this.kvs.entries()).map(function(t){var n=(0,ev.Z)(t,2),r=n[0],o=n[1];return e({key:r.split(ey).map(function(e){var t=e.match(/^([^:]*):(.*)$/),n=(0,ev.Z)(t,3),r=n[1],o=n[2];return"number"===r?Number(o):o}),value:o})})}},{key:"toJSON",value:function(){var e={};return this.map(function(t){var n=t.key,r=t.value;return e[n.join(".")]=r,null}),e}}]),e}(),ew=["name"],ex=(0,p.Z)(function e(t){var n=this;(0,d.Z)(this,e),(0,h.Z)(this,"formHooked",!1),(0,h.Z)(this,"forceRootUpdate",void 0),(0,h.Z)(this,"subscribable",!0),(0,h.Z)(this,"store",{}),(0,h.Z)(this,"fieldEntities",[]),(0,h.Z)(this,"initialValues",{}),(0,h.Z)(this,"callbacks",{}),(0,h.Z)(this,"validateMessages",null),(0,h.Z)(this,"preserve",null),(0,h.Z)(this,"lastValidatePromise",null),(0,h.Z)(this,"getForm",function(){return{getFieldValue:n.getFieldValue,getFieldsValue:n.getFieldsValue,getFieldError:n.getFieldError,getFieldWarning:n.getFieldWarning,getFieldsError:n.getFieldsError,isFieldsTouched:n.isFieldsTouched,isFieldTouched:n.isFieldTouched,isFieldValidating:n.isFieldValidating,isFieldsValidating:n.isFieldsValidating,resetFields:n.resetFields,setFields:n.setFields,setFieldValue:n.setFieldValue,setFieldsValue:n.setFieldsValue,validateFields:n.validateFields,submit:n.submit,_init:!0,getInternalHooks:n.getInternalHooks}}),(0,h.Z)(this,"getInternalHooks",function(e){return e===E?(n.formHooked=!0,{dispatch:n.dispatch,initEntityValue:n.initEntityValue,registerField:n.registerField,useSubscribe:n.useSubscribe,setInitialValues:n.setInitialValues,destroyForm:n.destroyForm,setCallbacks:n.setCallbacks,setValidateMessages:n.setValidateMessages,getFields:n.getFields,setPreserve:n.setPreserve,getInitialValue:n.getInitialValue,registerWatch:n.registerWatch}):((0,y.ZP)(!1,"`getInternalHooks` is internal usage. Should not call directly."),null)}),(0,h.Z)(this,"useSubscribe",function(e){n.subscribable=e}),(0,h.Z)(this,"prevWithoutPreserves",null),(0,h.Z)(this,"setInitialValues",function(e,t){if(n.initialValues=e||{},t){var r,o=(0,Q.T)(e,n.store);null===(r=n.prevWithoutPreserves)||void 0===r||r.map(function(t){var n=t.key;o=(0,Q.Z)(o,n,(0,ea.Z)(e,n))}),n.prevWithoutPreserves=null,n.updateStore(o)}}),(0,h.Z)(this,"destroyForm",function(){var e=new eS;n.getFieldEntities(!0).forEach(function(t){n.isMergedPreserve(t.isPreserve())||e.set(t.getNamePath(),!0)}),n.prevWithoutPreserves=e}),(0,h.Z)(this,"getInitialValue",function(e){var t=(0,ea.Z)(n.initialValues,e);return e.length?(0,Q.T)(t):t}),(0,h.Z)(this,"setCallbacks",function(e){n.callbacks=e}),(0,h.Z)(this,"setValidateMessages",function(e){n.validateMessages=e}),(0,h.Z)(this,"setPreserve",function(e){n.preserve=e}),(0,h.Z)(this,"watchList",[]),(0,h.Z)(this,"registerWatch",function(e){return n.watchList.push(e),function(){n.watchList=n.watchList.filter(function(t){return t!==e})}}),(0,h.Z)(this,"notifyWatch",function(){var e=arguments.length>0&&void 0!==arguments[0]?arguments[0]:[];if(n.watchList.length){var t=n.getFieldsValue(),r=n.getFieldsValue(!0);n.watchList.forEach(function(n){n(t,r,e)})}}),(0,h.Z)(this,"timeoutId",null),(0,h.Z)(this,"warningUnhooked",function(){}),(0,h.Z)(this,"updateStore",function(e){n.store=e}),(0,h.Z)(this,"getFieldEntities",function(){var e=arguments.length>0&&void 0!==arguments[0]&&arguments[0];return e?n.fieldEntities.filter(function(e){return e.getNamePath().length}):n.fieldEntities}),(0,h.Z)(this,"getFieldsMap",function(){var e=arguments.length>0&&void 0!==arguments[0]&&arguments[0],t=new eS;return n.getFieldEntities(e).forEach(function(e){var n=e.getNamePath();t.set(n,e)}),t}),(0,h.Z)(this,"getFieldEntitiesForNamePathList",function(e){if(!e)return n.getFieldEntities(!0);var t=n.getFieldsMap(!0);return e.map(function(e){var n=ei(e);return t.get(n)||{INVALIDATE_NAME_PATH:ei(e)}})}),(0,h.Z)(this,"getFieldsValue",function(e,t){if(n.warningUnhooked(),!0===e||Array.isArray(e)?(r=e,o=t):e&&"object"===(0,eo.Z)(e)&&(a=e.strict,o=e.filter),!0===r&&!o)return n.store;var r,o,a,i=n.getFieldEntitiesForNamePathList(Array.isArray(r)?r:null),l=[];return i.forEach(function(e){var t,n,i,s="INVALIDATE_NAME_PATH"in e?e.INVALIDATE_NAME_PATH:e.getNamePath();if(a){if(null!==(i=e.isList)&&void 0!==i&&i.call(e))return}else if(!r&&null!==(t=(n=e).isListField)&&void 0!==t&&t.call(n))return;if(o){var c="getMeta"in e?e.getMeta():null;o(c)&&l.push(s)}else l.push(s)}),el(n.store,l.map(ei))}),(0,h.Z)(this,"getFieldValue",function(e){n.warningUnhooked();var t=ei(e);return(0,ea.Z)(n.store,t)}),(0,h.Z)(this,"getFieldsError",function(e){return n.warningUnhooked(),n.getFieldEntitiesForNamePathList(e).map(function(t,n){return!t||"INVALIDATE_NAME_PATH"in t?{name:ei(e[n]),errors:[],warnings:[]}:{name:t.getNamePath(),errors:t.getErrors(),warnings:t.getWarnings()}})}),(0,h.Z)(this,"getFieldError",function(e){n.warningUnhooked();var t=ei(e);return n.getFieldsError([t])[0].errors}),(0,h.Z)(this,"getFieldWarning",function(e){n.warningUnhooked();var t=ei(e);return n.getFieldsError([t])[0].warnings}),(0,h.Z)(this,"isFieldsTouched",function(){n.warningUnhooked();for(var e,t=arguments.length,r=Array(t),o=0;o0&&void 0!==arguments[0]?arguments[0]:{},r=new eS,o=n.getFieldEntities(!0);o.forEach(function(e){var t=e.props.initialValue,n=e.getNamePath();if(void 0!==t){var o=r.get(n)||new Set;o.add({entity:e,value:t}),r.set(n,o)}}),t.entities?e=t.entities:t.namePathList?(e=[],t.namePathList.forEach(function(t){var n,o=r.get(t);o&&(n=e).push.apply(n,(0,u.Z)((0,u.Z)(o).map(function(e){return e.entity})))})):e=o,function(e){e.forEach(function(e){if(void 0!==e.props.initialValue){var o=e.getNamePath();if(void 0!==n.getInitialValue(o))(0,y.ZP)(!1,"Form already set 'initialValues' with path '".concat(o.join("."),"'. Field can not overwrite it."));else{var a=r.get(o);if(a&&a.size>1)(0,y.ZP)(!1,"Multiple Field with path '".concat(o.join("."),"' set 'initialValue'. Can not decide which one to pick."));else if(a){var i=n.getFieldValue(o);e.isListField()||t.skipExist&&void 0!==i||n.updateStore((0,Q.Z)(n.store,o,(0,u.Z)(a)[0].value))}}}})}(e)}),(0,h.Z)(this,"resetFields",function(e){n.warningUnhooked();var t=n.store;if(!e){n.updateStore((0,Q.T)(n.initialValues)),n.resetWithFieldInitialValue(),n.notifyObservers(t,null,{type:"reset"}),n.notifyWatch();return}var r=e.map(ei);r.forEach(function(e){var t=n.getInitialValue(e);n.updateStore((0,Q.Z)(n.store,e,t))}),n.resetWithFieldInitialValue({namePathList:r}),n.notifyObservers(t,r,{type:"reset"}),n.notifyWatch(r)}),(0,h.Z)(this,"setFields",function(e){n.warningUnhooked();var t=n.store,r=[];e.forEach(function(e){var o=e.name,a=(0,i.Z)(e,ew),l=ei(o);r.push(l),"value"in a&&n.updateStore((0,Q.Z)(n.store,l,a.value)),n.notifyObservers(t,[l],{type:"setField",data:e})}),n.notifyWatch(r)}),(0,h.Z)(this,"getFields",function(){return n.getFieldEntities(!0).map(function(e){var t=e.getNamePath(),r=e.getMeta(),o=(0,c.Z)((0,c.Z)({},r),{},{name:t,value:n.getFieldValue(t)});return Object.defineProperty(o,"originRCField",{value:!0}),o})}),(0,h.Z)(this,"initEntityValue",function(e){var t=e.props.initialValue;if(void 0!==t){var r=e.getNamePath();void 0===(0,ea.Z)(n.store,r)&&n.updateStore((0,Q.Z)(n.store,r,t))}}),(0,h.Z)(this,"isMergedPreserve",function(e){var t=void 0!==e?e:n.preserve;return null==t||t}),(0,h.Z)(this,"registerField",function(e){n.fieldEntities.push(e);var t=e.getNamePath();if(n.notifyWatch([t]),void 0!==e.props.initialValue){var r=n.store;n.resetWithFieldInitialValue({entities:[e],skipExist:!0}),n.notifyObservers(r,[e.getNamePath()],{type:"valueUpdate",source:"internal"})}return function(r,o){var a=arguments.length>2&&void 0!==arguments[2]?arguments[2]:[];if(n.fieldEntities=n.fieldEntities.filter(function(t){return t!==e}),!n.isMergedPreserve(o)&&(!r||a.length>1)){var i=r?void 0:n.getInitialValue(t);if(t.length&&n.getFieldValue(t)!==i&&n.fieldEntities.every(function(e){return!ec(e.getNamePath(),t)})){var l=n.store;n.updateStore((0,Q.Z)(l,t,i,!0)),n.notifyObservers(l,[t],{type:"remove"}),n.triggerDependenciesUpdate(l,t)}}n.notifyWatch([t])}}),(0,h.Z)(this,"dispatch",function(e){switch(e.type){case"updateValue":var t=e.namePath,r=e.value;n.updateValue(t,r);break;case"validateField":var o=e.namePath,a=e.triggerName;n.validateFields([o],{triggerName:a})}}),(0,h.Z)(this,"notifyObservers",function(e,t,r){if(n.subscribable){var o=(0,c.Z)((0,c.Z)({},r),{},{store:n.getFieldsValue(!0)});n.getFieldEntities().forEach(function(n){(0,n.onStoreChange)(e,t,o)})}else n.forceRootUpdate()}),(0,h.Z)(this,"triggerDependenciesUpdate",function(e,t){var r=n.getDependencyChildrenFields(t);return r.length&&n.validateFields(r),n.notifyObservers(e,r,{type:"dependenciesUpdate",relatedFields:[t].concat((0,u.Z)(r))}),r}),(0,h.Z)(this,"updateValue",function(e,t){var r=ei(e),o=n.store;n.updateStore((0,Q.Z)(n.store,r,t)),n.notifyObservers(o,[r],{type:"valueUpdate",source:"internal"}),n.notifyWatch([r]);var a=n.triggerDependenciesUpdate(o,r),i=n.callbacks.onValuesChange;i&&i(el(n.store,[r]),n.getFieldsValue()),n.triggerOnFieldsChange([r].concat((0,u.Z)(a)))}),(0,h.Z)(this,"setFieldsValue",function(e){n.warningUnhooked();var t=n.store;if(e){var r=(0,Q.T)(n.store,e);n.updateStore(r)}n.notifyObservers(t,null,{type:"valueUpdate",source:"external"}),n.notifyWatch()}),(0,h.Z)(this,"setFieldValue",function(e,t){n.setFields([{name:e,value:t}])}),(0,h.Z)(this,"getDependencyChildrenFields",function(e){var t=new Set,r=[],o=new eS;return n.getFieldEntities().forEach(function(e){(e.props.dependencies||[]).forEach(function(t){var n=ei(t);o.update(n,function(){var t=arguments.length>0&&void 0!==arguments[0]?arguments[0]:new Set;return t.add(e),t})})}),function e(n){(o.get(n)||new Set).forEach(function(n){if(!t.has(n)){t.add(n);var o=n.getNamePath();n.isFieldDirty()&&o.length&&(r.push(o),e(o))}})}(e),r}),(0,h.Z)(this,"triggerOnFieldsChange",function(e,t){var r=n.callbacks.onFieldsChange;if(r){var o=n.getFields();if(t){var a=new eS;t.forEach(function(e){var t=e.name,n=e.errors;a.set(t,n)}),o.forEach(function(e){e.errors=a.get(e.name)||e.errors})}var i=o.filter(function(t){return es(e,t.name)});i.length&&r(i,o)}}),(0,h.Z)(this,"validateFields",function(e,t){n.warningUnhooked(),Array.isArray(e)||"string"==typeof e||"string"==typeof t?(i=e,l=t):l=e;var r,o,a,i,l,s=!!i,d=s?i.map(ei):[],p=[],f=String(Date.now()),m=new Set,g=l||{},h=g.recursive,b=g.dirty;n.getFieldEntities(!0).forEach(function(e){if(s||d.push(e.getNamePath()),e.props.rules&&e.props.rules.length&&(!b||e.isFieldDirty())){var t=e.getNamePath();if(m.add(t.join(f)),!s||es(d,t,h)){var r=e.validateRules((0,c.Z)({validateMessages:(0,c.Z)((0,c.Z)({},X),n.validateMessages)},l));p.push(r.then(function(){return{name:t,errors:[],warnings:[]}}).catch(function(e){var n,r=[],o=[];return(null===(n=e.forEach)||void 0===n||n.call(e,function(e){var t=e.rule.warningOnly,n=e.errors;t?o.push.apply(o,(0,u.Z)(n)):r.push.apply(r,(0,u.Z)(n))}),r.length)?Promise.reject({name:t,errors:r,warnings:o}):{name:t,errors:r,warnings:o}}))}}});var v=(r=!1,o=p.length,a=[],p.length?new Promise(function(e,t){p.forEach(function(n,i){n.catch(function(e){return r=!0,e}).then(function(n){o-=1,a[i]=n,o>0||(r&&t(a),e(a))})})}):Promise.resolve([]));n.lastValidatePromise=v,v.catch(function(e){return e}).then(function(e){var t=e.map(function(e){return e.name});n.notifyObservers(n.store,t,{type:"validateFinish"}),n.triggerOnFieldsChange(t,e)});var y=v.then(function(){return n.lastValidatePromise===v?Promise.resolve(n.getFieldsValue(d)):Promise.reject([])}).catch(function(e){var t=e.filter(function(e){return e&&e.errors.length});return Promise.reject({values:n.getFieldsValue(d),errorFields:t,outOfDate:n.lastValidatePromise!==v})});y.catch(function(e){return e});var E=d.filter(function(e){return m.has(e.join(f))});return n.triggerOnFieldsChange(E),y}),(0,h.Z)(this,"submit",function(){n.warningUnhooked(),n.validateFields().then(function(e){var t=n.callbacks.onFinish;if(t)try{t(e)}catch(e){console.error(e)}}).catch(function(e){var t=n.callbacks.onFinishFailed;t&&t(e)})}),this.forceRootUpdate=t}),eO=function(e){var t=o.useRef(),n=o.useState({}),r=(0,ev.Z)(n,2)[1];if(!t.current){if(e)t.current=e;else{var a=new ex(function(){r({})});t.current=a.getForm()}}return[t.current]},eC=o.createContext({triggerFormChange:function(){},triggerFormFinish:function(){},registerForm:function(){},unregisterForm:function(){}}),eT=function(e){var t=e.validateMessages,n=e.onFormChange,r=e.onFormFinish,a=e.children,i=o.useContext(eC),l=o.useRef({});return o.createElement(eC.Provider,{value:(0,c.Z)((0,c.Z)({},i),{},{validateMessages:(0,c.Z)((0,c.Z)({},i.validateMessages),t),triggerFormChange:function(e,t){n&&n(e,{changedFields:t,forms:l.current}),i.triggerFormChange(e,t)},triggerFormFinish:function(e,t){r&&r(e,{values:t,forms:l.current}),i.triggerFormFinish(e,t)},registerForm:function(e,t){e&&(l.current=(0,c.Z)((0,c.Z)({},l.current),{},(0,h.Z)({},e,t))),i.registerForm(e,t)},unregisterForm:function(e){var t=(0,c.Z)({},l.current);delete t[e],l.current=t,i.unregisterForm(e)}})},a)},ek=["name","initialValues","fields","form","preserve","children","component","validateMessages","validateTrigger","onValuesChange","onFieldsChange","onFinish","onFinishFailed"];function eA(e){try{return JSON.stringify(e)}catch(e){return Math.random()}}var eI=function(){},eR=function(){for(var e=arguments.length,t=Array(e),n=0;n1?t-1:0),o=1;oen;(0,c.useImperativeHandle)(t,function(){return{focus:$,blur:function(){var e;null===(e=G.current)||void 0===e||e.blur()},setSelectionRange:function(e,t,n){var r;null===(r=G.current)||void 0===r||r.setSelectionRange(e,t,n)},select:function(){var e;null===(e=G.current)||void 0===e||e.select()},input:G.current}}),(0,c.useEffect)(function(){z(function(e){return(!e||!C)&&e})},[C]);var ea=function(e,t,n){var r,o,a=t;if(!H.current&&et.exceedFormatter&&et.max&&et.strategy(t)>et.max)a=et.exceedFormatter(t,{max:et.max}),t!==a&&ee([(null===(r=G.current)||void 0===r?void 0:r.selectionStart)||0,(null===(o=G.current)||void 0===o?void 0:o.selectionEnd)||0]);else if("compositionEnd"===n.source)return;Y(a),G.current&&(0,u.rJ)(G.current,e,l,a)};(0,c.useEffect)(function(){if(J){var e;null===(e=G.current)||void 0===e||e.setSelectionRange.apply(e,(0,p.Z)(J))}},[J]);var ei=eo&&"".concat(O,"-out-of-range");return c.createElement(d,(0,o.Z)({},F,{prefixCls:O,className:s()(k,ei),handleReset:function(e){Y(""),$(),G.current&&(0,u.rJ)(G.current,e,l)},value:K,focused:Z,triggerFocus:$,suffix:function(){var e=Number(en)>0;if(I||et.show){var t=et.showFormatter?et.showFormatter({value:K,count:er,maxLength:en}):"".concat(er).concat(e?" / ".concat(en):"");return c.createElement(c.Fragment,null,et.show&&c.createElement("span",{className:s()("".concat(O,"-show-count-suffix"),(0,a.Z)({},"".concat(O,"-show-count-has-suffix"),!!I),null==M?void 0:M.count),style:(0,r.Z)({},null==L?void 0:L.count)},t),I)}return null}(),disabled:C,classes:P,classNames:M,styles:L}),(n=(0,h.Z)(e,["prefixCls","onPressEnter","addonBefore","addonAfter","prefix","suffix","allowClear","defaultValue","showCount","count","classes","htmlSize","styles","classNames"]),c.createElement("input",(0,o.Z)({autoComplete:i},n,{onChange:function(e){ea(e,e.target.value,{source:"change"})},onFocus:function(e){z(!0),null==y||y(e)},onBlur:function(e){z(!1),null==E||E(e)},onKeyDown:function(e){S&&"Enter"===e.key&&S(e),null==w||w(e)},className:s()(O,(0,a.Z)({},"".concat(O,"-disabled"),C),null==M?void 0:M.input),style:null==L?void 0:L.input,ref:G,size:T,type:void 0===_?"text":_,onCompositionStart:function(e){H.current=!0,null==D||D(e)},onCompositionEnd:function(e){H.current=!1,ea(e,e.currentTarget.value,{source:"compositionEnd"}),null==j||j(e)}}))))})},8002:function(e,t,n){function r(e){return!!(e.addonBefore||e.addonAfter)}function o(e){return!!(e.prefix||e.suffix||e.allowClear)}function a(e,t,n,r){if(n){var o=t;if("click"===t.type){var a=e.cloneNode(!0);o=Object.create(t,{target:{value:a},currentTarget:{value:a}}),a.value="",n(o);return}if("file"!==e.type&&void 0!==r){var i=e.cloneNode(!0);o=Object.create(t,{target:{value:i},currentTarget:{value:i}}),i.value=r,n(o);return}n(o)}}function i(e,t){if(e){e.focus(t);var n=(t||{}).cursor;if(n){var r=e.value.length;switch(n){case"start":e.setSelectionRange(0,0);break;case"end":e.setSelectionRange(r,r);break;default:e.setSelectionRange(0,r)}}}}n.d(t,{He:function(){return r},X3:function(){return o},nH:function(){return i},rJ:function(){return a}})},49367:function(e,t,n){n.d(t,{V4:function(){return eu},zt:function(){return E},ZP:function(){return ed}});var r,o,a,i,l,s=n(50833),c=n(5239),u=n(80406),d=n(6976),p=n(16480),f=n.n(p),m=n(97472),g=n(74084),h=n(64090),b=n(6787),v=["children"],y=h.createContext({});function E(e){var t=e.children,n=(0,b.Z)(e,v);return h.createElement(y.Provider,{value:n},t)}var S=n(47365),w=n(65127),x=n(27478),O=n(85430),C=function(e){(0,x.Z)(n,e);var t=(0,O.Z)(n);function n(){return(0,S.Z)(this,n),t.apply(this,arguments)}return(0,w.Z)(n,[{key:"render",value:function(){return this.props.children}}]),n}(h.Component),T=n(89211),k="none",A="appear",I="enter",R="leave",N="none",_="prepare",P="start",M="active",L="prepared",D=n(22127);function j(e,t){var n={};return n[e.toLowerCase()]=t.toLowerCase(),n["Webkit".concat(e)]="webkit".concat(t),n["Moz".concat(e)]="moz".concat(t),n["ms".concat(e)]="MS".concat(t),n["O".concat(e)]="o".concat(t.toLowerCase()),n}var F=(r=(0,D.Z)(),o=window,a={animationend:j("Animation","AnimationEnd"),transitionend:j("Transition","TransitionEnd")},!r||("AnimationEvent"in o||delete a.animationend.animation,"TransitionEvent"in o||delete a.transitionend.transition),a),B={};(0,D.Z)()&&(B=document.createElement("div").style);var U={};function Z(e){if(U[e])return U[e];var t=F[e];if(t)for(var n=Object.keys(t),r=n.length,o=0;o0&&(clearTimeout(eg.current),eg.current=setTimeout(function(){ev({deadline:!0})},O))),eT===L&&eb(),!0},a=(0,T.Z)(N),l=(i=(0,u.Z)(a,2))[0],d=i[1],p=function(){var e=h.useRef(null);function t(){Y.Z.cancel(e.current)}return h.useEffect(function(){return function(){t()}},[]),[function n(r){var o=arguments.length>1&&void 0!==arguments[1]?arguments[1]:2;t();var a=(0,Y.Z)(function(){o<=1?r({isCanceled:function(){return a!==e.current}}):n(r,o-1)});e.current=a},t]}(),m=(f=(0,u.Z)(p,2))[0],g=f[1],b=e?K:X,q(function(){if(l!==N&&"end"!==l){var e=b.indexOf(l),t=b[e+1],n=o(l);!1===n?d(t,!0):t&&m(function(e){function r(){e.isCanceled()||d(t,!0)}!0===n?r():Promise.resolve(n).then(r)})}},[es,l]),h.useEffect(function(){return function(){g()}},[]),[function(){d(_,!0)},l]),eO=(0,u.Z)(ex,2),eC=eO[0],eT=eO[1],ek=Q(eT);eh.current=ek,q(function(){ea(t);var n,r=em.current;em.current=!0,!r&&t&&S&&(n=A),r&&t&&y&&(n=I),(r&&!t&&x||!r&&C&&!t&&x)&&(n=R);var o=eS(n);n&&(e||o[_])?(ec(n),eC()):ec(k)},[t]),(0,h.useEffect)(function(){(es!==A||S)&&(es!==I||y)&&(es!==R||x)||ec(k)},[S,y,x]),(0,h.useEffect)(function(){return function(){em.current=!1,clearTimeout(eg.current)}},[]);var eA=h.useRef(!1);(0,h.useEffect)(function(){eo&&(eA.current=!0),void 0!==eo&&es===k&&((eA.current||eo)&&(null==et||et(eo)),eA.current=!0)},[eo,es]);var eI=ep;return ew[_]&&eT===P&&(eI=(0,c.Z)({transition:"none"},eI)),[es,eT,eI,null!=eo?eo:t]}(S,r,function(){try{return w.current instanceof HTMLElement?w.current:(0,m.Z)(x.current)}catch(e){return null}},e),D=(0,u.Z)(O,4),j=D[0],F=D[1],B=D[2],U=D[3],Z=h.useRef(U);U&&(Z.current=!0);var z=h.useCallback(function(e){w.current=e,(0,g.mH)(t,e)},[t]),H=(0,c.Z)((0,c.Z)({},v),{},{visible:r});if(d){if(j===k)G=U?d((0,c.Z)({},H),z):!a&&Z.current&&b?d((0,c.Z)((0,c.Z)({},H),{},{className:b}),z):!l&&(a||b)?null:d((0,c.Z)((0,c.Z)({},H),{},{style:{display:"none"}}),z);else{F===_?ee="prepare":Q(F)?ee="active":F===P&&(ee="start");var G,J,ee,et=V(p,"".concat(j,"-").concat(ee));G=d((0,c.Z)((0,c.Z)({},H),{},{className:f()(V(p,j),(J={},(0,s.Z)(J,et,et&&ee),(0,s.Z)(J,p,"string"==typeof p),J)),style:B}),z)}}else G=null;return h.isValidElement(G)&&(0,g.Yr)(G)&&!G.ref&&(G=h.cloneElement(G,{ref:z})),h.createElement(C,{ref:x},G)})).displayName="CSSMotion",l),ee=n(14749),et=n(34951),en="keep",er="remove",eo="removed";function ea(e){var t;return t=e&&"object"===(0,d.Z)(e)&&"key"in e?e:{key:e},(0,c.Z)((0,c.Z)({},t),{},{key:String(t.key)})}function ei(){var e=arguments.length>0&&void 0!==arguments[0]?arguments[0]:[];return e.map(ea)}var el=["component","children","onVisibleChanged","onAllRemoved"],es=["status"],ec=["eventProps","visible","children","motionName","motionAppear","motionEnter","motionLeave","motionLeaveImmediately","motionDeadline","removeOnLeave","leavedClassName","onAppearPrepare","onAppearStart","onAppearActive","onAppearEnd","onEnterStart","onEnterActive","onEnterEnd","onLeaveStart","onLeaveActive","onLeaveEnd"],eu=function(e){var t=arguments.length>1&&void 0!==arguments[1]?arguments[1]:J,n=function(e){(0,x.Z)(r,e);var n=(0,O.Z)(r);function r(){var e;(0,S.Z)(this,r);for(var t=arguments.length,o=Array(t),a=0;a0&&void 0!==arguments[0]?arguments[0]:[],t=arguments.length>1&&void 0!==arguments[1]?arguments[1]:[],n=[],r=0,o=t.length,a=ei(e),i=ei(t);a.forEach(function(e){for(var t=!1,a=r;a1}).forEach(function(e){(n=n.filter(function(t){var n=t.key,r=t.status;return n!==e||r!==er})).forEach(function(t){t.key===e&&(t.status=en)})}),n})(r,ei(n)).filter(function(e){var t=r.find(function(t){var n=t.key;return e.key===n});return!t||t.status!==eo||e.status!==er})}}}]),r}(h.Component);return(0,s.Z)(n,"defaultProps",{component:"div"}),n}(G),ed=J},54739:function(e,t,n){n.d(t,{Z:function(){return I}});var r=n(14749),o=n(5239),a=n(80406),i=n(6787),l=n(64090),s=n(16480),c=n.n(s),u=n(46505),d=n(24800),p=["prefixCls","invalidate","item","renderItem","responsive","responsiveDisabled","registerSize","itemKey","className","style","children","display","order","component"],f=void 0,m=l.forwardRef(function(e,t){var n,a=e.prefixCls,s=e.invalidate,d=e.item,m=e.renderItem,g=e.responsive,h=e.responsiveDisabled,b=e.registerSize,v=e.itemKey,y=e.className,E=e.style,S=e.children,w=e.display,x=e.order,O=e.component,C=(0,i.Z)(e,p),T=g&&!w;l.useEffect(function(){return function(){b(v,null)}},[]);var k=m&&d!==f?m(d):S;s||(n={opacity:T?0:1,height:T?0:f,overflowY:T?"hidden":f,order:g?x:f,pointerEvents:T?"none":f,position:T?"absolute":f});var A={};T&&(A["aria-hidden"]=!0);var I=l.createElement(void 0===O?"div":O,(0,r.Z)({className:c()(!s&&a,y),style:(0,o.Z)((0,o.Z)({},n),E)},A,C,{ref:t}),k);return g&&(I=l.createElement(u.Z,{onResize:function(e){b(v,e.offsetWidth)},disabled:h},I)),I});m.displayName="Item";var g=n(54811),h=n(89542),b=n(19223);function v(e,t){var n=l.useState(t),r=(0,a.Z)(n,2),o=r[0],i=r[1];return[o,(0,g.Z)(function(t){e(function(){i(t)})})]}var y=l.createContext(null),E=["component"],S=["className"],w=["className"],x=l.forwardRef(function(e,t){var n=l.useContext(y);if(!n){var o=e.component,a=(0,i.Z)(e,E);return l.createElement(void 0===o?"div":o,(0,r.Z)({},a,{ref:t}))}var s=n.className,u=(0,i.Z)(n,S),d=e.className,p=(0,i.Z)(e,w);return l.createElement(y.Provider,{value:null},l.createElement(m,(0,r.Z)({ref:t,className:c()(s,d)},u,p)))});x.displayName="RawItem";var O=["prefixCls","data","renderItem","renderRawItem","itemKey","itemWidth","ssr","style","className","maxCount","renderRest","renderRawRest","suffix","component","itemComponent","onVisibleChange"],C="responsive",T="invalidate";function k(e){return"+ ".concat(e.length," ...")}var A=l.forwardRef(function(e,t){var n,s,p=e.prefixCls,f=void 0===p?"rc-overflow":p,g=e.data,E=void 0===g?[]:g,S=e.renderItem,w=e.renderRawItem,x=e.itemKey,A=e.itemWidth,I=void 0===A?10:A,R=e.ssr,N=e.style,_=e.className,P=e.maxCount,M=e.renderRest,L=e.renderRawRest,D=e.suffix,j=e.component,F=e.itemComponent,B=e.onVisibleChange,U=(0,i.Z)(e,O),Z="full"===R,z=(n=l.useRef(null),function(e){n.current||(n.current=[],function(e){if("undefined"==typeof MessageChannel)(0,b.Z)(e);else{var t=new MessageChannel;t.port1.onmessage=function(){return e()},t.port2.postMessage(void 0)}}(function(){(0,h.unstable_batchedUpdates)(function(){n.current.forEach(function(e){e()}),n.current=null})})),n.current.push(e)}),H=v(z,null),G=(0,a.Z)(H,2),$=G[0],W=G[1],V=$||0,q=v(z,new Map),Y=(0,a.Z)(q,2),K=Y[0],X=Y[1],Q=v(z,0),J=(0,a.Z)(Q,2),ee=J[0],et=J[1],en=v(z,0),er=(0,a.Z)(en,2),eo=er[0],ea=er[1],ei=v(z,0),el=(0,a.Z)(ei,2),es=el[0],ec=el[1],eu=(0,l.useState)(null),ed=(0,a.Z)(eu,2),ep=ed[0],ef=ed[1],em=(0,l.useState)(null),eg=(0,a.Z)(em,2),eh=eg[0],eb=eg[1],ev=l.useMemo(function(){return null===eh&&Z?Number.MAX_SAFE_INTEGER:eh||0},[eh,$]),ey=(0,l.useState)(!1),eE=(0,a.Z)(ey,2),eS=eE[0],ew=eE[1],ex="".concat(f,"-item"),eO=Math.max(ee,eo),eC=P===C,eT=E.length&&eC,ek=P===T,eA=eT||"number"==typeof P&&E.length>P,eI=(0,l.useMemo)(function(){var e=E;return eT?e=null===$&&Z?E:E.slice(0,Math.min(E.length,V/I)):"number"==typeof P&&(e=E.slice(0,P)),e},[E,I,$,P,eT]),eR=(0,l.useMemo)(function(){return eT?E.slice(ev+1):E.slice(eI.length)},[E,eI,eT,ev]),eN=(0,l.useCallback)(function(e,t){var n;return"function"==typeof x?x(e):null!==(n=x&&(null==e?void 0:e[x]))&&void 0!==n?n:t},[x]),e_=(0,l.useCallback)(S||function(e){return e},[S]);function eP(e,t,n){(eh!==e||void 0!==t&&t!==ep)&&(eb(e),n||(ew(eV){eP(r-1,e-o-es+eo);break}}D&&eL(0)+es>V&&ef(null)}},[V,K,eo,es,eN,eI]);var eD=eS&&!!eR.length,ej={};null!==ep&&eT&&(ej={position:"absolute",left:ep,top:0});var eF={prefixCls:ex,responsive:eT,component:F,invalidate:ek},eB=w?function(e,t){var n=eN(e,t);return l.createElement(y.Provider,{key:n,value:(0,o.Z)((0,o.Z)({},eF),{},{order:t,item:e,itemKey:n,registerSize:eM,display:t<=ev})},w(e,t))}:function(e,t){var n=eN(e,t);return l.createElement(m,(0,r.Z)({},eF,{order:t,key:n,item:e,renderItem:e_,itemKey:n,registerSize:eM,display:t<=ev}))},eU={order:eD?ev:Number.MAX_SAFE_INTEGER,className:"".concat(ex,"-rest"),registerSize:function(e,t){ea(t),et(eo)},display:eD};if(L)L&&(s=l.createElement(y.Provider,{value:(0,o.Z)((0,o.Z)({},eF),eU)},L(eR)));else{var eZ=M||k;s=l.createElement(m,(0,r.Z)({},eF,eU),"function"==typeof eZ?eZ(eR):eZ)}var ez=l.createElement(void 0===j?"div":j,(0,r.Z)({className:c()(!ek&&f,_),style:N,ref:t},U),eI.map(eB),eA?s:null,D&&l.createElement(m,(0,r.Z)({},eF,{responsive:eC,responsiveDisabled:!eT,order:ev,className:"".concat(ex,"-suffix"),registerSize:function(e,t){ec(t)},display:!0,style:ej}),D));return eC&&(ez=l.createElement(u.Z,{onResize:function(e,t){W(t.clientWidth)},disabled:!eT},ez)),ez});A.displayName="Overflow",A.Item=x,A.RESPONSIVE=C,A.INVALIDATE=T;var I=A},46505:function(e,t,n){n.d(t,{Z:function(){return U}});var r=n(14749),o=n(64090),a=n(33054);n(53850);var i=n(5239),l=n(6976),s=n(97472),c=n(74084),u=o.createContext(null),d=function(){if("undefined"!=typeof Map)return Map;function e(e,t){var n=-1;return e.some(function(e,r){return e[0]===t&&(n=r,!0)}),n}return function(){function t(){this.__entries__=[]}return Object.defineProperty(t.prototype,"size",{get:function(){return this.__entries__.length},enumerable:!0,configurable:!0}),t.prototype.get=function(t){var n=e(this.__entries__,t),r=this.__entries__[n];return r&&r[1]},t.prototype.set=function(t,n){var r=e(this.__entries__,t);~r?this.__entries__[r][1]=n:this.__entries__.push([t,n])},t.prototype.delete=function(t){var n=this.__entries__,r=e(n,t);~r&&n.splice(r,1)},t.prototype.has=function(t){return!!~e(this.__entries__,t)},t.prototype.clear=function(){this.__entries__.splice(0)},t.prototype.forEach=function(e,t){void 0===t&&(t=null);for(var n=0,r=this.__entries__;n0},e.prototype.connect_=function(){p&&!this.connected_&&(document.addEventListener("transitionend",this.onTransitionEnd_),window.addEventListener("resize",this.refresh),h?(this.mutationsObserver_=new MutationObserver(this.refresh),this.mutationsObserver_.observe(document,{attributes:!0,childList:!0,characterData:!0,subtree:!0})):(document.addEventListener("DOMSubtreeModified",this.refresh),this.mutationEventsAdded_=!0),this.connected_=!0)},e.prototype.disconnect_=function(){p&&this.connected_&&(document.removeEventListener("transitionend",this.onTransitionEnd_),window.removeEventListener("resize",this.refresh),this.mutationsObserver_&&this.mutationsObserver_.disconnect(),this.mutationEventsAdded_&&document.removeEventListener("DOMSubtreeModified",this.refresh),this.mutationsObserver_=null,this.mutationEventsAdded_=!1,this.connected_=!1)},e.prototype.onTransitionEnd_=function(e){var t=e.propertyName,n=void 0===t?"":t;g.some(function(e){return!!~n.indexOf(e)})&&this.refresh()},e.getInstance=function(){return this.instance_||(this.instance_=new e),this.instance_},e.instance_=null,e}(),v=function(e,t){for(var n=0,r=Object.keys(t);n0},e}(),A="undefined"!=typeof WeakMap?new WeakMap:new d,I=function e(t){if(!(this instanceof e))throw TypeError("Cannot call a class as a function.");if(!arguments.length)throw TypeError("1 argument required, but only 0 present.");var n=new k(t,b.getInstance(),this);A.set(this,n)};["observe","unobserve","disconnect"].forEach(function(e){I.prototype[e]=function(){var t;return(t=A.get(this))[e].apply(t,arguments)}});var R=void 0!==f.ResizeObserver?f.ResizeObserver:I,N=new Map,_=new R(function(e){e.forEach(function(e){var t,n=e.target;null===(t=N.get(n))||void 0===t||t.forEach(function(e){return e(n)})})}),P=n(47365),M=n(65127),L=n(27478),D=n(85430),j=function(e){(0,L.Z)(n,e);var t=(0,D.Z)(n);function n(){return(0,P.Z)(this,n),t.apply(this,arguments)}return(0,M.Z)(n,[{key:"render",value:function(){return this.props.children}}]),n}(o.Component),F=o.forwardRef(function(e,t){var n=e.children,r=e.disabled,a=o.useRef(null),d=o.useRef(null),p=o.useContext(u),f="function"==typeof n,m=f?n(a):n,g=o.useRef({width:-1,height:-1,offsetWidth:-1,offsetHeight:-1}),h=!f&&o.isValidElement(m)&&(0,c.Yr)(m),b=h?m.ref:null,v=(0,c.x1)(b,a),y=function(){var e;return(0,s.Z)(a.current)||(a.current&&"object"===(0,l.Z)(a.current)?(0,s.Z)(null===(e=a.current)||void 0===e?void 0:e.nativeElement):null)||(0,s.Z)(d.current)};o.useImperativeHandle(t,function(){return y()});var E=o.useRef(e);E.current=e;var S=o.useCallback(function(e){var t=E.current,n=t.onResize,r=t.data,o=e.getBoundingClientRect(),a=o.width,l=o.height,s=e.offsetWidth,c=e.offsetHeight,u=Math.floor(a),d=Math.floor(l);if(g.current.width!==u||g.current.height!==d||g.current.offsetWidth!==s||g.current.offsetHeight!==c){var f={width:u,height:d,offsetWidth:s,offsetHeight:c};g.current=f;var m=(0,i.Z)((0,i.Z)({},f),{},{offsetWidth:s===Math.round(a)?a:s,offsetHeight:c===Math.round(l)?l:c});null==p||p(m,e,r),n&&Promise.resolve().then(function(){n(m,e)})}},[]);return o.useEffect(function(){var e=y();return e&&!r&&(N.has(e)||(N.set(e,new Set),_.observe(e)),N.get(e).add(S)),function(){N.has(e)&&(N.get(e).delete(S),N.get(e).size||(_.unobserve(e),N.delete(e)))}},[a.current,r]),o.createElement(j,{ref:d},h?o.cloneElement(m,{ref:v}):m)}),B=o.forwardRef(function(e,t){var n=e.children;return("function"==typeof n?[n]:(0,a.Z)(n)).map(function(n,a){var i=(null==n?void 0:n.key)||"".concat("rc-observer-key","-").concat(a);return o.createElement(F,(0,r.Z)({},e,{key:i,ref:0===a?t:void 0}),n)})});B.Collection=function(e){var t=e.children,n=e.onBatchResize,r=o.useRef(0),a=o.useRef([]),i=o.useContext(u),l=o.useCallback(function(e,t,o){r.current+=1;var l=r.current;a.current.push({size:e,element:t,data:o}),Promise.resolve().then(function(){l===r.current&&(null==n||n(a.current),a.current=[])}),null==i||i(e,t,o)},[n,i]);return o.createElement(u.Provider,{value:l},t)};var U=B},33054:function(e,t,n){n.d(t,{Z:function(){return function e(t){var n=arguments.length>1&&void 0!==arguments[1]?arguments[1]:{},a=[];return r.Children.forEach(t,function(t){(null!=t||n.keepEmpty)&&(Array.isArray(t)?a=a.concat(e(t)):(0,o.isFragment)(t)&&t.props?a=a.concat(e(t.props.children,n)):a.push(t))}),a}}});var r=n(64090),o=n(24185)},22127:function(e,t,n){n.d(t,{Z:function(){return r}});function r(){return!!window.document&&!!window.document.createElement}},31506:function(e,t,n){n.d(t,{Z:function(){return r}});function r(e,t){if(!e)return!1;if(e.contains)return e.contains(t);for(var n=t;n;){if(n===e)return!0;n=n.parentNode}return!1}},24050:function(e,t,n){n.d(t,{hq:function(){return m},jL:function(){return f}});var r=n(22127),o=n(31506),a="data-rc-order",i="data-rc-priority",l=new Map;function s(){var e=arguments.length>0&&void 0!==arguments[0]?arguments[0]:{},t=e.mark;return t?t.startsWith("data-")?t:"data-".concat(t):"rc-util-key"}function c(e){return e.attachTo?e.attachTo:document.querySelector("head")||document.body}function u(e){return Array.from((l.get(e)||e).children).filter(function(e){return"STYLE"===e.tagName})}function d(e){var t=arguments.length>1&&void 0!==arguments[1]?arguments[1]:{};if(!(0,r.Z)())return null;var n=t.csp,o=t.prepend,l=t.priority,s=void 0===l?0:l,d="queue"===o?"prependQueue":o?"prepend":"append",p="prependQueue"===d,f=document.createElement("style");f.setAttribute(a,d),p&&s&&f.setAttribute(i,"".concat(s)),null!=n&&n.nonce&&(f.nonce=null==n?void 0:n.nonce),f.innerHTML=e;var m=c(t),g=m.firstChild;if(o){if(p){var h=u(m).filter(function(e){return!!["prepend","prependQueue"].includes(e.getAttribute(a))&&s>=Number(e.getAttribute(i)||0)});if(h.length)return m.insertBefore(f,h[h.length-1].nextSibling),f}m.insertBefore(f,g)}else m.appendChild(f);return f}function p(e){var t=arguments.length>1&&void 0!==arguments[1]?arguments[1]:{};return u(c(t)).find(function(n){return n.getAttribute(s(t))===e})}function f(e){var t=arguments.length>1&&void 0!==arguments[1]?arguments[1]:{},n=p(e,t);n&&c(t).removeChild(n)}function m(e,t){var n,r,a,i=arguments.length>2&&void 0!==arguments[2]?arguments[2]:{};!function(e,t){var n=l.get(e);if(!n||!(0,o.Z)(document,n)){var r=d("",t),a=r.parentNode;l.set(e,a),e.removeChild(r)}}(c(i),i);var u=p(t,i);if(u)return null!==(n=i.csp)&&void 0!==n&&n.nonce&&u.nonce!==(null===(r=i.csp)||void 0===r?void 0:r.nonce)&&(u.nonce=null===(a=i.csp)||void 0===a?void 0:a.nonce),u.innerHTML!==e&&(u.innerHTML=e),u;var f=d(e,i);return f.setAttribute(s(i),t),f}},97472:function(e,t,n){n.d(t,{S:function(){return a},Z:function(){return i}});var r=n(64090),o=n(89542);function a(e){return e instanceof HTMLElement||e instanceof SVGElement}function i(e){return a(e)?e:e instanceof r.Component?o.findDOMNode(e):null}},73193:function(e,t,n){n.d(t,{Z:function(){return r}});function r(e){if(!e)return!1;if(e instanceof Element){if(e.offsetParent)return!0;if(e.getBBox){var t=e.getBBox(),n=t.width,r=t.height;if(n||r)return!0}if(e.getBoundingClientRect){var o=e.getBoundingClientRect(),a=o.width,i=o.height;if(a||i)return!0}}return!1}},74687:function(e,t,n){function r(e){var t;return null==e||null===(t=e.getRootNode)||void 0===t?void 0:t.call(e)}function o(e){return r(e) instanceof ShadowRoot?r(e):null}n.d(t,{A:function(){return o}})},4295:function(e,t){var n={MAC_ENTER:3,BACKSPACE:8,TAB:9,NUM_CENTER:12,ENTER:13,SHIFT:16,CTRL:17,ALT:18,PAUSE:19,CAPS_LOCK:20,ESC:27,SPACE:32,PAGE_UP:33,PAGE_DOWN:34,END:35,HOME:36,LEFT:37,UP:38,RIGHT:39,DOWN:40,PRINT_SCREEN:44,INSERT:45,DELETE:46,ZERO:48,ONE:49,TWO:50,THREE:51,FOUR:52,FIVE:53,SIX:54,SEVEN:55,EIGHT:56,NINE:57,QUESTION_MARK:63,A:65,B:66,C:67,D:68,E:69,F:70,G:71,H:72,I:73,J:74,K:75,L:76,M:77,N:78,O:79,P:80,Q:81,R:82,S:83,T:84,U:85,V:86,W:87,X:88,Y:89,Z:90,META:91,WIN_KEY_RIGHT:92,CONTEXT_MENU:93,NUM_ZERO:96,NUM_ONE:97,NUM_TWO:98,NUM_THREE:99,NUM_FOUR:100,NUM_FIVE:101,NUM_SIX:102,NUM_SEVEN:103,NUM_EIGHT:104,NUM_NINE:105,NUM_MULTIPLY:106,NUM_PLUS:107,NUM_MINUS:109,NUM_PERIOD:110,NUM_DIVISION:111,F1:112,F2:113,F3:114,F4:115,F5:116,F6:117,F7:118,F8:119,F9:120,F10:121,F11:122,F12:123,NUMLOCK:144,SEMICOLON:186,DASH:189,EQUALS:187,COMMA:188,PERIOD:190,SLASH:191,APOSTROPHE:192,SINGLE_QUOTE:222,OPEN_SQUARE_BRACKET:219,BACKSLASH:220,CLOSE_SQUARE_BRACKET:221,WIN_KEY:224,MAC_FF_META:224,WIN_IME:229,isTextModifyingKeyEvent:function(e){var t=e.keyCode;if(e.altKey&&!e.ctrlKey||e.metaKey||t>=n.F1&&t<=n.F12)return!1;switch(t){case n.ALT:case n.CAPS_LOCK:case n.CONTEXT_MENU:case n.CTRL:case n.DOWN:case n.END:case n.ESC:case n.HOME:case n.INSERT:case n.LEFT:case n.MAC_FF_META:case n.META:case n.NUMLOCK:case n.NUM_CENTER:case n.PAGE_DOWN:case n.PAGE_UP:case n.PAUSE:case n.PRINT_SCREEN:case n.RIGHT:case n.SHIFT:case n.UP:case n.WIN_KEY:case n.WIN_KEY_RIGHT:return!1;default:return!0}},isCharacterKey:function(e){if(e>=n.ZERO&&e<=n.NINE||e>=n.NUM_ZERO&&e<=n.NUM_MULTIPLY||e>=n.A&&e<=n.Z||-1!==window.navigator.userAgent.indexOf("WebKit")&&0===e)return!0;switch(e){case n.SPACE:case n.QUESTION_MARK:case n.NUM_PLUS:case n.NUM_MINUS:case n.NUM_PERIOD:case n.NUM_DIVISION:case n.SEMICOLON:case n.DASH:case n.EQUALS:case n.COMMA:case n.PERIOD:case n.SLASH:case n.APOSTROPHE:case n.SINGLE_QUOTE:case n.OPEN_SQUARE_BRACKET:case n.BACKSLASH:case n.CLOSE_SQUARE_BRACKET:return!0;default:return!1}}};t.Z=n},37274:function(e,t,n){n.d(t,{s:function(){return h},v:function(){return v}});var r,o,a=n(86926),i=n(74902),l=n(6976),s=n(5239),c=n(89542),u=(0,s.Z)({},r||(r=n.t(c,2))),d=u.version,p=u.render,f=u.unmountComponentAtNode;try{Number((d||"").split(".")[0])>=18&&(o=u.createRoot)}catch(e){}function m(e){var t=u.__SECRET_INTERNALS_DO_NOT_USE_OR_YOU_WILL_BE_FIRED;t&&"object"===(0,l.Z)(t)&&(t.usingClientEntryPoint=e)}var g="__rc_react_root__";function h(e,t){if(o){var n;m(!0),n=t[g]||o(t),m(!1),n.render(e),t[g]=n;return}p(e,t)}function b(){return(b=(0,i.Z)((0,a.Z)().mark(function e(t){return(0,a.Z)().wrap(function(e){for(;;)switch(e.prev=e.next){case 0:return e.abrupt("return",Promise.resolve().then(function(){var e;null===(e=t[g])||void 0===e||e.unmount(),delete t[g]}));case 1:case"end":return e.stop()}},e)}))).apply(this,arguments)}function v(e){return y.apply(this,arguments)}function y(){return(y=(0,i.Z)((0,a.Z)().mark(function e(t){return(0,a.Z)().wrap(function(e){for(;;)switch(e.prev=e.next){case 0:if(!(void 0!==o)){e.next=2;break}return e.abrupt("return",function(e){return b.apply(this,arguments)}(t));case 2:f(t);case 3:case"end":return e.stop()}},e)}))).apply(this,arguments)}},54811:function(e,t,n){n.d(t,{Z:function(){return o}});var r=n(64090);function o(e){var t=r.useRef();return t.current=e,r.useCallback(function(){for(var e,n=arguments.length,r=Array(n),o=0;o2&&void 0!==arguments[2]&&arguments[2],a=new Set;return function e(t,i){var l=arguments.length>2&&void 0!==arguments[2]?arguments[2]:1,s=a.has(t);if((0,o.ZP)(!s,"Warning: There may be circular references"),s)return!1;if(t===i)return!0;if(n&&l>1)return!1;a.add(t);var c=l+1;if(Array.isArray(t)){if(!Array.isArray(i)||t.length!==i.length)return!1;for(var u=0;u