diff --git a/.circleci/config.yml b/.circleci/config.yml index e7e36f1c93..a4f7fa2d6c 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -72,6 +72,7 @@ jobs: pip install "jsonschema==4.22.0" pip install "pytest-xdist==3.6.1" pip install "websockets==10.4" + pip uninstall posthog -y - save_cache: paths: - ./venv @@ -1517,6 +1518,117 @@ jobs: - store_test_results: path: test-results + proxy_multi_instance_tests: + machine: + image: ubuntu-2204:2023.10.1 + resource_class: xlarge + working_directory: ~/project + steps: + - checkout + - run: + name: Install Docker CLI (In case it's not already installed) + command: | + sudo apt-get update + sudo apt-get install -y docker-ce docker-ce-cli containerd.io + - run: + name: Install Python 3.9 + command: | + curl https://repo.anaconda.com/miniconda/Miniconda3-latest-Linux-x86_64.sh --output miniconda.sh + bash miniconda.sh -b -p $HOME/miniconda + export PATH="$HOME/miniconda/bin:$PATH" + conda init bash + source ~/.bashrc + conda create -n myenv python=3.9 -y + conda activate myenv + python --version + - run: + name: Install Dependencies + command: | + pip install "pytest==7.3.1" + pip install "pytest-asyncio==0.21.1" + pip install aiohttp + python -m pip install --upgrade pip + python -m pip install -r requirements.txt + pip install "pytest==7.3.1" + pip install "pytest-retry==1.6.3" + pip install "pytest-mock==3.12.0" + pip install "pytest-asyncio==0.21.1" + - run: + name: Build Docker image + command: docker build -t my-app:latest -f ./docker/Dockerfile.database . + - run: + name: Run Docker container 1 + # intentionally give bad redis credentials here + # the OTEL test - should get this as a trace + command: | + docker run -d \ + -p 4000:4000 \ + -e DATABASE_URL=$PROXY_DATABASE_URL \ + -e REDIS_HOST=$REDIS_HOST \ + -e REDIS_PASSWORD=$REDIS_PASSWORD \ + -e REDIS_PORT=$REDIS_PORT \ + -e LITELLM_MASTER_KEY="sk-1234" \ + -e LITELLM_LICENSE=$LITELLM_LICENSE \ + -e USE_DDTRACE=True \ + -e DD_API_KEY=$DD_API_KEY \ + -e DD_SITE=$DD_SITE \ + --name my-app \ + -v $(pwd)/litellm/proxy/example_config_yaml/multi_instance_simple_config.yaml:/app/config.yaml \ + my-app:latest \ + --config /app/config.yaml \ + --port 4000 \ + --detailed_debug \ + - run: + name: Run Docker container 2 + command: | + docker run -d \ + -p 4001:4001 \ + -e DATABASE_URL=$PROXY_DATABASE_URL \ + -e REDIS_HOST=$REDIS_HOST \ + -e REDIS_PASSWORD=$REDIS_PASSWORD \ + -e REDIS_PORT=$REDIS_PORT \ + -e LITELLM_MASTER_KEY="sk-1234" \ + -e LITELLM_LICENSE=$LITELLM_LICENSE \ + -e USE_DDTRACE=True \ + -e DD_API_KEY=$DD_API_KEY \ + -e DD_SITE=$DD_SITE \ + --name my-app-2 \ + -v $(pwd)/litellm/proxy/example_config_yaml/multi_instance_simple_config.yaml:/app/config.yaml \ + my-app:latest \ + --config /app/config.yaml \ + --port 4001 \ + --detailed_debug + - run: + name: Install curl and dockerize + command: | + sudo apt-get update + sudo apt-get install -y curl + sudo wget https://github.com/jwilder/dockerize/releases/download/v0.6.1/dockerize-linux-amd64-v0.6.1.tar.gz + sudo tar -C /usr/local/bin -xzvf dockerize-linux-amd64-v0.6.1.tar.gz + sudo rm dockerize-linux-amd64-v0.6.1.tar.gz + - run: + name: Start outputting logs + command: docker logs -f my-app + background: true + - run: + name: Wait for instance 1 to be ready + command: dockerize -wait http://localhost:4000 -timeout 5m + - run: + name: Wait for instance 2 to be ready + command: dockerize -wait http://localhost:4001 -timeout 5m + - run: + name: Run tests + command: | + pwd + ls + python -m pytest -vv tests/multi_instance_e2e_tests -x --junitxml=test-results/junit.xml --durations=5 + no_output_timeout: + 120m + # Clean up first container + # Store test results + - store_test_results: + path: test-results + proxy_store_model_in_db_tests: machine: image: ubuntu-2204:2023.10.1 @@ -1552,6 +1664,7 @@ jobs: pip install "pytest-retry==1.6.3" pip install "pytest-mock==3.12.0" pip install "pytest-asyncio==0.21.1" + pip install "assemblyai==0.37.0" - run: name: Build Docker image command: docker build -t my-app:latest -f ./docker/Dockerfile.database . @@ -2171,6 +2284,12 @@ workflows: only: - main - /litellm_.*/ + - proxy_multi_instance_tests: + filters: + branches: + only: + - main + - /litellm_.*/ - proxy_store_model_in_db_tests: filters: branches: @@ -2302,6 +2421,7 @@ workflows: - installing_litellm_on_python - installing_litellm_on_python_3_13 - proxy_logging_guardrails_model_info_tests + - proxy_multi_instance_tests - proxy_store_model_in_db_tests - proxy_build_from_pip_tests - proxy_pass_through_endpoint_tests diff --git a/.env.example b/.env.example index c87c2ef8fd..82b09ca25e 100644 --- a/.env.example +++ b/.env.example @@ -20,3 +20,8 @@ REPLICATE_API_TOKEN = "" ANTHROPIC_API_KEY = "" # Infisical INFISICAL_TOKEN = "" + +# Development Configs +LITELLM_MASTER_KEY = "sk-1234" +DATABASE_URL = "postgresql://llmproxy:dbpassword9090@db:5432/litellm" +STORE_MODEL_IN_DB = "True" \ No newline at end of file diff --git a/.github/workflows/interpret_load_test.py b/.github/workflows/interpret_load_test.py index b1a28e069b..194d16b282 100644 --- a/.github/workflows/interpret_load_test.py +++ b/.github/workflows/interpret_load_test.py @@ -52,6 +52,39 @@ def interpret_results(csv_file): return markdown_table +def _get_docker_run_command_stable_release(release_version): + return f""" + \n\n + ## Docker Run LiteLLM Proxy + + ``` + docker run \\ + -e STORE_MODEL_IN_DB=True \\ + -p 4000:4000 \\ + ghcr.io/berriai/litellm_stable_release_branch-{release_version} + """ + + +def _get_docker_run_command(release_version): + return f""" + \n\n + ## Docker Run LiteLLM Proxy + + ``` + docker run \\ + -e STORE_MODEL_IN_DB=True \\ + -p 4000:4000 \\ + ghcr.io/berriai/litellm:main-{release_version} + """ + + +def get_docker_run_command(release_version): + if "stable" in release_version: + return _get_docker_run_command_stable_release(release_version) + else: + return _get_docker_run_command(release_version) + + if __name__ == "__main__": csv_file = "load_test_stats.csv" # Change this to the path of your CSV file markdown_table = interpret_results(csv_file) @@ -79,17 +112,7 @@ if __name__ == "__main__": start_index = latest_release.body.find("Load Test LiteLLM Proxy Results") existing_release_body = latest_release.body[:start_index] - docker_run_command = f""" -\n\n -## Docker Run LiteLLM Proxy - -``` -docker run \\ --e STORE_MODEL_IN_DB=True \\ --p 4000:4000 \\ -ghcr.io/berriai/litellm:main-{release_version} -``` - """ + docker_run_command = get_docker_run_command(release_version) print("docker run command: ", docker_run_command) new_release_body = ( diff --git a/README.md b/README.md index 12b8bfa9dd..7260e50bc8 100644 --- a/README.md +++ b/README.md @@ -451,3 +451,20 @@ If you have suggestions on how to improve the code quality feel free to open an + + +## Run in Developer mode +### Services +1. Setup .env file in root +2. Run dependant services `docker-compose up db prometheus` + +### Backend +1. (In root) create virtual environment `python -m venv .venv` +2. Activate virtual environment `source .venv/bin/activate` +3. Install dependencies `pip install -e ".[all]"` +4. Start proxy backend `uvicorn litellm.proxy.proxy_server:app --host localhost --port 4000 --reload` + +### Frontend +1. Navigate to `ui/litellm-dashboard` +2. Install dependencies `npm install` +3. Run `npm run dev` to start the dashboard \ No newline at end of file diff --git a/cookbook/logging_observability/LiteLLM_Arize.ipynb b/cookbook/logging_observability/LiteLLM_Arize.ipynb new file mode 100644 index 0000000000..72a082f874 --- /dev/null +++ b/cookbook/logging_observability/LiteLLM_Arize.ipynb @@ -0,0 +1,172 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": { + "id": "4FbDOmcj2VkM" + }, + "source": [ + "## Use LiteLLM with Arize\n", + "https://docs.litellm.ai/docs/observability/arize_integration\n", + "\n", + "This method uses the litellm proxy to send the data to Arize. The callback is set in the litellm config below, instead of using OpenInference tracing." + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "21W8Woog26Ns" + }, + "source": [ + "## Install Dependencies" + ] + }, + { + "cell_type": "code", + "execution_count": 1, + "metadata": { + "id": "xrjKLBxhxu2L" + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Requirement already satisfied: litellm in /Users/ericxiao/Documents/arize/.venv/lib/python3.11/site-packages (1.54.1)\n", + "Requirement already satisfied: aiohttp in /Users/ericxiao/Documents/arize/.venv/lib/python3.11/site-packages (from litellm) (3.11.10)\n", + "Requirement already satisfied: click in /Users/ericxiao/Documents/arize/.venv/lib/python3.11/site-packages (from litellm) (8.1.7)\n", + "Requirement already satisfied: httpx<0.28.0,>=0.23.0 in /Users/ericxiao/Documents/arize/.venv/lib/python3.11/site-packages (from litellm) (0.27.2)\n", + "Requirement already satisfied: importlib-metadata>=6.8.0 in /Users/ericxiao/Documents/arize/.venv/lib/python3.11/site-packages (from litellm) (8.5.0)\n", + "Requirement already satisfied: jinja2<4.0.0,>=3.1.2 in /Users/ericxiao/Documents/arize/.venv/lib/python3.11/site-packages (from litellm) (3.1.4)\n", + "Requirement already satisfied: jsonschema<5.0.0,>=4.22.0 in /Users/ericxiao/Documents/arize/.venv/lib/python3.11/site-packages (from litellm) (4.23.0)\n", + "Requirement already satisfied: openai>=1.55.3 in /Users/ericxiao/Documents/arize/.venv/lib/python3.11/site-packages (from litellm) (1.57.1)\n", + "Requirement already satisfied: pydantic<3.0.0,>=2.0.0 in /Users/ericxiao/Documents/arize/.venv/lib/python3.11/site-packages (from litellm) (2.10.3)\n", + "Requirement already satisfied: python-dotenv>=0.2.0 in /Users/ericxiao/Documents/arize/.venv/lib/python3.11/site-packages (from litellm) (1.0.1)\n", + "Requirement already satisfied: requests<3.0.0,>=2.31.0 in /Users/ericxiao/Documents/arize/.venv/lib/python3.11/site-packages (from litellm) (2.32.3)\n", + "Requirement already satisfied: tiktoken>=0.7.0 in /Users/ericxiao/Documents/arize/.venv/lib/python3.11/site-packages (from litellm) (0.7.0)\n", + "Requirement already satisfied: tokenizers in /Users/ericxiao/Documents/arize/.venv/lib/python3.11/site-packages (from litellm) (0.21.0)\n", + "Requirement already satisfied: anyio in /Users/ericxiao/Documents/arize/.venv/lib/python3.11/site-packages (from httpx<0.28.0,>=0.23.0->litellm) (4.7.0)\n", + "Requirement already satisfied: certifi in /Users/ericxiao/Documents/arize/.venv/lib/python3.11/site-packages (from httpx<0.28.0,>=0.23.0->litellm) (2024.8.30)\n", + "Requirement already satisfied: httpcore==1.* in /Users/ericxiao/Documents/arize/.venv/lib/python3.11/site-packages (from httpx<0.28.0,>=0.23.0->litellm) (1.0.7)\n", + "Requirement already satisfied: idna in /Users/ericxiao/Documents/arize/.venv/lib/python3.11/site-packages (from httpx<0.28.0,>=0.23.0->litellm) (3.10)\n", + "Requirement already satisfied: sniffio in /Users/ericxiao/Documents/arize/.venv/lib/python3.11/site-packages (from httpx<0.28.0,>=0.23.0->litellm) (1.3.1)\n", + "Requirement already satisfied: h11<0.15,>=0.13 in /Users/ericxiao/Documents/arize/.venv/lib/python3.11/site-packages (from httpcore==1.*->httpx<0.28.0,>=0.23.0->litellm) (0.14.0)\n", + "Requirement already satisfied: zipp>=3.20 in /Users/ericxiao/Documents/arize/.venv/lib/python3.11/site-packages (from importlib-metadata>=6.8.0->litellm) (3.21.0)\n", + "Requirement already satisfied: MarkupSafe>=2.0 in /Users/ericxiao/Documents/arize/.venv/lib/python3.11/site-packages (from jinja2<4.0.0,>=3.1.2->litellm) (3.0.2)\n", + "Requirement already satisfied: attrs>=22.2.0 in /Users/ericxiao/Documents/arize/.venv/lib/python3.11/site-packages (from jsonschema<5.0.0,>=4.22.0->litellm) (24.2.0)\n", + "Requirement already satisfied: jsonschema-specifications>=2023.03.6 in /Users/ericxiao/Documents/arize/.venv/lib/python3.11/site-packages (from jsonschema<5.0.0,>=4.22.0->litellm) (2024.10.1)\n", + "Requirement already satisfied: referencing>=0.28.4 in /Users/ericxiao/Documents/arize/.venv/lib/python3.11/site-packages (from jsonschema<5.0.0,>=4.22.0->litellm) (0.35.1)\n", + "Requirement already satisfied: rpds-py>=0.7.1 in /Users/ericxiao/Documents/arize/.venv/lib/python3.11/site-packages (from jsonschema<5.0.0,>=4.22.0->litellm) (0.22.3)\n", + "Requirement already satisfied: distro<2,>=1.7.0 in /Users/ericxiao/Documents/arize/.venv/lib/python3.11/site-packages (from openai>=1.55.3->litellm) (1.9.0)\n", + "Requirement already satisfied: jiter<1,>=0.4.0 in /Users/ericxiao/Documents/arize/.venv/lib/python3.11/site-packages (from openai>=1.55.3->litellm) (0.6.1)\n", + "Requirement already satisfied: tqdm>4 in /Users/ericxiao/Documents/arize/.venv/lib/python3.11/site-packages (from openai>=1.55.3->litellm) (4.67.1)\n", + "Requirement already satisfied: typing-extensions<5,>=4.11 in /Users/ericxiao/Documents/arize/.venv/lib/python3.11/site-packages (from openai>=1.55.3->litellm) (4.12.2)\n", + "Requirement already satisfied: annotated-types>=0.6.0 in /Users/ericxiao/Documents/arize/.venv/lib/python3.11/site-packages (from pydantic<3.0.0,>=2.0.0->litellm) (0.7.0)\n", + "Requirement already satisfied: pydantic-core==2.27.1 in /Users/ericxiao/Documents/arize/.venv/lib/python3.11/site-packages (from pydantic<3.0.0,>=2.0.0->litellm) (2.27.1)\n", + "Requirement already satisfied: charset-normalizer<4,>=2 in /Users/ericxiao/Documents/arize/.venv/lib/python3.11/site-packages (from requests<3.0.0,>=2.31.0->litellm) (3.4.0)\n", + "Requirement already satisfied: urllib3<3,>=1.21.1 in /Users/ericxiao/Documents/arize/.venv/lib/python3.11/site-packages (from requests<3.0.0,>=2.31.0->litellm) (2.0.7)\n", + "Requirement already satisfied: regex>=2022.1.18 in /Users/ericxiao/Documents/arize/.venv/lib/python3.11/site-packages (from tiktoken>=0.7.0->litellm) (2024.11.6)\n", + "Requirement already satisfied: aiohappyeyeballs>=2.3.0 in /Users/ericxiao/Documents/arize/.venv/lib/python3.11/site-packages (from aiohttp->litellm) (2.4.4)\n", + "Requirement already satisfied: aiosignal>=1.1.2 in /Users/ericxiao/Documents/arize/.venv/lib/python3.11/site-packages (from aiohttp->litellm) (1.3.1)\n", + "Requirement already satisfied: frozenlist>=1.1.1 in /Users/ericxiao/Documents/arize/.venv/lib/python3.11/site-packages (from aiohttp->litellm) (1.5.0)\n", + "Requirement already satisfied: multidict<7.0,>=4.5 in /Users/ericxiao/Documents/arize/.venv/lib/python3.11/site-packages (from aiohttp->litellm) (6.1.0)\n", + "Requirement already satisfied: propcache>=0.2.0 in /Users/ericxiao/Documents/arize/.venv/lib/python3.11/site-packages (from aiohttp->litellm) (0.2.1)\n", + "Requirement already satisfied: yarl<2.0,>=1.17.0 in /Users/ericxiao/Documents/arize/.venv/lib/python3.11/site-packages (from aiohttp->litellm) (1.18.3)\n", + "Requirement already satisfied: huggingface-hub<1.0,>=0.16.4 in /Users/ericxiao/Documents/arize/.venv/lib/python3.11/site-packages (from tokenizers->litellm) (0.26.5)\n", + "Requirement already satisfied: filelock in /Users/ericxiao/Documents/arize/.venv/lib/python3.11/site-packages (from huggingface-hub<1.0,>=0.16.4->tokenizers->litellm) (3.16.1)\n", + "Requirement already satisfied: fsspec>=2023.5.0 in /Users/ericxiao/Documents/arize/.venv/lib/python3.11/site-packages (from huggingface-hub<1.0,>=0.16.4->tokenizers->litellm) (2024.10.0)\n", + "Requirement already satisfied: packaging>=20.9 in /Users/ericxiao/Documents/arize/.venv/lib/python3.11/site-packages (from huggingface-hub<1.0,>=0.16.4->tokenizers->litellm) (24.2)\n", + "Requirement already satisfied: pyyaml>=5.1 in /Users/ericxiao/Documents/arize/.venv/lib/python3.11/site-packages (from huggingface-hub<1.0,>=0.16.4->tokenizers->litellm) (6.0.2)\n" + ] + } + ], + "source": [ + "!pip install litellm" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "jHEu-TjZ29PJ" + }, + "source": [ + "## Set Env Variables" + ] + }, + { + "cell_type": "code", + "execution_count": 5, + "metadata": { + "id": "QWd9rTysxsWO" + }, + "outputs": [], + "source": [ + "import litellm\n", + "import os\n", + "from getpass import getpass\n", + "\n", + "os.environ[\"ARIZE_SPACE_KEY\"] = getpass(\"Enter your Arize space key: \")\n", + "os.environ[\"ARIZE_API_KEY\"] = getpass(\"Enter your Arize API key: \")\n", + "os.environ['OPENAI_API_KEY']= getpass(\"Enter your OpenAI API key: \")" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Let's run a completion call and see the traces in Arize" + ] + }, + { + "cell_type": "code", + "execution_count": 8, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Hello! Nice to meet you, OpenAI. How can I assist you today?\n" + ] + } + ], + "source": [ + "# set arize as a callback, litellm will send the data to arize\n", + "litellm.callbacks = [\"arize\"]\n", + " \n", + "# openai call\n", + "response = litellm.completion(\n", + " model=\"gpt-3.5-turbo\",\n", + " messages=[\n", + " {\"role\": \"user\", \"content\": \"Hi 👋 - i'm openai\"}\n", + " ]\n", + ")\n", + "print(response.choices[0].message.content)" + ] + } + ], + "metadata": { + "colab": { + "provenance": [] + }, + "kernelspec": { + "display_name": ".venv", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.11.6" + } + }, + "nbformat": 4, + "nbformat_minor": 0 +} diff --git a/docker-compose.yml b/docker-compose.yml index 1508bd375c..78044c03b8 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -29,6 +29,8 @@ services: POSTGRES_DB: litellm POSTGRES_USER: llmproxy POSTGRES_PASSWORD: dbpassword9090 + ports: + - "5432:5432" healthcheck: test: ["CMD-SHELL", "pg_isready -d litellm -U llmproxy"] interval: 1s diff --git a/docs/my-website/docs/debugging/local_debugging.md b/docs/my-website/docs/debugging/local_debugging.md index a9409bfab0..8a56d6c34a 100644 --- a/docs/my-website/docs/debugging/local_debugging.md +++ b/docs/my-website/docs/debugging/local_debugging.md @@ -1,5 +1,5 @@ # Local Debugging -There's 2 ways to do local debugging - `litellm.set_verbose=True` and by passing in a custom function `completion(...logger_fn=)`. Warning: Make sure to not use `set_verbose` in production. It logs API keys, which might end up in log files. +There's 2 ways to do local debugging - `litellm._turn_on_debug()` and by passing in a custom function `completion(...logger_fn=)`. Warning: Make sure to not use `_turn_on_debug()` in production. It logs API keys, which might end up in log files. ## Set Verbose @@ -8,7 +8,7 @@ This is good for getting print statements for everything litellm is doing. import litellm from litellm import completion -litellm.set_verbose=True # 👈 this is the 1-line change you need to make +litellm._turn_on_debug() # 👈 this is the 1-line change you need to make ## set ENV variables os.environ["OPENAI_API_KEY"] = "openai key" diff --git a/docs/my-website/docs/observability/arize_integration.md b/docs/my-website/docs/observability/arize_integration.md index a69d32e5b3..73122196b0 100644 --- a/docs/my-website/docs/observability/arize_integration.md +++ b/docs/my-website/docs/observability/arize_integration.md @@ -28,7 +28,7 @@ import litellm import os os.environ["ARIZE_SPACE_KEY"] = "" -os.environ["ARIZE_API_KEY"] = "" # defaults to litellm-completion +os.environ["ARIZE_API_KEY"] = "" # LLM API Keys os.environ['OPENAI_API_KEY']="" diff --git a/docs/my-website/docs/pass_through/assembly_ai.md b/docs/my-website/docs/pass_through/assembly_ai.md index 5f7f1c07e6..4606640c5c 100644 --- a/docs/my-website/docs/pass_through/assembly_ai.md +++ b/docs/my-website/docs/pass_through/assembly_ai.md @@ -12,6 +12,9 @@ Supports **ALL** Assembly AI Endpoints [**See All Assembly AI Endpoints**](https://www.assemblyai.com/docs/api-reference) + + + ## Quick Start Let's call the Assembly AI [`/v2/transcripts` endpoint](https://www.assemblyai.com/docs/api-reference/transcripts) @@ -35,6 +38,8 @@ litellm Let's call the Assembly AI `/v2/transcripts` endpoint ```python +import assemblyai as aai + LITELLM_VIRTUAL_KEY = "sk-1234" # LITELLM_PROXY_BASE_URL = "http://0.0.0.0:4000/assemblyai" # /assemblyai @@ -53,3 +58,28 @@ print(transcript) print(transcript.id) ``` +## Calling Assembly AI EU endpoints + +If you want to send your request to the Assembly AI EU endpoint, you can do so by setting the `LITELLM_PROXY_BASE_URL` to `/eu.assemblyai` + + +```python +import assemblyai as aai + +LITELLM_VIRTUAL_KEY = "sk-1234" # +LITELLM_PROXY_BASE_URL = "http://0.0.0.0:4000/eu.assemblyai" # /eu.assemblyai + +aai.settings.api_key = f"Bearer {LITELLM_VIRTUAL_KEY}" +aai.settings.base_url = LITELLM_PROXY_BASE_URL + +# URL of the file to transcribe +FILE_URL = "https://assembly.ai/wildfires.mp3" + +# You can also transcribe a local file by passing in a file path +# FILE_URL = './path/to/file.mp3' + +transcriber = aai.Transcriber() +transcript = transcriber.transcribe(FILE_URL) +print(transcript) +print(transcript.id) +``` diff --git a/docs/my-website/docs/providers/anthropic.md b/docs/my-website/docs/providers/anthropic.md index b3bfe333cc..cda0bb97a6 100644 --- a/docs/my-website/docs/providers/anthropic.md +++ b/docs/my-website/docs/providers/anthropic.md @@ -987,6 +987,106 @@ curl http://0.0.0.0:4000/v1/chat/completions \ +## [BETA] Citations API + +Pass `citations: {"enabled": true}` to Anthropic, to get citations on your document responses. + +Note: This interface is in BETA. If you have feedback on how citations should be returned, please [tell us here](https://github.com/BerriAI/litellm/issues/7970#issuecomment-2644437943) + + + + +```python +from litellm import completion + +resp = completion( + model="claude-3-5-sonnet-20241022", + messages=[ + { + "role": "user", + "content": [ + { + "type": "document", + "source": { + "type": "text", + "media_type": "text/plain", + "data": "The grass is green. The sky is blue.", + }, + "title": "My Document", + "context": "This is a trustworthy document.", + "citations": {"enabled": True}, + }, + { + "type": "text", + "text": "What color is the grass and sky?", + }, + ], + } + ], +) + +citations = resp.choices[0].message.provider_specific_fields["citations"] + +assert citations is not None +``` + + + + +1. Setup config.yaml + +```yaml +model_list: + - model_name: anthropic-claude + litellm_params: + model: anthropic/claude-3-5-sonnet-20241022 + api_key: os.environ/ANTHROPIC_API_KEY +``` + +2. Start proxy + +```bash +litellm --config /path/to/config.yaml + +# RUNNING on http://0.0.0.0:4000 +``` + +3. Test it! + +```bash +curl -L -X POST 'http://0.0.0.0:4000/v1/chat/completions' \ +-H 'Content-Type: application/json' \ +-H 'Authorization: Bearer sk-1234' \ +-d '{ + "model": "anthropic-claude", + "messages": [ + { + "role": "user", + "content": [ + { + "type": "document", + "source": { + "type": "text", + "media_type": "text/plain", + "data": "The grass is green. The sky is blue.", + }, + "title": "My Document", + "context": "This is a trustworthy document.", + "citations": {"enabled": True}, + }, + { + "type": "text", + "text": "What color is the grass and sky?", + }, + ], + } + ] +}' +``` + + + + ## Usage - passing 'user_id' to Anthropic LiteLLM translates the OpenAI `user` param to Anthropic's `metadata[user_id]` param. diff --git a/docs/my-website/docs/providers/gemini.md b/docs/my-website/docs/providers/gemini.md index 0588200465..4a6cfdf1a3 100644 --- a/docs/my-website/docs/providers/gemini.md +++ b/docs/my-website/docs/providers/gemini.md @@ -688,7 +688,9 @@ response = litellm.completion( |-----------------------|--------------------------------------------------------|--------------------------------| | gemini-pro | `completion(model='gemini/gemini-pro', messages)` | `os.environ['GEMINI_API_KEY']` | | gemini-1.5-pro-latest | `completion(model='gemini/gemini-1.5-pro-latest', messages)` | `os.environ['GEMINI_API_KEY']` | -| gemini-pro-vision | `completion(model='gemini/gemini-pro-vision', messages)` | `os.environ['GEMINI_API_KEY']` | +| gemini-2.0-flash | `completion(model='gemini/gemini-2.0-flash', messages)` | `os.environ['GEMINI_API_KEY']` | +| gemini-2.0-flash-exp | `completion(model='gemini/gemini-2.0-flash-exp', messages)` | `os.environ['GEMINI_API_KEY']` | +| gemini-2.0-flash-lite-preview-02-05 | `completion(model='gemini/gemini-2.0-flash-lite-preview-02-05', messages)` | `os.environ['GEMINI_API_KEY']` | diff --git a/docs/my-website/docs/proxy/guardrails/aim_security.md b/docs/my-website/docs/proxy/guardrails/aim_security.md index d588afa424..3de933c0b7 100644 --- a/docs/my-website/docs/proxy/guardrails/aim_security.md +++ b/docs/my-website/docs/proxy/guardrails/aim_security.md @@ -37,7 +37,7 @@ guardrails: - guardrail_name: aim-protected-app litellm_params: guardrail: aim - mode: pre_call + mode: pre_call # 'during_call' is also available api_key: os.environ/AIM_API_KEY api_base: os.environ/AIM_API_BASE # Optional, use only when using a self-hosted Aim Outpost ``` diff --git a/docs/my-website/docs/proxy/timeout.md b/docs/my-website/docs/proxy/timeout.md index 2bf93298fe..85428ae53e 100644 --- a/docs/my-website/docs/proxy/timeout.md +++ b/docs/my-website/docs/proxy/timeout.md @@ -166,7 +166,7 @@ response = client.chat.completions.create( {"role": "user", "content": "what color is red"} ], logit_bias={12481: 100}, - timeout=1 + extra_body={"timeout": 1} # 👈 KEY CHANGE ) print(response) diff --git a/docs/my-website/docs/proxy/token_auth.md b/docs/my-website/docs/proxy/token_auth.md index 0e65900b28..753e92c169 100644 --- a/docs/my-website/docs/proxy/token_auth.md +++ b/docs/my-website/docs/proxy/token_auth.md @@ -163,10 +163,12 @@ scope: "litellm-proxy-admin ..." ```yaml general_settings: - master_key: sk-1234 + enable_jwt_auth: True litellm_jwtauth: user_id_jwt_field: "sub" team_ids_jwt_field: "groups" + user_id_upsert: true # add user_id to the db if they don't exist + enforce_team_based_model_access: true # don't allow users to access models unless the team has access ``` This is assuming your token looks like this: @@ -370,4 +372,68 @@ Supported internal roles: - `internal_user`: User object will be used for RBAC spend tracking. Use this for tracking spend for an 'individual user'. - `proxy_admin`: Proxy admin will be used for RBAC spend tracking. Use this for granting admin access to a token. -### [Architecture Diagram (Control Model Access)](./jwt_auth_arch) \ No newline at end of file +### [Architecture Diagram (Control Model Access)](./jwt_auth_arch) + +## [BETA] Control Model Access with Scopes + +Control which models a JWT can access. Set `enforce_scope_based_access: true` to enforce scope-based access control. + +### 1. Setup config.yaml with scope mappings. + + +```yaml +model_list: + - model_name: anthropic-claude + litellm_params: + model: anthropic/claude-3-5-sonnet + api_key: os.environ/ANTHROPIC_API_KEY + - model_name: gpt-3.5-turbo-testing + litellm_params: + model: gpt-3.5-turbo + api_key: os.environ/OPENAI_API_KEY + +general_settings: + enable_jwt_auth: True + litellm_jwtauth: + team_id_jwt_field: "client_id" # 👈 set the field in the JWT token that contains the team id + team_id_upsert: true # 👈 upsert the team to db, if team id is not found in db + scope_mappings: + - scope: litellm.api.consumer + models: ["anthropic-claude"] + - scope: litellm.api.gpt_3_5_turbo + models: ["gpt-3.5-turbo-testing"] + enforce_scope_based_access: true # 👈 enforce scope-based access control + enforce_rbac: true # 👈 enforces only a Team/User/ProxyAdmin can access the proxy. +``` + +#### Scope Mapping Spec + +- `scope`: The scope to be used for the JWT token. +- `models`: The models that the JWT token can access. Value is the `model_name` in `model_list`. Note: Wildcard routes are not currently supported. + +### 2. Create a JWT with the correct scopes. + +Expected Token: + +``` +{ + "scope": ["litellm.api.consumer", "litellm.api.gpt_3_5_turbo"] +} +``` + +### 3. Test the flow. + +```bash +curl -L -X POST 'http://0.0.0.0:4000/v1/chat/completions' \ +-H 'Content-Type: application/json' \ +-H 'Authorization: Bearer eyJhbGci...' \ +-d '{ + "model": "gpt-3.5-turbo-testing", + "messages": [ + { + "role": "user", + "content": "Hey, how'\''s it going 1234?" + } + ] +}' +``` \ No newline at end of file diff --git a/litellm/__init__.py b/litellm/__init__.py index c49b3214b9..b8de8a4298 100644 --- a/litellm/__init__.py +++ b/litellm/__init__.py @@ -360,7 +360,7 @@ BEDROCK_CONVERSE_MODELS = [ "meta.llama3-2-90b-instruct-v1:0", ] BEDROCK_INVOKE_PROVIDERS_LITERAL = Literal[ - "cohere", "anthropic", "mistral", "amazon", "meta", "llama", "ai21" + "cohere", "anthropic", "mistral", "amazon", "meta", "llama", "ai21", "nova" ] ####### COMPLETION MODELS ################### open_ai_chat_completion_models: List = [] @@ -863,6 +863,9 @@ from .llms.bedrock.common_utils import ( from .llms.bedrock.chat.invoke_transformations.amazon_ai21_transformation import ( AmazonAI21Config, ) +from .llms.bedrock.chat.invoke_transformations.amazon_nova_transformation import ( + AmazonInvokeNovaConfig, +) from .llms.bedrock.chat.invoke_transformations.anthropic_claude2_transformation import ( AmazonAnthropicConfig, ) diff --git a/litellm/integrations/additional_logging_utils.py b/litellm/integrations/additional_logging_utils.py new file mode 100644 index 0000000000..795afd81d4 --- /dev/null +++ b/litellm/integrations/additional_logging_utils.py @@ -0,0 +1,36 @@ +""" +Base class for Additional Logging Utils for CustomLoggers + +- Health Check for the logging util +- Get Request / Response Payload for the logging util +""" + +from abc import ABC, abstractmethod +from datetime import datetime +from typing import Optional + +from litellm.types.integrations.base_health_check import IntegrationHealthCheckStatus + + +class AdditionalLoggingUtils(ABC): + def __init__(self): + super().__init__() + + @abstractmethod + async def async_health_check(self) -> IntegrationHealthCheckStatus: + """ + Check if the service is healthy + """ + pass + + @abstractmethod + async def get_request_response_payload( + self, + request_id: str, + start_time_utc: Optional[datetime], + end_time_utc: Optional[datetime], + ) -> Optional[dict]: + """ + Get the request and response payload for a given `request_id` + """ + return None diff --git a/litellm/integrations/base_health_check.py b/litellm/integrations/base_health_check.py deleted file mode 100644 index 35b390692b..0000000000 --- a/litellm/integrations/base_health_check.py +++ /dev/null @@ -1,19 +0,0 @@ -""" -Base class for health check integrations -""" - -from abc import ABC, abstractmethod - -from litellm.types.integrations.base_health_check import IntegrationHealthCheckStatus - - -class HealthCheckIntegration(ABC): - def __init__(self): - super().__init__() - - @abstractmethod - async def async_health_check(self) -> IntegrationHealthCheckStatus: - """ - Check if the service is healthy - """ - pass diff --git a/litellm/integrations/datadog/datadog.py b/litellm/integrations/datadog/datadog.py index 89928840e9..04364d3a7f 100644 --- a/litellm/integrations/datadog/datadog.py +++ b/litellm/integrations/datadog/datadog.py @@ -38,14 +38,14 @@ from litellm.types.integrations.datadog import * from litellm.types.services import ServiceLoggerPayload from litellm.types.utils import StandardLoggingPayload -from ..base_health_check import HealthCheckIntegration +from ..additional_logging_utils import AdditionalLoggingUtils DD_MAX_BATCH_SIZE = 1000 # max number of logs DD API can accept class DataDogLogger( CustomBatchLogger, - HealthCheckIntegration, + AdditionalLoggingUtils, ): # Class variables or attributes def __init__( @@ -543,3 +543,13 @@ class DataDogLogger( status="unhealthy", error_message=str(e), ) + + async def get_request_response_payload( + self, + request_id: str, + start_time_utc: Optional[datetimeObj], + end_time_utc: Optional[datetimeObj], + ) -> Optional[dict]: + raise NotImplementedError( + "Datdog Integration for getting request/response payloads not implemented as yet" + ) diff --git a/litellm/integrations/gcs_bucket/gcs_bucket.py b/litellm/integrations/gcs_bucket/gcs_bucket.py index d6a9c316b3..187ab779c0 100644 --- a/litellm/integrations/gcs_bucket/gcs_bucket.py +++ b/litellm/integrations/gcs_bucket/gcs_bucket.py @@ -1,12 +1,16 @@ import asyncio +import json import os import uuid -from datetime import datetime +from datetime import datetime, timedelta, timezone from typing import TYPE_CHECKING, Any, Dict, List, Optional +from urllib.parse import quote from litellm._logging import verbose_logger +from litellm.integrations.additional_logging_utils import AdditionalLoggingUtils from litellm.integrations.gcs_bucket.gcs_bucket_base import GCSBucketBase from litellm.proxy._types import CommonProxyErrors +from litellm.types.integrations.base_health_check import IntegrationHealthCheckStatus from litellm.types.integrations.gcs_bucket import * from litellm.types.utils import StandardLoggingPayload @@ -20,7 +24,7 @@ GCS_DEFAULT_BATCH_SIZE = 2048 GCS_DEFAULT_FLUSH_INTERVAL_SECONDS = 20 -class GCSBucketLogger(GCSBucketBase): +class GCSBucketLogger(GCSBucketBase, AdditionalLoggingUtils): def __init__(self, bucket_name: Optional[str] = None) -> None: from litellm.proxy.proxy_server import premium_user @@ -39,6 +43,7 @@ class GCSBucketLogger(GCSBucketBase): batch_size=self.batch_size, flush_interval=self.flush_interval, ) + AdditionalLoggingUtils.__init__(self) if premium_user is not True: raise ValueError( @@ -150,11 +155,16 @@ class GCSBucketLogger(GCSBucketBase): """ Get the object name to use for the current payload """ - current_date = datetime.now().strftime("%Y-%m-%d") + current_date = self._get_object_date_from_datetime(datetime.now(timezone.utc)) if logging_payload.get("error_str", None) is not None: - object_name = f"{current_date}/failure-{uuid.uuid4().hex}" + object_name = self._generate_failure_object_name( + request_date_str=current_date, + ) else: - object_name = f"{current_date}/{response_obj.get('id', '')}" + object_name = self._generate_success_object_name( + request_date_str=current_date, + response_id=response_obj.get("id", ""), + ) # used for testing _litellm_params = kwargs.get("litellm_params", None) or {} @@ -163,3 +173,65 @@ class GCSBucketLogger(GCSBucketBase): object_name = _metadata["gcs_log_id"] return object_name + + async def get_request_response_payload( + self, + request_id: str, + start_time_utc: Optional[datetime], + end_time_utc: Optional[datetime], + ) -> Optional[dict]: + """ + Get the request and response payload for a given `request_id` + Tries current day, next day, and previous day until it finds the payload + """ + if start_time_utc is None: + raise ValueError( + "start_time_utc is required for getting a payload from GCS Bucket" + ) + + # Try current day, next day, and previous day + dates_to_try = [ + start_time_utc, + start_time_utc + timedelta(days=1), + start_time_utc - timedelta(days=1), + ] + date_str = None + for date in dates_to_try: + try: + date_str = self._get_object_date_from_datetime(datetime_obj=date) + object_name = self._generate_success_object_name( + request_date_str=date_str, + response_id=request_id, + ) + encoded_object_name = quote(object_name, safe="") + response = await self.download_gcs_object(encoded_object_name) + + if response is not None: + loaded_response = json.loads(response) + return loaded_response + except Exception as e: + verbose_logger.debug( + f"Failed to fetch payload for date {date_str}: {str(e)}" + ) + continue + + return None + + def _generate_success_object_name( + self, + request_date_str: str, + response_id: str, + ) -> str: + return f"{request_date_str}/{response_id}" + + def _generate_failure_object_name( + self, + request_date_str: str, + ) -> str: + return f"{request_date_str}/failure-{uuid.uuid4().hex}" + + def _get_object_date_from_datetime(self, datetime_obj: datetime) -> str: + return datetime_obj.strftime("%Y-%m-%d") + + async def async_health_check(self) -> IntegrationHealthCheckStatus: + raise NotImplementedError("GCS Bucket does not support health check") diff --git a/litellm/integrations/langfuse/langfuse.py b/litellm/integrations/langfuse/langfuse.py index 125bf4e686..f990a316c4 100644 --- a/litellm/integrations/langfuse/langfuse.py +++ b/litellm/integrations/langfuse/langfuse.py @@ -3,7 +3,8 @@ import copy import os import traceback -from typing import TYPE_CHECKING, Any, Dict, List, Optional, cast +from datetime import datetime +from typing import TYPE_CHECKING, Any, Dict, List, Optional, Tuple, Union, cast from packaging.version import Version @@ -13,9 +14,16 @@ from litellm.litellm_core_utils.redact_messages import redact_user_api_key_info from litellm.llms.custom_httpx.http_handler import _get_httpx_client from litellm.secret_managers.main import str_to_bool from litellm.types.integrations.langfuse import * +from litellm.types.llms.openai import HttpxBinaryResponseContent from litellm.types.utils import ( + EmbeddingResponse, + ImageResponse, + ModelResponse, + RerankResponse, StandardLoggingPayload, StandardLoggingPromptManagementMetadata, + TextCompletionResponse, + TranscriptionResponse, ) if TYPE_CHECKING: @@ -150,19 +158,29 @@ class LangFuseLogger: return metadata - def _old_log_event( # noqa: PLR0915 + def log_event_on_langfuse( self, - kwargs, - response_obj, - start_time, - end_time, - user_id, - print_verbose, - level="DEFAULT", - status_message=None, + kwargs: dict, + response_obj: Union[ + None, + dict, + EmbeddingResponse, + ModelResponse, + TextCompletionResponse, + ImageResponse, + TranscriptionResponse, + RerankResponse, + HttpxBinaryResponseContent, + ], + start_time: Optional[datetime] = None, + end_time: Optional[datetime] = None, + user_id: Optional[str] = None, + level: str = "DEFAULT", + status_message: Optional[str] = None, ) -> dict: - # Method definition - + """ + Logs a success or error event on Langfuse + """ try: verbose_logger.debug( f"Langfuse Logging - Enters logging function for model {kwargs}" @@ -198,66 +216,13 @@ class LangFuseLogger: # if casting value to str fails don't block logging pass - # end of processing langfuse ######################## - if ( - level == "ERROR" - and status_message is not None - and isinstance(status_message, str) - ): - input = prompt - output = status_message - elif response_obj is not None and ( - kwargs.get("call_type", None) == "embedding" - or isinstance(response_obj, litellm.EmbeddingResponse) - ): - input = prompt - output = None - elif response_obj is not None and isinstance( - response_obj, litellm.ModelResponse - ): - input = prompt - output = response_obj["choices"][0]["message"].json() - elif response_obj is not None and isinstance( - response_obj, litellm.HttpxBinaryResponseContent - ): - input = prompt - output = "speech-output" - elif response_obj is not None and isinstance( - response_obj, litellm.TextCompletionResponse - ): - input = prompt - output = response_obj.choices[0].text - elif response_obj is not None and isinstance( - response_obj, litellm.ImageResponse - ): - input = prompt - output = response_obj["data"] - elif response_obj is not None and isinstance( - response_obj, litellm.TranscriptionResponse - ): - input = prompt - output = response_obj["text"] - elif response_obj is not None and isinstance( - response_obj, litellm.RerankResponse - ): - input = prompt - output = response_obj.results - elif ( - kwargs.get("call_type") is not None - and kwargs.get("call_type") == "_arealtime" - and response_obj is not None - and isinstance(response_obj, list) - ): - input = kwargs.get("input") - output = response_obj - elif ( - kwargs.get("call_type") is not None - and kwargs.get("call_type") == "pass_through_endpoint" - and response_obj is not None - and isinstance(response_obj, dict) - ): - input = prompt - output = response_obj.get("response", "") + input, output = self._get_langfuse_input_output_content( + kwargs=kwargs, + response_obj=response_obj, + prompt=prompt, + level=level, + status_message=status_message, + ) verbose_logger.debug( f"OUTPUT IN LANGFUSE: {output}; original: {response_obj}" ) @@ -265,31 +230,30 @@ class LangFuseLogger: generation_id = None if self._is_langfuse_v2(): trace_id, generation_id = self._log_langfuse_v2( - user_id, - metadata, - litellm_params, - output, - start_time, - end_time, - kwargs, - optional_params, - input, - response_obj, - level, - print_verbose, - litellm_call_id, + user_id=user_id, + metadata=metadata, + litellm_params=litellm_params, + output=output, + start_time=start_time, + end_time=end_time, + kwargs=kwargs, + optional_params=optional_params, + input=input, + response_obj=response_obj, + level=level, + litellm_call_id=litellm_call_id, ) elif response_obj is not None: self._log_langfuse_v1( - user_id, - metadata, - output, - start_time, - end_time, - kwargs, - optional_params, - input, - response_obj, + user_id=user_id, + metadata=metadata, + output=output, + start_time=start_time, + end_time=end_time, + kwargs=kwargs, + optional_params=optional_params, + input=input, + response_obj=response_obj, ) verbose_logger.debug( f"Langfuse Layer Logging - final response object: {response_obj}" @@ -303,11 +267,108 @@ class LangFuseLogger: ) return {"trace_id": None, "generation_id": None} + def _get_langfuse_input_output_content( + self, + kwargs: dict, + response_obj: Union[ + None, + dict, + EmbeddingResponse, + ModelResponse, + TextCompletionResponse, + ImageResponse, + TranscriptionResponse, + RerankResponse, + HttpxBinaryResponseContent, + ], + prompt: dict, + level: str, + status_message: Optional[str], + ) -> Tuple[Optional[dict], Optional[Union[str, dict, list]]]: + """ + Get the input and output content for Langfuse logging + + Args: + kwargs: The keyword arguments passed to the function + response_obj: The response object returned by the function + prompt: The prompt used to generate the response + level: The level of the log message + status_message: The status message of the log message + + Returns: + input: The input content for Langfuse logging + output: The output content for Langfuse logging + """ + input = None + output: Optional[Union[str, dict, List[Any]]] = None + if ( + level == "ERROR" + and status_message is not None + and isinstance(status_message, str) + ): + input = prompt + output = status_message + elif response_obj is not None and ( + kwargs.get("call_type", None) == "embedding" + or isinstance(response_obj, litellm.EmbeddingResponse) + ): + input = prompt + output = None + elif response_obj is not None and isinstance( + response_obj, litellm.ModelResponse + ): + input = prompt + output = self._get_chat_content_for_langfuse(response_obj) + elif response_obj is not None and isinstance( + response_obj, litellm.HttpxBinaryResponseContent + ): + input = prompt + output = "speech-output" + elif response_obj is not None and isinstance( + response_obj, litellm.TextCompletionResponse + ): + input = prompt + output = self._get_text_completion_content_for_langfuse(response_obj) + elif response_obj is not None and isinstance( + response_obj, litellm.ImageResponse + ): + input = prompt + output = response_obj.get("data", None) + elif response_obj is not None and isinstance( + response_obj, litellm.TranscriptionResponse + ): + input = prompt + output = response_obj.get("text", None) + elif response_obj is not None and isinstance( + response_obj, litellm.RerankResponse + ): + input = prompt + output = response_obj.results + elif ( + kwargs.get("call_type") is not None + and kwargs.get("call_type") == "_arealtime" + and response_obj is not None + and isinstance(response_obj, list) + ): + input = kwargs.get("input") + output = response_obj + elif ( + kwargs.get("call_type") is not None + and kwargs.get("call_type") == "pass_through_endpoint" + and response_obj is not None + and isinstance(response_obj, dict) + ): + input = prompt + output = response_obj.get("response", "") + return input, output + async def _async_log_event( - self, kwargs, response_obj, start_time, end_time, user_id, print_verbose + self, kwargs, response_obj, start_time, end_time, user_id ): """ - TODO: support async calls when langfuse is truly async + Langfuse SDK uses a background thread to log events + + This approach does not impact latency and runs in the background """ def _is_langfuse_v2(self): @@ -361,19 +422,18 @@ class LangFuseLogger: def _log_langfuse_v2( # noqa: PLR0915 self, - user_id, - metadata, - litellm_params, - output, - start_time, - end_time, - kwargs, - optional_params, - input, + user_id: Optional[str], + metadata: dict, + litellm_params: dict, + output: Optional[Union[str, dict, list]], + start_time: Optional[datetime], + end_time: Optional[datetime], + kwargs: dict, + optional_params: dict, + input: Optional[dict], response_obj, - level, - print_verbose, - litellm_call_id, + level: str, + litellm_call_id: Optional[str], ) -> tuple: verbose_logger.debug("Langfuse Layer Logging - logging to langfuse v2") @@ -657,6 +717,31 @@ class LangFuseLogger: verbose_logger.error(f"Langfuse Layer Error - {traceback.format_exc()}") return None, None + @staticmethod + def _get_chat_content_for_langfuse( + response_obj: ModelResponse, + ): + """ + Get the chat content for Langfuse logging + """ + if response_obj.choices and len(response_obj.choices) > 0: + output = response_obj["choices"][0]["message"].json() + return output + else: + return None + + @staticmethod + def _get_text_completion_content_for_langfuse( + response_obj: TextCompletionResponse, + ): + """ + Get the text completion content for Langfuse logging + """ + if response_obj.choices and len(response_obj.choices) > 0: + return response_obj.choices[0].text + else: + return None + @staticmethod def _get_langfuse_tags( standard_logging_object: Optional[StandardLoggingPayload], diff --git a/litellm/integrations/langfuse/langfuse_prompt_management.py b/litellm/integrations/langfuse/langfuse_prompt_management.py index faa4a63491..cc2a6cf80d 100644 --- a/litellm/integrations/langfuse/langfuse_prompt_management.py +++ b/litellm/integrations/langfuse/langfuse_prompt_management.py @@ -247,13 +247,12 @@ class LangfusePromptManagement(LangFuseLogger, PromptManagementBase, CustomLogge standard_callback_dynamic_params=standard_callback_dynamic_params, in_memory_dynamic_logger_cache=in_memory_dynamic_logger_cache, ) - langfuse_logger_to_use._old_log_event( + langfuse_logger_to_use.log_event_on_langfuse( kwargs=kwargs, response_obj=response_obj, start_time=start_time, end_time=end_time, user_id=kwargs.get("user", None), - print_verbose=None, ) async def async_log_failure_event(self, kwargs, response_obj, start_time, end_time): @@ -271,12 +270,11 @@ class LangfusePromptManagement(LangFuseLogger, PromptManagementBase, CustomLogge ) if standard_logging_object is None: return - langfuse_logger_to_use._old_log_event( + langfuse_logger_to_use.log_event_on_langfuse( start_time=start_time, end_time=end_time, response_obj=None, user_id=kwargs.get("user", None), - print_verbose=None, status_message=standard_logging_object["error_str"], level="ERROR", kwargs=kwargs, diff --git a/litellm/integrations/pagerduty/pagerduty.py b/litellm/integrations/pagerduty/pagerduty.py index 2eeb318c9d..6085bc237a 100644 --- a/litellm/integrations/pagerduty/pagerduty.py +++ b/litellm/integrations/pagerduty/pagerduty.py @@ -118,6 +118,7 @@ class PagerDutyAlerting(SlackAlerting): user_api_key_user_id=_meta.get("user_api_key_user_id"), user_api_key_team_alias=_meta.get("user_api_key_team_alias"), user_api_key_end_user_id=_meta.get("user_api_key_end_user_id"), + user_api_key_user_email=_meta.get("user_api_key_user_email"), ) ) @@ -195,6 +196,7 @@ class PagerDutyAlerting(SlackAlerting): user_api_key_user_id=user_api_key_dict.user_id, user_api_key_team_alias=user_api_key_dict.team_alias, user_api_key_end_user_id=user_api_key_dict.end_user_id, + user_api_key_user_email=user_api_key_dict.user_email, ) ) diff --git a/litellm/integrations/prometheus.py b/litellm/integrations/prometheus.py index 4759ef91e6..8c01c7495b 100644 --- a/litellm/integrations/prometheus.py +++ b/litellm/integrations/prometheus.py @@ -423,6 +423,7 @@ class PrometheusLogger(CustomLogger): team=user_api_team, team_alias=user_api_team_alias, user=user_id, + user_email=standard_logging_payload["metadata"]["user_api_key_user_email"], status_code="200", model=model, litellm_model_name=model, @@ -806,6 +807,7 @@ class PrometheusLogger(CustomLogger): enum_values = UserAPIKeyLabelValues( end_user=user_api_key_dict.end_user_id, user=user_api_key_dict.user_id, + user_email=user_api_key_dict.user_email, hashed_api_key=user_api_key_dict.api_key, api_key_alias=user_api_key_dict.key_alias, team=user_api_key_dict.team_id, @@ -853,6 +855,7 @@ class PrometheusLogger(CustomLogger): team=user_api_key_dict.team_id, team_alias=user_api_key_dict.team_alias, user=user_api_key_dict.user_id, + user_email=user_api_key_dict.user_email, status_code="200", ) _labels = prometheus_label_factory( diff --git a/litellm/litellm_core_utils/litellm_logging.py b/litellm/litellm_core_utils/litellm_logging.py index 45b63177b9..220306ee74 100644 --- a/litellm/litellm_core_utils/litellm_logging.py +++ b/litellm/litellm_core_utils/litellm_logging.py @@ -199,6 +199,7 @@ class Logging(LiteLLMLoggingBaseClass): dynamic_async_failure_callbacks: Optional[ List[Union[str, Callable, CustomLogger]] ] = None, + applied_guardrails: Optional[List[str]] = None, kwargs: Optional[Dict] = None, ): _input: Optional[str] = messages # save original value of messages @@ -271,6 +272,7 @@ class Logging(LiteLLMLoggingBaseClass): "litellm_call_id": litellm_call_id, "input": _input, "litellm_params": litellm_params, + "applied_guardrails": applied_guardrails, } def process_dynamic_callbacks(self): @@ -1247,13 +1249,12 @@ class Logging(LiteLLMLoggingBaseClass): in_memory_dynamic_logger_cache=in_memory_dynamic_logger_cache, ) if langfuse_logger_to_use is not None: - _response = langfuse_logger_to_use._old_log_event( + _response = langfuse_logger_to_use.log_event_on_langfuse( kwargs=kwargs, response_obj=result, start_time=start_time, end_time=end_time, user_id=kwargs.get("user", None), - print_verbose=print_verbose, ) if _response is not None and isinstance(_response, dict): _trace_id = _response.get("trace_id", None) @@ -1957,12 +1958,11 @@ class Logging(LiteLLMLoggingBaseClass): standard_callback_dynamic_params=self.standard_callback_dynamic_params, in_memory_dynamic_logger_cache=in_memory_dynamic_logger_cache, ) - _response = langfuse_logger_to_use._old_log_event( + _response = langfuse_logger_to_use.log_event_on_langfuse( start_time=start_time, end_time=end_time, response_obj=None, user_id=kwargs.get("user", None), - print_verbose=print_verbose, status_message=str(exception), level="ERROR", kwargs=self.model_call_details, @@ -2854,6 +2854,7 @@ class StandardLoggingPayloadSetup: metadata: Optional[Dict[str, Any]], litellm_params: Optional[dict] = None, prompt_integration: Optional[str] = None, + applied_guardrails: Optional[List[str]] = None, ) -> StandardLoggingMetadata: """ Clean and filter the metadata dictionary to include only the specified keys in StandardLoggingMetadata. @@ -2868,6 +2869,7 @@ class StandardLoggingPayloadSetup: - If the input metadata is None or not a dictionary, an empty StandardLoggingMetadata object is returned. - If 'user_api_key' is present in metadata and is a valid SHA256 hash, it's stored as 'user_api_key_hash'. """ + prompt_management_metadata: Optional[ StandardLoggingPromptManagementMetadata ] = None @@ -2892,11 +2894,13 @@ class StandardLoggingPayloadSetup: user_api_key_org_id=None, user_api_key_user_id=None, user_api_key_team_alias=None, + user_api_key_user_email=None, spend_logs_metadata=None, requester_ip_address=None, requester_metadata=None, user_api_key_end_user_id=None, prompt_management_metadata=prompt_management_metadata, + applied_guardrails=applied_guardrails, ) if isinstance(metadata, dict): # Filter the metadata dictionary to include only the specified keys @@ -3195,6 +3199,7 @@ def get_standard_logging_object_payload( metadata=metadata, litellm_params=litellm_params, prompt_integration=kwargs.get("prompt_integration", None), + applied_guardrails=kwargs.get("applied_guardrails", None), ) _request_body = proxy_server_request.get("body", {}) @@ -3324,12 +3329,14 @@ def get_standard_logging_metadata( user_api_key_team_id=None, user_api_key_org_id=None, user_api_key_user_id=None, + user_api_key_user_email=None, user_api_key_team_alias=None, spend_logs_metadata=None, requester_ip_address=None, requester_metadata=None, user_api_key_end_user_id=None, prompt_management_metadata=None, + applied_guardrails=None, ) if isinstance(metadata, dict): # Filter the metadata dictionary to include only the specified keys diff --git a/litellm/litellm_core_utils/llm_response_utils/convert_dict_to_response.py b/litellm/litellm_core_utils/llm_response_utils/convert_dict_to_response.py index dacd21f426..def4c597f2 100644 --- a/litellm/litellm_core_utils/llm_response_utils/convert_dict_to_response.py +++ b/litellm/litellm_core_utils/llm_response_utils/convert_dict_to_response.py @@ -3,7 +3,8 @@ import json import time import traceback import uuid -from typing import Dict, Iterable, List, Literal, Optional, Union +import re +from typing import Dict, Iterable, List, Literal, Optional, Union, Tuple import litellm from litellm._logging import verbose_logger @@ -220,6 +221,16 @@ def _handle_invalid_parallel_tool_calls( # if there is a JSONDecodeError, return the original tool_calls return tool_calls +def _parse_content_for_reasoning(message_text: Optional[str]) -> Tuple[Optional[str], Optional[str]]: + if not message_text: + return None, None + + reasoning_match = re.match(r"(.*?)(.*)", message_text, re.DOTALL) + + if reasoning_match: + return reasoning_match.group(1), reasoning_match.group(2) + + return None, message_text class LiteLLMResponseObjectHandler: @@ -432,8 +443,14 @@ def convert_to_model_response_object( # noqa: PLR0915 for field in choice["message"].keys(): if field not in message_keys: provider_specific_fields[field] = choice["message"][field] + + # Handle reasoning models that display `reasoning_content` within `content` + reasoning_content, content = _parse_content_for_reasoning(choice["message"].get("content", None)) + if reasoning_content: + provider_specific_fields["reasoning_content"] = reasoning_content + message = Message( - content=choice["message"].get("content", None), + content=content, role=choice["message"]["role"] or "assistant", function_call=choice["message"].get("function_call", None), tool_calls=tool_calls, diff --git a/litellm/litellm_core_utils/logging_callback_manager.py b/litellm/litellm_core_utils/logging_callback_manager.py index 860a57c5f6..a20e826c43 100644 --- a/litellm/litellm_core_utils/logging_callback_manager.py +++ b/litellm/litellm_core_utils/logging_callback_manager.py @@ -1,7 +1,8 @@ -from typing import Callable, List, Union +from typing import Callable, List, Set, Union import litellm from litellm._logging import verbose_logger +from litellm.integrations.additional_logging_utils import AdditionalLoggingUtils from litellm.integrations.custom_logger import CustomLogger @@ -85,6 +86,21 @@ class LoggingCallbackManager: callback=callback, parent_list=litellm._async_failure_callback ) + def remove_callback_from_list_by_object( + self, callback_list, obj + ): + """ + Remove callbacks that are methods of a particular object (e.g., router cleanup) + """ + if not isinstance(callback_list, list): # Not list -> do nothing + return + + remove_list=[c for c in callback_list if hasattr(c, '__self__') and c.__self__ == obj] + + for c in remove_list: + callback_list.remove(c) + + def _add_string_callback_to_list( self, callback: str, parent_list: List[Union[CustomLogger, Callable, str]] ): @@ -205,3 +221,36 @@ class LoggingCallbackManager: litellm._async_success_callback = [] litellm._async_failure_callback = [] litellm.callbacks = [] + + def _get_all_callbacks(self) -> List[Union[CustomLogger, Callable, str]]: + """ + Get all callbacks from litellm.callbacks, litellm.success_callback, litellm.failure_callback, litellm._async_success_callback, litellm._async_failure_callback + """ + return ( + litellm.callbacks + + litellm.success_callback + + litellm.failure_callback + + litellm._async_success_callback + + litellm._async_failure_callback + ) + + def get_active_additional_logging_utils_from_custom_logger( + self, + ) -> Set[AdditionalLoggingUtils]: + """ + Get all custom loggers that are instances of the given class type + + Args: + class_type: The class type to match against (e.g., AdditionalLoggingUtils) + + Returns: + Set[CustomLogger]: Set of custom loggers that are instances of the given class type + """ + all_callbacks = self._get_all_callbacks() + matched_callbacks: Set[AdditionalLoggingUtils] = set() + for callback in all_callbacks: + if isinstance(callback, CustomLogger) and isinstance( + callback, AdditionalLoggingUtils + ): + matched_callbacks.add(callback) + return matched_callbacks diff --git a/litellm/litellm_core_utils/prompt_templates/factory.py b/litellm/litellm_core_utils/prompt_templates/factory.py index abb508b982..1ed072e086 100644 --- a/litellm/litellm_core_utils/prompt_templates/factory.py +++ b/litellm/litellm_core_utils/prompt_templates/factory.py @@ -1421,6 +1421,8 @@ def anthropic_messages_pt( # noqa: PLR0915 ) user_content.append(_content_element) + elif m.get("type", "") == "document": + user_content.append(cast(AnthropicMessagesDocumentParam, m)) elif isinstance(user_message_types_block["content"], str): _anthropic_content_text_element: AnthropicMessagesTextParam = { "type": "text", diff --git a/litellm/litellm_core_utils/sensitive_data_masker.py b/litellm/litellm_core_utils/sensitive_data_masker.py new file mode 100644 index 0000000000..a1df115ff0 --- /dev/null +++ b/litellm/litellm_core_utils/sensitive_data_masker.py @@ -0,0 +1,81 @@ +from typing import Any, Dict, Optional, Set + + +class SensitiveDataMasker: + def __init__( + self, + sensitive_patterns: Optional[Set[str]] = None, + visible_prefix: int = 4, + visible_suffix: int = 4, + mask_char: str = "*", + ): + self.sensitive_patterns = sensitive_patterns or { + "password", + "secret", + "key", + "token", + "auth", + "credential", + "access", + "private", + "certificate", + } + + self.visible_prefix = visible_prefix + self.visible_suffix = visible_suffix + self.mask_char = mask_char + + def _mask_value(self, value: str) -> str: + if not value or len(str(value)) < (self.visible_prefix + self.visible_suffix): + return value + + value_str = str(value) + masked_length = len(value_str) - (self.visible_prefix + self.visible_suffix) + return f"{value_str[:self.visible_prefix]}{self.mask_char * masked_length}{value_str[-self.visible_suffix:]}" + + def is_sensitive_key(self, key: str) -> bool: + key_lower = str(key).lower() + result = any(pattern in key_lower for pattern in self.sensitive_patterns) + return result + + def mask_dict( + self, data: Dict[str, Any], depth: int = 0, max_depth: int = 10 + ) -> Dict[str, Any]: + if depth >= max_depth: + return data + + masked_data: Dict[str, Any] = {} + for k, v in data.items(): + try: + if isinstance(v, dict): + masked_data[k] = self.mask_dict(v, depth + 1) + elif hasattr(v, "__dict__") and not isinstance(v, type): + masked_data[k] = self.mask_dict(vars(v), depth + 1) + elif self.is_sensitive_key(k): + str_value = str(v) if v is not None else "" + masked_data[k] = self._mask_value(str_value) + else: + masked_data[k] = ( + v if isinstance(v, (int, float, bool, str)) else str(v) + ) + except Exception: + masked_data[k] = "" + + return masked_data + + +# Usage example: +""" +masker = SensitiveDataMasker() +data = { + "api_key": "sk-1234567890abcdef", + "redis_password": "very_secret_pass", + "port": 6379 +} +masked = masker.mask_dict(data) +# Result: { +# "api_key": "sk-1****cdef", +# "redis_password": "very****pass", +# "port": 6379 +# } +""" diff --git a/litellm/litellm_core_utils/streaming_handler.py b/litellm/litellm_core_utils/streaming_handler.py index 08356fea73..5e9fb7aa76 100644 --- a/litellm/litellm_core_utils/streaming_handler.py +++ b/litellm/litellm_core_utils/streaming_handler.py @@ -809,7 +809,10 @@ class CustomStreamWrapper: if self.sent_first_chunk is False: completion_obj["role"] = "assistant" self.sent_first_chunk = True - + if response_obj.get("provider_specific_fields") is not None: + completion_obj["provider_specific_fields"] = response_obj[ + "provider_specific_fields" + ] model_response.choices[0].delta = Delta(**completion_obj) _index: Optional[int] = completion_obj.get("index") if _index is not None: diff --git a/litellm/llms/anthropic/chat/handler.py b/litellm/llms/anthropic/chat/handler.py index fdd1d79c7a..c58aa00a10 100644 --- a/litellm/llms/anthropic/chat/handler.py +++ b/litellm/llms/anthropic/chat/handler.py @@ -4,7 +4,7 @@ Calling + translation logic for anthropic's `/v1/messages` endpoint import copy import json -from typing import Any, Callable, List, Optional, Tuple, Union +from typing import Any, Callable, Dict, List, Optional, Tuple, Union import httpx # type: ignore @@ -506,6 +506,29 @@ class ModelResponseIterator: return usage_block + def _content_block_delta_helper(self, chunk: dict): + text = "" + tool_use: Optional[ChatCompletionToolCallChunk] = None + provider_specific_fields = {} + content_block = ContentBlockDelta(**chunk) # type: ignore + self.content_blocks.append(content_block) + if "text" in content_block["delta"]: + text = content_block["delta"]["text"] + elif "partial_json" in content_block["delta"]: + tool_use = { + "id": None, + "type": "function", + "function": { + "name": None, + "arguments": content_block["delta"]["partial_json"], + }, + "index": self.tool_index, + } + elif "citation" in content_block["delta"]: + provider_specific_fields["citation"] = content_block["delta"]["citation"] + + return text, tool_use, provider_specific_fields + def chunk_parser(self, chunk: dict) -> GenericStreamingChunk: try: type_chunk = chunk.get("type", "") or "" @@ -515,6 +538,7 @@ class ModelResponseIterator: is_finished = False finish_reason = "" usage: Optional[ChatCompletionUsageBlock] = None + provider_specific_fields: Dict[str, Any] = {} index = int(chunk.get("index", 0)) if type_chunk == "content_block_delta": @@ -522,20 +546,9 @@ class ModelResponseIterator: Anthropic content chunk chunk = {'type': 'content_block_delta', 'index': 0, 'delta': {'type': 'text_delta', 'text': 'Hello'}} """ - content_block = ContentBlockDelta(**chunk) # type: ignore - self.content_blocks.append(content_block) - if "text" in content_block["delta"]: - text = content_block["delta"]["text"] - elif "partial_json" in content_block["delta"]: - tool_use = { - "id": None, - "type": "function", - "function": { - "name": None, - "arguments": content_block["delta"]["partial_json"], - }, - "index": self.tool_index, - } + text, tool_use, provider_specific_fields = ( + self._content_block_delta_helper(chunk=chunk) + ) elif type_chunk == "content_block_start": """ event: content_block_start @@ -628,6 +641,9 @@ class ModelResponseIterator: finish_reason=finish_reason, usage=usage, index=index, + provider_specific_fields=( + provider_specific_fields if provider_specific_fields else None + ), ) return returned_chunk diff --git a/litellm/llms/anthropic/chat/transformation.py b/litellm/llms/anthropic/chat/transformation.py index 960b4f95bb..fb2f4dd2c6 100644 --- a/litellm/llms/anthropic/chat/transformation.py +++ b/litellm/llms/anthropic/chat/transformation.py @@ -70,7 +70,7 @@ class AnthropicConfig(BaseConfig): metadata: Optional[dict] = None, system: Optional[str] = None, ) -> None: - locals_ = locals() + locals_ = locals().copy() for key, value in locals_.items(): if key != "self" and value is not None: setattr(self.__class__, key, value) @@ -628,6 +628,7 @@ class AnthropicConfig(BaseConfig): ) else: text_content = "" + citations: List[Any] = [] tool_calls: List[ChatCompletionToolCallChunk] = [] for idx, content in enumerate(completion_response["content"]): if content["type"] == "text": @@ -645,10 +646,14 @@ class AnthropicConfig(BaseConfig): index=idx, ) ) + ## CITATIONS + if content.get("citations", None) is not None: + citations.append(content["citations"]) _message = litellm.Message( tool_calls=tool_calls, content=text_content or None, + provider_specific_fields={"citations": citations}, ) ## HANDLE JSON MODE - anthropic returns single function call diff --git a/litellm/llms/anthropic/completion/transformation.py b/litellm/llms/anthropic/completion/transformation.py index e2510d6a98..7a260b6f94 100644 --- a/litellm/llms/anthropic/completion/transformation.py +++ b/litellm/llms/anthropic/completion/transformation.py @@ -72,7 +72,7 @@ class AnthropicTextConfig(BaseConfig): top_k: Optional[int] = None, metadata: Optional[dict] = None, ) -> None: - locals_ = locals() + locals_ = locals().copy() for key, value in locals_.items(): if key != "self" and value is not None: setattr(self.__class__, key, value) diff --git a/litellm/llms/azure/azure.py b/litellm/llms/azure/azure.py index 6c578e4d8e..5294bd7141 100644 --- a/litellm/llms/azure/azure.py +++ b/litellm/llms/azure/azure.py @@ -5,10 +5,11 @@ import time from typing import Any, Callable, Dict, List, Literal, Optional, Union import httpx # type: ignore -from openai import AsyncAzureOpenAI, AzureOpenAI +from openai import APITimeoutError, AsyncAzureOpenAI, AzureOpenAI import litellm from litellm.caching.caching import DualCache +from litellm.constants import DEFAULT_MAX_RETRIES from litellm.litellm_core_utils.litellm_logging import Logging as LiteLLMLoggingObj from litellm.llms.custom_httpx.http_handler import ( AsyncHTTPHandler, @@ -98,14 +99,6 @@ class AzureOpenAIAssistantsAPIConfig: def select_azure_base_url_or_endpoint(azure_client_params: dict): - # azure_client_params = { - # "api_version": api_version, - # "azure_endpoint": api_base, - # "azure_deployment": model, - # "http_client": litellm.client_session, - # "max_retries": max_retries, - # "timeout": timeout, - # } azure_endpoint = azure_client_params.get("azure_endpoint", None) if azure_endpoint is not None: # see : https://github.com/openai/openai-python/blob/3d61ed42aba652b547029095a7eb269ad4e1e957/src/openai/lib/azure.py#L192 @@ -312,6 +305,7 @@ class AzureChatCompletion(BaseLLM): - call chat.completions.create.with_raw_response when litellm.return_response_headers is True - call chat.completions.create by default """ + start_time = time.time() try: raw_response = await azure_client.chat.completions.with_raw_response.create( **data, timeout=timeout @@ -320,6 +314,11 @@ class AzureChatCompletion(BaseLLM): headers = dict(raw_response.headers) response = raw_response.parse() return headers, response + except APITimeoutError as e: + end_time = time.time() + time_delta = round(end_time - start_time, 2) + e.message += f" - timeout value={timeout}, time taken={time_delta} seconds" + raise e except Exception as e: raise e @@ -353,7 +352,9 @@ class AzureChatCompletion(BaseLLM): status_code=422, message="Missing model or messages" ) - max_retries = optional_params.pop("max_retries", 2) + max_retries = optional_params.pop("max_retries", None) + if max_retries is None: + max_retries = DEFAULT_MAX_RETRIES json_mode: Optional[bool] = optional_params.pop("json_mode", False) ### CHECK IF CLOUDFLARE AI GATEWAY ### @@ -415,6 +416,7 @@ class AzureChatCompletion(BaseLLM): azure_ad_token_provider=azure_ad_token_provider, timeout=timeout, client=client, + max_retries=max_retries, ) else: return self.acompletion( @@ -430,6 +432,7 @@ class AzureChatCompletion(BaseLLM): timeout=timeout, client=client, logging_obj=logging_obj, + max_retries=max_retries, convert_tool_call_to_json_mode=json_mode, ) elif "stream" in optional_params and optional_params["stream"] is True: @@ -445,6 +448,7 @@ class AzureChatCompletion(BaseLLM): azure_ad_token_provider=azure_ad_token_provider, timeout=timeout, client=client, + max_retries=max_retries, ) else: ## LOGGING @@ -553,6 +557,7 @@ class AzureChatCompletion(BaseLLM): dynamic_params: bool, model_response: ModelResponse, logging_obj: LiteLLMLoggingObj, + max_retries: int, azure_ad_token: Optional[str] = None, azure_ad_token_provider: Optional[Callable] = None, convert_tool_call_to_json_mode: Optional[bool] = None, @@ -560,12 +565,6 @@ class AzureChatCompletion(BaseLLM): ): response = None try: - max_retries = data.pop("max_retries", 2) - if not isinstance(max_retries, int): - raise AzureOpenAIError( - status_code=422, message="max retries must be an int" - ) - # init AzureOpenAI Client azure_client_params = { "api_version": api_version, @@ -649,6 +648,7 @@ class AzureChatCompletion(BaseLLM): ) raise AzureOpenAIError(status_code=500, message=str(e)) except Exception as e: + message = getattr(e, "message", str(e)) ## LOGGING logging_obj.post_call( input=data["messages"], @@ -659,7 +659,7 @@ class AzureChatCompletion(BaseLLM): if hasattr(e, "status_code"): raise e else: - raise AzureOpenAIError(status_code=500, message=str(e)) + raise AzureOpenAIError(status_code=500, message=message) def streaming( self, @@ -671,15 +671,11 @@ class AzureChatCompletion(BaseLLM): data: dict, model: str, timeout: Any, + max_retries: int, azure_ad_token: Optional[str] = None, azure_ad_token_provider: Optional[Callable] = None, client=None, ): - max_retries = data.pop("max_retries", 2) - if not isinstance(max_retries, int): - raise AzureOpenAIError( - status_code=422, message="max retries must be an int" - ) # init AzureOpenAI Client azure_client_params = { "api_version": api_version, @@ -742,6 +738,7 @@ class AzureChatCompletion(BaseLLM): data: dict, model: str, timeout: Any, + max_retries: int, azure_ad_token: Optional[str] = None, azure_ad_token_provider: Optional[Callable] = None, client=None, @@ -753,7 +750,7 @@ class AzureChatCompletion(BaseLLM): "azure_endpoint": api_base, "azure_deployment": model, "http_client": litellm.aclient_session, - "max_retries": data.pop("max_retries", 2), + "max_retries": max_retries, "timeout": timeout, } azure_client_params = select_azure_base_url_or_endpoint( @@ -807,10 +804,11 @@ class AzureChatCompletion(BaseLLM): status_code = getattr(e, "status_code", 500) error_headers = getattr(e, "headers", None) error_response = getattr(e, "response", None) + message = getattr(e, "message", str(e)) if error_headers is None and error_response: error_headers = getattr(error_response, "headers", None) raise AzureOpenAIError( - status_code=status_code, message=str(e), headers=error_headers + status_code=status_code, message=message, headers=error_headers ) async def aembedding( diff --git a/litellm/llms/azure/chat/gpt_transformation.py b/litellm/llms/azure/chat/gpt_transformation.py index 53a7fdd687..b117583bd0 100644 --- a/litellm/llms/azure/chat/gpt_transformation.py +++ b/litellm/llms/azure/chat/gpt_transformation.py @@ -113,6 +113,17 @@ class AzureOpenAIConfig(BaseConfig): return False + def _is_response_format_supported_api_version( + self, api_version_year: str, api_version_month: str + ) -> bool: + """ + - check if api_version is supported for response_format + """ + + is_supported = int(api_version_year) <= 2024 and int(api_version_month) >= 8 + + return is_supported + def map_openai_params( self, non_default_params: dict, @@ -171,13 +182,20 @@ class AzureOpenAIConfig(BaseConfig): _is_response_format_supported_model = ( self._is_response_format_supported_model(model) ) - should_convert_response_format_to_tool = ( - api_version_year <= "2024" and api_version_month < "08" - ) or not _is_response_format_supported_model + + is_response_format_supported_api_version = ( + self._is_response_format_supported_api_version( + api_version_year, api_version_month + ) + ) + is_response_format_supported = ( + is_response_format_supported_api_version + and _is_response_format_supported_model + ) optional_params = self._add_response_format_to_tools( optional_params=optional_params, value=value, - should_convert_response_format_to_tool=should_convert_response_format_to_tool, + is_response_format_supported=is_response_format_supported, ) elif param == "tools" and isinstance(value, list): optional_params.setdefault("tools", []) diff --git a/litellm/llms/azure/completion/handler.py b/litellm/llms/azure/completion/handler.py index 31d634de65..fafa5665bb 100644 --- a/litellm/llms/azure/completion/handler.py +++ b/litellm/llms/azure/completion/handler.py @@ -131,6 +131,7 @@ class AzureTextCompletion(BaseLLM): timeout=timeout, client=client, logging_obj=logging_obj, + max_retries=max_retries, ) elif "stream" in optional_params and optional_params["stream"] is True: return self.streaming( @@ -236,17 +237,12 @@ class AzureTextCompletion(BaseLLM): timeout: Any, model_response: ModelResponse, logging_obj: Any, + max_retries: int, azure_ad_token: Optional[str] = None, client=None, # this is the AsyncAzureOpenAI ): response = None try: - max_retries = data.pop("max_retries", 2) - if not isinstance(max_retries, int): - raise AzureOpenAIError( - status_code=422, message="max retries must be an int" - ) - # init AzureOpenAI Client azure_client_params = { "api_version": api_version, diff --git a/litellm/llms/base_llm/base_utils.py b/litellm/llms/base_llm/base_utils.py index ac3d2c81f9..a7e65cdfbf 100644 --- a/litellm/llms/base_llm/base_utils.py +++ b/litellm/llms/base_llm/base_utils.py @@ -34,6 +34,17 @@ class BaseLLMModelInfo(ABC): def get_api_base(api_base: Optional[str] = None) -> Optional[str]: pass + @staticmethod + @abstractmethod + def get_base_model(model: str) -> Optional[str]: + """ + Returns the base model name from the given model name. + + Some providers like bedrock - can receive model=`invoke/anthropic.claude-3-opus-20240229-v1:0` or `converse/anthropic.claude-3-opus-20240229-v1:0` + This function will return `anthropic.claude-3-opus-20240229-v1:0` + """ + pass + def _dict_to_response_format_helper( response_format: dict, ref_template: Optional[str] = None diff --git a/litellm/llms/base_llm/chat/transformation.py b/litellm/llms/base_llm/chat/transformation.py index 1004cc9012..9d3778ed68 100644 --- a/litellm/llms/base_llm/chat/transformation.py +++ b/litellm/llms/base_llm/chat/transformation.py @@ -20,6 +20,7 @@ from pydantic import BaseModel from litellm._logging import verbose_logger from litellm.constants import RESPONSE_FORMAT_TOOL_NAME +from litellm.llms.custom_httpx.http_handler import AsyncHTTPHandler, HTTPHandler from litellm.types.llms.openai import ( AllMessageValues, ChatCompletionToolChoiceFunctionParam, @@ -27,9 +28,6 @@ from litellm.types.llms.openai import ( ChatCompletionToolParam, ChatCompletionToolParamFunctionChunk, ) - -from litellm.llms.custom_httpx.http_handler import AsyncHTTPHandler, HTTPHandler - from litellm.types.utils import ModelResponse from litellm.utils import CustomStreamWrapper @@ -163,7 +161,7 @@ class BaseConfig(ABC): self, optional_params: dict, value: dict, - should_convert_response_format_to_tool: bool, + is_response_format_supported: bool, ) -> dict: """ Follow similar approach to anthropic - translate to a single tool call. @@ -183,7 +181,8 @@ class BaseConfig(ABC): elif "json_schema" in value: json_schema = value["json_schema"]["schema"] - if json_schema and should_convert_response_format_to_tool: + if json_schema and not is_response_format_supported: + _tool_choice = ChatCompletionToolChoiceObjectParam( type="function", function=ChatCompletionToolChoiceFunctionParam( diff --git a/litellm/llms/bedrock/base_aws_llm.py b/litellm/llms/bedrock/base_aws_llm.py index 94ed1ed48f..7b04b2c02a 100644 --- a/litellm/llms/bedrock/base_aws_llm.py +++ b/litellm/llms/bedrock/base_aws_llm.py @@ -52,6 +52,7 @@ class BaseAWSLLM: "aws_role_name", "aws_web_identity_token", "aws_sts_endpoint", + "aws_bedrock_runtime_endpoint", ] def get_cache_key(self, credential_args: Dict[str, Optional[str]]) -> str: diff --git a/litellm/llms/bedrock/chat/converse_transformation.py b/litellm/llms/bedrock/chat/converse_transformation.py index 60527381f6..548e6f690a 100644 --- a/litellm/llms/bedrock/chat/converse_transformation.py +++ b/litellm/llms/bedrock/chat/converse_transformation.py @@ -33,14 +33,7 @@ from litellm.types.llms.openai import ( from litellm.types.utils import ModelResponse, Usage from litellm.utils import add_dummy_tool, has_tool_call_blocks -from ..common_utils import ( - AmazonBedrockGlobalConfig, - BedrockError, - get_bedrock_tool_name, -) - -global_config = AmazonBedrockGlobalConfig() -all_global_regions = global_config.get_all_regions() +from ..common_utils import BedrockError, BedrockModelInfo, get_bedrock_tool_name class AmazonConverseConfig(BaseConfig): @@ -63,7 +56,7 @@ class AmazonConverseConfig(BaseConfig): topP: Optional[int] = None, topK: Optional[int] = None, ) -> None: - locals_ = locals() + locals_ = locals().copy() for key, value in locals_.items(): if key != "self" and value is not None: setattr(self.__class__, key, value) @@ -104,7 +97,7 @@ class AmazonConverseConfig(BaseConfig): ] ## Filter out 'cross-region' from model name - base_model = self._get_base_model(model) + base_model = BedrockModelInfo.get_base_model(model) if ( base_model.startswith("anthropic") @@ -341,9 +334,9 @@ class AmazonConverseConfig(BaseConfig): if "top_k" in inference_params: inference_params["topK"] = inference_params.pop("top_k") return InferenceConfig(**inference_params) - + def _handle_top_k_value(self, model: str, inference_params: dict) -> dict: - base_model = self._get_base_model(model) + base_model = BedrockModelInfo.get_base_model(model) val_top_k = None if "topK" in inference_params: @@ -352,11 +345,11 @@ class AmazonConverseConfig(BaseConfig): val_top_k = inference_params.pop("top_k") if val_top_k: - if (base_model.startswith("anthropic")): + if base_model.startswith("anthropic"): return {"top_k": val_top_k} if base_model.startswith("amazon.nova"): - return {'inferenceConfig': {"topK": val_top_k}} - + return {"inferenceConfig": {"topK": val_top_k}} + return {} def _transform_request_helper( @@ -393,15 +386,25 @@ class AmazonConverseConfig(BaseConfig): ) + ["top_k"] supported_tool_call_params = ["tools", "tool_choice"] supported_guardrail_params = ["guardrailConfig"] - total_supported_params = supported_converse_params + supported_tool_call_params + supported_guardrail_params + total_supported_params = ( + supported_converse_params + + supported_tool_call_params + + supported_guardrail_params + ) inference_params.pop("json_mode", None) # used for handling json_schema # keep supported params in 'inference_params', and set all model-specific params in 'additional_request_params' - additional_request_params = {k: v for k, v in inference_params.items() if k not in total_supported_params} - inference_params = {k: v for k, v in inference_params.items() if k in total_supported_params} + additional_request_params = { + k: v for k, v in inference_params.items() if k not in total_supported_params + } + inference_params = { + k: v for k, v in inference_params.items() if k in total_supported_params + } # Only set the topK value in for models that support it - additional_request_params.update(self._handle_top_k_value(model, inference_params)) + additional_request_params.update( + self._handle_top_k_value(model, inference_params) + ) bedrock_tools: List[ToolBlock] = _bedrock_tools_pt( inference_params.pop("tools", []) @@ -679,41 +682,6 @@ class AmazonConverseConfig(BaseConfig): return model_response - def _supported_cross_region_inference_region(self) -> List[str]: - """ - Abbreviations of regions AWS Bedrock supports for cross region inference - """ - return ["us", "eu", "apac"] - - def _get_base_model(self, model: str) -> str: - """ - Get the base model from the given model name. - - Handle model names like - "us.meta.llama3-2-11b-instruct-v1:0" -> "meta.llama3-2-11b-instruct-v1" - AND "meta.llama3-2-11b-instruct-v1:0" -> "meta.llama3-2-11b-instruct-v1" - """ - - if model.startswith("bedrock/"): - model = model.split("/", 1)[1] - - if model.startswith("converse/"): - model = model.split("/", 1)[1] - - potential_region = model.split(".", 1)[0] - - alt_potential_region = model.split("/", 1)[ - 0 - ] # in model cost map we store regional information like `/us-west-2/bedrock-model` - - if potential_region in self._supported_cross_region_inference_region(): - return model.split(".", 1)[1] - elif ( - alt_potential_region in all_global_regions and len(model.split("/", 1)) > 1 - ): - return model.split("/", 1)[1] - - return model - def get_error_class( self, error_message: str, status_code: int, headers: Union[dict, httpx.Headers] ) -> BaseLLMException: diff --git a/litellm/llms/bedrock/chat/invoke_handler.py b/litellm/llms/bedrock/chat/invoke_handler.py index 42b29120b1..43fdc061e7 100644 --- a/litellm/llms/bedrock/chat/invoke_handler.py +++ b/litellm/llms/bedrock/chat/invoke_handler.py @@ -40,6 +40,9 @@ from litellm.litellm_core_utils.prompt_templates.factory import ( parse_xml_params, prompt_factory, ) +from litellm.llms.anthropic.chat.handler import ( + ModelResponseIterator as AnthropicModelResponseIterator, +) from litellm.llms.custom_httpx.http_handler import ( AsyncHTTPHandler, HTTPHandler, @@ -103,7 +106,7 @@ class AmazonCohereChatConfig: stop_sequences: Optional[str] = None, raw_prompting: Optional[bool] = None, ) -> None: - locals_ = locals() + locals_ = locals().copy() for key, value in locals_.items(): if key != "self" and value is not None: setattr(self.__class__, key, value) @@ -177,6 +180,7 @@ async def make_call( logging_obj: Logging, fake_stream: bool = False, json_mode: Optional[bool] = False, + bedrock_invoke_provider: Optional[litellm.BEDROCK_INVOKE_PROVIDERS_LITERAL] = None, ): try: if client is None: @@ -214,6 +218,14 @@ async def make_call( completion_stream: Any = MockResponseIterator( model_response=model_response, json_mode=json_mode ) + elif bedrock_invoke_provider == "anthropic": + decoder: AWSEventStreamDecoder = AmazonAnthropicClaudeStreamDecoder( + model=model, + sync_stream=False, + ) + completion_stream = decoder.aiter_bytes( + response.aiter_bytes(chunk_size=1024) + ) else: decoder = AWSEventStreamDecoder(model=model) completion_stream = decoder.aiter_bytes( @@ -248,6 +260,7 @@ def make_sync_call( logging_obj: Logging, fake_stream: bool = False, json_mode: Optional[bool] = False, + bedrock_invoke_provider: Optional[litellm.BEDROCK_INVOKE_PROVIDERS_LITERAL] = None, ): try: if client is None: @@ -283,6 +296,12 @@ def make_sync_call( completion_stream: Any = MockResponseIterator( model_response=model_response, json_mode=json_mode ) + elif bedrock_invoke_provider == "anthropic": + decoder: AWSEventStreamDecoder = AmazonAnthropicClaudeStreamDecoder( + model=model, + sync_stream=True, + ) + completion_stream = decoder.iter_bytes(response.iter_bytes(chunk_size=1024)) else: decoder = AWSEventStreamDecoder(model=model) completion_stream = decoder.iter_bytes(response.iter_bytes(chunk_size=1024)) @@ -1323,7 +1342,7 @@ class AWSEventStreamDecoder: text = chunk_data.get("completions")[0].get("data").get("text") # type: ignore is_finished = True finish_reason = "stop" - ######## bedrock.anthropic mappings ############### + ######## /bedrock/converse mappings ############### elif ( "contentBlockIndex" in chunk_data or "stopReason" in chunk_data @@ -1331,6 +1350,11 @@ class AWSEventStreamDecoder: or "trace" in chunk_data ): return self.converse_chunk_parser(chunk_data=chunk_data) + ######### /bedrock/invoke nova mappings ############### + elif "contentBlockDelta" in chunk_data: + # when using /bedrock/invoke/nova, the chunk_data is nested under "contentBlockDelta" + _chunk_data = chunk_data.get("contentBlockDelta", None) + return self.converse_chunk_parser(chunk_data=_chunk_data) ######## bedrock.mistral mappings ############### elif "outputs" in chunk_data: if ( @@ -1429,6 +1453,27 @@ class AWSEventStreamDecoder: return chunk.decode() # type: ignore[no-any-return] +class AmazonAnthropicClaudeStreamDecoder(AWSEventStreamDecoder): + def __init__( + self, + model: str, + sync_stream: bool, + ) -> None: + """ + Child class of AWSEventStreamDecoder that handles the streaming response from the Anthropic family of models + + The only difference between AWSEventStreamDecoder and AmazonAnthropicClaudeStreamDecoder is the `chunk_parser` method + """ + super().__init__(model=model) + self.anthropic_model_response_iterator = AnthropicModelResponseIterator( + streaming_response=None, + sync_stream=sync_stream, + ) + + def _chunk_parser(self, chunk_data: dict) -> GChunk: + return self.anthropic_model_response_iterator.chunk_parser(chunk=chunk_data) + + class MockResponseIterator: # for returning ai21 streaming responses def __init__(self, model_response, json_mode: Optional[bool] = False): self.model_response = model_response diff --git a/litellm/llms/bedrock/chat/invoke_transformations/amazon_ai21_transformation.py b/litellm/llms/bedrock/chat/invoke_transformations/amazon_ai21_transformation.py index 48e21ce602..50fa6f170b 100644 --- a/litellm/llms/bedrock/chat/invoke_transformations/amazon_ai21_transformation.py +++ b/litellm/llms/bedrock/chat/invoke_transformations/amazon_ai21_transformation.py @@ -46,7 +46,7 @@ class AmazonAI21Config(AmazonInvokeConfig, BaseConfig): presencePenalty: Optional[dict] = None, countPenalty: Optional[dict] = None, ) -> None: - locals_ = locals() + locals_ = locals().copy() for key, value in locals_.items(): if key != "self" and value is not None: setattr(self.__class__, key, value) diff --git a/litellm/llms/bedrock/chat/invoke_transformations/amazon_cohere_transformation.py b/litellm/llms/bedrock/chat/invoke_transformations/amazon_cohere_transformation.py index f276e390b2..e9479c8f32 100644 --- a/litellm/llms/bedrock/chat/invoke_transformations/amazon_cohere_transformation.py +++ b/litellm/llms/bedrock/chat/invoke_transformations/amazon_cohere_transformation.py @@ -28,7 +28,7 @@ class AmazonCohereConfig(AmazonInvokeConfig, BaseConfig): temperature: Optional[float] = None, return_likelihood: Optional[str] = None, ) -> None: - locals_ = locals() + locals_ = locals().copy() for key, value in locals_.items(): if key != "self" and value is not None: setattr(self.__class__, key, value) diff --git a/litellm/llms/bedrock/chat/invoke_transformations/amazon_llama_transformation.py b/litellm/llms/bedrock/chat/invoke_transformations/amazon_llama_transformation.py index f45e49672b..9f84844fcb 100644 --- a/litellm/llms/bedrock/chat/invoke_transformations/amazon_llama_transformation.py +++ b/litellm/llms/bedrock/chat/invoke_transformations/amazon_llama_transformation.py @@ -28,7 +28,7 @@ class AmazonLlamaConfig(AmazonInvokeConfig, BaseConfig): temperature: Optional[float] = None, topP: Optional[int] = None, ) -> None: - locals_ = locals() + locals_ = locals().copy() for key, value in locals_.items(): if key != "self" and value is not None: setattr(self.__class__, key, value) diff --git a/litellm/llms/bedrock/chat/invoke_transformations/amazon_mistral_transformation.py b/litellm/llms/bedrock/chat/invoke_transformations/amazon_mistral_transformation.py index 761fab7465..ef3c237f9d 100644 --- a/litellm/llms/bedrock/chat/invoke_transformations/amazon_mistral_transformation.py +++ b/litellm/llms/bedrock/chat/invoke_transformations/amazon_mistral_transformation.py @@ -33,7 +33,7 @@ class AmazonMistralConfig(AmazonInvokeConfig, BaseConfig): top_k: Optional[float] = None, stop: Optional[List[str]] = None, ) -> None: - locals_ = locals() + locals_ = locals().copy() for key, value in locals_.items(): if key != "self" and value is not None: setattr(self.__class__, key, value) diff --git a/litellm/llms/bedrock/chat/invoke_transformations/amazon_nova_transformation.py b/litellm/llms/bedrock/chat/invoke_transformations/amazon_nova_transformation.py new file mode 100644 index 0000000000..9d41beceff --- /dev/null +++ b/litellm/llms/bedrock/chat/invoke_transformations/amazon_nova_transformation.py @@ -0,0 +1,70 @@ +""" +Handles transforming requests for `bedrock/invoke/{nova} models` + +Inherits from `AmazonConverseConfig` + +Nova + Invoke API Tutorial: https://docs.aws.amazon.com/nova/latest/userguide/using-invoke-api.html +""" + +from typing import List + +import litellm +from litellm.types.llms.bedrock import BedrockInvokeNovaRequest +from litellm.types.llms.openai import AllMessageValues + + +class AmazonInvokeNovaConfig(litellm.AmazonConverseConfig): + """ + Config for sending `nova` requests to `/bedrock/invoke/` + """ + + def __init__(self, **kwargs): + super().__init__(**kwargs) + + def transform_request( + self, + model: str, + messages: List[AllMessageValues], + optional_params: dict, + litellm_params: dict, + headers: dict, + ) -> dict: + _transformed_nova_request = super().transform_request( + model=model, + messages=messages, + optional_params=optional_params, + litellm_params=litellm_params, + headers=headers, + ) + _bedrock_invoke_nova_request = BedrockInvokeNovaRequest( + **_transformed_nova_request + ) + self._remove_empty_system_messages(_bedrock_invoke_nova_request) + bedrock_invoke_nova_request = self._filter_allowed_fields( + _bedrock_invoke_nova_request + ) + return bedrock_invoke_nova_request + + def _filter_allowed_fields( + self, bedrock_invoke_nova_request: BedrockInvokeNovaRequest + ) -> dict: + """ + Filter out fields that are not allowed in the `BedrockInvokeNovaRequest` dataclass. + """ + allowed_fields = set(BedrockInvokeNovaRequest.__annotations__.keys()) + return { + k: v for k, v in bedrock_invoke_nova_request.items() if k in allowed_fields + } + + def _remove_empty_system_messages( + self, bedrock_invoke_nova_request: BedrockInvokeNovaRequest + ) -> None: + """ + In-place remove empty `system` messages from the request. + + /bedrock/invoke/ does not allow empty `system` messages. + """ + _system_message = bedrock_invoke_nova_request.get("system", None) + if isinstance(_system_message, list) and len(_system_message) == 0: + bedrock_invoke_nova_request.pop("system", None) + return diff --git a/litellm/llms/bedrock/chat/invoke_transformations/amazon_titan_transformation.py b/litellm/llms/bedrock/chat/invoke_transformations/amazon_titan_transformation.py index e16946f3ed..367fb84d1a 100644 --- a/litellm/llms/bedrock/chat/invoke_transformations/amazon_titan_transformation.py +++ b/litellm/llms/bedrock/chat/invoke_transformations/amazon_titan_transformation.py @@ -33,7 +33,7 @@ class AmazonTitanConfig(AmazonInvokeConfig, BaseConfig): temperature: Optional[float] = None, topP: Optional[int] = None, ) -> None: - locals_ = locals() + locals_ = locals().copy() for key, value in locals_.items(): if key != "self" and value is not None: setattr(self.__class__, key, value) diff --git a/litellm/llms/bedrock/chat/invoke_transformations/anthropic_claude2_transformation.py b/litellm/llms/bedrock/chat/invoke_transformations/anthropic_claude2_transformation.py index 5f86c22529..085cf0b9ca 100644 --- a/litellm/llms/bedrock/chat/invoke_transformations/anthropic_claude2_transformation.py +++ b/litellm/llms/bedrock/chat/invoke_transformations/anthropic_claude2_transformation.py @@ -34,7 +34,7 @@ class AmazonAnthropicConfig: top_p: Optional[int] = None, anthropic_version: Optional[str] = None, ) -> None: - locals_ = locals() + locals_ = locals().copy() for key, value in locals_.items(): if key != "self" and value is not None: setattr(self.__class__, key, value) diff --git a/litellm/llms/bedrock/chat/invoke_transformations/anthropic_claude3_transformation.py b/litellm/llms/bedrock/chat/invoke_transformations/anthropic_claude3_transformation.py index b227eb8223..09842aef01 100644 --- a/litellm/llms/bedrock/chat/invoke_transformations/anthropic_claude3_transformation.py +++ b/litellm/llms/bedrock/chat/invoke_transformations/anthropic_claude3_transformation.py @@ -1,61 +1,34 @@ -import types -from typing import List, Optional +from typing import TYPE_CHECKING, Any, List, Optional + +import httpx + +import litellm +from litellm.llms.bedrock.chat.invoke_transformations.base_invoke_transformation import ( + AmazonInvokeConfig, +) +from litellm.types.llms.openai import AllMessageValues +from litellm.types.utils import ModelResponse + +if TYPE_CHECKING: + from litellm.litellm_core_utils.litellm_logging import Logging as _LiteLLMLoggingObj + + LiteLLMLoggingObj = _LiteLLMLoggingObj +else: + LiteLLMLoggingObj = Any -class AmazonAnthropicClaude3Config: +class AmazonAnthropicClaude3Config(AmazonInvokeConfig): """ Reference: https://us-west-2.console.aws.amazon.com/bedrock/home?region=us-west-2#/providers?model=claude https://docs.anthropic.com/claude/docs/models-overview#model-comparison Supported Params for the Amazon / Anthropic Claude 3 models: - - - `max_tokens` Required (integer) max tokens. Default is 4096 - - `anthropic_version` Required (string) version of anthropic for bedrock - e.g. "bedrock-2023-05-31" - - `system` Optional (string) the system prompt, conversion from openai format to this is handled in factory.py - - `temperature` Optional (float) The amount of randomness injected into the response - - `top_p` Optional (float) Use nucleus sampling. - - `top_k` Optional (int) Only sample from the top K options for each subsequent token - - `stop_sequences` Optional (List[str]) Custom text sequences that cause the model to stop generating """ - max_tokens: Optional[int] = 4096 # Opus, Sonnet, and Haiku default - anthropic_version: Optional[str] = "bedrock-2023-05-31" - system: Optional[str] = None - temperature: Optional[float] = None - top_p: Optional[float] = None - top_k: Optional[int] = None - stop_sequences: Optional[List[str]] = None + anthropic_version: str = "bedrock-2023-05-31" - def __init__( - self, - max_tokens: Optional[int] = None, - anthropic_version: Optional[str] = None, - ) -> None: - locals_ = locals() - for key, value in locals_.items(): - if key != "self" and value is not None: - setattr(self.__class__, key, value) - - @classmethod - def get_config(cls): - return { - k: v - for k, v in cls.__dict__.items() - if not k.startswith("__") - and not isinstance( - v, - ( - types.FunctionType, - types.BuiltinFunctionType, - classmethod, - staticmethod, - ), - ) - and v is not None - } - - def get_supported_openai_params(self): + def get_supported_openai_params(self, model: str): return [ "max_tokens", "max_completion_tokens", @@ -68,7 +41,13 @@ class AmazonAnthropicClaude3Config: "extra_headers", ] - def map_openai_params(self, non_default_params: dict, optional_params: dict): + def map_openai_params( + self, + non_default_params: dict, + optional_params: dict, + model: str, + drop_params: bool, + ): for param, value in non_default_params.items(): if param == "max_tokens" or param == "max_completion_tokens": optional_params["max_tokens"] = value @@ -83,3 +62,53 @@ class AmazonAnthropicClaude3Config: if param == "top_p": optional_params["top_p"] = value return optional_params + + def transform_request( + self, + model: str, + messages: List[AllMessageValues], + optional_params: dict, + litellm_params: dict, + headers: dict, + ) -> dict: + _anthropic_request = litellm.AnthropicConfig().transform_request( + model=model, + messages=messages, + optional_params=optional_params, + litellm_params=litellm_params, + headers=headers, + ) + + _anthropic_request.pop("model", None) + if "anthropic_version" not in _anthropic_request: + _anthropic_request["anthropic_version"] = self.anthropic_version + + return _anthropic_request + + def transform_response( + self, + model: str, + raw_response: httpx.Response, + model_response: ModelResponse, + logging_obj: LiteLLMLoggingObj, + request_data: dict, + messages: List[AllMessageValues], + optional_params: dict, + litellm_params: dict, + encoding: Any, + api_key: Optional[str] = None, + json_mode: Optional[bool] = None, + ) -> ModelResponse: + return litellm.AnthropicConfig().transform_response( + model=model, + raw_response=raw_response, + model_response=model_response, + logging_obj=logging_obj, + request_data=request_data, + messages=messages, + optional_params=optional_params, + litellm_params=litellm_params, + encoding=encoding, + api_key=api_key, + json_mode=json_mode, + ) diff --git a/litellm/llms/bedrock/chat/invoke_transformations/base_invoke_transformation.py b/litellm/llms/bedrock/chat/invoke_transformations/base_invoke_transformation.py index fbcd7660b2..5eb006f6ca 100644 --- a/litellm/llms/bedrock/chat/invoke_transformations/base_invoke_transformation.py +++ b/litellm/llms/bedrock/chat/invoke_transformations/base_invoke_transformation.py @@ -2,22 +2,18 @@ import copy import json import time import urllib.parse -import uuid from functools import partial from typing import TYPE_CHECKING, Any, List, Optional, Tuple, Union, cast, get_args import httpx import litellm +from litellm._logging import verbose_logger from litellm.litellm_core_utils.core_helpers import map_finish_reason from litellm.litellm_core_utils.logging_utils import track_llm_api_timing from litellm.litellm_core_utils.prompt_templates.factory import ( cohere_message_pt, - construct_tool_use_system_prompt, - contains_tag, custom_prompt, - extract_between_tags, - parse_xml_params, prompt_factory, ) from litellm.llms.base_llm.chat.transformation import BaseConfig, BaseLLMException @@ -91,7 +87,7 @@ class AmazonInvokeConfig(BaseConfig, BaseAWSLLM): optional_params=optional_params, ) ### SET RUNTIME ENDPOINT ### - aws_bedrock_runtime_endpoint = optional_params.pop( + aws_bedrock_runtime_endpoint = optional_params.get( "aws_bedrock_runtime_endpoint", None ) # https://bedrock-runtime.{region_name}.amazonaws.com endpoint_url, proxy_endpoint_url = self.get_runtime_endpoint( @@ -129,15 +125,15 @@ class AmazonInvokeConfig(BaseConfig, BaseAWSLLM): ## CREDENTIALS ## # pop aws_secret_access_key, aws_access_key_id, aws_session_token, aws_region_name from kwargs, since completion calls fail with them - extra_headers = optional_params.pop("extra_headers", None) - aws_secret_access_key = optional_params.pop("aws_secret_access_key", None) - aws_access_key_id = optional_params.pop("aws_access_key_id", None) - aws_session_token = optional_params.pop("aws_session_token", None) - aws_role_name = optional_params.pop("aws_role_name", None) - aws_session_name = optional_params.pop("aws_session_name", None) - aws_profile_name = optional_params.pop("aws_profile_name", None) - aws_web_identity_token = optional_params.pop("aws_web_identity_token", None) - aws_sts_endpoint = optional_params.pop("aws_sts_endpoint", None) + extra_headers = optional_params.get("extra_headers", None) + aws_secret_access_key = optional_params.get("aws_secret_access_key", None) + aws_access_key_id = optional_params.get("aws_access_key_id", None) + aws_session_token = optional_params.get("aws_session_token", None) + aws_role_name = optional_params.get("aws_role_name", None) + aws_session_name = optional_params.get("aws_session_name", None) + aws_profile_name = optional_params.get("aws_profile_name", None) + aws_web_identity_token = optional_params.get("aws_web_identity_token", None) + aws_sts_endpoint = optional_params.get("aws_sts_endpoint", None) aws_region_name = self._get_aws_region_name(optional_params) credentials: Credentials = self.get_credentials( @@ -171,7 +167,7 @@ class AmazonInvokeConfig(BaseConfig, BaseAWSLLM): return dict(request.headers) - def transform_request( # noqa: PLR0915 + def transform_request( self, model: str, messages: List[AllMessageValues], @@ -194,7 +190,6 @@ class AmazonInvokeConfig(BaseConfig, BaseAWSLLM): for k, v in inference_params.items() if k not in self.aws_authentication_params } - json_schemas: dict = {} request_data: dict = {} if provider == "cohere": if model.startswith("cohere.command-r"): @@ -223,57 +218,21 @@ class AmazonInvokeConfig(BaseConfig, BaseAWSLLM): ) request_data = {"prompt": prompt, **inference_params} elif provider == "anthropic": - if model.startswith("anthropic.claude-3"): - # Separate system prompt from rest of message - system_prompt_idx: list[int] = [] - system_messages: list[str] = [] - for idx, message in enumerate(messages): - if message["role"] == "system" and isinstance( - message["content"], str - ): - system_messages.append(message["content"]) - system_prompt_idx.append(idx) - if len(system_prompt_idx) > 0: - inference_params["system"] = "\n".join(system_messages) - messages = [ - i for j, i in enumerate(messages) if j not in system_prompt_idx - ] - # Format rest of message according to anthropic guidelines - messages = prompt_factory( - model=model, messages=messages, custom_llm_provider="anthropic_xml" - ) # type: ignore - ## LOAD CONFIG - config = litellm.AmazonAnthropicClaude3Config.get_config() - for k, v in config.items(): - if ( - k not in inference_params - ): # completion(top_k=3) > anthropic_config(top_k=3) <- allows for dynamic variables to be passed in - inference_params[k] = v - ## Handle Tool Calling - if "tools" in inference_params: - _is_function_call = True - for tool in inference_params["tools"]: - json_schemas[tool["function"]["name"]] = tool["function"].get( - "parameters", None - ) - tool_calling_system_prompt = construct_tool_use_system_prompt( - tools=inference_params["tools"] - ) - inference_params["system"] = ( - inference_params.get("system", "\n") - + tool_calling_system_prompt - ) # add the anthropic tool calling prompt to the system prompt - inference_params.pop("tools") - request_data = {"messages": messages, **inference_params} - else: - ## LOAD CONFIG - config = litellm.AmazonAnthropicConfig.get_config() - for k, v in config.items(): - if ( - k not in inference_params - ): # completion(top_k=3) > anthropic_config(top_k=3) <- allows for dynamic variables to be passed in - inference_params[k] = v - request_data = {"prompt": prompt, **inference_params} + return litellm.AmazonAnthropicClaude3Config().transform_request( + model=model, + messages=messages, + optional_params=optional_params, + litellm_params=litellm_params, + headers=headers, + ) + elif provider == "nova": + return litellm.AmazonInvokeNovaConfig().transform_request( + model=model, + messages=messages, + optional_params=optional_params, + litellm_params=litellm_params, + headers=headers, + ) elif provider == "ai21": ## LOAD CONFIG config = litellm.AmazonAI21Config.get_config() @@ -347,6 +306,10 @@ class AmazonInvokeConfig(BaseConfig, BaseAWSLLM): raise BedrockError( message=raw_response.text, status_code=raw_response.status_code ) + verbose_logger.debug( + "bedrock invoke response % s", + json.dumps(completion_response, indent=4, default=str), + ) provider = self.get_bedrock_invoke_provider(model) outputText: Optional[str] = None try: @@ -359,66 +322,31 @@ class AmazonInvokeConfig(BaseConfig, BaseAWSLLM): completion_response["generations"][0]["finish_reason"] ) elif provider == "anthropic": - if model.startswith("anthropic.claude-3"): - json_schemas: dict = {} - _is_function_call = False - ## Handle Tool Calling - if "tools" in optional_params: - _is_function_call = True - for tool in optional_params["tools"]: - json_schemas[tool["function"]["name"]] = tool[ - "function" - ].get("parameters", None) - outputText = completion_response.get("content")[0].get("text", None) - if outputText is not None and contains_tag( - "invoke", outputText - ): # OUTPUT PARSE FUNCTION CALL - function_name = extract_between_tags("tool_name", outputText)[0] - function_arguments_str = extract_between_tags( - "invoke", outputText - )[0].strip() - function_arguments_str = ( - f"{function_arguments_str}" - ) - function_arguments = parse_xml_params( - function_arguments_str, - json_schema=json_schemas.get( - function_name, None - ), # check if we have a json schema for this function name) - ) - _message = litellm.Message( - tool_calls=[ - { - "id": f"call_{uuid.uuid4()}", - "type": "function", - "function": { - "name": function_name, - "arguments": json.dumps(function_arguments), - }, - } - ], - content=None, - ) - model_response.choices[0].message = _message # type: ignore - model_response._hidden_params["original_response"] = ( - outputText # allow user to access raw anthropic tool calling response - ) - model_response.choices[0].finish_reason = map_finish_reason( - completion_response.get("stop_reason", "") - ) - _usage = litellm.Usage( - prompt_tokens=completion_response["usage"]["input_tokens"], - completion_tokens=completion_response["usage"]["output_tokens"], - total_tokens=completion_response["usage"]["input_tokens"] - + completion_response["usage"]["output_tokens"], - ) - setattr(model_response, "usage", _usage) - else: - outputText = completion_response["completion"] - - model_response.choices[0].finish_reason = completion_response[ - "stop_reason" - ] + return litellm.AmazonAnthropicClaude3Config().transform_response( + model=model, + raw_response=raw_response, + model_response=model_response, + logging_obj=logging_obj, + request_data=request_data, + messages=messages, + optional_params=optional_params, + litellm_params=litellm_params, + encoding=encoding, + api_key=api_key, + json_mode=json_mode, + ) + elif provider == "nova": + return litellm.AmazonInvokeNovaConfig().transform_response( + model=model, + raw_response=raw_response, + model_response=model_response, + logging_obj=logging_obj, + request_data=request_data, + messages=messages, + optional_params=optional_params, + litellm_params=litellm_params, + encoding=encoding, + ) elif provider == "ai21": outputText = ( completion_response.get("completions")[0].get("data").get("text") @@ -536,6 +464,7 @@ class AmazonInvokeConfig(BaseConfig, BaseAWSLLM): messages=messages, logging_obj=logging_obj, fake_stream=True if "ai21" in api_base else False, + bedrock_invoke_provider=self.get_bedrock_invoke_provider(model), ), model=model, custom_llm_provider="bedrock", @@ -569,6 +498,7 @@ class AmazonInvokeConfig(BaseConfig, BaseAWSLLM): messages=messages, logging_obj=logging_obj, fake_stream=True if "ai21" in api_base else False, + bedrock_invoke_provider=self.get_bedrock_invoke_provider(model), ), model=model, custom_llm_provider="bedrock", @@ -594,10 +524,15 @@ class AmazonInvokeConfig(BaseConfig, BaseAWSLLM): """ Helper function to get the bedrock provider from the model - handles 2 scenarions: - 1. model=anthropic.claude-3-5-sonnet-20240620-v1:0 -> Returns `anthropic` - 2. model=llama/arn:aws:bedrock:us-east-1:086734376398:imported-model/r4c4kewx2s0n -> Returns `llama` + handles 3 scenarions: + 1. model=invoke/anthropic.claude-3-5-sonnet-20240620-v1:0 -> Returns `anthropic` + 2. model=anthropic.claude-3-5-sonnet-20240620-v1:0 -> Returns `anthropic` + 3. model=llama/arn:aws:bedrock:us-east-1:086734376398:imported-model/r4c4kewx2s0n -> Returns `llama` + 4. model=us.amazon.nova-pro-v1:0 -> Returns `nova` """ + if model.startswith("invoke/"): + model = model.replace("invoke/", "", 1) + _split_model = model.split(".")[0] if _split_model in get_args(litellm.BEDROCK_INVOKE_PROVIDERS_LITERAL): return cast(litellm.BEDROCK_INVOKE_PROVIDERS_LITERAL, _split_model) @@ -606,6 +541,10 @@ class AmazonInvokeConfig(BaseConfig, BaseAWSLLM): provider = AmazonInvokeConfig._get_provider_from_model_path(model) if provider is not None: return provider + + # check if provider == "nova" + if "nova" in model: + return "nova" return None @staticmethod @@ -640,16 +579,16 @@ class AmazonInvokeConfig(BaseConfig, BaseAWSLLM): else: modelId = model + modelId = modelId.replace("invoke/", "", 1) if provider == "llama" and "llama/" in modelId: modelId = self._get_model_id_for_llama_like_model(modelId) - return modelId def _get_aws_region_name(self, optional_params: dict) -> str: """ Get the AWS region name from the environment variables """ - aws_region_name = optional_params.pop("aws_region_name", None) + aws_region_name = optional_params.get("aws_region_name", None) ### SET REGION NAME ### if aws_region_name is None: # check env # diff --git a/litellm/llms/bedrock/common_utils.py b/litellm/llms/bedrock/common_utils.py index deed2124c4..8a534f6eac 100644 --- a/litellm/llms/bedrock/common_utils.py +++ b/litellm/llms/bedrock/common_utils.py @@ -3,11 +3,12 @@ Common utilities used across bedrock chat/embedding/image generation """ import os -from typing import List, Optional, Union +from typing import List, Literal, Optional, Union import httpx import litellm +from litellm.llms.base_llm.base_utils import BaseLLMModelInfo from litellm.llms.base_llm.chat.transformation import BaseLLMException from litellm.secret_managers.main import get_secret @@ -310,3 +311,68 @@ def get_bedrock_tool_name(response_tool_name: str) -> str: response_tool_name ] return response_tool_name + + +class BedrockModelInfo(BaseLLMModelInfo): + + global_config = AmazonBedrockGlobalConfig() + all_global_regions = global_config.get_all_regions() + + @staticmethod + def get_base_model(model: str) -> str: + """ + Get the base model from the given model name. + + Handle model names like - "us.meta.llama3-2-11b-instruct-v1:0" -> "meta.llama3-2-11b-instruct-v1" + AND "meta.llama3-2-11b-instruct-v1:0" -> "meta.llama3-2-11b-instruct-v1" + """ + if model.startswith("bedrock/"): + model = model.split("/", 1)[1] + + if model.startswith("converse/"): + model = model.split("/", 1)[1] + + if model.startswith("invoke/"): + model = model.split("/", 1)[1] + + potential_region = model.split(".", 1)[0] + + alt_potential_region = model.split("/", 1)[ + 0 + ] # in model cost map we store regional information like `/us-west-2/bedrock-model` + + if ( + potential_region + in BedrockModelInfo._supported_cross_region_inference_region() + ): + return model.split(".", 1)[1] + elif ( + alt_potential_region in BedrockModelInfo.all_global_regions + and len(model.split("/", 1)) > 1 + ): + return model.split("/", 1)[1] + + return model + + @staticmethod + def _supported_cross_region_inference_region() -> List[str]: + """ + Abbreviations of regions AWS Bedrock supports for cross region inference + """ + return ["us", "eu", "apac"] + + @staticmethod + def get_bedrock_route(model: str) -> Literal["converse", "invoke", "converse_like"]: + """ + Get the bedrock route for the given model. + """ + base_model = BedrockModelInfo.get_base_model(model) + if "invoke/" in model: + return "invoke" + elif "converse_like" in model: + return "converse_like" + elif "converse/" in model: + return "converse" + elif base_model in litellm.bedrock_converse_models: + return "converse" + return "invoke" diff --git a/litellm/llms/bedrock/embed/amazon_titan_g1_transformation.py b/litellm/llms/bedrock/embed/amazon_titan_g1_transformation.py index 63219868f4..2747551af8 100644 --- a/litellm/llms/bedrock/embed/amazon_titan_g1_transformation.py +++ b/litellm/llms/bedrock/embed/amazon_titan_g1_transformation.py @@ -27,7 +27,7 @@ class AmazonTitanG1Config: def __init__( self, ) -> None: - locals_ = locals() + locals_ = locals().copy() for key, value in locals_.items(): if key != "self" and value is not None: setattr(self.__class__, key, value) diff --git a/litellm/llms/bedrock/embed/amazon_titan_v2_transformation.py b/litellm/llms/bedrock/embed/amazon_titan_v2_transformation.py index 8244a9a334..a68bc6962c 100644 --- a/litellm/llms/bedrock/embed/amazon_titan_v2_transformation.py +++ b/litellm/llms/bedrock/embed/amazon_titan_v2_transformation.py @@ -33,7 +33,7 @@ class AmazonTitanV2Config: def __init__( self, normalize: Optional[bool] = None, dimensions: Optional[int] = None ) -> None: - locals_ = locals() + locals_ = locals().copy() for key, value in locals_.items(): if key != "self" and value is not None: setattr(self.__class__, key, value) diff --git a/litellm/llms/bedrock/image/amazon_stability1_transformation.py b/litellm/llms/bedrock/image/amazon_stability1_transformation.py index 880881e971..698ecca94b 100644 --- a/litellm/llms/bedrock/image/amazon_stability1_transformation.py +++ b/litellm/llms/bedrock/image/amazon_stability1_transformation.py @@ -49,7 +49,7 @@ class AmazonStabilityConfig: width: Optional[int] = None, height: Optional[int] = None, ) -> None: - locals_ = locals() + locals_ = locals().copy() for key, value in locals_.items(): if key != "self" and value is not None: setattr(self.__class__, key, value) diff --git a/litellm/llms/clarifai/chat/transformation.py b/litellm/llms/clarifai/chat/transformation.py index 299dd8637c..916da73883 100644 --- a/litellm/llms/clarifai/chat/transformation.py +++ b/litellm/llms/clarifai/chat/transformation.py @@ -45,7 +45,7 @@ class ClarifaiConfig(BaseConfig): temperature: Optional[int] = None, top_k: Optional[int] = None, ) -> None: - locals_ = locals() + locals_ = locals().copy() for key, value in locals_.items(): if key != "self" and value is not None: setattr(self.__class__, key, value) diff --git a/litellm/llms/cloudflare/chat/transformation.py b/litellm/llms/cloudflare/chat/transformation.py index ba1e0697ed..1ef6da5a4b 100644 --- a/litellm/llms/cloudflare/chat/transformation.py +++ b/litellm/llms/cloudflare/chat/transformation.py @@ -44,7 +44,7 @@ class CloudflareChatConfig(BaseConfig): max_tokens: Optional[int] = None, stream: Optional[bool] = None, ) -> None: - locals_ = locals() + locals_ = locals().copy() for key, value in locals_.items(): if key != "self" and value is not None: setattr(self.__class__, key, value) diff --git a/litellm/llms/cohere/chat/transformation.py b/litellm/llms/cohere/chat/transformation.py index 1d68735224..3ceec2dbba 100644 --- a/litellm/llms/cohere/chat/transformation.py +++ b/litellm/llms/cohere/chat/transformation.py @@ -104,7 +104,7 @@ class CohereChatConfig(BaseConfig): tool_results: Optional[list] = None, seed: Optional[int] = None, ) -> None: - locals_ = locals() + locals_ = locals().copy() for key, value in locals_.items(): if key != "self" and value is not None: setattr(self.__class__, key, value) diff --git a/litellm/llms/cohere/completion/transformation.py b/litellm/llms/cohere/completion/transformation.py index 7c01523571..bdfcda020e 100644 --- a/litellm/llms/cohere/completion/transformation.py +++ b/litellm/llms/cohere/completion/transformation.py @@ -86,7 +86,7 @@ class CohereTextConfig(BaseConfig): return_likelihoods: Optional[str] = None, logit_bias: Optional[dict] = None, ) -> None: - locals_ = locals() + locals_ = locals().copy() for key, value in locals_.items(): if key != "self" and value is not None: setattr(self.__class__, key, value) diff --git a/litellm/llms/custom_httpx/http_handler.py b/litellm/llms/custom_httpx/http_handler.py index 517cad25b0..736b85dc53 100644 --- a/litellm/llms/custom_httpx/http_handler.py +++ b/litellm/llms/custom_httpx/http_handler.py @@ -1,5 +1,6 @@ import asyncio import os +import time from typing import TYPE_CHECKING, Any, Callable, List, Mapping, Optional, Union import httpx @@ -179,6 +180,7 @@ class AsyncHTTPHandler: stream: bool = False, logging_obj: Optional[LiteLLMLoggingObject] = None, ): + start_time = time.time() try: if timeout is None: timeout = self.timeout @@ -207,6 +209,8 @@ class AsyncHTTPHandler: finally: await new_client.aclose() except httpx.TimeoutException as e: + end_time = time.time() + time_delta = round(end_time - start_time, 3) headers = {} error_response = getattr(e, "response", None) if error_response is not None: @@ -214,7 +218,7 @@ class AsyncHTTPHandler: headers["response_headers-{}".format(key)] = value raise litellm.Timeout( - message=f"Connection timed out after {timeout} seconds.", + message=f"Connection timed out. Timeout passed={timeout}, time taken={time_delta} seconds", model="default-model-name", llm_provider="litellm-httpx-handler", headers=headers, diff --git a/litellm/llms/databricks/chat/transformation.py b/litellm/llms/databricks/chat/transformation.py index 7e5c1f6c23..94e0203459 100644 --- a/litellm/llms/databricks/chat/transformation.py +++ b/litellm/llms/databricks/chat/transformation.py @@ -37,7 +37,7 @@ class DatabricksConfig(OpenAILikeChatConfig): stop: Optional[Union[List[str], str]] = None, n: Optional[int] = None, ) -> None: - locals_ = locals() + locals_ = locals().copy() for key, value in locals_.items(): if key != "self" and value is not None: setattr(self.__class__, key, value) diff --git a/litellm/llms/databricks/embed/transformation.py b/litellm/llms/databricks/embed/transformation.py index 8c7e119714..53e3b30dd2 100644 --- a/litellm/llms/databricks/embed/transformation.py +++ b/litellm/llms/databricks/embed/transformation.py @@ -16,7 +16,7 @@ class DatabricksEmbeddingConfig: ) def __init__(self, instruction: Optional[str] = None) -> None: - locals_ = locals() + locals_ = locals().copy() for key, value in locals_.items(): if key != "self" and value is not None: setattr(self.__class__, key, value) diff --git a/litellm/llms/deprecated_providers/aleph_alpha.py b/litellm/llms/deprecated_providers/aleph_alpha.py index a4c5d155f4..81ad134641 100644 --- a/litellm/llms/deprecated_providers/aleph_alpha.py +++ b/litellm/llms/deprecated_providers/aleph_alpha.py @@ -145,7 +145,7 @@ class AlephAlphaConfig: contextual_control_threshold: Optional[int] = None, control_log_additive: Optional[bool] = None, ) -> None: - locals_ = locals() + locals_ = locals().copy() for key, value in locals_.items(): if key != "self" and value is not None: setattr(self.__class__, key, value) diff --git a/litellm/llms/deprecated_providers/palm.py b/litellm/llms/deprecated_providers/palm.py index 4afc952a51..3039222c0e 100644 --- a/litellm/llms/deprecated_providers/palm.py +++ b/litellm/llms/deprecated_providers/palm.py @@ -63,7 +63,7 @@ class PalmConfig: top_p: Optional[float] = None, max_output_tokens: Optional[int] = None, ) -> None: - locals_ = locals() + locals_ = locals().copy() for key, value in locals_.items(): if key != "self" and value is not None: setattr(self.__class__, key, value) diff --git a/litellm/llms/gemini/chat/transformation.py b/litellm/llms/gemini/chat/transformation.py index 313bb99af7..6aa4cf5b52 100644 --- a/litellm/llms/gemini/chat/transformation.py +++ b/litellm/llms/gemini/chat/transformation.py @@ -57,7 +57,7 @@ class GoogleAIStudioGeminiConfig(VertexGeminiConfig): candidate_count: Optional[int] = None, stop_sequences: Optional[list] = None, ) -> None: - locals_ = locals() + locals_ = locals().copy() for key, value in locals_.items(): if key != "self" and value is not None: setattr(self.__class__, key, value) diff --git a/litellm/llms/huggingface/chat/transformation.py b/litellm/llms/huggingface/chat/transformation.py index 2f9824b677..858fda473e 100644 --- a/litellm/llms/huggingface/chat/transformation.py +++ b/litellm/llms/huggingface/chat/transformation.py @@ -77,7 +77,7 @@ class HuggingfaceChatConfig(BaseConfig): typical_p: Optional[float] = None, watermark: Optional[bool] = None, ) -> None: - locals_ = locals() + locals_ = locals().copy() for key, value in locals_.items(): if key != "self" and value is not None: setattr(self.__class__, key, value) diff --git a/litellm/llms/infinity/rerank/transformation.py b/litellm/llms/infinity/rerank/transformation.py index 2d34e5299a..f8bc02fe01 100644 --- a/litellm/llms/infinity/rerank/transformation.py +++ b/litellm/llms/infinity/rerank/transformation.py @@ -20,6 +20,15 @@ from .common_utils import InfinityError class InfinityRerankConfig(CohereRerankConfig): + def get_complete_url(self, api_base: Optional[str], model: str) -> str: + if api_base is None: + raise ValueError("api_base is required for Infinity rerank") + # Remove trailing slashes and ensure clean base URL + api_base = api_base.rstrip("/") + if not api_base.endswith("/rerank"): + api_base = f"{api_base}/rerank" + return api_base + def validate_environment( self, headers: dict, diff --git a/litellm/llms/jina_ai/embedding/transformation.py b/litellm/llms/jina_ai/embedding/transformation.py index a8fca20100..5263be900f 100644 --- a/litellm/llms/jina_ai/embedding/transformation.py +++ b/litellm/llms/jina_ai/embedding/transformation.py @@ -21,7 +21,7 @@ class JinaAIEmbeddingConfig: def __init__( self, ) -> None: - locals_ = locals() + locals_ = locals().copy() for key, value in locals_.items(): if key != "self" and value is not None: setattr(self.__class__, key, value) diff --git a/litellm/llms/lm_studio/embed/transformation.py b/litellm/llms/lm_studio/embed/transformation.py index 5ef121ea7a..1285550c30 100644 --- a/litellm/llms/lm_studio/embed/transformation.py +++ b/litellm/llms/lm_studio/embed/transformation.py @@ -18,7 +18,7 @@ class LmStudioEmbeddingConfig: def __init__( self, ) -> None: - locals_ = locals() + locals_ = locals().copy() for key, value in locals_.items(): if key != "self" and value is not None: setattr(self.__class__, key, value) diff --git a/litellm/llms/maritalk.py b/litellm/llms/maritalk.py index 62fa0113eb..5f2b8d71bc 100644 --- a/litellm/llms/maritalk.py +++ b/litellm/llms/maritalk.py @@ -33,7 +33,7 @@ class MaritalkConfig(OpenAIGPTConfig): tools: Optional[List[dict]] = None, tool_choice: Optional[Union[str, dict]] = None, ) -> None: - locals_ = locals() + locals_ = locals().copy() for key, value in locals_.items(): if key != "self" and value is not None: setattr(self.__class__, key, value) diff --git a/litellm/llms/nlp_cloud/chat/transformation.py b/litellm/llms/nlp_cloud/chat/transformation.py index 35ced50242..b7967249ab 100644 --- a/litellm/llms/nlp_cloud/chat/transformation.py +++ b/litellm/llms/nlp_cloud/chat/transformation.py @@ -78,7 +78,7 @@ class NLPCloudConfig(BaseConfig): num_beams: Optional[int] = None, num_return_sequences: Optional[int] = None, ) -> None: - locals_ = locals() + locals_ = locals().copy() for key, value in locals_.items(): if key != "self" and value is not None: setattr(self.__class__, key, value) diff --git a/litellm/llms/nvidia_nim/embed.py b/litellm/llms/nvidia_nim/embed.py index bf5d4d4ae6..24c6cc34e4 100644 --- a/litellm/llms/nvidia_nim/embed.py +++ b/litellm/llms/nvidia_nim/embed.py @@ -32,7 +32,7 @@ class NvidiaNimEmbeddingConfig: input_type: Optional[str] = None, truncate: Optional[str] = None, ) -> None: - locals_ = locals() + locals_ = locals().copy() for key, value in locals_.items(): if key != "self" and value is not None: setattr(self.__class__, key, value) @@ -58,7 +58,7 @@ class NvidiaNimEmbeddingConfig: def get_supported_openai_params( self, ): - return ["encoding_format", "user"] + return ["encoding_format", "user", "dimensions"] def map_openai_params( self, @@ -73,6 +73,8 @@ class NvidiaNimEmbeddingConfig: optional_params["extra_body"].update({"input_type": v}) elif k == "truncate": optional_params["extra_body"].update({"truncate": v}) + else: + optional_params[k] = v if kwargs is not None: # pass kwargs in extra_body diff --git a/litellm/llms/ollama/completion/transformation.py b/litellm/llms/ollama/completion/transformation.py index fcd198b01a..da981b6afb 100644 --- a/litellm/llms/ollama/completion/transformation.py +++ b/litellm/llms/ollama/completion/transformation.py @@ -117,7 +117,7 @@ class OllamaConfig(BaseConfig): system: Optional[str] = None, template: Optional[str] = None, ) -> None: - locals_ = locals() + locals_ = locals().copy() for key, value in locals_.items(): if key != "self" and value is not None: setattr(self.__class__, key, value) diff --git a/litellm/llms/ollama_chat.py b/litellm/llms/ollama_chat.py index 38fe549ca6..1047012c2e 100644 --- a/litellm/llms/ollama_chat.py +++ b/litellm/llms/ollama_chat.py @@ -105,7 +105,7 @@ class OllamaChatConfig(OpenAIGPTConfig): system: Optional[str] = None, template: Optional[str] = None, ) -> None: - locals_ = locals() + locals_ = locals().copy() for key, value in locals_.items(): if key != "self" and value is not None: setattr(self.__class__, key, value) diff --git a/litellm/llms/openai/chat/gpt_transformation.py b/litellm/llms/openai/chat/gpt_transformation.py index 98c3254da4..84a57bbaa6 100644 --- a/litellm/llms/openai/chat/gpt_transformation.py +++ b/litellm/llms/openai/chat/gpt_transformation.py @@ -344,6 +344,10 @@ class OpenAIGPTConfig(BaseLLMModelInfo, BaseConfig): or "https://api.openai.com/v1" ) + @staticmethod + def get_base_model(model: str) -> str: + return model + def get_model_response_iterator( self, streaming_response: Union[Iterator[str], AsyncIterator[str], ModelResponse], diff --git a/litellm/llms/openai/chat/o_series_transformation.py b/litellm/llms/openai/chat/o_series_transformation.py index d643b07680..3cc05b3c95 100644 --- a/litellm/llms/openai/chat/o_series_transformation.py +++ b/litellm/llms/openai/chat/o_series_transformation.py @@ -54,7 +54,7 @@ class OpenAIOSeriesConfig(OpenAIGPTConfig): if model is None: return True - supported_stream_models = ["o1-mini", "o1-preview"] + supported_stream_models = ["o1-mini", "o1-preview", "o3-mini"] for supported_model in supported_stream_models: if supported_model in model: return False diff --git a/litellm/llms/openai/openai.py b/litellm/llms/openai/openai.py index 82b9c9ba38..5465a24945 100644 --- a/litellm/llms/openai/openai.py +++ b/litellm/llms/openai/openai.py @@ -27,6 +27,7 @@ from typing_extensions import overload import litellm from litellm import LlmProviders from litellm._logging import verbose_logger +from litellm.constants import DEFAULT_MAX_RETRIES from litellm.litellm_core_utils.litellm_logging import Logging as LiteLLMLoggingObj from litellm.litellm_core_utils.logging_utils import track_llm_api_timing from litellm.llms.base_llm.base_model_iterator import BaseModelResponseIterator @@ -320,6 +321,17 @@ class OpenAIChatCompletion(BaseLLM): def __init__(self) -> None: super().__init__() + def _set_dynamic_params_on_client( + self, + client: Union[OpenAI, AsyncOpenAI], + organization: Optional[str] = None, + max_retries: Optional[int] = None, + ): + if organization is not None: + client.organization = organization + if max_retries is not None: + client.max_retries = max_retries + def _get_openai_client( self, is_async: bool, @@ -327,11 +339,10 @@ class OpenAIChatCompletion(BaseLLM): api_base: Optional[str] = None, api_version: Optional[str] = None, timeout: Union[float, httpx.Timeout] = httpx.Timeout(None), - max_retries: Optional[int] = 2, + max_retries: Optional[int] = DEFAULT_MAX_RETRIES, organization: Optional[str] = None, client: Optional[Union[OpenAI, AsyncOpenAI]] = None, ): - args = locals() if client is None: if not isinstance(max_retries, int): raise OpenAIError( @@ -364,7 +375,6 @@ class OpenAIChatCompletion(BaseLLM): organization=organization, ) else: - _new_client = OpenAI( api_key=api_key, base_url=api_base, @@ -383,6 +393,11 @@ class OpenAIChatCompletion(BaseLLM): return _new_client else: + self._set_dynamic_params_on_client( + client=client, + organization=organization, + max_retries=max_retries, + ) return client @track_llm_api_timing() diff --git a/litellm/llms/petals/completion/transformation.py b/litellm/llms/petals/completion/transformation.py index dec3f69416..08ec15de33 100644 --- a/litellm/llms/petals/completion/transformation.py +++ b/litellm/llms/petals/completion/transformation.py @@ -58,7 +58,7 @@ class PetalsConfig(BaseConfig): top_p: Optional[float] = None, repetition_penalty: Optional[float] = None, ) -> None: - locals_ = locals() + locals_ = locals().copy() for key, value in locals_.items(): if key != "self" and value is not None: setattr(self.__class__, key, value) diff --git a/litellm/llms/predibase/chat/transformation.py b/litellm/llms/predibase/chat/transformation.py index b9ca0ff693..f574238696 100644 --- a/litellm/llms/predibase/chat/transformation.py +++ b/litellm/llms/predibase/chat/transformation.py @@ -59,7 +59,7 @@ class PredibaseConfig(BaseConfig): typical_p: Optional[float] = None, watermark: Optional[bool] = None, ) -> None: - locals_ = locals() + locals_ = locals().copy() for key, value in locals_.items(): if key != "self" and value is not None: setattr(self.__class__, key, value) diff --git a/litellm/llms/replicate/chat/transformation.py b/litellm/llms/replicate/chat/transformation.py index 310193ea66..e9934dada8 100644 --- a/litellm/llms/replicate/chat/transformation.py +++ b/litellm/llms/replicate/chat/transformation.py @@ -73,7 +73,7 @@ class ReplicateConfig(BaseConfig): seed: Optional[int] = None, debug: Optional[bool] = None, ) -> None: - locals_ = locals() + locals_ = locals().copy() for key, value in locals_.items(): if key != "self" and value is not None: setattr(self.__class__, key, value) diff --git a/litellm/llms/sagemaker/completion/transformation.py b/litellm/llms/sagemaker/completion/transformation.py index 4ee4d2ce6a..d0ab5d0697 100644 --- a/litellm/llms/sagemaker/completion/transformation.py +++ b/litellm/llms/sagemaker/completion/transformation.py @@ -47,7 +47,7 @@ class SagemakerConfig(BaseConfig): temperature: Optional[float] = None, return_full_text: Optional[bool] = None, ) -> None: - locals_ = locals() + locals_ = locals().copy() for key, value in locals_.items(): if key != "self" and value is not None: setattr(self.__class__, key, value) diff --git a/litellm/llms/topaz/common_utils.py b/litellm/llms/topaz/common_utils.py index fc3c69a750..4ef2315db4 100644 --- a/litellm/llms/topaz/common_utils.py +++ b/litellm/llms/topaz/common_utils.py @@ -29,3 +29,7 @@ class TopazModelInfo(BaseLLMModelInfo): return ( api_base or get_secret_str("TOPAZ_API_BASE") or "https://api.topazlabs.com" ) + + @staticmethod + def get_base_model(model: str) -> str: + return model diff --git a/litellm/llms/vertex_ai/gemini/vertex_and_google_ai_studio_gemini.py b/litellm/llms/vertex_ai/gemini/vertex_and_google_ai_studio_gemini.py index 294c815016..dff63ce148 100644 --- a/litellm/llms/vertex_ai/gemini/vertex_and_google_ai_studio_gemini.py +++ b/litellm/llms/vertex_ai/gemini/vertex_and_google_ai_studio_gemini.py @@ -179,7 +179,7 @@ class VertexGeminiConfig(VertexAIBaseConfig, BaseConfig): presence_penalty: Optional[float] = None, seed: Optional[int] = None, ) -> None: - locals_ = locals() + locals_ = locals().copy() for key, value in locals_.items(): if key != "self" and value is not None: setattr(self.__class__, key, value) diff --git a/litellm/llms/vertex_ai/vertex_ai_partner_models/ai21/transformation.py b/litellm/llms/vertex_ai/vertex_ai_partner_models/ai21/transformation.py index 7ddd1cf89f..d87b2e0311 100644 --- a/litellm/llms/vertex_ai/vertex_ai_partner_models/ai21/transformation.py +++ b/litellm/llms/vertex_ai/vertex_ai_partner_models/ai21/transformation.py @@ -17,7 +17,7 @@ class VertexAIAi21Config: self, max_tokens: Optional[int] = None, ) -> None: - locals_ = locals() + locals_ = locals().copy() for key, value in locals_.items(): if key != "self" and value is not None: setattr(self.__class__, key, value) diff --git a/litellm/llms/vertex_ai/vertex_ai_partner_models/llama3/transformation.py b/litellm/llms/vertex_ai/vertex_ai_partner_models/llama3/transformation.py index 331d378c84..0278d19806 100644 --- a/litellm/llms/vertex_ai/vertex_ai_partner_models/llama3/transformation.py +++ b/litellm/llms/vertex_ai/vertex_ai_partner_models/llama3/transformation.py @@ -21,7 +21,7 @@ class VertexAILlama3Config: self, max_tokens: Optional[int] = None, ) -> None: - locals_ = locals() + locals_ = locals().copy() for key, value in locals_.items(): if key == "max_tokens" and value is None: value = self.max_tokens diff --git a/litellm/llms/vertex_ai/vertex_embeddings/transformation.py b/litellm/llms/vertex_ai/vertex_embeddings/transformation.py index 41eb65be69..0e9c073f8d 100644 --- a/litellm/llms/vertex_ai/vertex_embeddings/transformation.py +++ b/litellm/llms/vertex_ai/vertex_embeddings/transformation.py @@ -48,7 +48,7 @@ class VertexAITextEmbeddingConfig(BaseModel): ] = None, title: Optional[str] = None, ) -> None: - locals_ = locals() + locals_ = locals().copy() for key, value in locals_.items(): if key != "self" and value is not None: setattr(self.__class__, key, value) diff --git a/litellm/llms/watsonx/completion/transformation.py b/litellm/llms/watsonx/completion/transformation.py index 7e6a8a525d..ebebbde021 100644 --- a/litellm/llms/watsonx/completion/transformation.py +++ b/litellm/llms/watsonx/completion/transformation.py @@ -108,7 +108,7 @@ class IBMWatsonXAIConfig(IBMWatsonXMixin, BaseConfig): stream: Optional[bool] = None, **kwargs, ) -> None: - locals_ = locals() + locals_ = locals().copy() for key, value in locals_.items(): if key != "self" and value is not None: setattr(self.__class__, key, value) diff --git a/litellm/main.py b/litellm/main.py index 403691464f..8326140fab 100644 --- a/litellm/main.py +++ b/litellm/main.py @@ -68,6 +68,7 @@ from litellm.litellm_core_utils.prompt_templates.common_utils import ( get_content_from_model_response, ) from litellm.llms.base_llm.chat.transformation import BaseConfig +from litellm.llms.bedrock.common_utils import BedrockModelInfo from litellm.llms.custom_httpx.http_handler import AsyncHTTPHandler, HTTPHandler from litellm.realtime_api.main import _realtime_health_check from litellm.secret_managers.main import get_secret_str @@ -1222,6 +1223,8 @@ def completion( # type: ignore # noqa: PLR0915 if extra_headers is not None: optional_params["extra_headers"] = extra_headers + if max_retries is not None: + optional_params["max_retries"] = max_retries if litellm.AzureOpenAIO1Config().is_o_series_model(model=model): @@ -2626,11 +2629,8 @@ def completion( # type: ignore # noqa: PLR0915 aws_bedrock_client.meta.region_name ) - base_model = litellm.AmazonConverseConfig()._get_base_model(model) - - if base_model in litellm.bedrock_converse_models or model.startswith( - "converse/" - ): + bedrock_route = BedrockModelInfo.get_bedrock_route(model) + if bedrock_route == "converse": model = model.replace("converse/", "") response = bedrock_converse_chat_completion.completion( model=model, @@ -2649,7 +2649,7 @@ def completion( # type: ignore # noqa: PLR0915 client=client, api_base=api_base, ) - elif "converse_like" in model: + elif bedrock_route == "converse_like": model = model.replace("converse_like/", "") response = base_llm_http_handler.completion( model=model, @@ -3947,6 +3947,7 @@ async def atext_completion( ), model=model, custom_llm_provider=custom_llm_provider, + stream_options=kwargs.get('stream_options'), ) else: ## OpenAI / Azure Text Completion Returns here diff --git a/litellm/model_prices_and_context_window_backup.json b/litellm/model_prices_and_context_window_backup.json index 71650cd42a..612e5ad1a1 100644 --- a/litellm/model_prices_and_context_window_backup.json +++ b/litellm/model_prices_and_context_window_backup.json @@ -1069,6 +1069,21 @@ "supports_prompt_caching": true, "supports_tool_choice": true }, + "azure/o1-2024-12-17": { + "max_tokens": 100000, + "max_input_tokens": 200000, + "max_output_tokens": 100000, + "input_cost_per_token": 0.000015, + "output_cost_per_token": 0.000060, + "cache_read_input_token_cost": 0.0000075, + "litellm_provider": "azure", + "mode": "chat", + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_vision": true, + "supports_prompt_caching": true, + "supports_tool_choice": true + }, "azure/o1-preview": { "max_tokens": 32768, "max_input_tokens": 128000, @@ -1397,6 +1412,19 @@ "deprecation_date": "2025-03-31", "supports_tool_choice": true }, + "azure/gpt-3.5-turbo-0125": { + "max_tokens": 4096, + "max_input_tokens": 16384, + "max_output_tokens": 4096, + "input_cost_per_token": 0.0000005, + "output_cost_per_token": 0.0000015, + "litellm_provider": "azure", + "mode": "chat", + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "deprecation_date": "2025-03-31", + "supports_tool_choice": true + }, "azure/gpt-35-turbo-16k": { "max_tokens": 4096, "max_input_tokens": 16385, @@ -1418,6 +1446,17 @@ "supports_function_calling": true, "supports_tool_choice": true }, + "azure/gpt-3.5-turbo": { + "max_tokens": 4096, + "max_input_tokens": 4097, + "max_output_tokens": 4096, + "input_cost_per_token": 0.0000005, + "output_cost_per_token": 0.0000015, + "litellm_provider": "azure", + "mode": "chat", + "supports_function_calling": true, + "supports_tool_choice": true + }, "azure/gpt-3.5-turbo-instruct-0914": { "max_tokens": 4097, "max_input_tokens": 4097, @@ -2174,11 +2213,11 @@ "max_tokens": 8192, "max_input_tokens": 65536, "max_output_tokens": 8192, - "input_cost_per_token": 0.00000014, - "input_cost_per_token_cache_hit": 0.000000014, - "cache_read_input_token_cost": 0.000000014, + "input_cost_per_token": 0.00000027, + "input_cost_per_token_cache_hit": 0.00000007, + "cache_read_input_token_cost": 0.00000007, "cache_creation_input_token_cost": 0.0, - "output_cost_per_token": 0.00000028, + "output_cost_per_token": 0.0000011, "litellm_provider": "deepseek", "mode": "chat", "supports_function_calling": true, @@ -3650,9 +3689,34 @@ "supports_vision": true, "supports_response_schema": true, "supports_audio_output": true, - "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#gemini-2.0-flash", + "source": "https://cloud.google.com/vertex-ai/generative-ai/pricing", "supports_tool_choice": true }, + "gemini/gemini-2.0-flash": { + "max_tokens": 8192, + "max_input_tokens": 1048576, + "max_output_tokens": 8192, + "max_images_per_prompt": 3000, + "max_videos_per_prompt": 10, + "max_video_length": 1, + "max_audio_length_hours": 8.4, + "max_audio_per_prompt": 1, + "max_pdf_size_mb": 30, + "input_cost_per_audio_token": 0.0000007, + "input_cost_per_token": 0.0000001, + "output_cost_per_token": 0.0000004, + "litellm_provider": "gemini", + "mode": "chat", + "rpm": 10000, + "tpm": 10000000, + "supports_system_messages": true, + "supports_function_calling": true, + "supports_vision": true, + "supports_response_schema": true, + "supports_audio_output": true, + "supports_tool_choice": true, + "source": "https://ai.google.dev/pricing#2_0flash" + }, "gemini-2.0-flash-001": { "max_tokens": 8192, "max_input_tokens": 1048576, @@ -3663,9 +3727,9 @@ "max_audio_length_hours": 8.4, "max_audio_per_prompt": 1, "max_pdf_size_mb": 30, - "input_cost_per_audio_token": 0.001, - "input_cost_per_token": 0.00015, - "output_cost_per_token": 0.0006, + "input_cost_per_audio_token": 0.000001, + "input_cost_per_token": 0.00000015, + "output_cost_per_token": 0.0000006, "litellm_provider": "vertex_ai-language-models", "mode": "chat", "supports_system_messages": true, @@ -3674,7 +3738,7 @@ "supports_response_schema": true, "supports_audio_output": true, "supports_tool_choice": true, - "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#gemini-2.0-flash" + "source": "https://cloud.google.com/vertex-ai/generative-ai/pricing" }, "gemini-2.0-flash-thinking-exp": { "max_tokens": 8192, @@ -3744,6 +3808,31 @@ "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#gemini-2.0-flash", "supports_tool_choice": true }, + "gemini/gemini-2.0-flash-001": { + "max_tokens": 8192, + "max_input_tokens": 1048576, + "max_output_tokens": 8192, + "max_images_per_prompt": 3000, + "max_videos_per_prompt": 10, + "max_video_length": 1, + "max_audio_length_hours": 8.4, + "max_audio_per_prompt": 1, + "max_pdf_size_mb": 30, + "input_cost_per_audio_token": 0.0000007, + "input_cost_per_token": 0.0000001, + "output_cost_per_token": 0.0000004, + "litellm_provider": "gemini", + "mode": "chat", + "rpm": 10000, + "tpm": 10000000, + "supports_system_messages": true, + "supports_function_calling": true, + "supports_vision": true, + "supports_response_schema": true, + "supports_audio_output": false, + "supports_tool_choice": true, + "source": "https://ai.google.dev/pricing#2_0flash" + }, "gemini/gemini-2.0-flash-exp": { "max_tokens": 8192, "max_input_tokens": 1048576, @@ -3780,6 +3869,31 @@ "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#gemini-2.0-flash", "supports_tool_choice": true }, + "gemini/gemini-2.0-flash-lite-preview-02-05": { + "max_tokens": 8192, + "max_input_tokens": 1048576, + "max_output_tokens": 8192, + "max_images_per_prompt": 3000, + "max_videos_per_prompt": 10, + "max_video_length": 1, + "max_audio_length_hours": 8.4, + "max_audio_per_prompt": 1, + "max_pdf_size_mb": 30, + "input_cost_per_audio_token": 0.000000075, + "input_cost_per_token": 0.000000075, + "output_cost_per_token": 0.0000003, + "litellm_provider": "gemini", + "mode": "chat", + "rpm": 60000, + "tpm": 10000000, + "supports_system_messages": true, + "supports_function_calling": true, + "supports_vision": true, + "supports_response_schema": true, + "supports_audio_output": false, + "supports_tool_choice": true, + "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#gemini-2.0-flash-lite" + }, "gemini/gemini-2.0-flash-thinking-exp": { "max_tokens": 8192, "max_input_tokens": 1048576, @@ -6026,7 +6140,8 @@ "litellm_provider": "bedrock_converse", "mode": "chat", "supports_function_calling": true, - "supports_prompt_caching": true + "supports_prompt_caching": true, + "supports_response_schema": true }, "us.amazon.nova-micro-v1:0": { "max_tokens": 4096, @@ -6051,7 +6166,8 @@ "supports_function_calling": true, "supports_vision": true, "supports_pdf_input": true, - "supports_prompt_caching": true + "supports_prompt_caching": true, + "supports_response_schema": true }, "us.amazon.nova-lite-v1:0": { "max_tokens": 4096, @@ -6064,7 +6180,8 @@ "supports_function_calling": true, "supports_vision": true, "supports_pdf_input": true, - "supports_prompt_caching": true + "supports_prompt_caching": true, + "supports_response_schema": true }, "amazon.nova-pro-v1:0": { "max_tokens": 4096, @@ -6077,7 +6194,8 @@ "supports_function_calling": true, "supports_vision": true, "supports_pdf_input": true, - "supports_prompt_caching": true + "supports_prompt_caching": true, + "supports_response_schema": true }, "us.amazon.nova-pro-v1:0": { "max_tokens": 4096, @@ -6090,7 +6208,8 @@ "supports_function_calling": true, "supports_vision": true, "supports_pdf_input": true, - "supports_prompt_caching": true + "supports_prompt_caching": true, + "supports_response_schema": true }, "anthropic.claude-3-sonnet-20240229-v1:0": { "max_tokens": 4096, @@ -6101,6 +6220,7 @@ "litellm_provider": "bedrock", "mode": "chat", "supports_function_calling": true, + "supports_response_schema": true, "supports_vision": true, "supports_tool_choice": true }, @@ -6113,6 +6233,7 @@ "litellm_provider": "bedrock", "mode": "chat", "supports_function_calling": true, + "supports_response_schema": true, "supports_vision": true, "supports_tool_choice": true }, @@ -6140,6 +6261,7 @@ "litellm_provider": "bedrock", "mode": "chat", "supports_function_calling": true, + "supports_response_schema": true, "supports_vision": true, "supports_tool_choice": true }, @@ -6153,6 +6275,7 @@ "mode": "chat", "supports_assistant_prefill": true, "supports_function_calling": true, + "supports_response_schema": true, "supports_prompt_caching": true, "supports_tool_choice": true }, @@ -6165,6 +6288,7 @@ "litellm_provider": "bedrock", "mode": "chat", "supports_function_calling": true, + "supports_response_schema": true, "supports_vision": true, "supports_tool_choice": true }, @@ -6177,6 +6301,7 @@ "litellm_provider": "bedrock", "mode": "chat", "supports_function_calling": true, + "supports_response_schema": true, "supports_vision": true, "supports_tool_choice": true }, @@ -6189,6 +6314,7 @@ "litellm_provider": "bedrock", "mode": "chat", "supports_function_calling": true, + "supports_response_schema": true, "supports_vision": true, "supports_tool_choice": true }, @@ -6216,6 +6342,7 @@ "litellm_provider": "bedrock", "mode": "chat", "supports_function_calling": true, + "supports_response_schema": true, "supports_vision": true, "supports_tool_choice": true }, @@ -6230,6 +6357,7 @@ "supports_assistant_prefill": true, "supports_function_calling": true, "supports_prompt_caching": true, + "supports_response_schema": true, "supports_tool_choice": true }, "us.anthropic.claude-3-opus-20240229-v1:0": { @@ -6241,6 +6369,7 @@ "litellm_provider": "bedrock", "mode": "chat", "supports_function_calling": true, + "supports_response_schema": true, "supports_vision": true, "supports_tool_choice": true }, @@ -6253,6 +6382,7 @@ "litellm_provider": "bedrock", "mode": "chat", "supports_function_calling": true, + "supports_response_schema": true, "supports_vision": true, "supports_tool_choice": true }, @@ -6265,6 +6395,7 @@ "litellm_provider": "bedrock", "mode": "chat", "supports_function_calling": true, + "supports_response_schema": true, "supports_vision": true, "supports_tool_choice": true }, @@ -6292,6 +6423,7 @@ "litellm_provider": "bedrock", "mode": "chat", "supports_function_calling": true, + "supports_response_schema": true, "supports_vision": true, "supports_tool_choice": true }, @@ -6318,6 +6450,7 @@ "litellm_provider": "bedrock", "mode": "chat", "supports_function_calling": true, + "supports_response_schema": true, "supports_vision": true, "supports_tool_choice": true }, @@ -8935,4 +9068,4 @@ "output_cost_per_second": 0.00, "litellm_provider": "assemblyai" } -} +} \ No newline at end of file diff --git a/litellm/proxy/_experimental/out/_next/static/chunks/225-72bee079fe8c7963.js b/litellm/proxy/_experimental/out/_next/static/chunks/225-72bee079fe8c7963.js new file mode 100644 index 0000000000..718591a99b --- /dev/null +++ b/litellm/proxy/_experimental/out/_next/static/chunks/225-72bee079fe8c7963.js @@ -0,0 +1,11 @@ +(self.webpackChunk_N_E=self.webpackChunk_N_E||[]).push([[225],{12660:function(e,t,n){"use strict";n.d(t,{Z:function(){return l}});var r=n(1119),o=n(2265),i={icon:{tag:"svg",attrs:{viewBox:"64 64 896 896",focusable:"false"},children:[{tag:"path",attrs:{d:"M917.7 148.8l-42.4-42.4c-1.6-1.6-3.6-2.3-5.7-2.3s-4.1.8-5.7 2.3l-76.1 76.1a199.27 199.27 0 00-112.1-34.3c-51.2 0-102.4 19.5-141.5 58.6L432.3 308.7a8.03 8.03 0 000 11.3L704 591.7c1.6 1.6 3.6 2.3 5.7 2.3 2 0 4.1-.8 5.7-2.3l101.9-101.9c68.9-69 77-175.7 24.3-253.5l76.1-76.1c3.1-3.2 3.1-8.3 0-11.4zM769.1 441.7l-59.4 59.4-186.8-186.8 59.4-59.4c24.9-24.9 58.1-38.7 93.4-38.7 35.3 0 68.4 13.7 93.4 38.7 24.9 24.9 38.7 58.1 38.7 93.4 0 35.3-13.8 68.4-38.7 93.4zm-190.2 105a8.03 8.03 0 00-11.3 0L501 613.3 410.7 523l66.7-66.7c3.1-3.1 3.1-8.2 0-11.3L441 408.6a8.03 8.03 0 00-11.3 0L363 475.3l-43-43a7.85 7.85 0 00-5.7-2.3c-2 0-4.1.8-5.7 2.3L206.8 534.2c-68.9 69-77 175.7-24.3 253.5l-76.1 76.1a8.03 8.03 0 000 11.3l42.4 42.4c1.6 1.6 3.6 2.3 5.7 2.3s4.1-.8 5.7-2.3l76.1-76.1c33.7 22.9 72.9 34.3 112.1 34.3 51.2 0 102.4-19.5 141.5-58.6l101.9-101.9c3.1-3.1 3.1-8.2 0-11.3l-43-43 66.7-66.7c3.1-3.1 3.1-8.2 0-11.3l-36.6-36.2zM441.7 769.1a131.32 131.32 0 01-93.4 38.7c-35.3 0-68.4-13.7-93.4-38.7a131.32 131.32 0 01-38.7-93.4c0-35.3 13.7-68.4 38.7-93.4l59.4-59.4 186.8 186.8-59.4 59.4z"}}]},name:"api",theme:"outlined"},a=n(55015),l=o.forwardRef(function(e,t){return o.createElement(a.Z,(0,r.Z)({},e,{ref:t,icon:i}))})},88009:function(e,t,n){"use strict";n.d(t,{Z:function(){return l}});var r=n(1119),o=n(2265),i={icon:{tag:"svg",attrs:{viewBox:"64 64 896 896",focusable:"false"},children:[{tag:"path",attrs:{d:"M464 144H160c-8.8 0-16 7.2-16 16v304c0 8.8 7.2 16 16 16h304c8.8 0 16-7.2 16-16V160c0-8.8-7.2-16-16-16zm-52 268H212V212h200v200zm452-268H560c-8.8 0-16 7.2-16 16v304c0 8.8 7.2 16 16 16h304c8.8 0 16-7.2 16-16V160c0-8.8-7.2-16-16-16zm-52 268H612V212h200v200zM464 544H160c-8.8 0-16 7.2-16 16v304c0 8.8 7.2 16 16 16h304c8.8 0 16-7.2 16-16V560c0-8.8-7.2-16-16-16zm-52 268H212V612h200v200zm452-268H560c-8.8 0-16 7.2-16 16v304c0 8.8 7.2 16 16 16h304c8.8 0 16-7.2 16-16V560c0-8.8-7.2-16-16-16zm-52 268H612V612h200v200z"}}]},name:"appstore",theme:"outlined"},a=n(55015),l=o.forwardRef(function(e,t){return o.createElement(a.Z,(0,r.Z)({},e,{ref:t,icon:i}))})},37527:function(e,t,n){"use strict";n.d(t,{Z:function(){return l}});var r=n(1119),o=n(2265),i={icon:{tag:"svg",attrs:{viewBox:"64 64 896 896",focusable:"false"},children:[{tag:"path",attrs:{d:"M894 462c30.9 0 43.8-39.7 18.7-58L530.8 126.2a31.81 31.81 0 00-37.6 0L111.3 404c-25.1 18.2-12.2 58 18.8 58H192v374h-72c-4.4 0-8 3.6-8 8v52c0 4.4 3.6 8 8 8h784c4.4 0 8-3.6 8-8v-52c0-4.4-3.6-8-8-8h-72V462h62zM512 196.7l271.1 197.2H240.9L512 196.7zM264 462h117v374H264V462zm189 0h117v374H453V462zm307 374H642V462h118v374z"}}]},name:"bank",theme:"outlined"},a=n(55015),l=o.forwardRef(function(e,t){return o.createElement(a.Z,(0,r.Z)({},e,{ref:t,icon:i}))})},9775:function(e,t,n){"use strict";n.d(t,{Z:function(){return l}});var r=n(1119),o=n(2265),i={icon:{tag:"svg",attrs:{viewBox:"64 64 896 896",focusable:"false"},children:[{tag:"path",attrs:{d:"M888 792H200V168c0-4.4-3.6-8-8-8h-56c-4.4 0-8 3.6-8 8v688c0 4.4 3.6 8 8 8h752c4.4 0 8-3.6 8-8v-56c0-4.4-3.6-8-8-8zm-600-80h56c4.4 0 8-3.6 8-8V560c0-4.4-3.6-8-8-8h-56c-4.4 0-8 3.6-8 8v144c0 4.4 3.6 8 8 8zm152 0h56c4.4 0 8-3.6 8-8V384c0-4.4-3.6-8-8-8h-56c-4.4 0-8 3.6-8 8v320c0 4.4 3.6 8 8 8zm152 0h56c4.4 0 8-3.6 8-8V462c0-4.4-3.6-8-8-8h-56c-4.4 0-8 3.6-8 8v242c0 4.4 3.6 8 8 8zm152 0h56c4.4 0 8-3.6 8-8V304c0-4.4-3.6-8-8-8h-56c-4.4 0-8 3.6-8 8v400c0 4.4 3.6 8 8 8z"}}]},name:"bar-chart",theme:"outlined"},a=n(55015),l=o.forwardRef(function(e,t){return o.createElement(a.Z,(0,r.Z)({},e,{ref:t,icon:i}))})},68208:function(e,t,n){"use strict";n.d(t,{Z:function(){return l}});var r=n(1119),o=n(2265),i={icon:{tag:"svg",attrs:{viewBox:"64 64 896 896",focusable:"false"},children:[{tag:"path",attrs:{d:"M856 376H648V168c0-8.8-7.2-16-16-16H168c-8.8 0-16 7.2-16 16v464c0 8.8 7.2 16 16 16h208v208c0 8.8 7.2 16 16 16h464c8.8 0 16-7.2 16-16V392c0-8.8-7.2-16-16-16zm-480 16v188H220V220h360v156H392c-8.8 0-16 7.2-16 16zm204 52v136H444V444h136zm224 360H444V648h188c8.8 0 16-7.2 16-16V444h156v360z"}}]},name:"block",theme:"outlined"},a=n(55015),l=o.forwardRef(function(e,t){return o.createElement(a.Z,(0,r.Z)({},e,{ref:t,icon:i}))})},9738:function(e,t,n){"use strict";n.d(t,{Z:function(){return l}});var r=n(1119),o=n(2265),i={icon:{tag:"svg",attrs:{viewBox:"64 64 896 896",focusable:"false"},children:[{tag:"path",attrs:{d:"M912 190h-69.9c-9.8 0-19.1 4.5-25.1 12.2L404.7 724.5 207 474a32 32 0 00-25.1-12.2H112c-6.7 0-10.4 7.7-6.3 12.9l273.9 347c12.8 16.2 37.4 16.2 50.3 0l488.4-618.9c4.1-5.1.4-12.8-6.3-12.8z"}}]},name:"check",theme:"outlined"},a=n(55015),l=o.forwardRef(function(e,t){return o.createElement(a.Z,(0,r.Z)({},e,{ref:t,icon:i}))})},44625:function(e,t,n){"use strict";n.d(t,{Z:function(){return l}});var r=n(1119),o=n(2265),i={icon:{tag:"svg",attrs:{viewBox:"64 64 896 896",focusable:"false"},children:[{tag:"path",attrs:{d:"M832 64H192c-17.7 0-32 14.3-32 32v832c0 17.7 14.3 32 32 32h640c17.7 0 32-14.3 32-32V96c0-17.7-14.3-32-32-32zm-600 72h560v208H232V136zm560 480H232V408h560v208zm0 272H232V680h560v208zM304 240a40 40 0 1080 0 40 40 0 10-80 0zm0 272a40 40 0 1080 0 40 40 0 10-80 0zm0 272a40 40 0 1080 0 40 40 0 10-80 0z"}}]},name:"database",theme:"outlined"},a=n(55015),l=o.forwardRef(function(e,t){return o.createElement(a.Z,(0,r.Z)({},e,{ref:t,icon:i}))})},70464:function(e,t,n){"use strict";n.d(t,{Z:function(){return l}});var r=n(1119),o=n(2265),i={icon:{tag:"svg",attrs:{viewBox:"64 64 896 896",focusable:"false"},children:[{tag:"path",attrs:{d:"M884 256h-75c-5.1 0-9.9 2.5-12.9 6.6L512 654.2 227.9 262.6c-3-4.1-7.8-6.6-12.9-6.6h-75c-6.5 0-10.3 7.4-6.5 12.7l352.6 486.1c12.8 17.6 39 17.6 51.7 0l352.6-486.1c3.9-5.3.1-12.7-6.4-12.7z"}}]},name:"down",theme:"outlined"},a=n(55015),l=o.forwardRef(function(e,t){return o.createElement(a.Z,(0,r.Z)({},e,{ref:t,icon:i}))})},39760:function(e,t,n){"use strict";n.d(t,{Z:function(){return l}});var r=n(1119),o=n(2265),i={icon:{tag:"svg",attrs:{viewBox:"64 64 896 896",focusable:"false"},children:[{tag:"path",attrs:{d:"M176 511a56 56 0 10112 0 56 56 0 10-112 0zm280 0a56 56 0 10112 0 56 56 0 10-112 0zm280 0a56 56 0 10112 0 56 56 0 10-112 0z"}}]},name:"ellipsis",theme:"outlined"},a=n(55015),l=o.forwardRef(function(e,t){return o.createElement(a.Z,(0,r.Z)({},e,{ref:t,icon:i}))})},41169:function(e,t,n){"use strict";n.d(t,{Z:function(){return l}});var r=n(1119),o=n(2265),i={icon:{tag:"svg",attrs:{viewBox:"64 64 896 896",focusable:"false"},children:[{tag:"path",attrs:{d:"M512 472a40 40 0 1080 0 40 40 0 10-80 0zm367 352.9L696.3 352V178H768v-68H256v68h71.7v174L145 824.9c-2.8 7.4-4.3 15.2-4.3 23.1 0 35.3 28.7 64 64 64h614.6c7.9 0 15.7-1.5 23.1-4.3 33-12.7 49.4-49.8 36.6-82.8zM395.7 364.7V180h232.6v184.7L719.2 600c-20.7-5.3-42.1-8-63.9-8-61.2 0-119.2 21.5-165.3 60a188.78 188.78 0 01-121.3 43.9c-32.7 0-64.1-8.3-91.8-23.7l118.8-307.5zM210.5 844l41.7-107.8c35.7 18.1 75.4 27.8 116.6 27.8 61.2 0 119.2-21.5 165.3-60 33.9-28.2 76.3-43.9 121.3-43.9 35 0 68.4 9.5 97.6 27.1L813.5 844h-603z"}}]},name:"experiment",theme:"outlined"},a=n(55015),l=o.forwardRef(function(e,t){return o.createElement(a.Z,(0,r.Z)({},e,{ref:t,icon:i}))})},6520:function(e,t,n){"use strict";n.d(t,{Z:function(){return l}});var r=n(1119),o=n(2265),i={icon:{tag:"svg",attrs:{viewBox:"64 64 896 896",focusable:"false"},children:[{tag:"path",attrs:{d:"M942.2 486.2C847.4 286.5 704.1 186 512 186c-192.2 0-335.4 100.5-430.2 300.3a60.3 60.3 0 000 51.5C176.6 737.5 319.9 838 512 838c192.2 0 335.4-100.5 430.2-300.3 7.7-16.2 7.7-35 0-51.5zM512 766c-161.3 0-279.4-81.8-362.7-254C232.6 339.8 350.7 258 512 258c161.3 0 279.4 81.8 362.7 254C791.5 684.2 673.4 766 512 766zm-4-430c-97.2 0-176 78.8-176 176s78.8 176 176 176 176-78.8 176-176-78.8-176-176-176zm0 288c-61.9 0-112-50.1-112-112s50.1-112 112-112 112 50.1 112 112-50.1 112-112 112z"}}]},name:"eye",theme:"outlined"},a=n(55015),l=o.forwardRef(function(e,t){return o.createElement(a.Z,(0,r.Z)({},e,{ref:t,icon:i}))})},15424:function(e,t,n){"use strict";n.d(t,{Z:function(){return l}});var r=n(1119),o=n(2265),i={icon:{tag:"svg",attrs:{viewBox:"64 64 896 896",focusable:"false"},children:[{tag:"path",attrs:{d:"M512 64C264.6 64 64 264.6 64 512s200.6 448 448 448 448-200.6 448-448S759.4 64 512 64zm0 820c-205.4 0-372-166.6-372-372s166.6-372 372-372 372 166.6 372 372-166.6 372-372 372z"}},{tag:"path",attrs:{d:"M464 336a48 48 0 1096 0 48 48 0 10-96 0zm72 112h-48c-4.4 0-8 3.6-8 8v272c0 4.4 3.6 8 8 8h48c4.4 0 8-3.6 8-8V456c0-4.4-3.6-8-8-8z"}}]},name:"info-circle",theme:"outlined"},a=n(55015),l=o.forwardRef(function(e,t){return o.createElement(a.Z,(0,r.Z)({},e,{ref:t,icon:i}))})},92403:function(e,t,n){"use strict";n.d(t,{Z:function(){return l}});var r=n(1119),o=n(2265),i={icon:{tag:"svg",attrs:{viewBox:"64 64 896 896",focusable:"false"},children:[{tag:"path",attrs:{d:"M608 112c-167.9 0-304 136.1-304 304 0 70.3 23.9 135 63.9 186.5l-41.1 41.1-62.3-62.3a8.15 8.15 0 00-11.4 0l-39.8 39.8a8.15 8.15 0 000 11.4l62.3 62.3-44.9 44.9-62.3-62.3a8.15 8.15 0 00-11.4 0l-39.8 39.8a8.15 8.15 0 000 11.4l62.3 62.3-65.3 65.3a8.03 8.03 0 000 11.3l42.3 42.3c3.1 3.1 8.2 3.1 11.3 0l253.6-253.6A304.06 304.06 0 00608 720c167.9 0 304-136.1 304-304S775.9 112 608 112zm161.2 465.2C726.2 620.3 668.9 644 608 644c-60.9 0-118.2-23.7-161.2-66.8-43.1-43-66.8-100.3-66.8-161.2 0-60.9 23.7-118.2 66.8-161.2 43-43.1 100.3-66.8 161.2-66.8 60.9 0 118.2 23.7 161.2 66.8 43.1 43 66.8 100.3 66.8 161.2 0 60.9-23.7 118.2-66.8 161.2z"}}]},name:"key",theme:"outlined"},a=n(55015),l=o.forwardRef(function(e,t){return o.createElement(a.Z,(0,r.Z)({},e,{ref:t,icon:i}))})},48231:function(e,t,n){"use strict";n.d(t,{Z:function(){return l}});var r=n(1119),o=n(2265),i={icon:{tag:"svg",attrs:{viewBox:"64 64 896 896",focusable:"false"},children:[{tag:"path",attrs:{d:"M888 792H200V168c0-4.4-3.6-8-8-8h-56c-4.4 0-8 3.6-8 8v688c0 4.4 3.6 8 8 8h752c4.4 0 8-3.6 8-8v-56c0-4.4-3.6-8-8-8zM305.8 637.7c3.1 3.1 8.1 3.1 11.3 0l138.3-137.6L583 628.5c3.1 3.1 8.2 3.1 11.3 0l275.4-275.3c3.1-3.1 3.1-8.2 0-11.3l-39.6-39.6a8.03 8.03 0 00-11.3 0l-230 229.9L461.4 404a8.03 8.03 0 00-11.3 0L266.3 586.7a8.03 8.03 0 000 11.3l39.5 39.7z"}}]},name:"line-chart",theme:"outlined"},a=n(55015),l=o.forwardRef(function(e,t){return o.createElement(a.Z,(0,r.Z)({},e,{ref:t,icon:i}))})},45246:function(e,t,n){"use strict";n.d(t,{Z:function(){return l}});var r=n(1119),o=n(2265),i={icon:{tag:"svg",attrs:{viewBox:"64 64 896 896",focusable:"false"},children:[{tag:"path",attrs:{d:"M696 480H328c-4.4 0-8 3.6-8 8v48c0 4.4 3.6 8 8 8h368c4.4 0 8-3.6 8-8v-48c0-4.4-3.6-8-8-8z"}},{tag:"path",attrs:{d:"M512 64C264.6 64 64 264.6 64 512s200.6 448 448 448 448-200.6 448-448S759.4 64 512 64zm0 820c-205.4 0-372-166.6-372-372s166.6-372 372-372 372 166.6 372 372-166.6 372-372 372z"}}]},name:"minus-circle",theme:"outlined"},a=n(55015),l=o.forwardRef(function(e,t){return o.createElement(a.Z,(0,r.Z)({},e,{ref:t,icon:i}))})},28595:function(e,t,n){"use strict";n.d(t,{Z:function(){return l}});var r=n(1119),o=n(2265),i={icon:{tag:"svg",attrs:{viewBox:"64 64 896 896",focusable:"false"},children:[{tag:"path",attrs:{d:"M512 64C264.6 64 64 264.6 64 512s200.6 448 448 448 448-200.6 448-448S759.4 64 512 64zm0 820c-205.4 0-372-166.6-372-372s166.6-372 372-372 372 166.6 372 372-166.6 372-372 372z"}},{tag:"path",attrs:{d:"M719.4 499.1l-296.1-215A15.9 15.9 0 00398 297v430c0 13.1 14.8 20.5 25.3 12.9l296.1-215a15.9 15.9 0 000-25.8zm-257.6 134V390.9L628.5 512 461.8 633.1z"}}]},name:"play-circle",theme:"outlined"},a=n(55015),l=o.forwardRef(function(e,t){return o.createElement(a.Z,(0,r.Z)({},e,{ref:t,icon:i}))})},96473:function(e,t,n){"use strict";n.d(t,{Z:function(){return l}});var r=n(1119),o=n(2265),i={icon:{tag:"svg",attrs:{viewBox:"64 64 896 896",focusable:"false"},children:[{tag:"path",attrs:{d:"M482 152h60q8 0 8 8v704q0 8-8 8h-60q-8 0-8-8V160q0-8 8-8z"}},{tag:"path",attrs:{d:"M192 474h672q8 0 8 8v60q0 8-8 8H160q-8 0-8-8v-60q0-8 8-8z"}}]},name:"plus",theme:"outlined"},a=n(55015),l=o.forwardRef(function(e,t){return o.createElement(a.Z,(0,r.Z)({},e,{ref:t,icon:i}))})},57400:function(e,t,n){"use strict";n.d(t,{Z:function(){return l}});var r=n(1119),o=n(2265),i={icon:{tag:"svg",attrs:{viewBox:"0 0 1024 1024",focusable:"false"},children:[{tag:"path",attrs:{d:"M512 64L128 192v384c0 212.1 171.9 384 384 384s384-171.9 384-384V192L512 64zm312 512c0 172.3-139.7 312-312 312S200 748.3 200 576V246l312-110 312 110v330z"}},{tag:"path",attrs:{d:"M378.4 475.1a35.91 35.91 0 00-50.9 0 35.91 35.91 0 000 50.9l129.4 129.4 2.1 2.1a33.98 33.98 0 0048.1 0L730.6 434a33.98 33.98 0 000-48.1l-2.8-2.8a33.98 33.98 0 00-48.1 0L483 579.7 378.4 475.1z"}}]},name:"safety",theme:"outlined"},a=n(55015),l=o.forwardRef(function(e,t){return o.createElement(a.Z,(0,r.Z)({},e,{ref:t,icon:i}))})},29436:function(e,t,n){"use strict";n.d(t,{Z:function(){return l}});var r=n(1119),o=n(2265),i={icon:{tag:"svg",attrs:{viewBox:"64 64 896 896",focusable:"false"},children:[{tag:"path",attrs:{d:"M909.6 854.5L649.9 594.8C690.2 542.7 712 479 712 412c0-80.2-31.3-155.4-87.9-212.1-56.6-56.7-132-87.9-212.1-87.9s-155.5 31.3-212.1 87.9C143.2 256.5 112 331.8 112 412c0 80.1 31.3 155.5 87.9 212.1C256.5 680.8 331.8 712 412 712c67 0 130.6-21.8 182.7-62l259.7 259.6a8.2 8.2 0 0011.6 0l43.6-43.5a8.2 8.2 0 000-11.6zM570.4 570.4C528 612.7 471.8 636 412 636s-116-23.3-158.4-65.6C211.3 528 188 471.8 188 412s23.3-116.1 65.6-158.4C296 211.3 352.2 188 412 188s116.1 23.2 158.4 65.6S636 352.2 636 412s-23.3 116.1-65.6 158.4z"}}]},name:"search",theme:"outlined"},a=n(55015),l=o.forwardRef(function(e,t){return o.createElement(a.Z,(0,r.Z)({},e,{ref:t,icon:i}))})},55322:function(e,t,n){"use strict";n.d(t,{Z:function(){return l}});var r=n(1119),o=n(2265),i={icon:{tag:"svg",attrs:{viewBox:"64 64 896 896",focusable:"false"},children:[{tag:"path",attrs:{d:"M924.8 625.7l-65.5-56c3.1-19 4.7-38.4 4.7-57.8s-1.6-38.8-4.7-57.8l65.5-56a32.03 32.03 0 009.3-35.2l-.9-2.6a443.74 443.74 0 00-79.7-137.9l-1.8-2.1a32.12 32.12 0 00-35.1-9.5l-81.3 28.9c-30-24.6-63.5-44-99.7-57.6l-15.7-85a32.05 32.05 0 00-25.8-25.7l-2.7-.5c-52.1-9.4-106.9-9.4-159 0l-2.7.5a32.05 32.05 0 00-25.8 25.7l-15.8 85.4a351.86 351.86 0 00-99 57.4l-81.9-29.1a32 32 0 00-35.1 9.5l-1.8 2.1a446.02 446.02 0 00-79.7 137.9l-.9 2.6c-4.5 12.5-.8 26.5 9.3 35.2l66.3 56.6c-3.1 18.8-4.6 38-4.6 57.1 0 19.2 1.5 38.4 4.6 57.1L99 625.5a32.03 32.03 0 00-9.3 35.2l.9 2.6c18.1 50.4 44.9 96.9 79.7 137.9l1.8 2.1a32.12 32.12 0 0035.1 9.5l81.9-29.1c29.8 24.5 63.1 43.9 99 57.4l15.8 85.4a32.05 32.05 0 0025.8 25.7l2.7.5a449.4 449.4 0 00159 0l2.7-.5a32.05 32.05 0 0025.8-25.7l15.7-85a350 350 0 0099.7-57.6l81.3 28.9a32 32 0 0035.1-9.5l1.8-2.1c34.8-41.1 61.6-87.5 79.7-137.9l.9-2.6c4.5-12.3.8-26.3-9.3-35zM788.3 465.9c2.5 15.1 3.8 30.6 3.8 46.1s-1.3 31-3.8 46.1l-6.6 40.1 74.7 63.9a370.03 370.03 0 01-42.6 73.6L721 702.8l-31.4 25.8c-23.9 19.6-50.5 35-79.3 45.8l-38.1 14.3-17.9 97a377.5 377.5 0 01-85 0l-17.9-97.2-37.8-14.5c-28.5-10.8-55-26.2-78.7-45.7l-31.4-25.9-93.4 33.2c-17-22.9-31.2-47.6-42.6-73.6l75.5-64.5-6.5-40c-2.4-14.9-3.7-30.3-3.7-45.5 0-15.3 1.2-30.6 3.7-45.5l6.5-40-75.5-64.5c11.3-26.1 25.6-50.7 42.6-73.6l93.4 33.2 31.4-25.9c23.7-19.5 50.2-34.9 78.7-45.7l37.9-14.3 17.9-97.2c28.1-3.2 56.8-3.2 85 0l17.9 97 38.1 14.3c28.7 10.8 55.4 26.2 79.3 45.8l31.4 25.8 92.8-32.9c17 22.9 31.2 47.6 42.6 73.6L781.8 426l6.5 39.9zM512 326c-97.2 0-176 78.8-176 176s78.8 176 176 176 176-78.8 176-176-78.8-176-176-176zm79.2 255.2A111.6 111.6 0 01512 614c-29.9 0-58-11.7-79.2-32.8A111.6 111.6 0 01400 502c0-29.9 11.7-58 32.8-79.2C454 401.6 482.1 390 512 390c29.9 0 58 11.6 79.2 32.8A111.6 111.6 0 01624 502c0 29.9-11.7 58-32.8 79.2z"}}]},name:"setting",theme:"outlined"},a=n(55015),l=o.forwardRef(function(e,t){return o.createElement(a.Z,(0,r.Z)({},e,{ref:t,icon:i}))})},41361:function(e,t,n){"use strict";n.d(t,{Z:function(){return l}});var r=n(1119),o=n(2265),i={icon:{tag:"svg",attrs:{viewBox:"64 64 896 896",focusable:"false"},children:[{tag:"path",attrs:{d:"M824.2 699.9a301.55 301.55 0 00-86.4-60.4C783.1 602.8 812 546.8 812 484c0-110.8-92.4-201.7-203.2-200-109.1 1.7-197 90.6-197 200 0 62.8 29 118.8 74.2 155.5a300.95 300.95 0 00-86.4 60.4C345 754.6 314 826.8 312 903.8a8 8 0 008 8.2h56c4.3 0 7.9-3.4 8-7.7 1.9-58 25.4-112.3 66.7-153.5A226.62 226.62 0 01612 684c60.9 0 118.2 23.7 161.3 66.8C814.5 792 838 846.3 840 904.3c.1 4.3 3.7 7.7 8 7.7h56a8 8 0 008-8.2c-2-77-33-149.2-87.8-203.9zM612 612c-34.2 0-66.4-13.3-90.5-37.5a126.86 126.86 0 01-37.5-91.8c.3-32.8 13.4-64.5 36.3-88 24-24.6 56.1-38.3 90.4-38.7 33.9-.3 66.8 12.9 91 36.6 24.8 24.3 38.4 56.8 38.4 91.4 0 34.2-13.3 66.3-37.5 90.5A127.3 127.3 0 01612 612zM361.5 510.4c-.9-8.7-1.4-17.5-1.4-26.4 0-15.9 1.5-31.4 4.3-46.5.7-3.6-1.2-7.3-4.5-8.8-13.6-6.1-26.1-14.5-36.9-25.1a127.54 127.54 0 01-38.7-95.4c.9-32.1 13.8-62.6 36.3-85.6 24.7-25.3 57.9-39.1 93.2-38.7 31.9.3 62.7 12.6 86 34.4 7.9 7.4 14.7 15.6 20.4 24.4 2 3.1 5.9 4.4 9.3 3.2 17.6-6.1 36.2-10.4 55.3-12.4 5.6-.6 8.8-6.6 6.3-11.6-32.5-64.3-98.9-108.7-175.7-109.9-110.9-1.7-203.3 89.2-203.3 199.9 0 62.8 28.9 118.8 74.2 155.5-31.8 14.7-61.1 35-86.5 60.4-54.8 54.7-85.8 126.9-87.8 204a8 8 0 008 8.2h56.1c4.3 0 7.9-3.4 8-7.7 1.9-58 25.4-112.3 66.7-153.5 29.4-29.4 65.4-49.8 104.7-59.7 3.9-1 6.5-4.7 6-8.7z"}}]},name:"team",theme:"outlined"},a=n(55015),l=o.forwardRef(function(e,t){return o.createElement(a.Z,(0,r.Z)({},e,{ref:t,icon:i}))})},3632:function(e,t,n){"use strict";n.d(t,{Z:function(){return l}});var r=n(1119),o=n(2265),i={icon:{tag:"svg",attrs:{viewBox:"64 64 896 896",focusable:"false"},children:[{tag:"path",attrs:{d:"M400 317.7h73.9V656c0 4.4 3.6 8 8 8h60c4.4 0 8-3.6 8-8V317.7H624c6.7 0 10.4-7.7 6.3-12.9L518.3 163a8 8 0 00-12.6 0l-112 141.7c-4.1 5.3-.4 13 6.3 13zM878 626h-60c-4.4 0-8 3.6-8 8v154H214V634c0-4.4-3.6-8-8-8h-60c-4.4 0-8 3.6-8 8v198c0 17.7 14.3 32 32 32h684c17.7 0 32-14.3 32-32V634c0-4.4-3.6-8-8-8z"}}]},name:"upload",theme:"outlined"},a=n(55015),l=o.forwardRef(function(e,t){return o.createElement(a.Z,(0,r.Z)({},e,{ref:t,icon:i}))})},15883:function(e,t,n){"use strict";n.d(t,{Z:function(){return l}});var r=n(1119),o=n(2265),i={icon:{tag:"svg",attrs:{viewBox:"64 64 896 896",focusable:"false"},children:[{tag:"path",attrs:{d:"M858.5 763.6a374 374 0 00-80.6-119.5 375.63 375.63 0 00-119.5-80.6c-.4-.2-.8-.3-1.2-.5C719.5 518 760 444.7 760 362c0-137-111-248-248-248S264 225 264 362c0 82.7 40.5 156 102.8 201.1-.4.2-.8.3-1.2.5-44.8 18.9-85 46-119.5 80.6a375.63 375.63 0 00-80.6 119.5A371.7 371.7 0 00136 901.8a8 8 0 008 8.2h60c4.4 0 7.9-3.5 8-7.8 2-77.2 33-149.5 87.8-204.3 56.7-56.7 132-87.9 212.2-87.9s155.5 31.2 212.2 87.9C779 752.7 810 825 812 902.2c.1 4.4 3.6 7.8 8 7.8h60a8 8 0 008-8.2c-1-47.8-10.9-94.3-29.5-138.2zM512 534c-45.9 0-89.1-17.9-121.6-50.4S340 407.9 340 362c0-45.9 17.9-89.1 50.4-121.6S466.1 190 512 190s89.1 17.9 121.6 50.4S684 316.1 684 362c0 45.9-17.9 89.1-50.4 121.6S557.9 534 512 534z"}}]},name:"user",theme:"outlined"},a=n(55015),l=o.forwardRef(function(e,t){return o.createElement(a.Z,(0,r.Z)({},e,{ref:t,icon:i}))})},58747:function(e,t,n){"use strict";n.d(t,{Z:function(){return i}});var r=n(5853),o=n(2265);let i=e=>{var t=(0,r._T)(e,[]);return o.createElement("svg",Object.assign({xmlns:"http://www.w3.org/2000/svg",viewBox:"0 0 24 24",fill:"currentColor"},t),o.createElement("path",{d:"M11.9999 13.1714L16.9497 8.22168L18.3639 9.63589L11.9999 15.9999L5.63599 9.63589L7.0502 8.22168L11.9999 13.1714Z"}))}},4537:function(e,t,n){"use strict";n.d(t,{Z:function(){return i}});var r=n(5853),o=n(2265);let i=e=>{var t=(0,r._T)(e,[]);return o.createElement("svg",Object.assign({xmlns:"http://www.w3.org/2000/svg",viewBox:"0 0 24 24",fill:"currentColor"},t),o.createElement("path",{d:"M12 22C6.47715 22 2 17.5228 2 12C2 6.47715 6.47715 2 12 2C17.5228 2 22 6.47715 22 12C22 17.5228 17.5228 22 12 22ZM12 10.5858L9.17157 7.75736L7.75736 9.17157L10.5858 12L7.75736 14.8284L9.17157 16.2426L12 13.4142L14.8284 16.2426L16.2426 14.8284L13.4142 12L16.2426 9.17157L14.8284 7.75736L12 10.5858Z"}))}},75105:function(e,t,n){"use strict";n.d(t,{Z:function(){return et}});var r=n(5853),o=n(2265),i=n(47625),a=n(93765),l=n(61994),c=n(59221),s=n(86757),u=n.n(s),d=n(95645),f=n.n(d),p=n(77571),h=n.n(p),m=n(82559),g=n.n(m),v=n(21652),y=n.n(v),b=n(57165),x=n(81889),w=n(9841),S=n(58772),O=n(34067),E=n(16630),k=n(85355),C=n(82944),j=["layout","type","stroke","connectNulls","isRange","ref"];function P(e){return(P="function"==typeof Symbol&&"symbol"==typeof Symbol.iterator?function(e){return typeof e}:function(e){return e&&"function"==typeof Symbol&&e.constructor===Symbol&&e!==Symbol.prototype?"symbol":typeof e})(e)}function M(){return(M=Object.assign?Object.assign.bind():function(e){for(var t=1;t=0||(o[n]=e[n]);return o}(e,t);if(Object.getOwnPropertySymbols){var i=Object.getOwnPropertySymbols(e);for(r=0;r=0)&&Object.prototype.propertyIsEnumerable.call(e,n)&&(o[n]=e[n])}return o}(i,j));return o.createElement(w.m,{clipPath:n?"url(#clipPath-".concat(r,")"):null},o.createElement(b.H,M({},(0,C.L6)(d,!0),{points:e,connectNulls:s,type:l,baseLine:t,layout:a,stroke:"none",className:"recharts-area-area"})),"none"!==c&&o.createElement(b.H,M({},(0,C.L6)(this.props,!1),{className:"recharts-area-curve",layout:a,type:l,connectNulls:s,fill:"none",points:e})),"none"!==c&&u&&o.createElement(b.H,M({},(0,C.L6)(this.props,!1),{className:"recharts-area-curve",layout:a,type:l,connectNulls:s,fill:"none",points:t})))}},{key:"renderAreaWithAnimation",value:function(e,t){var n=this,r=this.props,i=r.points,a=r.baseLine,l=r.isAnimationActive,s=r.animationBegin,u=r.animationDuration,d=r.animationEasing,f=r.animationId,p=this.state,m=p.prevPoints,v=p.prevBaseLine;return o.createElement(c.ZP,{begin:s,duration:u,isActive:l,easing:d,from:{t:0},to:{t:1},key:"area-".concat(f),onAnimationEnd:this.handleAnimationEnd,onAnimationStart:this.handleAnimationStart},function(r){var l=r.t;if(m){var c,s=m.length/i.length,u=i.map(function(e,t){var n=Math.floor(t*s);if(m[n]){var r=m[n],o=(0,E.k4)(r.x,e.x),i=(0,E.k4)(r.y,e.y);return I(I({},e),{},{x:o(l),y:i(l)})}return e});return c=(0,E.hj)(a)&&"number"==typeof a?(0,E.k4)(v,a)(l):h()(a)||g()(a)?(0,E.k4)(v,0)(l):a.map(function(e,t){var n=Math.floor(t*s);if(v[n]){var r=v[n],o=(0,E.k4)(r.x,e.x),i=(0,E.k4)(r.y,e.y);return I(I({},e),{},{x:o(l),y:i(l)})}return e}),n.renderAreaStatically(u,c,e,t)}return o.createElement(w.m,null,o.createElement("defs",null,o.createElement("clipPath",{id:"animationClipPath-".concat(t)},n.renderClipRect(l))),o.createElement(w.m,{clipPath:"url(#animationClipPath-".concat(t,")")},n.renderAreaStatically(i,a,e,t)))})}},{key:"renderArea",value:function(e,t){var n=this.props,r=n.points,o=n.baseLine,i=n.isAnimationActive,a=this.state,l=a.prevPoints,c=a.prevBaseLine,s=a.totalLength;return i&&r&&r.length&&(!l&&s>0||!y()(l,r)||!y()(c,o))?this.renderAreaWithAnimation(e,t):this.renderAreaStatically(r,o,e,t)}},{key:"render",value:function(){var e,t=this.props,n=t.hide,r=t.dot,i=t.points,a=t.className,c=t.top,s=t.left,u=t.xAxis,d=t.yAxis,f=t.width,p=t.height,m=t.isAnimationActive,g=t.id;if(n||!i||!i.length)return null;var v=this.state.isAnimationFinished,y=1===i.length,b=(0,l.Z)("recharts-area",a),x=u&&u.allowDataOverflow,O=d&&d.allowDataOverflow,E=x||O,k=h()(g)?this.id:g,j=null!==(e=(0,C.L6)(r,!1))&&void 0!==e?e:{r:3,strokeWidth:2},P=j.r,M=j.strokeWidth,A=((0,C.$k)(r)?r:{}).clipDot,I=void 0===A||A,T=2*(void 0===P?3:P)+(void 0===M?2:M);return o.createElement(w.m,{className:b},x||O?o.createElement("defs",null,o.createElement("clipPath",{id:"clipPath-".concat(k)},o.createElement("rect",{x:x?s:s-f/2,y:O?c:c-p/2,width:x?f:2*f,height:O?p:2*p})),!I&&o.createElement("clipPath",{id:"clipPath-dots-".concat(k)},o.createElement("rect",{x:s-T/2,y:c-T/2,width:f+T,height:p+T}))):null,y?null:this.renderArea(E,k),(r||y)&&this.renderDots(E,I,k),(!m||v)&&S.e.renderCallByParent(this.props,i))}}],r=[{key:"getDerivedStateFromProps",value:function(e,t){return e.animationId!==t.prevAnimationId?{prevAnimationId:e.animationId,curPoints:e.points,curBaseLine:e.baseLine,prevPoints:t.curPoints,prevBaseLine:t.curBaseLine}:e.points!==t.curPoints||e.baseLine!==t.curBaseLine?{curPoints:e.points,curBaseLine:e.baseLine}:null}}],n&&T(a.prototype,n),r&&T(a,r),Object.defineProperty(a,"prototype",{writable:!1}),a}(o.PureComponent);D(Z,"displayName","Area"),D(Z,"defaultProps",{stroke:"#3182bd",fill:"#3182bd",fillOpacity:.6,xAxisId:0,yAxisId:0,legendType:"line",connectNulls:!1,points:[],dot:!1,activeDot:!0,hide:!1,isAnimationActive:!O.x.isSsr,animationBegin:0,animationDuration:1500,animationEasing:"ease"}),D(Z,"getBaseValue",function(e,t,n,r){var o=e.layout,i=e.baseValue,a=t.props.baseValue,l=null!=a?a:i;if((0,E.hj)(l)&&"number"==typeof l)return l;var c="horizontal"===o?r:n,s=c.scale.domain();if("number"===c.type){var u=Math.max(s[0],s[1]),d=Math.min(s[0],s[1]);return"dataMin"===l?d:"dataMax"===l?u:u<0?u:Math.max(Math.min(s[0],s[1]),0)}return"dataMin"===l?s[0]:"dataMax"===l?s[1]:s[0]}),D(Z,"getComposedData",function(e){var t,n=e.props,r=e.item,o=e.xAxis,i=e.yAxis,a=e.xAxisTicks,l=e.yAxisTicks,c=e.bandSize,s=e.dataKey,u=e.stackedData,d=e.dataStartIndex,f=e.displayedData,p=e.offset,h=n.layout,m=u&&u.length,g=Z.getBaseValue(n,r,o,i),v="horizontal"===h,y=!1,b=f.map(function(e,t){m?n=u[d+t]:Array.isArray(n=(0,k.F$)(e,s))?y=!0:n=[g,n];var n,r=null==n[1]||m&&null==(0,k.F$)(e,s);return v?{x:(0,k.Hv)({axis:o,ticks:a,bandSize:c,entry:e,index:t}),y:r?null:i.scale(n[1]),value:n,payload:e}:{x:r?null:o.scale(n[1]),y:(0,k.Hv)({axis:i,ticks:l,bandSize:c,entry:e,index:t}),value:n,payload:e}});return t=m||y?b.map(function(e){var t=Array.isArray(e.value)?e.value[0]:null;return v?{x:e.x,y:null!=t&&null!=e.y?i.scale(t):null}:{x:null!=t?o.scale(t):null,y:e.y}}):v?i.scale(g):o.scale(g),I({points:b,baseLine:t,layout:h,isRange:y},p)}),D(Z,"renderDotItem",function(e,t){return o.isValidElement(e)?o.cloneElement(e,t):u()(e)?e(t):o.createElement(x.o,M({},t,{className:"recharts-area-dot"}))});var B=n(97059),z=n(62994),F=n(25311),H=(0,a.z)({chartName:"AreaChart",GraphicalChild:Z,axisComponents:[{axisType:"xAxis",AxisComp:B.K},{axisType:"yAxis",AxisComp:z.B}],formatAxisMap:F.t9}),q=n(56940),V=n(8147),U=n(22190),W=n(54061),K=n(65278),$=n(98593),G=n(69448),Y=n(32644),X=n(7084),Q=n(26898),J=n(65954),ee=n(1153);let et=o.forwardRef((e,t)=>{let{data:n=[],categories:a=[],index:l,stack:c=!1,colors:s=Q.s,valueFormatter:u=ee.Cj,startEndOnly:d=!1,showXAxis:f=!0,showYAxis:p=!0,yAxisWidth:h=56,intervalType:m="equidistantPreserveStart",showAnimation:g=!1,animationDuration:v=900,showTooltip:y=!0,showLegend:b=!0,showGridLines:w=!0,showGradient:S=!0,autoMinValue:O=!1,curveType:E="linear",minValue:k,maxValue:C,connectNulls:j=!1,allowDecimals:P=!0,noDataText:M,className:A,onValueChange:I,enableLegendSlider:T=!1,customTooltip:R,rotateLabelX:N,tickGap:_=5}=e,D=(0,r._T)(e,["data","categories","index","stack","colors","valueFormatter","startEndOnly","showXAxis","showYAxis","yAxisWidth","intervalType","showAnimation","animationDuration","showTooltip","showLegend","showGridLines","showGradient","autoMinValue","curveType","minValue","maxValue","connectNulls","allowDecimals","noDataText","className","onValueChange","enableLegendSlider","customTooltip","rotateLabelX","tickGap"]),L=(f||p)&&(!d||p)?20:0,[F,et]=(0,o.useState)(60),[en,er]=(0,o.useState)(void 0),[eo,ei]=(0,o.useState)(void 0),ea=(0,Y.me)(a,s),el=(0,Y.i4)(O,k,C),ec=!!I;function es(e){ec&&(e===eo&&!en||(0,Y.FB)(n,e)&&en&&en.dataKey===e?(ei(void 0),null==I||I(null)):(ei(e),null==I||I({eventType:"category",categoryClicked:e})),er(void 0))}return o.createElement("div",Object.assign({ref:t,className:(0,J.q)("w-full h-80",A)},D),o.createElement(i.h,{className:"h-full w-full"},(null==n?void 0:n.length)?o.createElement(H,{data:n,onClick:ec&&(eo||en)?()=>{er(void 0),ei(void 0),null==I||I(null)}:void 0},w?o.createElement(q.q,{className:(0,J.q)("stroke-1","stroke-tremor-border","dark:stroke-dark-tremor-border"),horizontal:!0,vertical:!1}):null,o.createElement(B.K,{padding:{left:L,right:L},hide:!f,dataKey:l,tick:{transform:"translate(0, 6)"},ticks:d?[n[0][l],n[n.length-1][l]]:void 0,fill:"",stroke:"",className:(0,J.q)("text-tremor-label","fill-tremor-content","dark:fill-dark-tremor-content"),interval:d?"preserveStartEnd":m,tickLine:!1,axisLine:!1,minTickGap:_,angle:null==N?void 0:N.angle,dy:null==N?void 0:N.verticalShift,height:null==N?void 0:N.xAxisHeight}),o.createElement(z.B,{width:h,hide:!p,axisLine:!1,tickLine:!1,type:"number",domain:el,tick:{transform:"translate(-3, 0)"},fill:"",stroke:"",className:(0,J.q)("text-tremor-label","fill-tremor-content","dark:fill-dark-tremor-content"),tickFormatter:u,allowDecimals:P}),o.createElement(V.u,{wrapperStyle:{outline:"none"},isAnimationActive:!1,cursor:{stroke:"#d1d5db",strokeWidth:1},content:y?e=>{let{active:t,payload:n,label:r}=e;return R?o.createElement(R,{payload:null==n?void 0:n.map(e=>{var t;return Object.assign(Object.assign({},e),{color:null!==(t=ea.get(e.dataKey))&&void 0!==t?t:X.fr.Gray})}),active:t,label:r}):o.createElement($.ZP,{active:t,payload:n,label:r,valueFormatter:u,categoryColors:ea})}:o.createElement(o.Fragment,null),position:{y:0}}),b?o.createElement(U.D,{verticalAlign:"top",height:F,content:e=>{let{payload:t}=e;return(0,K.Z)({payload:t},ea,et,eo,ec?e=>es(e):void 0,T)}}):null,a.map(e=>{var t,n;return o.createElement("defs",{key:e},S?o.createElement("linearGradient",{className:(0,ee.bM)(null!==(t=ea.get(e))&&void 0!==t?t:X.fr.Gray,Q.K.text).textColor,id:ea.get(e),x1:"0",y1:"0",x2:"0",y2:"1"},o.createElement("stop",{offset:"5%",stopColor:"currentColor",stopOpacity:en||eo&&eo!==e?.15:.4}),o.createElement("stop",{offset:"95%",stopColor:"currentColor",stopOpacity:0})):o.createElement("linearGradient",{className:(0,ee.bM)(null!==(n=ea.get(e))&&void 0!==n?n:X.fr.Gray,Q.K.text).textColor,id:ea.get(e),x1:"0",y1:"0",x2:"0",y2:"1"},o.createElement("stop",{stopColor:"currentColor",stopOpacity:en||eo&&eo!==e?.1:.3})))}),a.map(e=>{var t;return o.createElement(Z,{className:(0,ee.bM)(null!==(t=ea.get(e))&&void 0!==t?t:X.fr.Gray,Q.K.text).strokeColor,strokeOpacity:en||eo&&eo!==e?.3:1,activeDot:e=>{var t;let{cx:r,cy:i,stroke:a,strokeLinecap:l,strokeLinejoin:c,strokeWidth:s,dataKey:u}=e;return o.createElement(x.o,{className:(0,J.q)("stroke-tremor-background dark:stroke-dark-tremor-background",I?"cursor-pointer":"",(0,ee.bM)(null!==(t=ea.get(u))&&void 0!==t?t:X.fr.Gray,Q.K.text).fillColor),cx:r,cy:i,r:5,fill:"",stroke:a,strokeLinecap:l,strokeLinejoin:c,strokeWidth:s,onClick:(t,r)=>{r.stopPropagation(),ec&&(e.index===(null==en?void 0:en.index)&&e.dataKey===(null==en?void 0:en.dataKey)||(0,Y.FB)(n,e.dataKey)&&eo&&eo===e.dataKey?(ei(void 0),er(void 0),null==I||I(null)):(ei(e.dataKey),er({index:e.index,dataKey:e.dataKey}),null==I||I(Object.assign({eventType:"dot",categoryClicked:e.dataKey},e.payload))))}})},dot:t=>{var r;let{stroke:i,strokeLinecap:a,strokeLinejoin:l,strokeWidth:c,cx:s,cy:u,dataKey:d,index:f}=t;return(0,Y.FB)(n,e)&&!(en||eo&&eo!==e)||(null==en?void 0:en.index)===f&&(null==en?void 0:en.dataKey)===e?o.createElement(x.o,{key:f,cx:s,cy:u,r:5,stroke:i,fill:"",strokeLinecap:a,strokeLinejoin:l,strokeWidth:c,className:(0,J.q)("stroke-tremor-background dark:stroke-dark-tremor-background",I?"cursor-pointer":"",(0,ee.bM)(null!==(r=ea.get(d))&&void 0!==r?r:X.fr.Gray,Q.K.text).fillColor)}):o.createElement(o.Fragment,{key:f})},key:e,name:e,type:E,dataKey:e,stroke:"",fill:"url(#".concat(ea.get(e),")"),strokeWidth:2,strokeLinejoin:"round",strokeLinecap:"round",isAnimationActive:g,animationDuration:v,stackId:c?"a":void 0,connectNulls:j})}),I?a.map(e=>o.createElement(W.x,{className:(0,J.q)("cursor-pointer"),strokeOpacity:0,key:e,name:e,type:E,dataKey:e,stroke:"transparent",fill:"transparent",legendType:"none",tooltipType:"none",strokeWidth:12,connectNulls:j,onClick:(e,t)=>{t.stopPropagation();let{name:n}=e;es(n)}})):null):o.createElement(G.Z,{noDataText:M})))});et.displayName="AreaChart"},40278:function(e,t,n){"use strict";n.d(t,{Z:function(){return O}});var r=n(5853),o=n(7084),i=n(26898),a=n(65954),l=n(1153),c=n(2265),s=n(47625),u=n(93765),d=n(31699),f=n(97059),p=n(62994),h=n(25311),m=(0,u.z)({chartName:"BarChart",GraphicalChild:d.$,defaultTooltipEventType:"axis",validateTooltipEventTypes:["axis","item"],axisComponents:[{axisType:"xAxis",AxisComp:f.K},{axisType:"yAxis",AxisComp:p.B}],formatAxisMap:h.t9}),g=n(56940),v=n(8147),y=n(22190),b=n(65278),x=n(98593),w=n(69448),S=n(32644);let O=c.forwardRef((e,t)=>{let{data:n=[],categories:u=[],index:h,colors:O=i.s,valueFormatter:E=l.Cj,layout:k="horizontal",stack:C=!1,relative:j=!1,startEndOnly:P=!1,animationDuration:M=900,showAnimation:A=!1,showXAxis:I=!0,showYAxis:T=!0,yAxisWidth:R=56,intervalType:N="equidistantPreserveStart",showTooltip:_=!0,showLegend:D=!0,showGridLines:L=!0,autoMinValue:Z=!1,minValue:B,maxValue:z,allowDecimals:F=!0,noDataText:H,onValueChange:q,enableLegendSlider:V=!1,customTooltip:U,rotateLabelX:W,tickGap:K=5,className:$}=e,G=(0,r._T)(e,["data","categories","index","colors","valueFormatter","layout","stack","relative","startEndOnly","animationDuration","showAnimation","showXAxis","showYAxis","yAxisWidth","intervalType","showTooltip","showLegend","showGridLines","autoMinValue","minValue","maxValue","allowDecimals","noDataText","onValueChange","enableLegendSlider","customTooltip","rotateLabelX","tickGap","className"]),Y=I||T?20:0,[X,Q]=(0,c.useState)(60),J=(0,S.me)(u,O),[ee,et]=c.useState(void 0),[en,er]=(0,c.useState)(void 0),eo=!!q;function ei(e,t,n){var r,o,i,a;n.stopPropagation(),q&&((0,S.vZ)(ee,Object.assign(Object.assign({},e.payload),{value:e.value}))?(er(void 0),et(void 0),null==q||q(null)):(er(null===(o=null===(r=e.tooltipPayload)||void 0===r?void 0:r[0])||void 0===o?void 0:o.dataKey),et(Object.assign(Object.assign({},e.payload),{value:e.value})),null==q||q(Object.assign({eventType:"bar",categoryClicked:null===(a=null===(i=e.tooltipPayload)||void 0===i?void 0:i[0])||void 0===a?void 0:a.dataKey},e.payload))))}let ea=(0,S.i4)(Z,B,z);return c.createElement("div",Object.assign({ref:t,className:(0,a.q)("w-full h-80",$)},G),c.createElement(s.h,{className:"h-full w-full"},(null==n?void 0:n.length)?c.createElement(m,{data:n,stackOffset:C?"sign":j?"expand":"none",layout:"vertical"===k?"vertical":"horizontal",onClick:eo&&(en||ee)?()=>{et(void 0),er(void 0),null==q||q(null)}:void 0},L?c.createElement(g.q,{className:(0,a.q)("stroke-1","stroke-tremor-border","dark:stroke-dark-tremor-border"),horizontal:"vertical"!==k,vertical:"vertical"===k}):null,"vertical"!==k?c.createElement(f.K,{padding:{left:Y,right:Y},hide:!I,dataKey:h,interval:P?"preserveStartEnd":N,tick:{transform:"translate(0, 6)"},ticks:P?[n[0][h],n[n.length-1][h]]:void 0,fill:"",stroke:"",className:(0,a.q)("mt-4 text-tremor-label","fill-tremor-content","dark:fill-dark-tremor-content"),tickLine:!1,axisLine:!1,angle:null==W?void 0:W.angle,dy:null==W?void 0:W.verticalShift,height:null==W?void 0:W.xAxisHeight,minTickGap:K}):c.createElement(f.K,{hide:!I,type:"number",tick:{transform:"translate(-3, 0)"},domain:ea,fill:"",stroke:"",className:(0,a.q)("text-tremor-label","fill-tremor-content","dark:fill-dark-tremor-content"),tickLine:!1,axisLine:!1,tickFormatter:E,minTickGap:K,allowDecimals:F,angle:null==W?void 0:W.angle,dy:null==W?void 0:W.verticalShift,height:null==W?void 0:W.xAxisHeight}),"vertical"!==k?c.createElement(p.B,{width:R,hide:!T,axisLine:!1,tickLine:!1,type:"number",domain:ea,tick:{transform:"translate(-3, 0)"},fill:"",stroke:"",className:(0,a.q)("text-tremor-label","fill-tremor-content","dark:fill-dark-tremor-content"),tickFormatter:j?e=>"".concat((100*e).toString()," %"):E,allowDecimals:F}):c.createElement(p.B,{width:R,hide:!T,dataKey:h,axisLine:!1,tickLine:!1,ticks:P?[n[0][h],n[n.length-1][h]]:void 0,type:"category",interval:"preserveStartEnd",tick:{transform:"translate(0, 6)"},fill:"",stroke:"",className:(0,a.q)("text-tremor-label","fill-tremor-content","dark:fill-dark-tremor-content")}),c.createElement(v.u,{wrapperStyle:{outline:"none"},isAnimationActive:!1,cursor:{fill:"#d1d5db",opacity:"0.15"},content:_?e=>{let{active:t,payload:n,label:r}=e;return U?c.createElement(U,{payload:null==n?void 0:n.map(e=>{var t;return Object.assign(Object.assign({},e),{color:null!==(t=J.get(e.dataKey))&&void 0!==t?t:o.fr.Gray})}),active:t,label:r}):c.createElement(x.ZP,{active:t,payload:n,label:r,valueFormatter:E,categoryColors:J})}:c.createElement(c.Fragment,null),position:{y:0}}),D?c.createElement(y.D,{verticalAlign:"top",height:X,content:e=>{let{payload:t}=e;return(0,b.Z)({payload:t},J,Q,en,eo?e=>{eo&&(e!==en||ee?(er(e),null==q||q({eventType:"category",categoryClicked:e})):(er(void 0),null==q||q(null)),et(void 0))}:void 0,V)}}):null,u.map(e=>{var t;return c.createElement(d.$,{className:(0,a.q)((0,l.bM)(null!==(t=J.get(e))&&void 0!==t?t:o.fr.Gray,i.K.background).fillColor,q?"cursor-pointer":""),key:e,name:e,type:"linear",stackId:C||j?"a":void 0,dataKey:e,fill:"",isAnimationActive:A,animationDuration:M,shape:e=>((e,t,n,r)=>{let{fillOpacity:o,name:i,payload:a,value:l}=e,{x:s,width:u,y:d,height:f}=e;return"horizontal"===r&&f<0?(d+=f,f=Math.abs(f)):"vertical"===r&&u<0&&(s+=u,u=Math.abs(u)),c.createElement("rect",{x:s,y:d,width:u,height:f,opacity:t||n&&n!==i?(0,S.vZ)(t,Object.assign(Object.assign({},a),{value:l}))?o:.3:o})})(e,ee,en,k),onClick:ei})})):c.createElement(w.Z,{noDataText:H})))});O.displayName="BarChart"},14042:function(e,t,n){"use strict";n.d(t,{Z:function(){return eB}});var r=n(5853),o=n(7084),i=n(26898),a=n(65954),l=n(1153),c=n(2265),s=n(60474),u=n(47625),d=n(93765),f=n(86757),p=n.n(f),h=n(9841),m=n(81889),g=n(61994),v=n(82944),y=["points","className","baseLinePoints","connectNulls"];function b(){return(b=Object.assign?Object.assign.bind():function(e){for(var t=1;te.length)&&(t=e.length);for(var n=0,r=Array(t);n0&&void 0!==arguments[0]?arguments[0]:[],t=[[]];return e.forEach(function(e){S(e)?t[t.length-1].push(e):t[t.length-1].length>0&&t.push([])}),S(e[0])&&t[t.length-1].push(e[0]),t[t.length-1].length<=0&&(t=t.slice(0,-1)),t},E=function(e,t){var n=O(e);t&&(n=[n.reduce(function(e,t){return[].concat(x(e),x(t))},[])]);var r=n.map(function(e){return e.reduce(function(e,t,n){return"".concat(e).concat(0===n?"M":"L").concat(t.x,",").concat(t.y)},"")}).join("");return 1===n.length?"".concat(r,"Z"):r},k=function(e,t,n){var r=E(e,n);return"".concat("Z"===r.slice(-1)?r.slice(0,-1):r,"L").concat(E(t.reverse(),n).slice(1))},C=function(e){var t=e.points,n=e.className,r=e.baseLinePoints,o=e.connectNulls,i=function(e,t){if(null==e)return{};var n,r,o=function(e,t){if(null==e)return{};var n,r,o={},i=Object.keys(e);for(r=0;r=0||(o[n]=e[n]);return o}(e,t);if(Object.getOwnPropertySymbols){var i=Object.getOwnPropertySymbols(e);for(r=0;r=0)&&Object.prototype.propertyIsEnumerable.call(e,n)&&(o[n]=e[n])}return o}(e,y);if(!t||!t.length)return null;var a=(0,g.Z)("recharts-polygon",n);if(r&&r.length){var l=i.stroke&&"none"!==i.stroke,s=k(t,r,o);return c.createElement("g",{className:a},c.createElement("path",b({},(0,v.L6)(i,!0),{fill:"Z"===s.slice(-1)?i.fill:"none",stroke:"none",d:s})),l?c.createElement("path",b({},(0,v.L6)(i,!0),{fill:"none",d:E(t,o)})):null,l?c.createElement("path",b({},(0,v.L6)(i,!0),{fill:"none",d:E(r,o)})):null)}var u=E(t,o);return c.createElement("path",b({},(0,v.L6)(i,!0),{fill:"Z"===u.slice(-1)?i.fill:"none",className:a,d:u}))},j=n(58811),P=n(41637),M=n(39206);function A(e){return(A="function"==typeof Symbol&&"symbol"==typeof Symbol.iterator?function(e){return typeof e}:function(e){return e&&"function"==typeof Symbol&&e.constructor===Symbol&&e!==Symbol.prototype?"symbol":typeof e})(e)}function I(){return(I=Object.assign?Object.assign.bind():function(e){for(var t=1;t1e-5?"outer"===t?"start":"end":n<-.00001?"outer"===t?"end":"start":"middle"}},{key:"renderAxisLine",value:function(){var e=this.props,t=e.cx,n=e.cy,r=e.radius,o=e.axisLine,i=e.axisLineType,a=R(R({},(0,v.L6)(this.props,!1)),{},{fill:"none"},(0,v.L6)(o,!1));if("circle"===i)return c.createElement(m.o,I({className:"recharts-polar-angle-axis-line"},a,{cx:t,cy:n,r:r}));var l=this.props.ticks.map(function(e){return(0,M.op)(t,n,r,e.coordinate)});return c.createElement(C,I({className:"recharts-polar-angle-axis-line"},a,{points:l}))}},{key:"renderTicks",value:function(){var e=this,t=this.props,n=t.ticks,r=t.tick,o=t.tickLine,a=t.tickFormatter,l=t.stroke,s=(0,v.L6)(this.props,!1),u=(0,v.L6)(r,!1),d=R(R({},s),{},{fill:"none"},(0,v.L6)(o,!1)),f=n.map(function(t,n){var f=e.getTickLineCoord(t),p=R(R(R({textAnchor:e.getTickTextAnchor(t)},s),{},{stroke:"none",fill:l},u),{},{index:n,payload:t,x:f.x2,y:f.y2});return c.createElement(h.m,I({className:"recharts-polar-angle-axis-tick",key:"tick-".concat(t.coordinate)},(0,P.bw)(e.props,t,n)),o&&c.createElement("line",I({className:"recharts-polar-angle-axis-tick-line"},d,f)),r&&i.renderTickItem(r,p,a?a(t.value,n):t.value))});return c.createElement(h.m,{className:"recharts-polar-angle-axis-ticks"},f)}},{key:"render",value:function(){var e=this.props,t=e.ticks,n=e.radius,r=e.axisLine;return!(n<=0)&&t&&t.length?c.createElement(h.m,{className:"recharts-polar-angle-axis"},r&&this.renderAxisLine(),this.renderTicks()):null}}],r=[{key:"renderTickItem",value:function(e,t,n){return c.isValidElement(e)?c.cloneElement(e,t):p()(e)?e(t):c.createElement(j.x,I({},t,{className:"recharts-polar-angle-axis-tick-value"}),n)}}],n&&N(i.prototype,n),r&&N(i,r),Object.defineProperty(i,"prototype",{writable:!1}),i}(c.PureComponent);L(z,"displayName","PolarAngleAxis"),L(z,"axisType","angleAxis"),L(z,"defaultProps",{type:"category",angleAxisId:0,scale:"auto",cx:0,cy:0,orientation:"outer",axisLine:!0,tickLine:!0,tickSize:8,tick:!0,hide:!1,allowDuplicatedCategory:!0});var F=n(35802),H=n.n(F),q=n(37891),V=n.n(q),U=n(26680),W=["cx","cy","angle","ticks","axisLine"],K=["ticks","tick","angle","tickFormatter","stroke"];function $(e){return($="function"==typeof Symbol&&"symbol"==typeof Symbol.iterator?function(e){return typeof e}:function(e){return e&&"function"==typeof Symbol&&e.constructor===Symbol&&e!==Symbol.prototype?"symbol":typeof e})(e)}function G(){return(G=Object.assign?Object.assign.bind():function(e){for(var t=1;t=0||(o[n]=e[n]);return o}(e,t);if(Object.getOwnPropertySymbols){var i=Object.getOwnPropertySymbols(e);for(r=0;r=0)&&Object.prototype.propertyIsEnumerable.call(e,n)&&(o[n]=e[n])}return o}function J(e,t){for(var n=0;n0?el()(e,"paddingAngle",0):0;if(n){var l=(0,eg.k4)(n.endAngle-n.startAngle,e.endAngle-e.startAngle),c=eO(eO({},e),{},{startAngle:i+a,endAngle:i+l(r)+a});o.push(c),i=c.endAngle}else{var s=e.endAngle,d=e.startAngle,f=(0,eg.k4)(0,s-d)(r),p=eO(eO({},e),{},{startAngle:i+a,endAngle:i+f+a});o.push(p),i=p.endAngle}}),c.createElement(h.m,null,e.renderSectorsStatically(o))})}},{key:"attachKeyboardHandlers",value:function(e){var t=this;e.onkeydown=function(e){if(!e.altKey)switch(e.key){case"ArrowLeft":var n=++t.state.sectorToFocus%t.sectorRefs.length;t.sectorRefs[n].focus(),t.setState({sectorToFocus:n});break;case"ArrowRight":var r=--t.state.sectorToFocus<0?t.sectorRefs.length-1:t.state.sectorToFocus%t.sectorRefs.length;t.sectorRefs[r].focus(),t.setState({sectorToFocus:r});break;case"Escape":t.sectorRefs[t.state.sectorToFocus].blur(),t.setState({sectorToFocus:0})}}}},{key:"renderSectors",value:function(){var e=this.props,t=e.sectors,n=e.isAnimationActive,r=this.state.prevSectors;return n&&t&&t.length&&(!r||!es()(r,t))?this.renderSectorsWithAnimation():this.renderSectorsStatically(t)}},{key:"componentDidMount",value:function(){this.pieRef&&this.attachKeyboardHandlers(this.pieRef)}},{key:"render",value:function(){var e=this,t=this.props,n=t.hide,r=t.sectors,o=t.className,i=t.label,a=t.cx,l=t.cy,s=t.innerRadius,u=t.outerRadius,d=t.isAnimationActive,f=this.state.isAnimationFinished;if(n||!r||!r.length||!(0,eg.hj)(a)||!(0,eg.hj)(l)||!(0,eg.hj)(s)||!(0,eg.hj)(u))return null;var p=(0,g.Z)("recharts-pie",o);return c.createElement(h.m,{tabIndex:this.props.rootTabIndex,className:p,ref:function(t){e.pieRef=t}},this.renderSectors(),i&&this.renderLabels(r),U._.renderCallByParent(this.props,null,!1),(!d||f)&&ep.e.renderCallByParent(this.props,r,!1))}}],r=[{key:"getDerivedStateFromProps",value:function(e,t){return t.prevIsAnimationActive!==e.isAnimationActive?{prevIsAnimationActive:e.isAnimationActive,prevAnimationId:e.animationId,curSectors:e.sectors,prevSectors:[],isAnimationFinished:!0}:e.isAnimationActive&&e.animationId!==t.prevAnimationId?{prevAnimationId:e.animationId,curSectors:e.sectors,prevSectors:t.curSectors,isAnimationFinished:!0}:e.sectors!==t.curSectors?{curSectors:e.sectors,isAnimationFinished:!0}:null}},{key:"getTextAnchor",value:function(e,t){return e>t?"start":e=360?x:x-1)*u,S=i.reduce(function(e,t){var n=(0,ev.F$)(t,b,0);return e+((0,eg.hj)(n)?n:0)},0);return S>0&&(t=i.map(function(e,t){var r,o=(0,ev.F$)(e,b,0),i=(0,ev.F$)(e,f,t),a=((0,eg.hj)(o)?o:0)/S,s=(r=t?n.endAngle+(0,eg.uY)(v)*u*(0!==o?1:0):c)+(0,eg.uY)(v)*((0!==o?m:0)+a*w),d=(r+s)/2,p=(g.innerRadius+g.outerRadius)/2,y=[{name:i,value:o,payload:e,dataKey:b,type:h}],x=(0,M.op)(g.cx,g.cy,p,d);return n=eO(eO(eO({percent:a,cornerRadius:l,name:i,tooltipPayload:y,midAngle:d,middleRadius:p,tooltipPosition:x},e),g),{},{value:(0,ev.F$)(e,b),startAngle:r,endAngle:s,payload:e,paddingAngle:(0,eg.uY)(v)*u})})),eO(eO({},g),{},{sectors:t,data:i})});var eI=(0,d.z)({chartName:"PieChart",GraphicalChild:eA,validateTooltipEventTypes:["item"],defaultTooltipEventType:"item",legendContent:"children",axisComponents:[{axisType:"angleAxis",AxisComp:z},{axisType:"radiusAxis",AxisComp:eo}],formatAxisMap:M.t9,defaultProps:{layout:"centric",startAngle:0,endAngle:360,cx:"50%",cy:"50%",innerRadius:0,outerRadius:"80%"}}),eT=n(8147),eR=n(69448),eN=n(98593);let e_=e=>{let{active:t,payload:n,valueFormatter:r}=e;if(t&&(null==n?void 0:n[0])){let e=null==n?void 0:n[0];return c.createElement(eN.$B,null,c.createElement("div",{className:(0,a.q)("px-4 py-2")},c.createElement(eN.zX,{value:r(e.value),name:e.name,color:e.payload.color})))}return null},eD=(e,t)=>e.map((e,n)=>{let r=ne||t((0,l.vP)(n.map(e=>e[r]))),eZ=e=>{let{cx:t,cy:n,innerRadius:r,outerRadius:o,startAngle:i,endAngle:a,className:l}=e;return c.createElement("g",null,c.createElement(s.L,{cx:t,cy:n,innerRadius:r,outerRadius:o,startAngle:i,endAngle:a,className:l,fill:"",opacity:.3,style:{outline:"none"}}))},eB=c.forwardRef((e,t)=>{let{data:n=[],category:s="value",index:d="name",colors:f=i.s,variant:p="donut",valueFormatter:h=l.Cj,label:m,showLabel:g=!0,animationDuration:v=900,showAnimation:y=!1,showTooltip:b=!0,noDataText:x,onValueChange:w,customTooltip:S,className:O}=e,E=(0,r._T)(e,["data","category","index","colors","variant","valueFormatter","label","showLabel","animationDuration","showAnimation","showTooltip","noDataText","onValueChange","customTooltip","className"]),k="donut"==p,C=eL(m,h,n,s),[j,P]=c.useState(void 0),M=!!w;return(0,c.useEffect)(()=>{let e=document.querySelectorAll(".recharts-pie-sector");e&&e.forEach(e=>{e.setAttribute("style","outline: none")})},[j]),c.createElement("div",Object.assign({ref:t,className:(0,a.q)("w-full h-40",O)},E),c.createElement(u.h,{className:"h-full w-full"},(null==n?void 0:n.length)?c.createElement(eI,{onClick:M&&j?()=>{P(void 0),null==w||w(null)}:void 0,margin:{top:0,left:0,right:0,bottom:0}},g&&k?c.createElement("text",{className:(0,a.q)("fill-tremor-content-emphasis","dark:fill-dark-tremor-content-emphasis"),x:"50%",y:"50%",textAnchor:"middle",dominantBaseline:"middle"},C):null,c.createElement(eA,{className:(0,a.q)("stroke-tremor-background dark:stroke-dark-tremor-background",w?"cursor-pointer":"cursor-default"),data:eD(n,f),cx:"50%",cy:"50%",startAngle:90,endAngle:-270,innerRadius:k?"75%":"0%",outerRadius:"100%",stroke:"",strokeLinejoin:"round",dataKey:s,nameKey:d,isAnimationActive:y,animationDuration:v,onClick:function(e,t,n){n.stopPropagation(),M&&(j===t?(P(void 0),null==w||w(null)):(P(t),null==w||w(Object.assign({eventType:"slice"},e.payload.payload))))},activeIndex:j,inactiveShape:eZ,style:{outline:"none"}}),c.createElement(eT.u,{wrapperStyle:{outline:"none"},isAnimationActive:!1,content:b?e=>{var t;let{active:n,payload:r}=e;return S?c.createElement(S,{payload:null==r?void 0:r.map(e=>{var t,n,i;return Object.assign(Object.assign({},e),{color:null!==(i=null===(n=null===(t=null==r?void 0:r[0])||void 0===t?void 0:t.payload)||void 0===n?void 0:n.color)&&void 0!==i?i:o.fr.Gray})}),active:n,label:null===(t=null==r?void 0:r[0])||void 0===t?void 0:t.name}):c.createElement(e_,{active:n,payload:r,valueFormatter:h})}:c.createElement(c.Fragment,null)})):c.createElement(eR.Z,{noDataText:x})))});eB.displayName="DonutChart"},59664:function(e,t,n){"use strict";n.d(t,{Z:function(){return E}});var r=n(5853),o=n(2265),i=n(47625),a=n(93765),l=n(54061),c=n(97059),s=n(62994),u=n(25311),d=(0,a.z)({chartName:"LineChart",GraphicalChild:l.x,axisComponents:[{axisType:"xAxis",AxisComp:c.K},{axisType:"yAxis",AxisComp:s.B}],formatAxisMap:u.t9}),f=n(56940),p=n(8147),h=n(22190),m=n(81889),g=n(65278),v=n(98593),y=n(69448),b=n(32644),x=n(7084),w=n(26898),S=n(65954),O=n(1153);let E=o.forwardRef((e,t)=>{let{data:n=[],categories:a=[],index:u,colors:E=w.s,valueFormatter:k=O.Cj,startEndOnly:C=!1,showXAxis:j=!0,showYAxis:P=!0,yAxisWidth:M=56,intervalType:A="equidistantPreserveStart",animationDuration:I=900,showAnimation:T=!1,showTooltip:R=!0,showLegend:N=!0,showGridLines:_=!0,autoMinValue:D=!1,curveType:L="linear",minValue:Z,maxValue:B,connectNulls:z=!1,allowDecimals:F=!0,noDataText:H,className:q,onValueChange:V,enableLegendSlider:U=!1,customTooltip:W,rotateLabelX:K,tickGap:$=5}=e,G=(0,r._T)(e,["data","categories","index","colors","valueFormatter","startEndOnly","showXAxis","showYAxis","yAxisWidth","intervalType","animationDuration","showAnimation","showTooltip","showLegend","showGridLines","autoMinValue","curveType","minValue","maxValue","connectNulls","allowDecimals","noDataText","className","onValueChange","enableLegendSlider","customTooltip","rotateLabelX","tickGap"]),Y=j||P?20:0,[X,Q]=(0,o.useState)(60),[J,ee]=(0,o.useState)(void 0),[et,en]=(0,o.useState)(void 0),er=(0,b.me)(a,E),eo=(0,b.i4)(D,Z,B),ei=!!V;function ea(e){ei&&(e===et&&!J||(0,b.FB)(n,e)&&J&&J.dataKey===e?(en(void 0),null==V||V(null)):(en(e),null==V||V({eventType:"category",categoryClicked:e})),ee(void 0))}return o.createElement("div",Object.assign({ref:t,className:(0,S.q)("w-full h-80",q)},G),o.createElement(i.h,{className:"h-full w-full"},(null==n?void 0:n.length)?o.createElement(d,{data:n,onClick:ei&&(et||J)?()=>{ee(void 0),en(void 0),null==V||V(null)}:void 0},_?o.createElement(f.q,{className:(0,S.q)("stroke-1","stroke-tremor-border","dark:stroke-dark-tremor-border"),horizontal:!0,vertical:!1}):null,o.createElement(c.K,{padding:{left:Y,right:Y},hide:!j,dataKey:u,interval:C?"preserveStartEnd":A,tick:{transform:"translate(0, 6)"},ticks:C?[n[0][u],n[n.length-1][u]]:void 0,fill:"",stroke:"",className:(0,S.q)("text-tremor-label","fill-tremor-content","dark:fill-dark-tremor-content"),tickLine:!1,axisLine:!1,minTickGap:$,angle:null==K?void 0:K.angle,dy:null==K?void 0:K.verticalShift,height:null==K?void 0:K.xAxisHeight}),o.createElement(s.B,{width:M,hide:!P,axisLine:!1,tickLine:!1,type:"number",domain:eo,tick:{transform:"translate(-3, 0)"},fill:"",stroke:"",className:(0,S.q)("text-tremor-label","fill-tremor-content","dark:fill-dark-tremor-content"),tickFormatter:k,allowDecimals:F}),o.createElement(p.u,{wrapperStyle:{outline:"none"},isAnimationActive:!1,cursor:{stroke:"#d1d5db",strokeWidth:1},content:R?e=>{let{active:t,payload:n,label:r}=e;return W?o.createElement(W,{payload:null==n?void 0:n.map(e=>{var t;return Object.assign(Object.assign({},e),{color:null!==(t=er.get(e.dataKey))&&void 0!==t?t:x.fr.Gray})}),active:t,label:r}):o.createElement(v.ZP,{active:t,payload:n,label:r,valueFormatter:k,categoryColors:er})}:o.createElement(o.Fragment,null),position:{y:0}}),N?o.createElement(h.D,{verticalAlign:"top",height:X,content:e=>{let{payload:t}=e;return(0,g.Z)({payload:t},er,Q,et,ei?e=>ea(e):void 0,U)}}):null,a.map(e=>{var t;return o.createElement(l.x,{className:(0,S.q)((0,O.bM)(null!==(t=er.get(e))&&void 0!==t?t:x.fr.Gray,w.K.text).strokeColor),strokeOpacity:J||et&&et!==e?.3:1,activeDot:e=>{var t;let{cx:r,cy:i,stroke:a,strokeLinecap:l,strokeLinejoin:c,strokeWidth:s,dataKey:u}=e;return o.createElement(m.o,{className:(0,S.q)("stroke-tremor-background dark:stroke-dark-tremor-background",V?"cursor-pointer":"",(0,O.bM)(null!==(t=er.get(u))&&void 0!==t?t:x.fr.Gray,w.K.text).fillColor),cx:r,cy:i,r:5,fill:"",stroke:a,strokeLinecap:l,strokeLinejoin:c,strokeWidth:s,onClick:(t,r)=>{r.stopPropagation(),ei&&(e.index===(null==J?void 0:J.index)&&e.dataKey===(null==J?void 0:J.dataKey)||(0,b.FB)(n,e.dataKey)&&et&&et===e.dataKey?(en(void 0),ee(void 0),null==V||V(null)):(en(e.dataKey),ee({index:e.index,dataKey:e.dataKey}),null==V||V(Object.assign({eventType:"dot",categoryClicked:e.dataKey},e.payload))))}})},dot:t=>{var r;let{stroke:i,strokeLinecap:a,strokeLinejoin:l,strokeWidth:c,cx:s,cy:u,dataKey:d,index:f}=t;return(0,b.FB)(n,e)&&!(J||et&&et!==e)||(null==J?void 0:J.index)===f&&(null==J?void 0:J.dataKey)===e?o.createElement(m.o,{key:f,cx:s,cy:u,r:5,stroke:i,fill:"",strokeLinecap:a,strokeLinejoin:l,strokeWidth:c,className:(0,S.q)("stroke-tremor-background dark:stroke-dark-tremor-background",V?"cursor-pointer":"",(0,O.bM)(null!==(r=er.get(d))&&void 0!==r?r:x.fr.Gray,w.K.text).fillColor)}):o.createElement(o.Fragment,{key:f})},key:e,name:e,type:L,dataKey:e,stroke:"",strokeWidth:2,strokeLinejoin:"round",strokeLinecap:"round",isAnimationActive:T,animationDuration:I,connectNulls:z})}),V?a.map(e=>o.createElement(l.x,{className:(0,S.q)("cursor-pointer"),strokeOpacity:0,key:e,name:e,type:L,dataKey:e,stroke:"transparent",fill:"transparent",legendType:"none",tooltipType:"none",strokeWidth:12,connectNulls:z,onClick:(e,t)=>{t.stopPropagation();let{name:n}=e;ea(n)}})):null):o.createElement(y.Z,{noDataText:H})))});E.displayName="LineChart"},65278:function(e,t,n){"use strict";n.d(t,{Z:function(){return m}});var r=n(2265);let o=(e,t)=>{let[n,o]=(0,r.useState)(t);(0,r.useEffect)(()=>{let t=()=>{o(window.innerWidth),e()};return t(),window.addEventListener("resize",t),()=>window.removeEventListener("resize",t)},[e,n])};var i=n(5853),a=n(26898),l=n(65954),c=n(1153);let s=e=>{var t=(0,i._T)(e,[]);return r.createElement("svg",Object.assign({},t,{xmlns:"http://www.w3.org/2000/svg",viewBox:"0 0 24 24",fill:"currentColor"}),r.createElement("path",{d:"M8 12L14 6V18L8 12Z"}))},u=e=>{var t=(0,i._T)(e,[]);return r.createElement("svg",Object.assign({},t,{xmlns:"http://www.w3.org/2000/svg",viewBox:"0 0 24 24",fill:"currentColor"}),r.createElement("path",{d:"M16 12L10 18V6L16 12Z"}))},d=(0,c.fn)("Legend"),f=e=>{let{name:t,color:n,onClick:o,activeLegend:i}=e,s=!!o;return r.createElement("li",{className:(0,l.q)(d("legendItem"),"group inline-flex items-center px-2 py-0.5 rounded-tremor-small transition whitespace-nowrap",s?"cursor-pointer":"cursor-default","text-tremor-content",s?"hover:bg-tremor-background-subtle":"","dark:text-dark-tremor-content",s?"dark:hover:bg-dark-tremor-background-subtle":""),onClick:e=>{e.stopPropagation(),null==o||o(t,n)}},r.createElement("svg",{className:(0,l.q)("flex-none h-2 w-2 mr-1.5",(0,c.bM)(n,a.K.text).textColor,i&&i!==t?"opacity-40":"opacity-100"),fill:"currentColor",viewBox:"0 0 8 8"},r.createElement("circle",{cx:4,cy:4,r:4})),r.createElement("p",{className:(0,l.q)("whitespace-nowrap truncate text-tremor-default","text-tremor-content",s?"group-hover:text-tremor-content-emphasis":"","dark:text-dark-tremor-content",i&&i!==t?"opacity-40":"opacity-100",s?"dark:group-hover:text-dark-tremor-content-emphasis":"")},t))},p=e=>{let{icon:t,onClick:n,disabled:o}=e,[i,a]=r.useState(!1),c=r.useRef(null);return r.useEffect(()=>(i?c.current=setInterval(()=>{null==n||n()},300):clearInterval(c.current),()=>clearInterval(c.current)),[i,n]),(0,r.useEffect)(()=>{o&&(clearInterval(c.current),a(!1))},[o]),r.createElement("button",{type:"button",className:(0,l.q)(d("legendSliderButton"),"w-5 group inline-flex items-center truncate rounded-tremor-small transition",o?"cursor-not-allowed":"cursor-pointer",o?"text-tremor-content-subtle":"text-tremor-content hover:text-tremor-content-emphasis hover:bg-tremor-background-subtle",o?"dark:text-dark-tremor-subtle":"dark:text-dark-tremor dark:hover:text-tremor-content-emphasis dark:hover:bg-dark-tremor-background-subtle"),disabled:o,onClick:e=>{e.stopPropagation(),null==n||n()},onMouseDown:e=>{e.stopPropagation(),a(!0)},onMouseUp:e=>{e.stopPropagation(),a(!1)}},r.createElement(t,{className:"w-full"}))},h=r.forwardRef((e,t)=>{var n,o;let{categories:c,colors:h=a.s,className:m,onClickLegendItem:g,activeLegend:v,enableLegendSlider:y=!1}=e,b=(0,i._T)(e,["categories","colors","className","onClickLegendItem","activeLegend","enableLegendSlider"]),x=r.useRef(null),[w,S]=r.useState(null),[O,E]=r.useState(null),k=r.useRef(null),C=(0,r.useCallback)(()=>{let e=null==x?void 0:x.current;e&&S({left:e.scrollLeft>0,right:e.scrollWidth-e.clientWidth>e.scrollLeft})},[S]),j=(0,r.useCallback)(e=>{var t;let n=null==x?void 0:x.current,r=null!==(t=null==n?void 0:n.clientWidth)&&void 0!==t?t:0;n&&y&&(n.scrollTo({left:"left"===e?n.scrollLeft-r:n.scrollLeft+r,behavior:"smooth"}),setTimeout(()=>{C()},400))},[y,C]);r.useEffect(()=>{let e=e=>{"ArrowLeft"===e?j("left"):"ArrowRight"===e&&j("right")};return O?(e(O),k.current=setInterval(()=>{e(O)},300)):clearInterval(k.current),()=>clearInterval(k.current)},[O,j]);let P=e=>{e.stopPropagation(),"ArrowLeft"!==e.key&&"ArrowRight"!==e.key||(e.preventDefault(),E(e.key))},M=e=>{e.stopPropagation(),E(null)};return r.useEffect(()=>{let e=null==x?void 0:x.current;return y&&(C(),null==e||e.addEventListener("keydown",P),null==e||e.addEventListener("keyup",M)),()=>{null==e||e.removeEventListener("keydown",P),null==e||e.removeEventListener("keyup",M)}},[C,y]),r.createElement("ol",Object.assign({ref:t,className:(0,l.q)(d("root"),"relative overflow-hidden",m)},b),r.createElement("div",{ref:x,tabIndex:0,className:(0,l.q)("h-full flex",y?(null==w?void 0:w.right)||(null==w?void 0:w.left)?"pl-4 pr-12 items-center overflow-auto snap-mandatory [&::-webkit-scrollbar]:hidden [scrollbar-width:none]":"":"flex-wrap")},c.map((e,t)=>r.createElement(f,{key:"item-".concat(t),name:e,color:h[t],onClick:g,activeLegend:v}))),y&&((null==w?void 0:w.right)||(null==w?void 0:w.left))?r.createElement(r.Fragment,null,r.createElement("div",{className:(0,l.q)("from-tremor-background","dark:from-dark-tremor-background","absolute top-0 bottom-0 left-0 w-4 bg-gradient-to-r to-transparent pointer-events-none")}),r.createElement("div",{className:(0,l.q)("to-tremor-background","dark:to-dark-tremor-background","absolute top-0 bottom-0 right-10 w-4 bg-gradient-to-r from-transparent pointer-events-none")}),r.createElement("div",{className:(0,l.q)("bg-tremor-background","dark:bg-dark-tremor-background","absolute flex top-0 pr-1 bottom-0 right-0 items-center justify-center h-full")},r.createElement(p,{icon:s,onClick:()=>{E(null),j("left")},disabled:!(null==w?void 0:w.left)}),r.createElement(p,{icon:u,onClick:()=>{E(null),j("right")},disabled:!(null==w?void 0:w.right)}))):null)});h.displayName="Legend";let m=(e,t,n,i,a,l)=>{let{payload:c}=e,s=(0,r.useRef)(null);o(()=>{var e,t;n((t=null===(e=s.current)||void 0===e?void 0:e.clientHeight)?Number(t)+20:60)});let u=c.filter(e=>"none"!==e.type);return r.createElement("div",{ref:s,className:"flex items-center justify-end"},r.createElement(h,{categories:u.map(e=>e.value),colors:u.map(e=>t.get(e.value)),onClickLegendItem:a,activeLegend:i,enableLegendSlider:l}))}},98593:function(e,t,n){"use strict";n.d(t,{$B:function(){return c},ZP:function(){return u},zX:function(){return s}});var r=n(2265),o=n(7084),i=n(26898),a=n(65954),l=n(1153);let c=e=>{let{children:t}=e;return r.createElement("div",{className:(0,a.q)("rounded-tremor-default text-tremor-default border","bg-tremor-background shadow-tremor-dropdown border-tremor-border","dark:bg-dark-tremor-background dark:shadow-dark-tremor-dropdown dark:border-dark-tremor-border")},t)},s=e=>{let{value:t,name:n,color:o}=e;return r.createElement("div",{className:"flex items-center justify-between space-x-8"},r.createElement("div",{className:"flex items-center space-x-2"},r.createElement("span",{className:(0,a.q)("shrink-0 rounded-tremor-full border-2 h-3 w-3","border-tremor-background shadow-tremor-card","dark:border-dark-tremor-background dark:shadow-dark-tremor-card",(0,l.bM)(o,i.K.background).bgColor)}),r.createElement("p",{className:(0,a.q)("text-right whitespace-nowrap","text-tremor-content","dark:text-dark-tremor-content")},n)),r.createElement("p",{className:(0,a.q)("font-medium tabular-nums text-right whitespace-nowrap","text-tremor-content-emphasis","dark:text-dark-tremor-content-emphasis")},t))},u=e=>{let{active:t,payload:n,label:i,categoryColors:l,valueFormatter:u}=e;if(t&&n){let e=n.filter(e=>"none"!==e.type);return r.createElement(c,null,r.createElement("div",{className:(0,a.q)("border-tremor-border border-b px-4 py-2","dark:border-dark-tremor-border")},r.createElement("p",{className:(0,a.q)("font-medium","text-tremor-content-emphasis","dark:text-dark-tremor-content-emphasis")},i)),r.createElement("div",{className:(0,a.q)("px-4 py-2 space-y-1")},e.map((e,t)=>{var n;let{value:i,name:a}=e;return r.createElement(s,{key:"id-".concat(t),value:u(i),name:a,color:null!==(n=l.get(a))&&void 0!==n?n:o.fr.Blue})})))}return null}},69448:function(e,t,n){"use strict";n.d(t,{Z:function(){return f}});var r=n(65954),o=n(2265),i=n(5853);let a=(0,n(1153).fn)("Flex"),l={start:"justify-start",end:"justify-end",center:"justify-center",between:"justify-between",around:"justify-around",evenly:"justify-evenly"},c={start:"items-start",end:"items-end",center:"items-center",baseline:"items-baseline",stretch:"items-stretch"},s={row:"flex-row",col:"flex-col","row-reverse":"flex-row-reverse","col-reverse":"flex-col-reverse"},u=o.forwardRef((e,t)=>{let{flexDirection:n="row",justifyContent:u="between",alignItems:d="center",children:f,className:p}=e,h=(0,i._T)(e,["flexDirection","justifyContent","alignItems","children","className"]);return o.createElement("div",Object.assign({ref:t,className:(0,r.q)(a("root"),"flex w-full",s[n],l[u],c[d],p)},h),f)});u.displayName="Flex";var d=n(84264);let f=e=>{let{noDataText:t="No data"}=e;return o.createElement(u,{alignItems:"center",justifyContent:"center",className:(0,r.q)("w-full h-full border border-dashed rounded-tremor-default","border-tremor-border","dark:border-dark-tremor-border")},o.createElement(d.Z,{className:(0,r.q)("text-tremor-content","dark:text-dark-tremor-content")},t))}},32644:function(e,t,n){"use strict";n.d(t,{FB:function(){return i},i4:function(){return o},me:function(){return r},vZ:function(){return function e(t,n){if(t===n)return!0;if("object"!=typeof t||"object"!=typeof n||null===t||null===n)return!1;let r=Object.keys(t),o=Object.keys(n);if(r.length!==o.length)return!1;for(let i of r)if(!o.includes(i)||!e(t[i],n[i]))return!1;return!0}}});let r=(e,t)=>{let n=new Map;return e.forEach((e,r)=>{n.set(e,t[r])}),n},o=(e,t,n)=>[e?"auto":null!=t?t:0,null!=n?n:"auto"];function i(e,t){let n=[];for(let r of e)if(Object.prototype.hasOwnProperty.call(r,t)&&(n.push(r[t]),n.length>1))return!1;return!0}},41649:function(e,t,n){"use strict";n.d(t,{Z:function(){return p}});var r=n(5853),o=n(2265),i=n(1526),a=n(7084),l=n(26898),c=n(65954),s=n(1153);let u={xs:{paddingX:"px-2",paddingY:"py-0.5",fontSize:"text-xs"},sm:{paddingX:"px-2.5",paddingY:"py-0.5",fontSize:"text-sm"},md:{paddingX:"px-3",paddingY:"py-0.5",fontSize:"text-md"},lg:{paddingX:"px-3.5",paddingY:"py-0.5",fontSize:"text-lg"},xl:{paddingX:"px-4",paddingY:"py-1",fontSize:"text-xl"}},d={xs:{height:"h-4",width:"w-4"},sm:{height:"h-4",width:"w-4"},md:{height:"h-4",width:"w-4"},lg:{height:"h-5",width:"w-5"},xl:{height:"h-6",width:"w-6"}},f=(0,s.fn)("Badge"),p=o.forwardRef((e,t)=>{let{color:n,icon:p,size:h=a.u8.SM,tooltip:m,className:g,children:v}=e,y=(0,r._T)(e,["color","icon","size","tooltip","className","children"]),b=p||null,{tooltipProps:x,getReferenceProps:w}=(0,i.l)();return o.createElement("span",Object.assign({ref:(0,s.lq)([t,x.refs.setReference]),className:(0,c.q)(f("root"),"w-max flex-shrink-0 inline-flex justify-center items-center cursor-default rounded-tremor-full",n?(0,c.q)((0,s.bM)(n,l.K.background).bgColor,(0,s.bM)(n,l.K.text).textColor,"bg-opacity-20 dark:bg-opacity-25"):(0,c.q)("bg-tremor-brand-muted text-tremor-brand-emphasis","dark:bg-dark-tremor-brand-muted dark:text-dark-tremor-brand-emphasis"),u[h].paddingX,u[h].paddingY,u[h].fontSize,g)},w,y),o.createElement(i.Z,Object.assign({text:m},x)),b?o.createElement(b,{className:(0,c.q)(f("icon"),"shrink-0 -ml-1 mr-1.5",d[h].height,d[h].width)}):null,o.createElement("p",{className:(0,c.q)(f("text"),"text-sm whitespace-nowrap")},v))});p.displayName="Badge"},47323:function(e,t,n){"use strict";n.d(t,{Z:function(){return m}});var r=n(5853),o=n(2265),i=n(1526),a=n(7084),l=n(65954),c=n(1153),s=n(26898);let u={xs:{paddingX:"px-1.5",paddingY:"py-1.5"},sm:{paddingX:"px-1.5",paddingY:"py-1.5"},md:{paddingX:"px-2",paddingY:"py-2"},lg:{paddingX:"px-2",paddingY:"py-2"},xl:{paddingX:"px-2.5",paddingY:"py-2.5"}},d={xs:{height:"h-3",width:"w-3"},sm:{height:"h-5",width:"w-5"},md:{height:"h-5",width:"w-5"},lg:{height:"h-7",width:"w-7"},xl:{height:"h-9",width:"w-9"}},f={simple:{rounded:"",border:"",ring:"",shadow:""},light:{rounded:"rounded-tremor-default",border:"",ring:"",shadow:""},shadow:{rounded:"rounded-tremor-default",border:"border",ring:"",shadow:"shadow-tremor-card dark:shadow-dark-tremor-card"},solid:{rounded:"rounded-tremor-default",border:"border-2",ring:"ring-1",shadow:""},outlined:{rounded:"rounded-tremor-default",border:"border",ring:"ring-2",shadow:""}},p=(e,t)=>{switch(e){case"simple":return{textColor:t?(0,c.bM)(t,s.K.text).textColor:"text-tremor-brand dark:text-dark-tremor-brand",bgColor:"",borderColor:"",ringColor:""};case"light":return{textColor:t?(0,c.bM)(t,s.K.text).textColor:"text-tremor-brand dark:text-dark-tremor-brand",bgColor:t?(0,l.q)((0,c.bM)(t,s.K.background).bgColor,"bg-opacity-20"):"bg-tremor-brand-muted dark:bg-dark-tremor-brand-muted",borderColor:"",ringColor:""};case"shadow":return{textColor:t?(0,c.bM)(t,s.K.text).textColor:"text-tremor-brand dark:text-dark-tremor-brand",bgColor:t?(0,l.q)((0,c.bM)(t,s.K.background).bgColor,"bg-opacity-20"):"bg-tremor-background dark:bg-dark-tremor-background",borderColor:"border-tremor-border dark:border-dark-tremor-border",ringColor:""};case"solid":return{textColor:t?(0,c.bM)(t,s.K.text).textColor:"text-tremor-brand-inverted dark:text-dark-tremor-brand-inverted",bgColor:t?(0,l.q)((0,c.bM)(t,s.K.background).bgColor,"bg-opacity-20"):"bg-tremor-brand dark:bg-dark-tremor-brand",borderColor:"border-tremor-brand-inverted dark:border-dark-tremor-brand-inverted",ringColor:"ring-tremor-ring dark:ring-dark-tremor-ring"};case"outlined":return{textColor:t?(0,c.bM)(t,s.K.text).textColor:"text-tremor-brand dark:text-dark-tremor-brand",bgColor:t?(0,l.q)((0,c.bM)(t,s.K.background).bgColor,"bg-opacity-20"):"bg-tremor-background dark:bg-dark-tremor-background",borderColor:t?(0,c.bM)(t,s.K.ring).borderColor:"border-tremor-brand-subtle dark:border-dark-tremor-brand-subtle",ringColor:t?(0,l.q)((0,c.bM)(t,s.K.ring).ringColor,"ring-opacity-40"):"ring-tremor-brand-muted dark:ring-dark-tremor-brand-muted"}}},h=(0,c.fn)("Icon"),m=o.forwardRef((e,t)=>{let{icon:n,variant:s="simple",tooltip:m,size:g=a.u8.SM,color:v,className:y}=e,b=(0,r._T)(e,["icon","variant","tooltip","size","color","className"]),x=p(s,v),{tooltipProps:w,getReferenceProps:S}=(0,i.l)();return o.createElement("span",Object.assign({ref:(0,c.lq)([t,w.refs.setReference]),className:(0,l.q)(h("root"),"inline-flex flex-shrink-0 items-center",x.bgColor,x.textColor,x.borderColor,x.ringColor,f[s].rounded,f[s].border,f[s].shadow,f[s].ring,u[g].paddingX,u[g].paddingY,y)},S,b),o.createElement(i.Z,Object.assign({text:m},w)),o.createElement(n,{className:(0,l.q)(h("icon"),"shrink-0",d[g].height,d[g].width)}))});m.displayName="Icon"},53003:function(e,t,n){"use strict";let r,o,i;n.d(t,{Z:function(){return nF}});var a,l,c,s,u=n(5853),d=n(2265),f=n(54887),p=n(13323),h=n(64518),m=n(96822),g=n(40293);function v(){for(var e=arguments.length,t=Array(e),n=0;n(0,g.r)(...t),[...t])}var y=n(72238),b=n(93689);let x=(0,d.createContext)(!1);var w=n(61424),S=n(27847);let O=d.Fragment,E=d.Fragment,k=(0,d.createContext)(null),C=(0,d.createContext)(null);Object.assign((0,S.yV)(function(e,t){var n;let r,o,i=(0,d.useRef)(null),a=(0,b.T)((0,b.h)(e=>{i.current=e}),t),l=v(i),c=function(e){let t=(0,d.useContext)(x),n=(0,d.useContext)(k),r=v(e),[o,i]=(0,d.useState)(()=>{if(!t&&null!==n||w.O.isServer)return null;let e=null==r?void 0:r.getElementById("headlessui-portal-root");if(e)return e;if(null===r)return null;let o=r.createElement("div");return o.setAttribute("id","headlessui-portal-root"),r.body.appendChild(o)});return(0,d.useEffect)(()=>{null!==o&&(null!=r&&r.body.contains(o)||null==r||r.body.appendChild(o))},[o,r]),(0,d.useEffect)(()=>{t||null!==n&&i(n.current)},[n,i,t]),o}(i),[s]=(0,d.useState)(()=>{var e;return w.O.isServer?null:null!=(e=null==l?void 0:l.createElement("div"))?e:null}),u=(0,d.useContext)(C),g=(0,y.H)();return(0,h.e)(()=>{!c||!s||c.contains(s)||(s.setAttribute("data-headlessui-portal",""),c.appendChild(s))},[c,s]),(0,h.e)(()=>{if(s&&u)return u.register(s)},[u,s]),n=()=>{var e;c&&s&&(s instanceof Node&&c.contains(s)&&c.removeChild(s),c.childNodes.length<=0&&(null==(e=c.parentElement)||e.removeChild(c)))},r=(0,p.z)(n),o=(0,d.useRef)(!1),(0,d.useEffect)(()=>(o.current=!1,()=>{o.current=!0,(0,m.Y)(()=>{o.current&&r()})}),[r]),g&&c&&s?(0,f.createPortal)((0,S.sY)({ourProps:{ref:a},theirProps:e,defaultTag:O,name:"Portal"}),s):null}),{Group:(0,S.yV)(function(e,t){let{target:n,...r}=e,o={ref:(0,b.T)(t)};return d.createElement(k.Provider,{value:n},(0,S.sY)({ourProps:o,theirProps:r,defaultTag:E,name:"Popover.Group"}))})});var j=n(31948),P=n(17684),M=n(98505),A=n(80004),I=n(38198),T=n(3141),R=((r=R||{})[r.Forwards=0]="Forwards",r[r.Backwards=1]="Backwards",r);function N(){let e=(0,d.useRef)(0);return(0,T.s)("keydown",t=>{"Tab"===t.key&&(e.current=t.shiftKey?1:0)},!0),e}var _=n(37863),D=n(47634),L=n(37105),Z=n(24536),B=n(37388),z=((o=z||{})[o.Open=0]="Open",o[o.Closed=1]="Closed",o),F=((i=F||{})[i.TogglePopover=0]="TogglePopover",i[i.ClosePopover=1]="ClosePopover",i[i.SetButton=2]="SetButton",i[i.SetButtonId=3]="SetButtonId",i[i.SetPanel=4]="SetPanel",i[i.SetPanelId=5]="SetPanelId",i);let H={0:e=>{let t={...e,popoverState:(0,Z.E)(e.popoverState,{0:1,1:0})};return 0===t.popoverState&&(t.__demoMode=!1),t},1:e=>1===e.popoverState?e:{...e,popoverState:1},2:(e,t)=>e.button===t.button?e:{...e,button:t.button},3:(e,t)=>e.buttonId===t.buttonId?e:{...e,buttonId:t.buttonId},4:(e,t)=>e.panel===t.panel?e:{...e,panel:t.panel},5:(e,t)=>e.panelId===t.panelId?e:{...e,panelId:t.panelId}},q=(0,d.createContext)(null);function V(e){let t=(0,d.useContext)(q);if(null===t){let t=Error("<".concat(e," /> is missing a parent component."));throw Error.captureStackTrace&&Error.captureStackTrace(t,V),t}return t}q.displayName="PopoverContext";let U=(0,d.createContext)(null);function W(e){let t=(0,d.useContext)(U);if(null===t){let t=Error("<".concat(e," /> is missing a parent component."));throw Error.captureStackTrace&&Error.captureStackTrace(t,W),t}return t}U.displayName="PopoverAPIContext";let K=(0,d.createContext)(null);function $(){return(0,d.useContext)(K)}K.displayName="PopoverGroupContext";let G=(0,d.createContext)(null);function Y(e,t){return(0,Z.E)(t.type,H,e,t)}G.displayName="PopoverPanelContext";let X=S.AN.RenderStrategy|S.AN.Static,Q=S.AN.RenderStrategy|S.AN.Static,J=Object.assign((0,S.yV)(function(e,t){var n,r,o,i;let a,l,c,s,u,f;let{__demoMode:h=!1,...m}=e,g=(0,d.useRef)(null),y=(0,b.T)(t,(0,b.h)(e=>{g.current=e})),x=(0,d.useRef)([]),w=(0,d.useReducer)(Y,{__demoMode:h,popoverState:h?0:1,buttons:x,button:null,buttonId:null,panel:null,panelId:null,beforePanelSentinel:(0,d.createRef)(),afterPanelSentinel:(0,d.createRef)()}),[{popoverState:O,button:E,buttonId:k,panel:P,panelId:A,beforePanelSentinel:T,afterPanelSentinel:R},N]=w,D=v(null!=(n=g.current)?n:E),B=(0,d.useMemo)(()=>{if(!E||!P)return!1;for(let e of document.querySelectorAll("body > *"))if(Number(null==e?void 0:e.contains(E))^Number(null==e?void 0:e.contains(P)))return!0;let e=(0,L.GO)(),t=e.indexOf(E),n=(t+e.length-1)%e.length,r=(t+1)%e.length,o=e[n],i=e[r];return!P.contains(o)&&!P.contains(i)},[E,P]),z=(0,j.E)(k),F=(0,j.E)(A),H=(0,d.useMemo)(()=>({buttonId:z,panelId:F,close:()=>N({type:1})}),[z,F,N]),V=$(),W=null==V?void 0:V.registerPopover,K=(0,p.z)(()=>{var e;return null!=(e=null==V?void 0:V.isFocusWithinPopoverGroup())?e:(null==D?void 0:D.activeElement)&&((null==E?void 0:E.contains(D.activeElement))||(null==P?void 0:P.contains(D.activeElement)))});(0,d.useEffect)(()=>null==W?void 0:W(H),[W,H]);let[X,Q]=(a=(0,d.useContext)(C),l=(0,d.useRef)([]),c=(0,p.z)(e=>(l.current.push(e),a&&a.register(e),()=>s(e))),s=(0,p.z)(e=>{let t=l.current.indexOf(e);-1!==t&&l.current.splice(t,1),a&&a.unregister(e)}),u=(0,d.useMemo)(()=>({register:c,unregister:s,portals:l}),[c,s,l]),[l,(0,d.useMemo)(()=>function(e){let{children:t}=e;return d.createElement(C.Provider,{value:u},t)},[u])]),J=function(){var e;let{defaultContainers:t=[],portals:n,mainTreeNodeRef:r}=arguments.length>0&&void 0!==arguments[0]?arguments[0]:{},o=(0,d.useRef)(null!=(e=null==r?void 0:r.current)?e:null),i=v(o),a=(0,p.z)(()=>{var e,r,a;let l=[];for(let e of t)null!==e&&(e instanceof HTMLElement?l.push(e):"current"in e&&e.current instanceof HTMLElement&&l.push(e.current));if(null!=n&&n.current)for(let e of n.current)l.push(e);for(let t of null!=(e=null==i?void 0:i.querySelectorAll("html > *, body > *"))?e:[])t!==document.body&&t!==document.head&&t instanceof HTMLElement&&"headlessui-portal-root"!==t.id&&(t.contains(o.current)||t.contains(null==(a=null==(r=o.current)?void 0:r.getRootNode())?void 0:a.host)||l.some(e=>t.contains(e))||l.push(t));return l});return{resolveContainers:a,contains:(0,p.z)(e=>a().some(t=>t.contains(e))),mainTreeNodeRef:o,MainTreeNode:(0,d.useMemo)(()=>function(){return null!=r?null:d.createElement(I._,{features:I.A.Hidden,ref:o})},[o,r])}}({mainTreeNodeRef:null==V?void 0:V.mainTreeNodeRef,portals:X,defaultContainers:[E,P]});r=null==D?void 0:D.defaultView,o="focus",i=e=>{var t,n,r,o;e.target!==window&&e.target instanceof HTMLElement&&0===O&&(K()||E&&P&&(J.contains(e.target)||null!=(n=null==(t=T.current)?void 0:t.contains)&&n.call(t,e.target)||null!=(o=null==(r=R.current)?void 0:r.contains)&&o.call(r,e.target)||N({type:1})))},f=(0,j.E)(i),(0,d.useEffect)(()=>{function e(e){f.current(e)}return(r=null!=r?r:window).addEventListener(o,e,!0),()=>r.removeEventListener(o,e,!0)},[r,o,!0]),(0,M.O)(J.resolveContainers,(e,t)=>{N({type:1}),(0,L.sP)(t,L.tJ.Loose)||(e.preventDefault(),null==E||E.focus())},0===O);let ee=(0,p.z)(e=>{N({type:1});let t=e?e instanceof HTMLElement?e:"current"in e&&e.current instanceof HTMLElement?e.current:E:E;null==t||t.focus()}),et=(0,d.useMemo)(()=>({close:ee,isPortalled:B}),[ee,B]),en=(0,d.useMemo)(()=>({open:0===O,close:ee}),[O,ee]);return d.createElement(G.Provider,{value:null},d.createElement(q.Provider,{value:w},d.createElement(U.Provider,{value:et},d.createElement(_.up,{value:(0,Z.E)(O,{0:_.ZM.Open,1:_.ZM.Closed})},d.createElement(Q,null,(0,S.sY)({ourProps:{ref:y},theirProps:m,slot:en,defaultTag:"div",name:"Popover"}),d.createElement(J.MainTreeNode,null))))))}),{Button:(0,S.yV)(function(e,t){let n=(0,P.M)(),{id:r="headlessui-popover-button-".concat(n),...o}=e,[i,a]=V("Popover.Button"),{isPortalled:l}=W("Popover.Button"),c=(0,d.useRef)(null),s="headlessui-focus-sentinel-".concat((0,P.M)()),u=$(),f=null==u?void 0:u.closeOthers,h=null!==(0,d.useContext)(G);(0,d.useEffect)(()=>{if(!h)return a({type:3,buttonId:r}),()=>{a({type:3,buttonId:null})}},[h,r,a]);let[m]=(0,d.useState)(()=>Symbol()),g=(0,b.T)(c,t,h?null:e=>{if(e)i.buttons.current.push(m);else{let e=i.buttons.current.indexOf(m);-1!==e&&i.buttons.current.splice(e,1)}i.buttons.current.length>1&&console.warn("You are already using a but only 1 is supported."),e&&a({type:2,button:e})}),y=(0,b.T)(c,t),x=v(c),w=(0,p.z)(e=>{var t,n,r;if(h){if(1===i.popoverState)return;switch(e.key){case B.R.Space:case B.R.Enter:e.preventDefault(),null==(n=(t=e.target).click)||n.call(t),a({type:1}),null==(r=i.button)||r.focus()}}else switch(e.key){case B.R.Space:case B.R.Enter:e.preventDefault(),e.stopPropagation(),1===i.popoverState&&(null==f||f(i.buttonId)),a({type:0});break;case B.R.Escape:if(0!==i.popoverState)return null==f?void 0:f(i.buttonId);if(!c.current||null!=x&&x.activeElement&&!c.current.contains(x.activeElement))return;e.preventDefault(),e.stopPropagation(),a({type:1})}}),O=(0,p.z)(e=>{h||e.key===B.R.Space&&e.preventDefault()}),E=(0,p.z)(t=>{var n,r;(0,D.P)(t.currentTarget)||e.disabled||(h?(a({type:1}),null==(n=i.button)||n.focus()):(t.preventDefault(),t.stopPropagation(),1===i.popoverState&&(null==f||f(i.buttonId)),a({type:0}),null==(r=i.button)||r.focus()))}),k=(0,p.z)(e=>{e.preventDefault(),e.stopPropagation()}),C=0===i.popoverState,j=(0,d.useMemo)(()=>({open:C}),[C]),M=(0,A.f)(e,c),T=h?{ref:y,type:M,onKeyDown:w,onClick:E}:{ref:g,id:i.buttonId,type:M,"aria-expanded":0===i.popoverState,"aria-controls":i.panel?i.panelId:void 0,onKeyDown:w,onKeyUp:O,onClick:E,onMouseDown:k},_=N(),z=(0,p.z)(()=>{let e=i.panel;e&&(0,Z.E)(_.current,{[R.Forwards]:()=>(0,L.jA)(e,L.TO.First),[R.Backwards]:()=>(0,L.jA)(e,L.TO.Last)})===L.fE.Error&&(0,L.jA)((0,L.GO)().filter(e=>"true"!==e.dataset.headlessuiFocusGuard),(0,Z.E)(_.current,{[R.Forwards]:L.TO.Next,[R.Backwards]:L.TO.Previous}),{relativeTo:i.button})});return d.createElement(d.Fragment,null,(0,S.sY)({ourProps:T,theirProps:o,slot:j,defaultTag:"button",name:"Popover.Button"}),C&&!h&&l&&d.createElement(I._,{id:s,features:I.A.Focusable,"data-headlessui-focus-guard":!0,as:"button",type:"button",onFocus:z}))}),Overlay:(0,S.yV)(function(e,t){let n=(0,P.M)(),{id:r="headlessui-popover-overlay-".concat(n),...o}=e,[{popoverState:i},a]=V("Popover.Overlay"),l=(0,b.T)(t),c=(0,_.oJ)(),s=null!==c?(c&_.ZM.Open)===_.ZM.Open:0===i,u=(0,p.z)(e=>{if((0,D.P)(e.currentTarget))return e.preventDefault();a({type:1})}),f=(0,d.useMemo)(()=>({open:0===i}),[i]);return(0,S.sY)({ourProps:{ref:l,id:r,"aria-hidden":!0,onClick:u},theirProps:o,slot:f,defaultTag:"div",features:X,visible:s,name:"Popover.Overlay"})}),Panel:(0,S.yV)(function(e,t){let n=(0,P.M)(),{id:r="headlessui-popover-panel-".concat(n),focus:o=!1,...i}=e,[a,l]=V("Popover.Panel"),{close:c,isPortalled:s}=W("Popover.Panel"),u="headlessui-focus-sentinel-before-".concat((0,P.M)()),f="headlessui-focus-sentinel-after-".concat((0,P.M)()),m=(0,d.useRef)(null),g=(0,b.T)(m,t,e=>{l({type:4,panel:e})}),y=v(m),x=(0,S.Y2)();(0,h.e)(()=>(l({type:5,panelId:r}),()=>{l({type:5,panelId:null})}),[r,l]);let w=(0,_.oJ)(),O=null!==w?(w&_.ZM.Open)===_.ZM.Open:0===a.popoverState,E=(0,p.z)(e=>{var t;if(e.key===B.R.Escape){if(0!==a.popoverState||!m.current||null!=y&&y.activeElement&&!m.current.contains(y.activeElement))return;e.preventDefault(),e.stopPropagation(),l({type:1}),null==(t=a.button)||t.focus()}});(0,d.useEffect)(()=>{var t;e.static||1===a.popoverState&&(null==(t=e.unmount)||t)&&l({type:4,panel:null})},[a.popoverState,e.unmount,e.static,l]),(0,d.useEffect)(()=>{if(a.__demoMode||!o||0!==a.popoverState||!m.current)return;let e=null==y?void 0:y.activeElement;m.current.contains(e)||(0,L.jA)(m.current,L.TO.First)},[a.__demoMode,o,m,a.popoverState]);let k=(0,d.useMemo)(()=>({open:0===a.popoverState,close:c}),[a,c]),C={ref:g,id:r,onKeyDown:E,onBlur:o&&0===a.popoverState?e=>{var t,n,r,o,i;let c=e.relatedTarget;c&&m.current&&(null!=(t=m.current)&&t.contains(c)||(l({type:1}),(null!=(r=null==(n=a.beforePanelSentinel.current)?void 0:n.contains)&&r.call(n,c)||null!=(i=null==(o=a.afterPanelSentinel.current)?void 0:o.contains)&&i.call(o,c))&&c.focus({preventScroll:!0})))}:void 0,tabIndex:-1},j=N(),M=(0,p.z)(()=>{let e=m.current;e&&(0,Z.E)(j.current,{[R.Forwards]:()=>{var t;(0,L.jA)(e,L.TO.First)===L.fE.Error&&(null==(t=a.afterPanelSentinel.current)||t.focus())},[R.Backwards]:()=>{var e;null==(e=a.button)||e.focus({preventScroll:!0})}})}),A=(0,p.z)(()=>{let e=m.current;e&&(0,Z.E)(j.current,{[R.Forwards]:()=>{var e;if(!a.button)return;let t=(0,L.GO)(),n=t.indexOf(a.button),r=t.slice(0,n+1),o=[...t.slice(n+1),...r];for(let t of o.slice())if("true"===t.dataset.headlessuiFocusGuard||null!=(e=a.panel)&&e.contains(t)){let e=o.indexOf(t);-1!==e&&o.splice(e,1)}(0,L.jA)(o,L.TO.First,{sorted:!1})},[R.Backwards]:()=>{var t;(0,L.jA)(e,L.TO.Previous)===L.fE.Error&&(null==(t=a.button)||t.focus())}})});return d.createElement(G.Provider,{value:r},O&&s&&d.createElement(I._,{id:u,ref:a.beforePanelSentinel,features:I.A.Focusable,"data-headlessui-focus-guard":!0,as:"button",type:"button",onFocus:M}),(0,S.sY)({mergeRefs:x,ourProps:C,theirProps:i,slot:k,defaultTag:"div",features:Q,visible:O,name:"Popover.Panel"}),O&&s&&d.createElement(I._,{id:f,ref:a.afterPanelSentinel,features:I.A.Focusable,"data-headlessui-focus-guard":!0,as:"button",type:"button",onFocus:A}))}),Group:(0,S.yV)(function(e,t){let n;let r=(0,d.useRef)(null),o=(0,b.T)(r,t),[i,a]=(0,d.useState)([]),l={mainTreeNodeRef:n=(0,d.useRef)(null),MainTreeNode:(0,d.useMemo)(()=>function(){return d.createElement(I._,{features:I.A.Hidden,ref:n})},[n])},c=(0,p.z)(e=>{a(t=>{let n=t.indexOf(e);if(-1!==n){let e=t.slice();return e.splice(n,1),e}return t})}),s=(0,p.z)(e=>(a(t=>[...t,e]),()=>c(e))),u=(0,p.z)(()=>{var e;let t=(0,g.r)(r);if(!t)return!1;let n=t.activeElement;return!!(null!=(e=r.current)&&e.contains(n))||i.some(e=>{var r,o;return(null==(r=t.getElementById(e.buttonId.current))?void 0:r.contains(n))||(null==(o=t.getElementById(e.panelId.current))?void 0:o.contains(n))})}),f=(0,p.z)(e=>{for(let t of i)t.buttonId.current!==e&&t.close()}),h=(0,d.useMemo)(()=>({registerPopover:s,unregisterPopover:c,isFocusWithinPopoverGroup:u,closeOthers:f,mainTreeNodeRef:l.mainTreeNodeRef}),[s,c,u,f,l.mainTreeNodeRef]),m=(0,d.useMemo)(()=>({}),[]);return d.createElement(K.Provider,{value:h},(0,S.sY)({ourProps:{ref:o},theirProps:e,slot:m,defaultTag:"div",name:"Popover.Group"}),d.createElement(l.MainTreeNode,null))})});var ee=n(33044),et=n(28517);let en=e=>{var t=(0,u._T)(e,[]);return d.createElement("svg",Object.assign({},t,{xmlns:"http://www.w3.org/2000/svg",viewBox:"0 0 20 20",fill:"currentColor"}),d.createElement("path",{fillRule:"evenodd",d:"M6 2a1 1 0 00-1 1v1H4a2 2 0 00-2 2v10a2 2 0 002 2h12a2 2 0 002-2V6a2 2 0 00-2-2h-1V3a1 1 0 10-2 0v1H7V3a1 1 0 00-1-1zm0 5a1 1 0 000 2h8a1 1 0 100-2H6z",clipRule:"evenodd"}))};var er=n(4537),eo=n(99735),ei=n(7656);function ea(e){(0,ei.Z)(1,arguments);var t=(0,eo.Z)(e);return t.setHours(0,0,0,0),t}function el(){return ea(Date.now())}function ec(e){(0,ei.Z)(1,arguments);var t=(0,eo.Z)(e);return t.setDate(1),t.setHours(0,0,0,0),t}var es=n(65954),eu=n(96398),ed=n(41154);function ef(e){var t,n;if((0,ei.Z)(1,arguments),e&&"function"==typeof e.forEach)t=e;else{if("object"!==(0,ed.Z)(e)||null===e)return new Date(NaN);t=Array.prototype.slice.call(e)}return t.forEach(function(e){var t=(0,eo.Z)(e);(void 0===n||nt||isNaN(t.getDate()))&&(n=t)}),n||new Date(NaN)}var eh=n(25721),em=n(47869);function eg(e,t){(0,ei.Z)(2,arguments);var n=(0,em.Z)(t);return(0,eh.Z)(e,-n)}var ev=n(55463);function ey(e,t){if((0,ei.Z)(2,arguments),!t||"object"!==(0,ed.Z)(t))return new Date(NaN);var n=t.years?(0,em.Z)(t.years):0,r=t.months?(0,em.Z)(t.months):0,o=t.weeks?(0,em.Z)(t.weeks):0,i=t.days?(0,em.Z)(t.days):0,a=t.hours?(0,em.Z)(t.hours):0,l=t.minutes?(0,em.Z)(t.minutes):0,c=t.seconds?(0,em.Z)(t.seconds):0;return new Date(eg(function(e,t){(0,ei.Z)(2,arguments);var n=(0,em.Z)(t);return(0,ev.Z)(e,-n)}(e,r+12*n),i+7*o).getTime()-1e3*(c+60*(l+60*a)))}function eb(e){(0,ei.Z)(1,arguments);var t=(0,eo.Z)(e),n=new Date(0);return n.setFullYear(t.getFullYear(),0,1),n.setHours(0,0,0,0),n}function ex(e){return(0,ei.Z)(1,arguments),e instanceof Date||"object"===(0,ed.Z)(e)&&"[object Date]"===Object.prototype.toString.call(e)}function ew(e){(0,ei.Z)(1,arguments);var t=(0,eo.Z)(e),n=t.getUTCDay();return t.setUTCDate(t.getUTCDate()-((n<1?7:0)+n-1)),t.setUTCHours(0,0,0,0),t}function eS(e){(0,ei.Z)(1,arguments);var t=(0,eo.Z)(e),n=t.getUTCFullYear(),r=new Date(0);r.setUTCFullYear(n+1,0,4),r.setUTCHours(0,0,0,0);var o=ew(r),i=new Date(0);i.setUTCFullYear(n,0,4),i.setUTCHours(0,0,0,0);var a=ew(i);return t.getTime()>=o.getTime()?n+1:t.getTime()>=a.getTime()?n:n-1}var eO={};function eE(e,t){(0,ei.Z)(1,arguments);var n,r,o,i,a,l,c,s,u=(0,em.Z)(null!==(n=null!==(r=null!==(o=null!==(i=null==t?void 0:t.weekStartsOn)&&void 0!==i?i:null==t?void 0:null===(a=t.locale)||void 0===a?void 0:null===(l=a.options)||void 0===l?void 0:l.weekStartsOn)&&void 0!==o?o:eO.weekStartsOn)&&void 0!==r?r:null===(c=eO.locale)||void 0===c?void 0:null===(s=c.options)||void 0===s?void 0:s.weekStartsOn)&&void 0!==n?n:0);if(!(u>=0&&u<=6))throw RangeError("weekStartsOn must be between 0 and 6 inclusively");var d=(0,eo.Z)(e),f=d.getUTCDay();return d.setUTCDate(d.getUTCDate()-((f=1&&f<=7))throw RangeError("firstWeekContainsDate must be between 1 and 7 inclusively");var p=new Date(0);p.setUTCFullYear(d+1,0,f),p.setUTCHours(0,0,0,0);var h=eE(p,t),m=new Date(0);m.setUTCFullYear(d,0,f),m.setUTCHours(0,0,0,0);var g=eE(m,t);return u.getTime()>=h.getTime()?d+1:u.getTime()>=g.getTime()?d:d-1}function eC(e,t){for(var n=Math.abs(e).toString();n.length0?n:1-n;return eC("yy"===t?r%100:r,t.length)},M:function(e,t){var n=e.getUTCMonth();return"M"===t?String(n+1):eC(n+1,2)},d:function(e,t){return eC(e.getUTCDate(),t.length)},h:function(e,t){return eC(e.getUTCHours()%12||12,t.length)},H:function(e,t){return eC(e.getUTCHours(),t.length)},m:function(e,t){return eC(e.getUTCMinutes(),t.length)},s:function(e,t){return eC(e.getUTCSeconds(),t.length)},S:function(e,t){var n=t.length;return eC(Math.floor(e.getUTCMilliseconds()*Math.pow(10,n-3)),t.length)}},eP={midnight:"midnight",noon:"noon",morning:"morning",afternoon:"afternoon",evening:"evening",night:"night"};function eM(e,t){var n=e>0?"-":"+",r=Math.abs(e),o=Math.floor(r/60),i=r%60;return 0===i?n+String(o):n+String(o)+(t||"")+eC(i,2)}function eA(e,t){return e%60==0?(e>0?"-":"+")+eC(Math.abs(e)/60,2):eI(e,t)}function eI(e,t){var n=Math.abs(e);return(e>0?"-":"+")+eC(Math.floor(n/60),2)+(t||"")+eC(n%60,2)}var eT={G:function(e,t,n){var r=e.getUTCFullYear()>0?1:0;switch(t){case"G":case"GG":case"GGG":return n.era(r,{width:"abbreviated"});case"GGGGG":return n.era(r,{width:"narrow"});default:return n.era(r,{width:"wide"})}},y:function(e,t,n){if("yo"===t){var r=e.getUTCFullYear();return n.ordinalNumber(r>0?r:1-r,{unit:"year"})}return ej.y(e,t)},Y:function(e,t,n,r){var o=ek(e,r),i=o>0?o:1-o;return"YY"===t?eC(i%100,2):"Yo"===t?n.ordinalNumber(i,{unit:"year"}):eC(i,t.length)},R:function(e,t){return eC(eS(e),t.length)},u:function(e,t){return eC(e.getUTCFullYear(),t.length)},Q:function(e,t,n){var r=Math.ceil((e.getUTCMonth()+1)/3);switch(t){case"Q":return String(r);case"QQ":return eC(r,2);case"Qo":return n.ordinalNumber(r,{unit:"quarter"});case"QQQ":return n.quarter(r,{width:"abbreviated",context:"formatting"});case"QQQQQ":return n.quarter(r,{width:"narrow",context:"formatting"});default:return n.quarter(r,{width:"wide",context:"formatting"})}},q:function(e,t,n){var r=Math.ceil((e.getUTCMonth()+1)/3);switch(t){case"q":return String(r);case"qq":return eC(r,2);case"qo":return n.ordinalNumber(r,{unit:"quarter"});case"qqq":return n.quarter(r,{width:"abbreviated",context:"standalone"});case"qqqqq":return n.quarter(r,{width:"narrow",context:"standalone"});default:return n.quarter(r,{width:"wide",context:"standalone"})}},M:function(e,t,n){var r=e.getUTCMonth();switch(t){case"M":case"MM":return ej.M(e,t);case"Mo":return n.ordinalNumber(r+1,{unit:"month"});case"MMM":return n.month(r,{width:"abbreviated",context:"formatting"});case"MMMMM":return n.month(r,{width:"narrow",context:"formatting"});default:return n.month(r,{width:"wide",context:"formatting"})}},L:function(e,t,n){var r=e.getUTCMonth();switch(t){case"L":return String(r+1);case"LL":return eC(r+1,2);case"Lo":return n.ordinalNumber(r+1,{unit:"month"});case"LLL":return n.month(r,{width:"abbreviated",context:"standalone"});case"LLLLL":return n.month(r,{width:"narrow",context:"standalone"});default:return n.month(r,{width:"wide",context:"standalone"})}},w:function(e,t,n,r){var o=function(e,t){(0,ei.Z)(1,arguments);var n=(0,eo.Z)(e);return Math.round((eE(n,t).getTime()-(function(e,t){(0,ei.Z)(1,arguments);var n,r,o,i,a,l,c,s,u=(0,em.Z)(null!==(n=null!==(r=null!==(o=null!==(i=null==t?void 0:t.firstWeekContainsDate)&&void 0!==i?i:null==t?void 0:null===(a=t.locale)||void 0===a?void 0:null===(l=a.options)||void 0===l?void 0:l.firstWeekContainsDate)&&void 0!==o?o:eO.firstWeekContainsDate)&&void 0!==r?r:null===(c=eO.locale)||void 0===c?void 0:null===(s=c.options)||void 0===s?void 0:s.firstWeekContainsDate)&&void 0!==n?n:1),d=ek(e,t),f=new Date(0);return f.setUTCFullYear(d,0,u),f.setUTCHours(0,0,0,0),eE(f,t)})(n,t).getTime())/6048e5)+1}(e,r);return"wo"===t?n.ordinalNumber(o,{unit:"week"}):eC(o,t.length)},I:function(e,t,n){var r=function(e){(0,ei.Z)(1,arguments);var t=(0,eo.Z)(e);return Math.round((ew(t).getTime()-(function(e){(0,ei.Z)(1,arguments);var t=eS(e),n=new Date(0);return n.setUTCFullYear(t,0,4),n.setUTCHours(0,0,0,0),ew(n)})(t).getTime())/6048e5)+1}(e);return"Io"===t?n.ordinalNumber(r,{unit:"week"}):eC(r,t.length)},d:function(e,t,n){return"do"===t?n.ordinalNumber(e.getUTCDate(),{unit:"date"}):ej.d(e,t)},D:function(e,t,n){var r=function(e){(0,ei.Z)(1,arguments);var t=(0,eo.Z)(e),n=t.getTime();return t.setUTCMonth(0,1),t.setUTCHours(0,0,0,0),Math.floor((n-t.getTime())/864e5)+1}(e);return"Do"===t?n.ordinalNumber(r,{unit:"dayOfYear"}):eC(r,t.length)},E:function(e,t,n){var r=e.getUTCDay();switch(t){case"E":case"EE":case"EEE":return n.day(r,{width:"abbreviated",context:"formatting"});case"EEEEE":return n.day(r,{width:"narrow",context:"formatting"});case"EEEEEE":return n.day(r,{width:"short",context:"formatting"});default:return n.day(r,{width:"wide",context:"formatting"})}},e:function(e,t,n,r){var o=e.getUTCDay(),i=(o-r.weekStartsOn+8)%7||7;switch(t){case"e":return String(i);case"ee":return eC(i,2);case"eo":return n.ordinalNumber(i,{unit:"day"});case"eee":return n.day(o,{width:"abbreviated",context:"formatting"});case"eeeee":return n.day(o,{width:"narrow",context:"formatting"});case"eeeeee":return n.day(o,{width:"short",context:"formatting"});default:return n.day(o,{width:"wide",context:"formatting"})}},c:function(e,t,n,r){var o=e.getUTCDay(),i=(o-r.weekStartsOn+8)%7||7;switch(t){case"c":return String(i);case"cc":return eC(i,t.length);case"co":return n.ordinalNumber(i,{unit:"day"});case"ccc":return n.day(o,{width:"abbreviated",context:"standalone"});case"ccccc":return n.day(o,{width:"narrow",context:"standalone"});case"cccccc":return n.day(o,{width:"short",context:"standalone"});default:return n.day(o,{width:"wide",context:"standalone"})}},i:function(e,t,n){var r=e.getUTCDay(),o=0===r?7:r;switch(t){case"i":return String(o);case"ii":return eC(o,t.length);case"io":return n.ordinalNumber(o,{unit:"day"});case"iii":return n.day(r,{width:"abbreviated",context:"formatting"});case"iiiii":return n.day(r,{width:"narrow",context:"formatting"});case"iiiiii":return n.day(r,{width:"short",context:"formatting"});default:return n.day(r,{width:"wide",context:"formatting"})}},a:function(e,t,n){var r=e.getUTCHours()/12>=1?"pm":"am";switch(t){case"a":case"aa":return n.dayPeriod(r,{width:"abbreviated",context:"formatting"});case"aaa":return n.dayPeriod(r,{width:"abbreviated",context:"formatting"}).toLowerCase();case"aaaaa":return n.dayPeriod(r,{width:"narrow",context:"formatting"});default:return n.dayPeriod(r,{width:"wide",context:"formatting"})}},b:function(e,t,n){var r,o=e.getUTCHours();switch(r=12===o?eP.noon:0===o?eP.midnight:o/12>=1?"pm":"am",t){case"b":case"bb":return n.dayPeriod(r,{width:"abbreviated",context:"formatting"});case"bbb":return n.dayPeriod(r,{width:"abbreviated",context:"formatting"}).toLowerCase();case"bbbbb":return n.dayPeriod(r,{width:"narrow",context:"formatting"});default:return n.dayPeriod(r,{width:"wide",context:"formatting"})}},B:function(e,t,n){var r,o=e.getUTCHours();switch(r=o>=17?eP.evening:o>=12?eP.afternoon:o>=4?eP.morning:eP.night,t){case"B":case"BB":case"BBB":return n.dayPeriod(r,{width:"abbreviated",context:"formatting"});case"BBBBB":return n.dayPeriod(r,{width:"narrow",context:"formatting"});default:return n.dayPeriod(r,{width:"wide",context:"formatting"})}},h:function(e,t,n){if("ho"===t){var r=e.getUTCHours()%12;return 0===r&&(r=12),n.ordinalNumber(r,{unit:"hour"})}return ej.h(e,t)},H:function(e,t,n){return"Ho"===t?n.ordinalNumber(e.getUTCHours(),{unit:"hour"}):ej.H(e,t)},K:function(e,t,n){var r=e.getUTCHours()%12;return"Ko"===t?n.ordinalNumber(r,{unit:"hour"}):eC(r,t.length)},k:function(e,t,n){var r=e.getUTCHours();return(0===r&&(r=24),"ko"===t)?n.ordinalNumber(r,{unit:"hour"}):eC(r,t.length)},m:function(e,t,n){return"mo"===t?n.ordinalNumber(e.getUTCMinutes(),{unit:"minute"}):ej.m(e,t)},s:function(e,t,n){return"so"===t?n.ordinalNumber(e.getUTCSeconds(),{unit:"second"}):ej.s(e,t)},S:function(e,t){return ej.S(e,t)},X:function(e,t,n,r){var o=(r._originalDate||e).getTimezoneOffset();if(0===o)return"Z";switch(t){case"X":return eA(o);case"XXXX":case"XX":return eI(o);default:return eI(o,":")}},x:function(e,t,n,r){var o=(r._originalDate||e).getTimezoneOffset();switch(t){case"x":return eA(o);case"xxxx":case"xx":return eI(o);default:return eI(o,":")}},O:function(e,t,n,r){var o=(r._originalDate||e).getTimezoneOffset();switch(t){case"O":case"OO":case"OOO":return"GMT"+eM(o,":");default:return"GMT"+eI(o,":")}},z:function(e,t,n,r){var o=(r._originalDate||e).getTimezoneOffset();switch(t){case"z":case"zz":case"zzz":return"GMT"+eM(o,":");default:return"GMT"+eI(o,":")}},t:function(e,t,n,r){return eC(Math.floor((r._originalDate||e).getTime()/1e3),t.length)},T:function(e,t,n,r){return eC((r._originalDate||e).getTime(),t.length)}},eR=function(e,t){switch(e){case"P":return t.date({width:"short"});case"PP":return t.date({width:"medium"});case"PPP":return t.date({width:"long"});default:return t.date({width:"full"})}},eN=function(e,t){switch(e){case"p":return t.time({width:"short"});case"pp":return t.time({width:"medium"});case"ppp":return t.time({width:"long"});default:return t.time({width:"full"})}},e_={p:eN,P:function(e,t){var n,r=e.match(/(P+)(p+)?/)||[],o=r[1],i=r[2];if(!i)return eR(e,t);switch(o){case"P":n=t.dateTime({width:"short"});break;case"PP":n=t.dateTime({width:"medium"});break;case"PPP":n=t.dateTime({width:"long"});break;default:n=t.dateTime({width:"full"})}return n.replace("{{date}}",eR(o,t)).replace("{{time}}",eN(i,t))}};function eD(e){var t=new Date(Date.UTC(e.getFullYear(),e.getMonth(),e.getDate(),e.getHours(),e.getMinutes(),e.getSeconds(),e.getMilliseconds()));return t.setUTCFullYear(e.getFullYear()),e.getTime()-t.getTime()}var eL=["D","DD"],eZ=["YY","YYYY"];function eB(e,t,n){if("YYYY"===e)throw RangeError("Use `yyyy` instead of `YYYY` (in `".concat(t,"`) for formatting years to the input `").concat(n,"`; see: https://github.com/date-fns/date-fns/blob/master/docs/unicodeTokens.md"));if("YY"===e)throw RangeError("Use `yy` instead of `YY` (in `".concat(t,"`) for formatting years to the input `").concat(n,"`; see: https://github.com/date-fns/date-fns/blob/master/docs/unicodeTokens.md"));if("D"===e)throw RangeError("Use `d` instead of `D` (in `".concat(t,"`) for formatting days of the month to the input `").concat(n,"`; see: https://github.com/date-fns/date-fns/blob/master/docs/unicodeTokens.md"));if("DD"===e)throw RangeError("Use `dd` instead of `DD` (in `".concat(t,"`) for formatting days of the month to the input `").concat(n,"`; see: https://github.com/date-fns/date-fns/blob/master/docs/unicodeTokens.md"))}var ez={lessThanXSeconds:{one:"less than a second",other:"less than {{count}} seconds"},xSeconds:{one:"1 second",other:"{{count}} seconds"},halfAMinute:"half a minute",lessThanXMinutes:{one:"less than a minute",other:"less than {{count}} minutes"},xMinutes:{one:"1 minute",other:"{{count}} minutes"},aboutXHours:{one:"about 1 hour",other:"about {{count}} hours"},xHours:{one:"1 hour",other:"{{count}} hours"},xDays:{one:"1 day",other:"{{count}} days"},aboutXWeeks:{one:"about 1 week",other:"about {{count}} weeks"},xWeeks:{one:"1 week",other:"{{count}} weeks"},aboutXMonths:{one:"about 1 month",other:"about {{count}} months"},xMonths:{one:"1 month",other:"{{count}} months"},aboutXYears:{one:"about 1 year",other:"about {{count}} years"},xYears:{one:"1 year",other:"{{count}} years"},overXYears:{one:"over 1 year",other:"over {{count}} years"},almostXYears:{one:"almost 1 year",other:"almost {{count}} years"}};function eF(e){return function(){var t=arguments.length>0&&void 0!==arguments[0]?arguments[0]:{},n=t.width?String(t.width):e.defaultWidth;return e.formats[n]||e.formats[e.defaultWidth]}}var eH={date:eF({formats:{full:"EEEE, MMMM do, y",long:"MMMM do, y",medium:"MMM d, y",short:"MM/dd/yyyy"},defaultWidth:"full"}),time:eF({formats:{full:"h:mm:ss a zzzz",long:"h:mm:ss a z",medium:"h:mm:ss a",short:"h:mm a"},defaultWidth:"full"}),dateTime:eF({formats:{full:"{{date}} 'at' {{time}}",long:"{{date}} 'at' {{time}}",medium:"{{date}}, {{time}}",short:"{{date}}, {{time}}"},defaultWidth:"full"})},eq={lastWeek:"'last' eeee 'at' p",yesterday:"'yesterday at' p",today:"'today at' p",tomorrow:"'tomorrow at' p",nextWeek:"eeee 'at' p",other:"P"};function eV(e){return function(t,n){var r;if("formatting"===(null!=n&&n.context?String(n.context):"standalone")&&e.formattingValues){var o=e.defaultFormattingWidth||e.defaultWidth,i=null!=n&&n.width?String(n.width):o;r=e.formattingValues[i]||e.formattingValues[o]}else{var a=e.defaultWidth,l=null!=n&&n.width?String(n.width):e.defaultWidth;r=e.values[l]||e.values[a]}return r[e.argumentCallback?e.argumentCallback(t):t]}}function eU(e){return function(t){var n,r=arguments.length>1&&void 0!==arguments[1]?arguments[1]:{},o=r.width,i=o&&e.matchPatterns[o]||e.matchPatterns[e.defaultMatchWidth],a=t.match(i);if(!a)return null;var l=a[0],c=o&&e.parsePatterns[o]||e.parsePatterns[e.defaultParseWidth],s=Array.isArray(c)?function(e,t){for(var n=0;n0?"in "+r:r+" ago":r},formatLong:eH,formatRelative:function(e,t,n,r){return eq[e]},localize:{ordinalNumber:function(e,t){var n=Number(e),r=n%100;if(r>20||r<10)switch(r%10){case 1:return n+"st";case 2:return n+"nd";case 3:return n+"rd"}return n+"th"},era:eV({values:{narrow:["B","A"],abbreviated:["BC","AD"],wide:["Before Christ","Anno Domini"]},defaultWidth:"wide"}),quarter:eV({values:{narrow:["1","2","3","4"],abbreviated:["Q1","Q2","Q3","Q4"],wide:["1st quarter","2nd quarter","3rd quarter","4th quarter"]},defaultWidth:"wide",argumentCallback:function(e){return e-1}}),month:eV({values:{narrow:["J","F","M","A","M","J","J","A","S","O","N","D"],abbreviated:["Jan","Feb","Mar","Apr","May","Jun","Jul","Aug","Sep","Oct","Nov","Dec"],wide:["January","February","March","April","May","June","July","August","September","October","November","December"]},defaultWidth:"wide"}),day:eV({values:{narrow:["S","M","T","W","T","F","S"],short:["Su","Mo","Tu","We","Th","Fr","Sa"],abbreviated:["Sun","Mon","Tue","Wed","Thu","Fri","Sat"],wide:["Sunday","Monday","Tuesday","Wednesday","Thursday","Friday","Saturday"]},defaultWidth:"wide"}),dayPeriod:eV({values:{narrow:{am:"a",pm:"p",midnight:"mi",noon:"n",morning:"morning",afternoon:"afternoon",evening:"evening",night:"night"},abbreviated:{am:"AM",pm:"PM",midnight:"midnight",noon:"noon",morning:"morning",afternoon:"afternoon",evening:"evening",night:"night"},wide:{am:"a.m.",pm:"p.m.",midnight:"midnight",noon:"noon",morning:"morning",afternoon:"afternoon",evening:"evening",night:"night"}},defaultWidth:"wide",formattingValues:{narrow:{am:"a",pm:"p",midnight:"mi",noon:"n",morning:"in the morning",afternoon:"in the afternoon",evening:"in the evening",night:"at night"},abbreviated:{am:"AM",pm:"PM",midnight:"midnight",noon:"noon",morning:"in the morning",afternoon:"in the afternoon",evening:"in the evening",night:"at night"},wide:{am:"a.m.",pm:"p.m.",midnight:"midnight",noon:"noon",morning:"in the morning",afternoon:"in the afternoon",evening:"in the evening",night:"at night"}},defaultFormattingWidth:"wide"})},match:{ordinalNumber:(a={matchPattern:/^(\d+)(th|st|nd|rd)?/i,parsePattern:/\d+/i,valueCallback:function(e){return parseInt(e,10)}},function(e){var t=arguments.length>1&&void 0!==arguments[1]?arguments[1]:{},n=e.match(a.matchPattern);if(!n)return null;var r=n[0],o=e.match(a.parsePattern);if(!o)return null;var i=a.valueCallback?a.valueCallback(o[0]):o[0];return{value:i=t.valueCallback?t.valueCallback(i):i,rest:e.slice(r.length)}}),era:eU({matchPatterns:{narrow:/^(b|a)/i,abbreviated:/^(b\.?\s?c\.?|b\.?\s?c\.?\s?e\.?|a\.?\s?d\.?|c\.?\s?e\.?)/i,wide:/^(before christ|before common era|anno domini|common era)/i},defaultMatchWidth:"wide",parsePatterns:{any:[/^b/i,/^(a|c)/i]},defaultParseWidth:"any"}),quarter:eU({matchPatterns:{narrow:/^[1234]/i,abbreviated:/^q[1234]/i,wide:/^[1234](th|st|nd|rd)? quarter/i},defaultMatchWidth:"wide",parsePatterns:{any:[/1/i,/2/i,/3/i,/4/i]},defaultParseWidth:"any",valueCallback:function(e){return e+1}}),month:eU({matchPatterns:{narrow:/^[jfmasond]/i,abbreviated:/^(jan|feb|mar|apr|may|jun|jul|aug|sep|oct|nov|dec)/i,wide:/^(january|february|march|april|may|june|july|august|september|october|november|december)/i},defaultMatchWidth:"wide",parsePatterns:{narrow:[/^j/i,/^f/i,/^m/i,/^a/i,/^m/i,/^j/i,/^j/i,/^a/i,/^s/i,/^o/i,/^n/i,/^d/i],any:[/^ja/i,/^f/i,/^mar/i,/^ap/i,/^may/i,/^jun/i,/^jul/i,/^au/i,/^s/i,/^o/i,/^n/i,/^d/i]},defaultParseWidth:"any"}),day:eU({matchPatterns:{narrow:/^[smtwf]/i,short:/^(su|mo|tu|we|th|fr|sa)/i,abbreviated:/^(sun|mon|tue|wed|thu|fri|sat)/i,wide:/^(sunday|monday|tuesday|wednesday|thursday|friday|saturday)/i},defaultMatchWidth:"wide",parsePatterns:{narrow:[/^s/i,/^m/i,/^t/i,/^w/i,/^t/i,/^f/i,/^s/i],any:[/^su/i,/^m/i,/^tu/i,/^w/i,/^th/i,/^f/i,/^sa/i]},defaultParseWidth:"any"}),dayPeriod:eU({matchPatterns:{narrow:/^(a|p|mi|n|(in the|at) (morning|afternoon|evening|night))/i,any:/^([ap]\.?\s?m\.?|midnight|noon|(in the|at) (morning|afternoon|evening|night))/i},defaultMatchWidth:"any",parsePatterns:{any:{am:/^a/i,pm:/^p/i,midnight:/^mi/i,noon:/^no/i,morning:/morning/i,afternoon:/afternoon/i,evening:/evening/i,night:/night/i}},defaultParseWidth:"any"})},options:{weekStartsOn:0,firstWeekContainsDate:1}},eK=/[yYQqMLwIdDecihHKkms]o|(\w)\1*|''|'(''|[^'])+('|$)|./g,e$=/P+p+|P+|p+|''|'(''|[^'])+('|$)|./g,eG=/^'([^]*?)'?$/,eY=/''/g,eX=/[a-zA-Z]/;function eQ(e,t,n){(0,ei.Z)(2,arguments);var r,o,i,a,l,c,s,u,d,f,p,h,m,g,v,y,b,x,w=String(t),S=null!==(r=null!==(o=null==n?void 0:n.locale)&&void 0!==o?o:eO.locale)&&void 0!==r?r:eW,O=(0,em.Z)(null!==(i=null!==(a=null!==(l=null!==(c=null==n?void 0:n.firstWeekContainsDate)&&void 0!==c?c:null==n?void 0:null===(s=n.locale)||void 0===s?void 0:null===(u=s.options)||void 0===u?void 0:u.firstWeekContainsDate)&&void 0!==l?l:eO.firstWeekContainsDate)&&void 0!==a?a:null===(d=eO.locale)||void 0===d?void 0:null===(f=d.options)||void 0===f?void 0:f.firstWeekContainsDate)&&void 0!==i?i:1);if(!(O>=1&&O<=7))throw RangeError("firstWeekContainsDate must be between 1 and 7 inclusively");var E=(0,em.Z)(null!==(p=null!==(h=null!==(m=null!==(g=null==n?void 0:n.weekStartsOn)&&void 0!==g?g:null==n?void 0:null===(v=n.locale)||void 0===v?void 0:null===(y=v.options)||void 0===y?void 0:y.weekStartsOn)&&void 0!==m?m:eO.weekStartsOn)&&void 0!==h?h:null===(b=eO.locale)||void 0===b?void 0:null===(x=b.options)||void 0===x?void 0:x.weekStartsOn)&&void 0!==p?p:0);if(!(E>=0&&E<=6))throw RangeError("weekStartsOn must be between 0 and 6 inclusively");if(!S.localize)throw RangeError("locale must contain localize property");if(!S.formatLong)throw RangeError("locale must contain formatLong property");var k=(0,eo.Z)(e);if(!function(e){return(0,ei.Z)(1,arguments),(!!ex(e)||"number"==typeof e)&&!isNaN(Number((0,eo.Z)(e)))}(k))throw RangeError("Invalid time value");var C=eD(k),j=function(e,t){return(0,ei.Z)(2,arguments),function(e,t){return(0,ei.Z)(2,arguments),new Date((0,eo.Z)(e).getTime()+(0,em.Z)(t))}(e,-(0,em.Z)(t))}(k,C),P={firstWeekContainsDate:O,weekStartsOn:E,locale:S,_originalDate:k};return w.match(e$).map(function(e){var t=e[0];return"p"===t||"P"===t?(0,e_[t])(e,S.formatLong):e}).join("").match(eK).map(function(r){if("''"===r)return"'";var o,i=r[0];if("'"===i)return(o=r.match(eG))?o[1].replace(eY,"'"):r;var a=eT[i];if(a)return null!=n&&n.useAdditionalWeekYearTokens||-1===eZ.indexOf(r)||eB(r,t,String(e)),null!=n&&n.useAdditionalDayOfYearTokens||-1===eL.indexOf(r)||eB(r,t,String(e)),a(j,r,S.localize,P);if(i.match(eX))throw RangeError("Format string contains an unescaped latin alphabet character `"+i+"`");return r}).join("")}var eJ=n(1153);let e0=(0,eJ.fn)("DateRangePicker"),e1=(e,t,n,r)=>{var o;if(n&&(e=null===(o=r.get(n))||void 0===o?void 0:o.from),e)return ea(e&&!t?e:ef([e,t]))},e2=(e,t,n,r)=>{var o,i;if(n&&(e=ea(null!==(i=null===(o=r.get(n))||void 0===o?void 0:o.to)&&void 0!==i?i:el())),e)return ea(e&&!t?e:ep([e,t]))},e6=[{value:"tdy",text:"Today",from:el()},{value:"w",text:"Last 7 days",from:ey(el(),{days:7})},{value:"t",text:"Last 30 days",from:ey(el(),{days:30})},{value:"m",text:"Month to Date",from:ec(el())},{value:"y",text:"Year to Date",from:eb(el())}],e5=(e,t,n,r)=>{let o=(null==n?void 0:n.code)||"en-US";if(!e&&!t)return"";if(e&&!t)return r?eQ(e,r):e.toLocaleDateString(o,{year:"numeric",month:"short",day:"numeric"});if(e&&t){if(function(e,t){(0,ei.Z)(2,arguments);var n=(0,eo.Z)(e),r=(0,eo.Z)(t);return n.getTime()===r.getTime()}(e,t))return r?eQ(e,r):e.toLocaleDateString(o,{year:"numeric",month:"short",day:"numeric"});if(e.getMonth()===t.getMonth()&&e.getFullYear()===t.getFullYear())return r?"".concat(eQ(e,r)," - ").concat(eQ(t,r)):"".concat(e.toLocaleDateString(o,{month:"short",day:"numeric"})," - \n ").concat(t.getDate(),", ").concat(t.getFullYear());{if(r)return"".concat(eQ(e,r)," - ").concat(eQ(t,r));let n={year:"numeric",month:"short",day:"numeric"};return"".concat(e.toLocaleDateString(o,n)," - \n ").concat(t.toLocaleDateString(o,n))}}return""};function e3(e){(0,ei.Z)(1,arguments);var t=(0,eo.Z)(e),n=t.getMonth();return t.setFullYear(t.getFullYear(),n+1,0),t.setHours(23,59,59,999),t}function e4(e,t){(0,ei.Z)(2,arguments);var n=(0,eo.Z)(e),r=(0,em.Z)(t),o=n.getFullYear(),i=n.getDate(),a=new Date(0);a.setFullYear(o,r,15),a.setHours(0,0,0,0);var l=function(e){(0,ei.Z)(1,arguments);var t=(0,eo.Z)(e),n=t.getFullYear(),r=t.getMonth(),o=new Date(0);return o.setFullYear(n,r+1,0),o.setHours(0,0,0,0),o.getDate()}(a);return n.setMonth(r,Math.min(i,l)),n}function e8(e,t){(0,ei.Z)(2,arguments);var n=(0,eo.Z)(e),r=(0,em.Z)(t);return isNaN(n.getTime())?new Date(NaN):(n.setFullYear(r),n)}function e7(e,t){(0,ei.Z)(2,arguments);var n=(0,eo.Z)(e),r=(0,eo.Z)(t);return 12*(n.getFullYear()-r.getFullYear())+(n.getMonth()-r.getMonth())}function e9(e,t){(0,ei.Z)(2,arguments);var n=(0,eo.Z)(e),r=(0,eo.Z)(t);return n.getFullYear()===r.getFullYear()&&n.getMonth()===r.getMonth()}function te(e,t){(0,ei.Z)(2,arguments);var n=(0,eo.Z)(e),r=(0,eo.Z)(t);return n.getTime()=0&&u<=6))throw RangeError("weekStartsOn must be between 0 and 6 inclusively");var d=(0,eo.Z)(e),f=d.getDay();return d.setDate(d.getDate()-((fr.getTime()}function ti(e,t){(0,ei.Z)(2,arguments);var n=ea(e),r=ea(t);return Math.round((n.getTime()-eD(n)-(r.getTime()-eD(r)))/864e5)}function ta(e,t){(0,ei.Z)(2,arguments);var n=(0,em.Z)(t);return(0,eh.Z)(e,7*n)}function tl(e,t){(0,ei.Z)(2,arguments);var n=(0,em.Z)(t);return(0,ev.Z)(e,12*n)}function tc(e,t){(0,ei.Z)(1,arguments);var n,r,o,i,a,l,c,s,u=(0,em.Z)(null!==(n=null!==(r=null!==(o=null!==(i=null==t?void 0:t.weekStartsOn)&&void 0!==i?i:null==t?void 0:null===(a=t.locale)||void 0===a?void 0:null===(l=a.options)||void 0===l?void 0:l.weekStartsOn)&&void 0!==o?o:eO.weekStartsOn)&&void 0!==r?r:null===(c=eO.locale)||void 0===c?void 0:null===(s=c.options)||void 0===s?void 0:s.weekStartsOn)&&void 0!==n?n:0);if(!(u>=0&&u<=6))throw RangeError("weekStartsOn must be between 0 and 6 inclusively");var d=(0,eo.Z)(e),f=d.getDay();return d.setDate(d.getDate()+((fe7(l,a)&&(a=(0,ev.Z)(l,-1*((void 0===s?1:s)-1))),c&&0>e7(a,c)&&(a=c),u=ec(a),f=t.month,h=(p=(0,d.useState)(u))[0],m=[void 0===f?h:f,p[1]])[0],v=m[1],[g,function(e){if(!t.disableNavigation){var n,r=ec(e);v(r),null===(n=t.onMonthChange)||void 0===n||n.call(t,r)}}]),x=b[0],w=b[1],S=function(e,t){for(var n=t.reverseMonths,r=t.numberOfMonths,o=ec(e),i=e7(ec((0,ev.Z)(o,r)),o),a=[],l=0;l=e7(i,n)))return(0,ev.Z)(i,-(r?void 0===o?1:o:1))}}(x,y),k=function(e){return S.some(function(t){return e9(e,t)})};return th.jsx(tM.Provider,{value:{currentMonth:x,displayMonths:S,goToMonth:w,goToDate:function(e,t){k(e)||(t&&te(e,t)?w((0,ev.Z)(e,1+-1*y.numberOfMonths)):w(e))},previousMonth:E,nextMonth:O,isDateDisplayed:k},children:e.children})}function tI(){var e=(0,d.useContext)(tM);if(!e)throw Error("useNavigation must be used within a NavigationProvider");return e}function tT(e){var t,n=tO(),r=n.classNames,o=n.styles,i=n.components,a=tI().goToMonth,l=function(t){a((0,ev.Z)(t,e.displayIndex?-e.displayIndex:0))},c=null!==(t=null==i?void 0:i.CaptionLabel)&&void 0!==t?t:tE,s=th.jsx(c,{id:e.id,displayMonth:e.displayMonth});return th.jsxs("div",{className:r.caption_dropdowns,style:o.caption_dropdowns,children:[th.jsx("div",{className:r.vhidden,children:s}),th.jsx(tj,{onChange:l,displayMonth:e.displayMonth}),th.jsx(tP,{onChange:l,displayMonth:e.displayMonth})]})}function tR(e){return th.jsx("svg",tu({width:"16px",height:"16px",viewBox:"0 0 120 120"},e,{children:th.jsx("path",{d:"M69.490332,3.34314575 C72.6145263,0.218951416 77.6798462,0.218951416 80.8040405,3.34314575 C83.8617626,6.40086786 83.9268205,11.3179931 80.9992143,14.4548388 L80.8040405,14.6568542 L35.461,60 L80.8040405,105.343146 C83.8617626,108.400868 83.9268205,113.317993 80.9992143,116.454839 L80.8040405,116.656854 C77.7463184,119.714576 72.8291931,119.779634 69.6923475,116.852028 L69.490332,116.656854 L18.490332,65.6568542 C15.4326099,62.5991321 15.367552,57.6820069 18.2951583,54.5451612 L18.490332,54.3431458 L69.490332,3.34314575 Z",fill:"currentColor",fillRule:"nonzero"})}))}function tN(e){return th.jsx("svg",tu({width:"16px",height:"16px",viewBox:"0 0 120 120"},e,{children:th.jsx("path",{d:"M49.8040405,3.34314575 C46.6798462,0.218951416 41.6145263,0.218951416 38.490332,3.34314575 C35.4326099,6.40086786 35.367552,11.3179931 38.2951583,14.4548388 L38.490332,14.6568542 L83.8333725,60 L38.490332,105.343146 C35.4326099,108.400868 35.367552,113.317993 38.2951583,116.454839 L38.490332,116.656854 C41.5480541,119.714576 46.4651794,119.779634 49.602025,116.852028 L49.8040405,116.656854 L100.804041,65.6568542 C103.861763,62.5991321 103.926821,57.6820069 100.999214,54.5451612 L100.804041,54.3431458 L49.8040405,3.34314575 Z",fill:"currentColor"})}))}var t_=(0,d.forwardRef)(function(e,t){var n=tO(),r=n.classNames,o=n.styles,i=[r.button_reset,r.button];e.className&&i.push(e.className);var a=i.join(" "),l=tu(tu({},o.button_reset),o.button);return e.style&&Object.assign(l,e.style),th.jsx("button",tu({},e,{ref:t,type:"button",className:a,style:l}))});function tD(e){var t,n,r=tO(),o=r.dir,i=r.locale,a=r.classNames,l=r.styles,c=r.labels,s=c.labelPrevious,u=c.labelNext,d=r.components;if(!e.nextMonth&&!e.previousMonth)return th.jsx(th.Fragment,{});var f=s(e.previousMonth,{locale:i}),p=[a.nav_button,a.nav_button_previous].join(" "),h=u(e.nextMonth,{locale:i}),m=[a.nav_button,a.nav_button_next].join(" "),g=null!==(t=null==d?void 0:d.IconRight)&&void 0!==t?t:tN,v=null!==(n=null==d?void 0:d.IconLeft)&&void 0!==n?n:tR;return th.jsxs("div",{className:a.nav,style:l.nav,children:[!e.hidePrevious&&th.jsx(t_,{name:"previous-month","aria-label":f,className:p,style:l.nav_button_previous,disabled:!e.previousMonth,onClick:e.onPreviousClick,children:"rtl"===o?th.jsx(g,{className:a.nav_icon,style:l.nav_icon}):th.jsx(v,{className:a.nav_icon,style:l.nav_icon})}),!e.hideNext&&th.jsx(t_,{name:"next-month","aria-label":h,className:m,style:l.nav_button_next,disabled:!e.nextMonth,onClick:e.onNextClick,children:"rtl"===o?th.jsx(v,{className:a.nav_icon,style:l.nav_icon}):th.jsx(g,{className:a.nav_icon,style:l.nav_icon})})]})}function tL(e){var t=tO().numberOfMonths,n=tI(),r=n.previousMonth,o=n.nextMonth,i=n.goToMonth,a=n.displayMonths,l=a.findIndex(function(t){return e9(e.displayMonth,t)}),c=0===l,s=l===a.length-1;return th.jsx(tD,{displayMonth:e.displayMonth,hideNext:t>1&&(c||!s),hidePrevious:t>1&&(s||!c),nextMonth:o,previousMonth:r,onPreviousClick:function(){r&&i(r)},onNextClick:function(){o&&i(o)}})}function tZ(e){var t,n,r=tO(),o=r.classNames,i=r.disableNavigation,a=r.styles,l=r.captionLayout,c=r.components,s=null!==(t=null==c?void 0:c.CaptionLabel)&&void 0!==t?t:tE;return n=i?th.jsx(s,{id:e.id,displayMonth:e.displayMonth}):"dropdown"===l?th.jsx(tT,{displayMonth:e.displayMonth,id:e.id}):"dropdown-buttons"===l?th.jsxs(th.Fragment,{children:[th.jsx(tT,{displayMonth:e.displayMonth,displayIndex:e.displayIndex,id:e.id}),th.jsx(tL,{displayMonth:e.displayMonth,displayIndex:e.displayIndex,id:e.id})]}):th.jsxs(th.Fragment,{children:[th.jsx(s,{id:e.id,displayMonth:e.displayMonth,displayIndex:e.displayIndex}),th.jsx(tL,{displayMonth:e.displayMonth,id:e.id})]}),th.jsx("div",{className:o.caption,style:a.caption,children:n})}function tB(e){var t=tO(),n=t.footer,r=t.styles,o=t.classNames.tfoot;return n?th.jsx("tfoot",{className:o,style:r.tfoot,children:th.jsx("tr",{children:th.jsx("td",{colSpan:8,children:n})})}):th.jsx(th.Fragment,{})}function tz(){var e=tO(),t=e.classNames,n=e.styles,r=e.showWeekNumber,o=e.locale,i=e.weekStartsOn,a=e.ISOWeek,l=e.formatters.formatWeekdayName,c=e.labels.labelWeekday,s=function(e,t,n){for(var r=n?tn(new Date):tt(new Date,{locale:e,weekStartsOn:t}),o=[],i=0;i<7;i++){var a=(0,eh.Z)(r,i);o.push(a)}return o}(o,i,a);return th.jsxs("tr",{style:n.head_row,className:t.head_row,children:[r&&th.jsx("td",{style:n.head_cell,className:t.head_cell}),s.map(function(e,r){return th.jsx("th",{scope:"col",className:t.head_cell,style:n.head_cell,"aria-label":c(e,{locale:o}),children:l(e,{locale:o})},r)})]})}function tF(){var e,t=tO(),n=t.classNames,r=t.styles,o=t.components,i=null!==(e=null==o?void 0:o.HeadRow)&&void 0!==e?e:tz;return th.jsx("thead",{style:r.head,className:n.head,children:th.jsx(i,{})})}function tH(e){var t=tO(),n=t.locale,r=t.formatters.formatDay;return th.jsx(th.Fragment,{children:r(e.date,{locale:n})})}var tq=(0,d.createContext)(void 0);function tV(e){return tm(e.initialProps)?th.jsx(tU,{initialProps:e.initialProps,children:e.children}):th.jsx(tq.Provider,{value:{selected:void 0,modifiers:{disabled:[]}},children:e.children})}function tU(e){var t=e.initialProps,n=e.children,r=t.selected,o=t.min,i=t.max,a={disabled:[]};return r&&a.disabled.push(function(e){var t=i&&r.length>i-1,n=r.some(function(t){return tr(t,e)});return!!(t&&!n)}),th.jsx(tq.Provider,{value:{selected:r,onDayClick:function(e,n,a){if(null===(l=t.onDayClick)||void 0===l||l.call(t,e,n,a),(!n.selected||!o||(null==r?void 0:r.length)!==o)&&(n.selected||!i||(null==r?void 0:r.length)!==i)){var l,c,s=r?td([],r,!0):[];if(n.selected){var u=s.findIndex(function(t){return tr(e,t)});s.splice(u,1)}else s.push(e);null===(c=t.onSelect)||void 0===c||c.call(t,s,e,n,a)}},modifiers:a},children:n})}function tW(){var e=(0,d.useContext)(tq);if(!e)throw Error("useSelectMultiple must be used within a SelectMultipleProvider");return e}var tK=(0,d.createContext)(void 0);function t$(e){return tg(e.initialProps)?th.jsx(tG,{initialProps:e.initialProps,children:e.children}):th.jsx(tK.Provider,{value:{selected:void 0,modifiers:{range_start:[],range_end:[],range_middle:[],disabled:[]}},children:e.children})}function tG(e){var t=e.initialProps,n=e.children,r=t.selected,o=r||{},i=o.from,a=o.to,l=t.min,c=t.max,s={range_start:[],range_end:[],range_middle:[],disabled:[]};if(i?(s.range_start=[i],a?(s.range_end=[a],tr(i,a)||(s.range_middle=[{after:i,before:a}])):s.range_end=[i]):a&&(s.range_start=[a],s.range_end=[a]),l&&(i&&!a&&s.disabled.push({after:eg(i,l-1),before:(0,eh.Z)(i,l-1)}),i&&a&&s.disabled.push({after:i,before:(0,eh.Z)(i,l-1)}),!i&&a&&s.disabled.push({after:eg(a,l-1),before:(0,eh.Z)(a,l-1)})),c){if(i&&!a&&(s.disabled.push({before:(0,eh.Z)(i,-c+1)}),s.disabled.push({after:(0,eh.Z)(i,c-1)})),i&&a){var u=c-(ti(a,i)+1);s.disabled.push({before:eg(i,u)}),s.disabled.push({after:(0,eh.Z)(a,u)})}!i&&a&&(s.disabled.push({before:(0,eh.Z)(a,-c+1)}),s.disabled.push({after:(0,eh.Z)(a,c-1)}))}return th.jsx(tK.Provider,{value:{selected:r,onDayClick:function(e,n,o){null===(c=t.onDayClick)||void 0===c||c.call(t,e,n,o);var i,a,l,c,s,u=(a=(i=r||{}).from,l=i.to,a&&l?tr(l,e)&&tr(a,e)?void 0:tr(l,e)?{from:l,to:void 0}:tr(a,e)?void 0:to(a,e)?{from:e,to:l}:{from:a,to:e}:l?to(e,l)?{from:l,to:e}:{from:e,to:l}:a?te(e,a)?{from:e,to:a}:{from:a,to:e}:{from:e,to:void 0});null===(s=t.onSelect)||void 0===s||s.call(t,u,e,n,o)},modifiers:s},children:n})}function tY(){var e=(0,d.useContext)(tK);if(!e)throw Error("useSelectRange must be used within a SelectRangeProvider");return e}function tX(e){return Array.isArray(e)?td([],e,!0):void 0!==e?[e]:[]}(l=s||(s={})).Outside="outside",l.Disabled="disabled",l.Selected="selected",l.Hidden="hidden",l.Today="today",l.RangeStart="range_start",l.RangeEnd="range_end",l.RangeMiddle="range_middle";var tQ=s.Selected,tJ=s.Disabled,t0=s.Hidden,t1=s.Today,t2=s.RangeEnd,t6=s.RangeMiddle,t5=s.RangeStart,t3=s.Outside,t4=(0,d.createContext)(void 0);function t8(e){var t,n,r,o=tO(),i=tW(),a=tY(),l=((t={})[tQ]=tX(o.selected),t[tJ]=tX(o.disabled),t[t0]=tX(o.hidden),t[t1]=[o.today],t[t2]=[],t[t6]=[],t[t5]=[],t[t3]=[],o.fromDate&&t[tJ].push({before:o.fromDate}),o.toDate&&t[tJ].push({after:o.toDate}),tm(o)?t[tJ]=t[tJ].concat(i.modifiers[tJ]):tg(o)&&(t[tJ]=t[tJ].concat(a.modifiers[tJ]),t[t5]=a.modifiers[t5],t[t6]=a.modifiers[t6],t[t2]=a.modifiers[t2]),t),c=(n=o.modifiers,r={},Object.entries(n).forEach(function(e){var t=e[0],n=e[1];r[t]=tX(n)}),r),s=tu(tu({},l),c);return th.jsx(t4.Provider,{value:s,children:e.children})}function t7(){var e=(0,d.useContext)(t4);if(!e)throw Error("useModifiers must be used within a ModifiersProvider");return e}function t9(e,t,n){var r=Object.keys(t).reduce(function(n,r){return t[r].some(function(t){if("boolean"==typeof t)return t;if(ex(t))return tr(e,t);if(Array.isArray(t)&&t.every(ex))return t.includes(e);if(t&&"object"==typeof t&&"from"in t)return r=t.from,o=t.to,r&&o?(0>ti(o,r)&&(r=(n=[o,r])[0],o=n[1]),ti(e,r)>=0&&ti(o,e)>=0):o?tr(o,e):!!r&&tr(r,e);if(t&&"object"==typeof t&&"dayOfWeek"in t)return t.dayOfWeek.includes(e.getDay());if(t&&"object"==typeof t&&"before"in t&&"after"in t){var n,r,o,i=ti(t.before,e),a=ti(t.after,e),l=i>0,c=a<0;return to(t.before,t.after)?c&&l:l||c}return t&&"object"==typeof t&&"after"in t?ti(e,t.after)>0:t&&"object"==typeof t&&"before"in t?ti(t.before,e)>0:"function"==typeof t&&t(e)})&&n.push(r),n},[]),o={};return r.forEach(function(e){return o[e]=!0}),n&&!e9(e,n)&&(o.outside=!0),o}var ne=(0,d.createContext)(void 0);function nt(e){var t=tI(),n=t7(),r=(0,d.useState)(),o=r[0],i=r[1],a=(0,d.useState)(),l=a[0],c=a[1],s=function(e,t){for(var n,r,o=ec(e[0]),i=e3(e[e.length-1]),a=o;a<=i;){var l=t9(a,t);if(!(!l.disabled&&!l.hidden)){a=(0,eh.Z)(a,1);continue}if(l.selected)return a;l.today&&!r&&(r=a),n||(n=a),a=(0,eh.Z)(a,1)}return r||n}(t.displayMonths,n),u=(null!=o?o:l&&t.isDateDisplayed(l))?l:s,f=function(e){i(e)},p=tO(),h=function(e,r){if(o){var i=function e(t,n){var r=n.moveBy,o=n.direction,i=n.context,a=n.modifiers,l=n.retry,c=void 0===l?{count:0,lastFocused:t}:l,s=i.weekStartsOn,u=i.fromDate,d=i.toDate,f=i.locale,p=({day:eh.Z,week:ta,month:ev.Z,year:tl,startOfWeek:function(e){return i.ISOWeek?tn(e):tt(e,{locale:f,weekStartsOn:s})},endOfWeek:function(e){return i.ISOWeek?ts(e):tc(e,{locale:f,weekStartsOn:s})}})[r](t,"after"===o?1:-1);"before"===o&&u?p=ef([u,p]):"after"===o&&d&&(p=ep([d,p]));var h=!0;if(a){var m=t9(p,a);h=!m.disabled&&!m.hidden}return h?p:c.count>365?c.lastFocused:e(p,{moveBy:r,direction:o,context:i,modifiers:a,retry:tu(tu({},c),{count:c.count+1})})}(o,{moveBy:e,direction:r,context:p,modifiers:n});tr(o,i)||(t.goToDate(i,o),f(i))}};return th.jsx(ne.Provider,{value:{focusedDay:o,focusTarget:u,blur:function(){c(o),i(void 0)},focus:f,focusDayAfter:function(){return h("day","after")},focusDayBefore:function(){return h("day","before")},focusWeekAfter:function(){return h("week","after")},focusWeekBefore:function(){return h("week","before")},focusMonthBefore:function(){return h("month","before")},focusMonthAfter:function(){return h("month","after")},focusYearBefore:function(){return h("year","before")},focusYearAfter:function(){return h("year","after")},focusStartOfWeek:function(){return h("startOfWeek","before")},focusEndOfWeek:function(){return h("endOfWeek","after")}},children:e.children})}function nn(){var e=(0,d.useContext)(ne);if(!e)throw Error("useFocusContext must be used within a FocusProvider");return e}var nr=(0,d.createContext)(void 0);function no(e){return tv(e.initialProps)?th.jsx(ni,{initialProps:e.initialProps,children:e.children}):th.jsx(nr.Provider,{value:{selected:void 0},children:e.children})}function ni(e){var t=e.initialProps,n=e.children,r={selected:t.selected,onDayClick:function(e,n,r){var o,i,a;if(null===(o=t.onDayClick)||void 0===o||o.call(t,e,n,r),n.selected&&!t.required){null===(i=t.onSelect)||void 0===i||i.call(t,void 0,e,n,r);return}null===(a=t.onSelect)||void 0===a||a.call(t,e,e,n,r)}};return th.jsx(nr.Provider,{value:r,children:n})}function na(){var e=(0,d.useContext)(nr);if(!e)throw Error("useSelectSingle must be used within a SelectSingleProvider");return e}function nl(e){var t,n,r,o,i,a,l,c,u,f,p,h,m,g,v,y,b,x,w,S,O,E,k,C,j,P,M,A,I,T,R,N,_,D,L,Z,B,z,F,H,q,V,U=(0,d.useRef)(null),W=(t=e.date,n=e.displayMonth,a=tO(),l=nn(),c=t9(t,t7(),n),u=tO(),f=na(),p=tW(),h=tY(),g=(m=nn()).focusDayAfter,v=m.focusDayBefore,y=m.focusWeekAfter,b=m.focusWeekBefore,x=m.blur,w=m.focus,S=m.focusMonthBefore,O=m.focusMonthAfter,E=m.focusYearBefore,k=m.focusYearAfter,C=m.focusStartOfWeek,j=m.focusEndOfWeek,P={onClick:function(e){var n,r,o,i;tv(u)?null===(n=f.onDayClick)||void 0===n||n.call(f,t,c,e):tm(u)?null===(r=p.onDayClick)||void 0===r||r.call(p,t,c,e):tg(u)?null===(o=h.onDayClick)||void 0===o||o.call(h,t,c,e):null===(i=u.onDayClick)||void 0===i||i.call(u,t,c,e)},onFocus:function(e){var n;w(t),null===(n=u.onDayFocus)||void 0===n||n.call(u,t,c,e)},onBlur:function(e){var n;x(),null===(n=u.onDayBlur)||void 0===n||n.call(u,t,c,e)},onKeyDown:function(e){var n;switch(e.key){case"ArrowLeft":e.preventDefault(),e.stopPropagation(),"rtl"===u.dir?g():v();break;case"ArrowRight":e.preventDefault(),e.stopPropagation(),"rtl"===u.dir?v():g();break;case"ArrowDown":e.preventDefault(),e.stopPropagation(),y();break;case"ArrowUp":e.preventDefault(),e.stopPropagation(),b();break;case"PageUp":e.preventDefault(),e.stopPropagation(),e.shiftKey?E():S();break;case"PageDown":e.preventDefault(),e.stopPropagation(),e.shiftKey?k():O();break;case"Home":e.preventDefault(),e.stopPropagation(),C();break;case"End":e.preventDefault(),e.stopPropagation(),j()}null===(n=u.onDayKeyDown)||void 0===n||n.call(u,t,c,e)},onKeyUp:function(e){var n;null===(n=u.onDayKeyUp)||void 0===n||n.call(u,t,c,e)},onMouseEnter:function(e){var n;null===(n=u.onDayMouseEnter)||void 0===n||n.call(u,t,c,e)},onMouseLeave:function(e){var n;null===(n=u.onDayMouseLeave)||void 0===n||n.call(u,t,c,e)},onPointerEnter:function(e){var n;null===(n=u.onDayPointerEnter)||void 0===n||n.call(u,t,c,e)},onPointerLeave:function(e){var n;null===(n=u.onDayPointerLeave)||void 0===n||n.call(u,t,c,e)},onTouchCancel:function(e){var n;null===(n=u.onDayTouchCancel)||void 0===n||n.call(u,t,c,e)},onTouchEnd:function(e){var n;null===(n=u.onDayTouchEnd)||void 0===n||n.call(u,t,c,e)},onTouchMove:function(e){var n;null===(n=u.onDayTouchMove)||void 0===n||n.call(u,t,c,e)},onTouchStart:function(e){var n;null===(n=u.onDayTouchStart)||void 0===n||n.call(u,t,c,e)}},M=tO(),A=na(),I=tW(),T=tY(),R=tv(M)?A.selected:tm(M)?I.selected:tg(M)?T.selected:void 0,N=!!(a.onDayClick||"default"!==a.mode),(0,d.useEffect)(function(){var e;!c.outside&&l.focusedDay&&N&&tr(l.focusedDay,t)&&(null===(e=U.current)||void 0===e||e.focus())},[l.focusedDay,t,U,N,c.outside]),D=(_=[a.classNames.day],Object.keys(c).forEach(function(e){var t=a.modifiersClassNames[e];if(t)_.push(t);else if(Object.values(s).includes(e)){var n=a.classNames["day_".concat(e)];n&&_.push(n)}}),_).join(" "),L=tu({},a.styles.day),Object.keys(c).forEach(function(e){var t;L=tu(tu({},L),null===(t=a.modifiersStyles)||void 0===t?void 0:t[e])}),Z=L,B=!!(c.outside&&!a.showOutsideDays||c.hidden),z=null!==(i=null===(o=a.components)||void 0===o?void 0:o.DayContent)&&void 0!==i?i:tH,F={style:Z,className:D,children:th.jsx(z,{date:t,displayMonth:n,activeModifiers:c}),role:"gridcell"},H=l.focusTarget&&tr(l.focusTarget,t)&&!c.outside,q=l.focusedDay&&tr(l.focusedDay,t),V=tu(tu(tu({},F),((r={disabled:c.disabled,role:"gridcell"})["aria-selected"]=c.selected,r.tabIndex=q||H?0:-1,r)),P),{isButton:N,isHidden:B,activeModifiers:c,selectedDays:R,buttonProps:V,divProps:F});return W.isHidden?th.jsx("div",{role:"gridcell"}):W.isButton?th.jsx(t_,tu({name:"day",ref:U},W.buttonProps)):th.jsx("div",tu({},W.divProps))}function nc(e){var t=e.number,n=e.dates,r=tO(),o=r.onWeekNumberClick,i=r.styles,a=r.classNames,l=r.locale,c=r.labels.labelWeekNumber,s=(0,r.formatters.formatWeekNumber)(Number(t),{locale:l});if(!o)return th.jsx("span",{className:a.weeknumber,style:i.weeknumber,children:s});var u=c(Number(t),{locale:l});return th.jsx(t_,{name:"week-number","aria-label":u,className:a.weeknumber,style:i.weeknumber,onClick:function(e){o(t,n,e)},children:s})}function ns(e){var t,n,r,o=tO(),i=o.styles,a=o.classNames,l=o.showWeekNumber,c=o.components,s=null!==(t=null==c?void 0:c.Day)&&void 0!==t?t:nl,u=null!==(n=null==c?void 0:c.WeekNumber)&&void 0!==n?n:nc;return l&&(r=th.jsx("td",{className:a.cell,style:i.cell,children:th.jsx(u,{number:e.weekNumber,dates:e.dates})})),th.jsxs("tr",{className:a.row,style:i.row,children:[r,e.dates.map(function(t){return th.jsx("td",{className:a.cell,style:i.cell,role:"presentation",children:th.jsx(s,{displayMonth:e.displayMonth,date:t})},function(e){return(0,ei.Z)(1,arguments),Math.floor(function(e){return(0,ei.Z)(1,arguments),(0,eo.Z)(e).getTime()}(e)/1e3)}(t))})]})}function nu(e,t,n){for(var r=(null==n?void 0:n.ISOWeek)?ts(t):tc(t,n),o=(null==n?void 0:n.ISOWeek)?tn(e):tt(e,n),i=ti(r,o),a=[],l=0;l<=i;l++)a.push((0,eh.Z)(o,l));return a.reduce(function(e,t){var r=(null==n?void 0:n.ISOWeek)?function(e){(0,ei.Z)(1,arguments);var t=(0,eo.Z)(e);return Math.round((tn(t).getTime()-(function(e){(0,ei.Z)(1,arguments);var t=function(e){(0,ei.Z)(1,arguments);var t=(0,eo.Z)(e),n=t.getFullYear(),r=new Date(0);r.setFullYear(n+1,0,4),r.setHours(0,0,0,0);var o=tn(r),i=new Date(0);i.setFullYear(n,0,4),i.setHours(0,0,0,0);var a=tn(i);return t.getTime()>=o.getTime()?n+1:t.getTime()>=a.getTime()?n:n-1}(e),n=new Date(0);return n.setFullYear(t,0,4),n.setHours(0,0,0,0),tn(n)})(t).getTime())/6048e5)+1}(t):function(e,t){(0,ei.Z)(1,arguments);var n=(0,eo.Z)(e);return Math.round((tt(n,t).getTime()-(function(e,t){(0,ei.Z)(1,arguments);var n,r,o,i,a,l,c,s,u=(0,em.Z)(null!==(n=null!==(r=null!==(o=null!==(i=null==t?void 0:t.firstWeekContainsDate)&&void 0!==i?i:null==t?void 0:null===(a=t.locale)||void 0===a?void 0:null===(l=a.options)||void 0===l?void 0:l.firstWeekContainsDate)&&void 0!==o?o:eO.firstWeekContainsDate)&&void 0!==r?r:null===(c=eO.locale)||void 0===c?void 0:null===(s=c.options)||void 0===s?void 0:s.firstWeekContainsDate)&&void 0!==n?n:1),d=function(e,t){(0,ei.Z)(1,arguments);var n,r,o,i,a,l,c,s,u=(0,eo.Z)(e),d=u.getFullYear(),f=(0,em.Z)(null!==(n=null!==(r=null!==(o=null!==(i=null==t?void 0:t.firstWeekContainsDate)&&void 0!==i?i:null==t?void 0:null===(a=t.locale)||void 0===a?void 0:null===(l=a.options)||void 0===l?void 0:l.firstWeekContainsDate)&&void 0!==o?o:eO.firstWeekContainsDate)&&void 0!==r?r:null===(c=eO.locale)||void 0===c?void 0:null===(s=c.options)||void 0===s?void 0:s.firstWeekContainsDate)&&void 0!==n?n:1);if(!(f>=1&&f<=7))throw RangeError("firstWeekContainsDate must be between 1 and 7 inclusively");var p=new Date(0);p.setFullYear(d+1,0,f),p.setHours(0,0,0,0);var h=tt(p,t),m=new Date(0);m.setFullYear(d,0,f),m.setHours(0,0,0,0);var g=tt(m,t);return u.getTime()>=h.getTime()?d+1:u.getTime()>=g.getTime()?d:d-1}(e,t),f=new Date(0);return f.setFullYear(d,0,u),f.setHours(0,0,0,0),tt(f,t)})(n,t).getTime())/6048e5)+1}(t,n),o=e.find(function(e){return e.weekNumber===r});return o?o.dates.push(t):e.push({weekNumber:r,dates:[t]}),e},[])}function nd(e){var t,n,r,o=tO(),i=o.locale,a=o.classNames,l=o.styles,c=o.hideHead,s=o.fixedWeeks,u=o.components,d=o.weekStartsOn,f=o.firstWeekContainsDate,p=o.ISOWeek,h=function(e,t){var n=nu(ec(e),e3(e),t);if(null==t?void 0:t.useFixedWeeks){var r=function(e,t){return(0,ei.Z)(1,arguments),function(e,t,n){(0,ei.Z)(2,arguments);var r=tt(e,n),o=tt(t,n);return Math.round((r.getTime()-eD(r)-(o.getTime()-eD(o)))/6048e5)}(function(e){(0,ei.Z)(1,arguments);var t=(0,eo.Z)(e),n=t.getMonth();return t.setFullYear(t.getFullYear(),n+1,0),t.setHours(0,0,0,0),t}(e),ec(e),t)+1}(e,t);if(r<6){var o=n[n.length-1],i=o.dates[o.dates.length-1],a=ta(i,6-r),l=nu(ta(i,1),a,t);n.push.apply(n,l)}}return n}(e.displayMonth,{useFixedWeeks:!!s,ISOWeek:p,locale:i,weekStartsOn:d,firstWeekContainsDate:f}),m=null!==(t=null==u?void 0:u.Head)&&void 0!==t?t:tF,g=null!==(n=null==u?void 0:u.Row)&&void 0!==n?n:ns,v=null!==(r=null==u?void 0:u.Footer)&&void 0!==r?r:tB;return th.jsxs("table",{id:e.id,className:a.table,style:l.table,role:"grid","aria-labelledby":e["aria-labelledby"],children:[!c&&th.jsx(m,{}),th.jsx("tbody",{className:a.tbody,style:l.tbody,children:h.map(function(t){return th.jsx(g,{displayMonth:e.displayMonth,dates:t.dates,weekNumber:t.weekNumber},t.weekNumber)})}),th.jsx(v,{displayMonth:e.displayMonth})]})}var nf="undefined"!=typeof window&&window.document&&window.document.createElement?d.useLayoutEffect:d.useEffect,np=!1,nh=0;function nm(){return"react-day-picker-".concat(++nh)}function ng(e){var t,n,r,o,i,a,l,c,s=tO(),u=s.dir,f=s.classNames,p=s.styles,h=s.components,m=tI().displayMonths,g=(r=null!=(t=s.id?"".concat(s.id,"-").concat(e.displayIndex):void 0)?t:np?nm():null,i=(o=(0,d.useState)(r))[0],a=o[1],nf(function(){null===i&&a(nm())},[]),(0,d.useEffect)(function(){!1===np&&(np=!0)},[]),null!==(n=null!=t?t:i)&&void 0!==n?n:void 0),v=s.id?"".concat(s.id,"-grid-").concat(e.displayIndex):void 0,y=[f.month],b=p.month,x=0===e.displayIndex,w=e.displayIndex===m.length-1,S=!x&&!w;"rtl"===u&&(w=(l=[x,w])[0],x=l[1]),x&&(y.push(f.caption_start),b=tu(tu({},b),p.caption_start)),w&&(y.push(f.caption_end),b=tu(tu({},b),p.caption_end)),S&&(y.push(f.caption_between),b=tu(tu({},b),p.caption_between));var O=null!==(c=null==h?void 0:h.Caption)&&void 0!==c?c:tZ;return th.jsxs("div",{className:y.join(" "),style:b,children:[th.jsx(O,{id:g,displayMonth:e.displayMonth,displayIndex:e.displayIndex}),th.jsx(nd,{id:v,"aria-labelledby":g,displayMonth:e.displayMonth})]},e.displayIndex)}function nv(e){var t=tO(),n=t.classNames,r=t.styles;return th.jsx("div",{className:n.months,style:r.months,children:e.children})}function ny(e){var t,n,r=e.initialProps,o=tO(),i=nn(),a=tI(),l=(0,d.useState)(!1),c=l[0],s=l[1];(0,d.useEffect)(function(){o.initialFocus&&i.focusTarget&&(c||(i.focus(i.focusTarget),s(!0)))},[o.initialFocus,c,i.focus,i.focusTarget,i]);var u=[o.classNames.root,o.className];o.numberOfMonths>1&&u.push(o.classNames.multiple_months),o.showWeekNumber&&u.push(o.classNames.with_weeknumber);var f=tu(tu({},o.styles.root),o.style),p=Object.keys(r).filter(function(e){return e.startsWith("data-")}).reduce(function(e,t){var n;return tu(tu({},e),((n={})[t]=r[t],n))},{}),h=null!==(n=null===(t=r.components)||void 0===t?void 0:t.Months)&&void 0!==n?n:nv;return th.jsx("div",tu({className:u.join(" "),style:f,dir:o.dir,id:o.id,nonce:r.nonce,title:r.title,lang:r.lang},p,{children:th.jsx(h,{children:a.displayMonths.map(function(e,t){return th.jsx(ng,{displayIndex:t,displayMonth:e},t)})})}))}function nb(e){var t=e.children,n=function(e,t){var n={};for(var r in e)Object.prototype.hasOwnProperty.call(e,r)&&0>t.indexOf(r)&&(n[r]=e[r]);if(null!=e&&"function"==typeof Object.getOwnPropertySymbols)for(var o=0,r=Object.getOwnPropertySymbols(e);ot.indexOf(r[o])&&Object.prototype.propertyIsEnumerable.call(e,r[o])&&(n[r[o]]=e[r[o]]);return n}(e,["children"]);return th.jsx(tS,{initialProps:n,children:th.jsx(tA,{children:th.jsx(no,{initialProps:n,children:th.jsx(tV,{initialProps:n,children:th.jsx(t$,{initialProps:n,children:th.jsx(t8,{children:th.jsx(nt,{children:t})})})})})})})}function nx(e){return th.jsx(nb,tu({},e,{children:th.jsx(ny,{initialProps:e})}))}let nw=e=>{var t=(0,u._T)(e,[]);return d.createElement("svg",Object.assign({xmlns:"http://www.w3.org/2000/svg",viewBox:"0 0 24 24",fill:"currentColor"},t),d.createElement("path",{d:"M10.8284 12.0007L15.7782 16.9504L14.364 18.3646L8 12.0007L14.364 5.63672L15.7782 7.05093L10.8284 12.0007Z"}))},nS=e=>{var t=(0,u._T)(e,[]);return d.createElement("svg",Object.assign({xmlns:"http://www.w3.org/2000/svg",viewBox:"0 0 24 24",fill:"currentColor"},t),d.createElement("path",{d:"M13.1717 12.0007L8.22192 7.05093L9.63614 5.63672L16.0001 12.0007L9.63614 18.3646L8.22192 16.9504L13.1717 12.0007Z"}))},nO=e=>{var t=(0,u._T)(e,[]);return d.createElement("svg",Object.assign({xmlns:"http://www.w3.org/2000/svg",viewBox:"0 0 24 24",fill:"currentColor"},t),d.createElement("path",{d:"M4.83582 12L11.0429 18.2071L12.4571 16.7929L7.66424 12L12.4571 7.20712L11.0429 5.79291L4.83582 12ZM10.4857 12L16.6928 18.2071L18.107 16.7929L13.3141 12L18.107 7.20712L16.6928 5.79291L10.4857 12Z"}))},nE=e=>{var t=(0,u._T)(e,[]);return d.createElement("svg",Object.assign({xmlns:"http://www.w3.org/2000/svg",viewBox:"0 0 24 24",fill:"currentColor"},t),d.createElement("path",{d:"M19.1642 12L12.9571 5.79291L11.5429 7.20712L16.3358 12L11.5429 16.7929L12.9571 18.2071L19.1642 12ZM13.5143 12L7.30722 5.79291L5.89301 7.20712L10.6859 12L5.89301 16.7929L7.30722 18.2071L13.5143 12Z"}))};var nk=n(84264);n(41649);var nC=n(1526),nj=n(7084),nP=n(26898);let nM={xs:{paddingX:"px-2",paddingY:"py-0.5",fontSize:"text-xs"},sm:{paddingX:"px-2.5",paddingY:"py-1",fontSize:"text-sm"},md:{paddingX:"px-3",paddingY:"py-1.5",fontSize:"text-md"},lg:{paddingX:"px-3.5",paddingY:"py-1.5",fontSize:"text-lg"},xl:{paddingX:"px-3.5",paddingY:"py-1.5",fontSize:"text-xl"}},nA={xs:{paddingX:"px-2",paddingY:"py-0.5",fontSize:"text-xs"},sm:{paddingX:"px-2.5",paddingY:"py-0.5",fontSize:"text-sm"},md:{paddingX:"px-3",paddingY:"py-0.5",fontSize:"text-md"},lg:{paddingX:"px-3.5",paddingY:"py-0.5",fontSize:"text-lg"},xl:{paddingX:"px-4",paddingY:"py-1",fontSize:"text-xl"}},nI={xs:{height:"h-4",width:"w-4"},sm:{height:"h-4",width:"w-4"},md:{height:"h-4",width:"w-4"},lg:{height:"h-5",width:"w-5"},xl:{height:"h-6",width:"w-6"}},nT={[nj.wu.Increase]:{bgColor:(0,eJ.bM)(nj.fr.Emerald,nP.K.background).bgColor,textColor:(0,eJ.bM)(nj.fr.Emerald,nP.K.text).textColor},[nj.wu.ModerateIncrease]:{bgColor:(0,eJ.bM)(nj.fr.Emerald,nP.K.background).bgColor,textColor:(0,eJ.bM)(nj.fr.Emerald,nP.K.text).textColor},[nj.wu.Decrease]:{bgColor:(0,eJ.bM)(nj.fr.Rose,nP.K.background).bgColor,textColor:(0,eJ.bM)(nj.fr.Rose,nP.K.text).textColor},[nj.wu.ModerateDecrease]:{bgColor:(0,eJ.bM)(nj.fr.Rose,nP.K.background).bgColor,textColor:(0,eJ.bM)(nj.fr.Rose,nP.K.text).textColor},[nj.wu.Unchanged]:{bgColor:(0,eJ.bM)(nj.fr.Orange,nP.K.background).bgColor,textColor:(0,eJ.bM)(nj.fr.Orange,nP.K.text).textColor}},nR={[nj.wu.Increase]:e=>{var t=(0,u._T)(e,[]);return d.createElement("svg",Object.assign({xmlns:"http://www.w3.org/2000/svg",viewBox:"0 0 24 24",fill:"currentColor"},t),d.createElement("path",{d:"M13.0001 7.82843V20H11.0001V7.82843L5.63614 13.1924L4.22192 11.7782L12.0001 4L19.7783 11.7782L18.3641 13.1924L13.0001 7.82843Z"}))},[nj.wu.ModerateIncrease]:e=>{var t=(0,u._T)(e,[]);return d.createElement("svg",Object.assign({xmlns:"http://www.w3.org/2000/svg",viewBox:"0 0 24 24",fill:"currentColor"},t),d.createElement("path",{d:"M16.0037 9.41421L7.39712 18.0208L5.98291 16.6066L14.5895 8H7.00373V6H18.0037V17H16.0037V9.41421Z"}))},[nj.wu.Decrease]:e=>{var t=(0,u._T)(e,[]);return d.createElement("svg",Object.assign({xmlns:"http://www.w3.org/2000/svg",viewBox:"0 0 24 24",fill:"currentColor"},t),d.createElement("path",{d:"M13.0001 16.1716L18.3641 10.8076L19.7783 12.2218L12.0001 20L4.22192 12.2218L5.63614 10.8076L11.0001 16.1716V4H13.0001V16.1716Z"}))},[nj.wu.ModerateDecrease]:e=>{var t=(0,u._T)(e,[]);return d.createElement("svg",Object.assign({xmlns:"http://www.w3.org/2000/svg",viewBox:"0 0 24 24",fill:"currentColor"},t),d.createElement("path",{d:"M14.5895 16.0032L5.98291 7.39664L7.39712 5.98242L16.0037 14.589V7.00324H18.0037V18.0032H7.00373V16.0032H14.5895Z"}))},[nj.wu.Unchanged]:e=>{var t=(0,u._T)(e,[]);return d.createElement("svg",Object.assign({xmlns:"http://www.w3.org/2000/svg",viewBox:"0 0 24 24",fill:"currentColor"},t),d.createElement("path",{d:"M16.1716 10.9999L10.8076 5.63589L12.2218 4.22168L20 11.9999L12.2218 19.778L10.8076 18.3638L16.1716 12.9999H4V10.9999H16.1716Z"}))}},nN=(0,eJ.fn)("BadgeDelta");d.forwardRef((e,t)=>{let{deltaType:n=nj.wu.Increase,isIncreasePositive:r=!0,size:o=nj.u8.SM,tooltip:i,children:a,className:l}=e,c=(0,u._T)(e,["deltaType","isIncreasePositive","size","tooltip","children","className"]),s=nR[n],f=(0,eJ.Fo)(n,r),p=a?nA:nM,{tooltipProps:h,getReferenceProps:m}=(0,nC.l)();return d.createElement("span",Object.assign({ref:(0,eJ.lq)([t,h.refs.setReference]),className:(0,es.q)(nN("root"),"w-max flex-shrink-0 inline-flex justify-center items-center cursor-default rounded-tremor-full bg-opacity-20 dark:bg-opacity-25",nT[f].bgColor,nT[f].textColor,p[o].paddingX,p[o].paddingY,p[o].fontSize,l)},m,c),d.createElement(nC.Z,Object.assign({text:i},h)),d.createElement(s,{className:(0,es.q)(nN("icon"),"shrink-0",a?(0,es.q)("-ml-1 mr-1.5"):nI[o].height,nI[o].width)}),a?d.createElement("p",{className:(0,es.q)(nN("text"),"text-sm whitespace-nowrap")},a):null)}).displayName="BadgeDelta";var n_=n(47323);let nD=e=>{var{onClick:t,icon:n}=e,r=(0,u._T)(e,["onClick","icon"]);return d.createElement("button",Object.assign({type:"button",className:(0,es.q)("flex items-center justify-center p-1 h-7 w-7 outline-none focus:ring-2 transition duration-100 border border-tremor-border dark:border-dark-tremor-border hover:bg-tremor-background-muted dark:hover:bg-dark-tremor-background-muted rounded-tremor-small focus:border-tremor-brand-subtle select-none dark:focus:border-dark-tremor-brand-subtle focus:ring-tremor-brand-muted dark:focus:ring-dark-tremor-brand-muted text-tremor-content-subtle dark:text-dark-tremor-content-subtle hover:text-tremor-content dark:hover:text-dark-tremor-content")},r),d.createElement(n_.Z,{onClick:t,icon:n,variant:"simple",color:"slate",size:"sm"}))};function nL(e){var{mode:t,defaultMonth:n,selected:r,onSelect:o,locale:i,disabled:a,enableYearNavigation:l,classNames:c,weekStartsOn:s=0}=e,f=(0,u._T)(e,["mode","defaultMonth","selected","onSelect","locale","disabled","enableYearNavigation","classNames","weekStartsOn"]);return d.createElement(nx,Object.assign({showOutsideDays:!0,mode:t,defaultMonth:n,selected:r,onSelect:o,locale:i,disabled:a,weekStartsOn:s,classNames:Object.assign({months:"flex flex-col sm:flex-row space-y-4 sm:space-x-4 sm:space-y-0",month:"space-y-4",caption:"flex justify-center pt-2 relative items-center",caption_label:"text-tremor-default text-tremor-content-emphasis dark:text-dark-tremor-content-emphasis font-medium",nav:"space-x-1 flex items-center",nav_button:"flex items-center justify-center p-1 h-7 w-7 outline-none focus:ring-2 transition duration-100 border border-tremor-border dark:border-dark-tremor-border hover:bg-tremor-background-muted dark:hover:bg-dark-tremor-background-muted rounded-tremor-small focus:border-tremor-brand-subtle dark:focus:border-dark-tremor-brand-subtle focus:ring-tremor-brand-muted dark:focus:ring-dark-tremor-brand-muted text-tremor-content-subtle dark:text-dark-tremor-content-subtle hover:text-tremor-content dark:hover:text-dark-tremor-content",nav_button_previous:"absolute left-1",nav_button_next:"absolute right-1",table:"w-full border-collapse space-y-1",head_row:"flex",head_cell:"w-9 font-normal text-center text-tremor-content-subtle dark:text-dark-tremor-content-subtle",row:"flex w-full mt-0.5",cell:"text-center p-0 relative focus-within:relative text-tremor-default text-tremor-content-emphasis dark:text-dark-tremor-content-emphasis",day:"h-9 w-9 p-0 hover:bg-tremor-background-subtle dark:hover:bg-dark-tremor-background-subtle outline-tremor-brand dark:outline-dark-tremor-brand rounded-tremor-default",day_today:"font-bold",day_selected:"aria-selected:bg-tremor-background-emphasis aria-selected:text-tremor-content-inverted dark:aria-selected:bg-dark-tremor-background-emphasis dark:aria-selected:text-dark-tremor-content-inverted ",day_disabled:"text-tremor-content-subtle dark:text-dark-tremor-content-subtle disabled:hover:bg-transparent",day_outside:"text-tremor-content-subtle dark:text-dark-tremor-content-subtle"},c),components:{IconLeft:e=>{var t=(0,u._T)(e,[]);return d.createElement(nw,Object.assign({className:"h-4 w-4"},t))},IconRight:e=>{var t=(0,u._T)(e,[]);return d.createElement(nS,Object.assign({className:"h-4 w-4"},t))},Caption:e=>{var t=(0,u._T)(e,[]);let{goToMonth:n,nextMonth:r,previousMonth:o,currentMonth:a}=tI();return d.createElement("div",{className:"flex justify-between items-center"},d.createElement("div",{className:"flex items-center space-x-1"},l&&d.createElement(nD,{onClick:()=>a&&n(tl(a,-1)),icon:nO}),d.createElement(nD,{onClick:()=>o&&n(o),icon:nw})),d.createElement(nk.Z,{className:"text-tremor-default tabular-nums capitalize text-tremor-content-emphasis dark:text-dark-tremor-content-emphasis font-medium"},eQ(t.displayMonth,"LLLL yyy",{locale:i})),d.createElement("div",{className:"flex items-center space-x-1"},d.createElement(nD,{onClick:()=>r&&n(r),icon:nS}),l&&d.createElement(nD,{onClick:()=>a&&n(tl(a,1)),icon:nE})))}}},f))}nL.displayName="DateRangePicker",n(27281);var nZ=n(57365),nB=n(44140);let nz=el(),nF=d.forwardRef((e,t)=>{var n,r;let{value:o,defaultValue:i,onValueChange:a,enableSelect:l=!0,minDate:c,maxDate:s,placeholder:f="Select range",selectPlaceholder:p="Select range",disabled:h=!1,locale:m=eW,enableClear:g=!0,displayFormat:v,children:y,className:b,enableYearNavigation:x=!1,weekStartsOn:w=0,disabledDates:S}=e,O=(0,u._T)(e,["value","defaultValue","onValueChange","enableSelect","minDate","maxDate","placeholder","selectPlaceholder","disabled","locale","enableClear","displayFormat","children","className","enableYearNavigation","weekStartsOn","disabledDates"]),[E,k]=(0,nB.Z)(i,o),[C,j]=(0,d.useState)(!1),[P,M]=(0,d.useState)(!1),A=(0,d.useMemo)(()=>{let e=[];return c&&e.push({before:c}),s&&e.push({after:s}),[...e,...null!=S?S:[]]},[c,s,S]),I=(0,d.useMemo)(()=>{let e=new Map;return y?d.Children.forEach(y,t=>{var n;e.set(t.props.value,{text:null!==(n=(0,eu.qg)(t))&&void 0!==n?n:t.props.value,from:t.props.from,to:t.props.to})}):e6.forEach(t=>{e.set(t.value,{text:t.text,from:t.from,to:nz})}),e},[y]),T=(0,d.useMemo)(()=>{if(y)return(0,eu.sl)(y);let e=new Map;return e6.forEach(t=>e.set(t.value,t.text)),e},[y]),R=(null==E?void 0:E.selectValue)||"",N=e1(null==E?void 0:E.from,c,R,I),_=e2(null==E?void 0:E.to,s,R,I),D=N||_?e5(N,_,m,v):f,L=ec(null!==(r=null!==(n=null!=_?_:N)&&void 0!==n?n:s)&&void 0!==r?r:nz),Z=g&&!h;return d.createElement("div",Object.assign({ref:t,className:(0,es.q)("w-full min-w-[10rem] relative flex justify-between text-tremor-default max-w-sm shadow-tremor-input dark:shadow-dark-tremor-input rounded-tremor-default",b)},O),d.createElement(J,{as:"div",className:(0,es.q)("w-full",l?"rounded-l-tremor-default":"rounded-tremor-default",C&&"ring-2 ring-tremor-brand-muted dark:ring-dark-tremor-brand-muted z-10")},d.createElement("div",{className:"relative w-full"},d.createElement(J.Button,{onFocus:()=>j(!0),onBlur:()=>j(!1),disabled:h,className:(0,es.q)("w-full outline-none text-left whitespace-nowrap truncate focus:ring-2 transition duration-100 rounded-l-tremor-default flex flex-nowrap border pl-3 py-2","rounded-l-tremor-default border-tremor-border text-tremor-content-emphasis focus:border-tremor-brand-subtle focus:ring-tremor-brand-muted","dark:border-dark-tremor-border dark:text-dark-tremor-content-emphasis dark:focus:border-dark-tremor-brand-subtle dark:focus:ring-dark-tremor-brand-muted",l?"rounded-l-tremor-default":"rounded-tremor-default",Z?"pr-8":"pr-4",(0,eu.um)((0,eu.Uh)(N||_),h))},d.createElement(en,{className:(0,es.q)(e0("calendarIcon"),"flex-none shrink-0 h-5 w-5 -ml-0.5 mr-2","text-tremor-content-subtle","dark:text-dark-tremor-content-subtle"),"aria-hidden":"true"}),d.createElement("p",{className:"truncate"},D)),Z&&N?d.createElement("button",{type:"button",className:(0,es.q)("absolute outline-none inset-y-0 right-0 flex items-center transition duration-100 mr-4"),onClick:e=>{e.preventDefault(),null==a||a({}),k({})}},d.createElement(er.Z,{className:(0,es.q)(e0("clearIcon"),"flex-none h-4 w-4","text-tremor-content-subtle","dark:text-dark-tremor-content-subtle")})):null),d.createElement(ee.u,{className:"absolute z-10 min-w-min left-0",enter:"transition ease duration-100 transform",enterFrom:"opacity-0 -translate-y-4",enterTo:"opacity-100 translate-y-0",leave:"transition ease duration-100 transform",leaveFrom:"opacity-100 translate-y-0",leaveTo:"opacity-0 -translate-y-4"},d.createElement(J.Panel,{focus:!0,className:(0,es.q)("divide-y overflow-y-auto outline-none rounded-tremor-default p-3 border my-1","bg-tremor-background border-tremor-border divide-tremor-border shadow-tremor-dropdown","dark:bg-dark-tremor-background dark:border-dark-tremor-border dark:divide-dark-tremor-border dark:shadow-dark-tremor-dropdown")},d.createElement(nL,Object.assign({mode:"range",showOutsideDays:!0,defaultMonth:L,selected:{from:N,to:_},onSelect:e=>{null==a||a({from:null==e?void 0:e.from,to:null==e?void 0:e.to}),k({from:null==e?void 0:e.from,to:null==e?void 0:e.to})},locale:m,disabled:A,enableYearNavigation:x,classNames:{day_range_middle:(0,es.q)("!rounded-none aria-selected:!bg-tremor-background-subtle aria-selected:dark:!bg-dark-tremor-background-subtle aria-selected:!text-tremor-content aria-selected:dark:!bg-dark-tremor-background-subtle"),day_range_start:"rounded-r-none rounded-l-tremor-small aria-selected:text-tremor-brand-inverted dark:aria-selected:text-dark-tremor-brand-inverted",day_range_end:"rounded-l-none rounded-r-tremor-small aria-selected:text-tremor-brand-inverted dark:aria-selected:text-dark-tremor-brand-inverted"},weekStartsOn:w},e))))),l&&d.createElement(et.R,{as:"div",className:(0,es.q)("w-48 -ml-px rounded-r-tremor-default",P&&"ring-2 ring-tremor-brand-muted dark:ring-dark-tremor-brand-muted z-10"),value:R,onChange:e=>{let{from:t,to:n}=I.get(e),r=null!=n?n:nz;null==a||a({from:t,to:r,selectValue:e}),k({from:t,to:r,selectValue:e})},disabled:h},e=>{var t;let{value:n}=e;return d.createElement(d.Fragment,null,d.createElement(et.R.Button,{onFocus:()=>M(!0),onBlur:()=>M(!1),className:(0,es.q)("w-full outline-none text-left whitespace-nowrap truncate rounded-r-tremor-default transition duration-100 border px-4 py-2","border-tremor-border shadow-tremor-input text-tremor-content-emphasis focus:border-tremor-brand-subtle","dark:border-dark-tremor-border dark:shadow-dark-tremor-input dark:text-dark-tremor-content-emphasis dark:focus:border-dark-tremor-brand-subtle",(0,eu.um)((0,eu.Uh)(n),h))},n&&null!==(t=T.get(n))&&void 0!==t?t:p),d.createElement(ee.u,{className:"absolute z-10 w-full inset-x-0 right-0",enter:"transition ease duration-100 transform",enterFrom:"opacity-0 -translate-y-4",enterTo:"opacity-100 translate-y-0",leave:"transition ease duration-100 transform",leaveFrom:"opacity-100 translate-y-0",leaveTo:"opacity-0 -translate-y-4"},d.createElement(et.R.Options,{className:(0,es.q)("divide-y overflow-y-auto outline-none border my-1","shadow-tremor-dropdown bg-tremor-background border-tremor-border divide-tremor-border rounded-tremor-default","dark:shadow-dark-tremor-dropdown dark:bg-dark-tremor-background dark:border-dark-tremor-border dark:divide-dark-tremor-border")},null!=y?y:e6.map(e=>d.createElement(nZ.Z,{key:e.value,value:e.value},e.text)))))}))});nF.displayName="DateRangePicker"},92414:function(e,t,n){"use strict";n.d(t,{Z:function(){return v}});var r=n(5853),o=n(2265);n(42698),n(64016),n(8710);var i=n(33232),a=n(44140),l=n(58747);let c=e=>{var t=(0,r._T)(e,[]);return o.createElement("svg",Object.assign({xmlns:"http://www.w3.org/2000/svg",viewBox:"0 0 24 24",fill:"currentColor"},t),o.createElement("path",{d:"M18.031 16.6168L22.3137 20.8995L20.8995 22.3137L16.6168 18.031C15.0769 19.263 13.124 20 11 20C6.032 20 2 15.968 2 11C2 6.032 6.032 2 11 2C15.968 2 20 6.032 20 11C20 13.124 19.263 15.0769 18.031 16.6168ZM16.0247 15.8748C17.2475 14.6146 18 12.8956 18 11C18 7.1325 14.8675 4 11 4C7.1325 4 4 7.1325 4 11C4 14.8675 7.1325 18 11 18C12.8956 18 14.6146 17.2475 15.8748 16.0247L16.0247 15.8748Z"}))};var s=n(4537),u=n(28517),d=n(33044);let f=e=>{var t=(0,r._T)(e,[]);return o.createElement("svg",Object.assign({xmlns:"http://www.w3.org/2000/svg",width:"100%",height:"100%",fill:"none",viewBox:"0 0 24 24",stroke:"currentColor",strokeWidth:"2",strokeLinecap:"round",strokeLinejoin:"round"},t),o.createElement("line",{x1:"18",y1:"6",x2:"6",y2:"18"}),o.createElement("line",{x1:"6",y1:"6",x2:"18",y2:"18"}))};var p=n(65954),h=n(1153),m=n(96398);let g=(0,h.fn)("MultiSelect"),v=o.forwardRef((e,t)=>{let{defaultValue:n,value:h,onValueChange:v,placeholder:y="Select...",placeholderSearch:b="Search",disabled:x=!1,icon:w,children:S,className:O}=e,E=(0,r._T)(e,["defaultValue","value","onValueChange","placeholder","placeholderSearch","disabled","icon","children","className"]),[k,C]=(0,a.Z)(n,h),{reactElementChildren:j,optionsAvailable:P}=(0,o.useMemo)(()=>{let e=o.Children.toArray(S).filter(o.isValidElement);return{reactElementChildren:e,optionsAvailable:(0,m.n0)("",e)}},[S]),[M,A]=(0,o.useState)(""),I=(null!=k?k:[]).length>0,T=(0,o.useMemo)(()=>M?(0,m.n0)(M,j):P,[M,j,P]),R=()=>{A("")};return o.createElement(u.R,Object.assign({as:"div",ref:t,defaultValue:k,value:k,onChange:e=>{null==v||v(e),C(e)},disabled:x,className:(0,p.q)("w-full min-w-[10rem] relative text-tremor-default",O)},E,{multiple:!0}),e=>{let{value:t}=e;return o.createElement(o.Fragment,null,o.createElement(u.R.Button,{className:(0,p.q)("w-full outline-none text-left whitespace-nowrap truncate rounded-tremor-default focus:ring-2 transition duration-100 border pr-8 py-1.5","border-tremor-border shadow-tremor-input focus:border-tremor-brand-subtle focus:ring-tremor-brand-muted","dark:border-dark-tremor-border dark:shadow-dark-tremor-input dark:focus:border-dark-tremor-brand-subtle dark:focus:ring-dark-tremor-brand-muted",w?"pl-11 -ml-0.5":"pl-3",(0,m.um)(t.length>0,x))},w&&o.createElement("span",{className:(0,p.q)("absolute inset-y-0 left-0 flex items-center ml-px pl-2.5")},o.createElement(w,{className:(0,p.q)(g("Icon"),"flex-none h-5 w-5","text-tremor-content-subtle","dark:text-dark-tremor-content-subtle")})),o.createElement("div",{className:"h-6 flex items-center"},t.length>0?o.createElement("div",{className:"flex flex-nowrap overflow-x-scroll [&::-webkit-scrollbar]:hidden [scrollbar-width:none] gap-x-1 mr-5 -ml-1.5 relative"},P.filter(e=>t.includes(e.props.value)).map((e,n)=>{var r;return o.createElement("div",{key:n,className:(0,p.q)("max-w-[100px] lg:max-w-[200px] flex justify-center items-center pl-2 pr-1.5 py-1 font-medium","rounded-tremor-small","bg-tremor-background-muted dark:bg-dark-tremor-background-muted","bg-tremor-background-subtle dark:bg-dark-tremor-background-subtle","text-tremor-content-default dark:text-dark-tremor-content-default","text-tremor-content-emphasis dark:text-dark-tremor-content-emphasis")},o.createElement("div",{className:"text-xs truncate "},null!==(r=e.props.children)&&void 0!==r?r:e.props.value),o.createElement("div",{onClick:n=>{n.preventDefault();let r=t.filter(t=>t!==e.props.value);null==v||v(r),C(r)}},o.createElement(f,{className:(0,p.q)(g("clearIconItem"),"cursor-pointer rounded-tremor-full w-3.5 h-3.5 ml-2","text-tremor-content-subtle hover:text-tremor-content","dark:text-dark-tremor-content-subtle dark:hover:text-tremor-content")})))})):o.createElement("span",null,y)),o.createElement("span",{className:(0,p.q)("absolute inset-y-0 right-0 flex items-center mr-2.5")},o.createElement(l.Z,{className:(0,p.q)(g("arrowDownIcon"),"flex-none h-5 w-5","text-tremor-content-subtle","dark:text-dark-tremor-content-subtle")}))),I&&!x?o.createElement("button",{type:"button",className:(0,p.q)("absolute inset-y-0 right-0 flex items-center mr-8"),onClick:e=>{e.preventDefault(),C([]),null==v||v([])}},o.createElement(s.Z,{className:(0,p.q)(g("clearIconAllItems"),"flex-none h-4 w-4","text-tremor-content-subtle","dark:text-dark-tremor-content-subtle")})):null,o.createElement(d.u,{className:"absolute z-10 w-full",enter:"transition ease duration-100 transform",enterFrom:"opacity-0 -translate-y-4",enterTo:"opacity-100 translate-y-0",leave:"transition ease duration-100 transform",leaveFrom:"opacity-100 translate-y-0",leaveTo:"opacity-0 -translate-y-4"},o.createElement(u.R.Options,{className:(0,p.q)("divide-y overflow-y-auto outline-none rounded-tremor-default max-h-[228px] left-0 border my-1","bg-tremor-background border-tremor-border divide-tremor-border shadow-tremor-dropdown","dark:bg-dark-tremor-background dark:border-dark-tremor-border dark:divide-dark-tremor-border dark:shadow-dark-tremor-dropdown")},o.createElement("div",{className:(0,p.q)("flex items-center w-full px-2.5","bg-tremor-background-muted","dark:bg-dark-tremor-background-muted")},o.createElement("span",null,o.createElement(c,{className:(0,p.q)("flex-none w-4 h-4 mr-2","text-tremor-content-subtle","dark:text-dark-tremor-content-subtle")})),o.createElement("input",{name:"search",type:"input",autoComplete:"off",placeholder:b,className:(0,p.q)("w-full focus:outline-none focus:ring-none bg-transparent text-tremor-default py-2","text-tremor-content-emphasis","dark:text-dark-tremor-content-emphasis"),onKeyDown:e=>{"Space"===e.code&&""!==e.target.value&&e.stopPropagation()},onChange:e=>A(e.target.value),value:M})),o.createElement(i.Z.Provider,Object.assign({},{onBlur:{handleResetSearch:R}},{value:{selectedValue:t}}),T))))})});v.displayName="MultiSelect"},46030:function(e,t,n){"use strict";n.d(t,{Z:function(){return u}});var r=n(5853);n(42698),n(64016),n(8710);var o=n(33232),i=n(2265),a=n(65954),l=n(1153),c=n(28517);let s=(0,l.fn)("MultiSelectItem"),u=i.forwardRef((e,t)=>{let{value:n,className:u,children:d}=e,f=(0,r._T)(e,["value","className","children"]),{selectedValue:p}=(0,i.useContext)(o.Z),h=(0,l.NZ)(n,p);return i.createElement(c.R.Option,Object.assign({className:(0,a.q)(s("root"),"flex justify-start items-center cursor-default text-tremor-default p-2.5","ui-active:bg-tremor-background-muted ui-active:text-tremor-content-strong ui-selected:text-tremor-content-strong text-tremor-content-emphasis","dark:ui-active:bg-dark-tremor-background-muted dark:ui-active:text-dark-tremor-content-strong dark:ui-selected:text-dark-tremor-content-strong dark:ui-selected:bg-dark-tremor-background-muted dark:text-dark-tremor-content-emphasis",u),ref:t,key:n,value:n},f),i.createElement("input",{type:"checkbox",className:(0,a.q)(s("checkbox"),"flex-none focus:ring-none focus:outline-none cursor-pointer mr-2.5","accent-tremor-brand","dark:accent-dark-tremor-brand"),checked:h,readOnly:!0}),i.createElement("span",{className:"whitespace-nowrap truncate"},null!=d?d:n))});u.displayName="MultiSelectItem"},27281:function(e,t,n){"use strict";n.d(t,{Z:function(){return h}});var r=n(5853),o=n(2265),i=n(58747),a=n(4537),l=n(65954),c=n(1153),s=n(96398),u=n(28517),d=n(33044),f=n(44140);let p=(0,c.fn)("Select"),h=o.forwardRef((e,t)=>{let{defaultValue:n,value:c,onValueChange:h,placeholder:m="Select...",disabled:g=!1,icon:v,enableClear:y=!0,children:b,className:x}=e,w=(0,r._T)(e,["defaultValue","value","onValueChange","placeholder","disabled","icon","enableClear","children","className"]),[S,O]=(0,f.Z)(n,c),E=(0,o.useMemo)(()=>{let e=o.Children.toArray(b).filter(o.isValidElement);return(0,s.sl)(e)},[b]);return o.createElement(u.R,Object.assign({as:"div",ref:t,defaultValue:S,value:S,onChange:e=>{null==h||h(e),O(e)},disabled:g,className:(0,l.q)("w-full min-w-[10rem] relative text-tremor-default",x)},w),e=>{var t;let{value:n}=e;return o.createElement(o.Fragment,null,o.createElement(u.R.Button,{className:(0,l.q)("w-full outline-none text-left whitespace-nowrap truncate rounded-tremor-default focus:ring-2 transition duration-100 border pr-8 py-2","border-tremor-border shadow-tremor-input focus:border-tremor-brand-subtle focus:ring-tremor-brand-muted","dark:border-dark-tremor-border dark:shadow-dark-tremor-input dark:focus:border-dark-tremor-brand-subtle dark:focus:ring-dark-tremor-brand-muted",v?"pl-10":"pl-3",(0,s.um)((0,s.Uh)(n),g))},v&&o.createElement("span",{className:(0,l.q)("absolute inset-y-0 left-0 flex items-center ml-px pl-2.5")},o.createElement(v,{className:(0,l.q)(p("Icon"),"flex-none h-5 w-5","text-tremor-content-subtle","dark:text-dark-tremor-content-subtle")})),o.createElement("span",{className:"w-[90%] block truncate"},n&&null!==(t=E.get(n))&&void 0!==t?t:m),o.createElement("span",{className:(0,l.q)("absolute inset-y-0 right-0 flex items-center mr-3")},o.createElement(i.Z,{className:(0,l.q)(p("arrowDownIcon"),"flex-none h-5 w-5","text-tremor-content-subtle","dark:text-dark-tremor-content-subtle")}))),y&&S?o.createElement("button",{type:"button",className:(0,l.q)("absolute inset-y-0 right-0 flex items-center mr-8"),onClick:e=>{e.preventDefault(),O(""),null==h||h("")}},o.createElement(a.Z,{className:(0,l.q)(p("clearIcon"),"flex-none h-4 w-4","text-tremor-content-subtle","dark:text-dark-tremor-content-subtle")})):null,o.createElement(d.u,{className:"absolute z-10 w-full",enter:"transition ease duration-100 transform",enterFrom:"opacity-0 -translate-y-4",enterTo:"opacity-100 translate-y-0",leave:"transition ease duration-100 transform",leaveFrom:"opacity-100 translate-y-0",leaveTo:"opacity-0 -translate-y-4"},o.createElement(u.R.Options,{className:(0,l.q)("divide-y overflow-y-auto outline-none rounded-tremor-default max-h-[228px] left-0 border my-1","bg-tremor-background border-tremor-border divide-tremor-border shadow-tremor-dropdown","dark:bg-dark-tremor-background dark:border-dark-tremor-border dark:divide-dark-tremor-border dark:shadow-dark-tremor-dropdown")},b)))})});h.displayName="Select"},57365:function(e,t,n){"use strict";n.d(t,{Z:function(){return c}});var r=n(5853),o=n(2265),i=n(28517),a=n(65954);let l=(0,n(1153).fn)("SelectItem"),c=o.forwardRef((e,t)=>{let{value:n,icon:c,className:s,children:u}=e,d=(0,r._T)(e,["value","icon","className","children"]);return o.createElement(i.R.Option,Object.assign({className:(0,a.q)(l("root"),"flex justify-start items-center cursor-default text-tremor-default px-2.5 py-2.5","ui-active:bg-tremor-background-muted ui-active:text-tremor-content-strong ui-selected:text-tremor-content-strong ui-selected:bg-tremor-background-muted text-tremor-content-emphasis","dark:ui-active:bg-dark-tremor-background-muted dark:ui-active:text-dark-tremor-content-strong dark:ui-selected:text-dark-tremor-content-strong dark:ui-selected:bg-dark-tremor-background-muted dark:text-dark-tremor-content-emphasis",s),ref:t,key:n,value:n},d),c&&o.createElement(c,{className:(0,a.q)(l("icon"),"flex-none w-5 h-5 mr-1.5","text-tremor-content-subtle","dark:text-dark-tremor-content-subtle")}),o.createElement("span",{className:"whitespace-nowrap truncate"},null!=u?u:n))});c.displayName="SelectItem"},92858:function(e,t,n){"use strict";n.d(t,{Z:function(){return A}});var r=n(5853),o=n(2265),i=n(62963),a=n(90945),l=n(13323),c=n(17684),s=n(80004),u=n(93689),d=n(38198),f=n(47634),p=n(56314),h=n(27847),m=n(64518);let g=(0,o.createContext)(null),v=Object.assign((0,h.yV)(function(e,t){let n=(0,c.M)(),{id:r="headlessui-description-".concat(n),...i}=e,a=function e(){let t=(0,o.useContext)(g);if(null===t){let t=Error("You used a component, but it is not inside a relevant parent.");throw Error.captureStackTrace&&Error.captureStackTrace(t,e),t}return t}(),l=(0,u.T)(t);(0,m.e)(()=>a.register(r),[r,a.register]);let s={ref:l,...a.props,id:r};return(0,h.sY)({ourProps:s,theirProps:i,slot:a.slot||{},defaultTag:"p",name:a.name||"Description"})}),{});var y=n(37388);let b=(0,o.createContext)(null),x=Object.assign((0,h.yV)(function(e,t){let n=(0,c.M)(),{id:r="headlessui-label-".concat(n),passive:i=!1,...a}=e,l=function e(){let t=(0,o.useContext)(b);if(null===t){let t=Error("You used a