diff --git a/.circleci/config.yml b/.circleci/config.yml index acf8612eac..ecae22f872 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -1,6 +1,8 @@ version: 2.1 orbs: codecov: codecov/codecov@4.0.1 + node: circleci/node@5.1.0 # Add this line to declare the node orb + jobs: local_testing: @@ -70,6 +72,7 @@ jobs: pip install "jsonschema==4.22.0" pip install "pytest-xdist==3.6.1" pip install "websockets==10.4" + pip uninstall posthog -y - save_cache: paths: - ./venv @@ -415,6 +418,56 @@ jobs: paths: - litellm_router_coverage.xml - litellm_router_coverage + litellm_proxy_security_tests: + docker: + - image: cimg/python:3.11 + auth: + username: ${DOCKERHUB_USERNAME} + password: ${DOCKERHUB_PASSWORD} + working_directory: ~/project + steps: + - checkout + - run: + name: Show git commit hash + command: | + echo "Git commit hash: $CIRCLE_SHA1" + - run: + name: Install Dependencies + command: | + python -m pip install --upgrade pip + python -m pip install -r requirements.txt + pip install "pytest==7.3.1" + pip install "pytest-retry==1.6.3" + pip install "pytest-asyncio==0.21.1" + pip install "pytest-cov==5.0.0" + - run: + name: Run prisma ./docker/entrypoint.sh + command: | + set +e + chmod +x docker/entrypoint.sh + ./docker/entrypoint.sh + set -e + # Run pytest and generate JUnit XML report + - run: + name: Run tests + command: | + pwd + ls + python -m pytest tests/proxy_security_tests --cov=litellm --cov-report=xml -vv -x -v --junitxml=test-results/junit.xml --durations=5 + no_output_timeout: 120m + - run: + name: Rename the coverage files + command: | + mv coverage.xml litellm_proxy_security_tests_coverage.xml + mv .coverage litellm_proxy_security_tests_coverage + # Store test results + - store_test_results: + path: test-results + - persist_to_workspace: + root: . + paths: + - litellm_proxy_security_tests_coverage.xml + - litellm_proxy_security_tests_coverage litellm_proxy_unit_testing: # Runs all tests with the "proxy", "key", "jwt" filenames docker: - image: cimg/python:3.11 @@ -625,6 +678,50 @@ jobs: paths: - llm_translation_coverage.xml - llm_translation_coverage + litellm_mapped_tests: + docker: + - image: cimg/python:3.11 + auth: + username: ${DOCKERHUB_USERNAME} + password: ${DOCKERHUB_PASSWORD} + working_directory: ~/project + + steps: + - checkout + - run: + name: Install Dependencies + command: | + python -m pip install --upgrade pip + python -m pip install -r requirements.txt + pip install "pytest-mock==3.12.0" + pip install "pytest==7.3.1" + pip install "pytest-retry==1.6.3" + pip install "pytest-cov==5.0.0" + pip install "pytest-asyncio==0.21.1" + pip install "respx==0.21.1" + pip install "hypercorn==0.17.3" + # Run pytest and generate JUnit XML report + - run: + name: Run tests + command: | + pwd + ls + python -m pytest -vv tests/litellm --cov=litellm --cov-report=xml -x -s -v --junitxml=test-results/junit.xml --durations=5 + no_output_timeout: 120m + - run: + name: Rename the coverage files + command: | + mv coverage.xml litellm_mapped_tests_coverage.xml + mv .coverage litellm_mapped_tests_coverage + + # Store test results + - store_test_results: + path: test-results + - persist_to_workspace: + root: . + paths: + - litellm_mapped_tests_coverage.xml + - litellm_mapped_tests_coverage batches_testing: docker: - image: cimg/python:3.11 @@ -691,6 +788,7 @@ jobs: pip install "pytest-cov==5.0.0" pip install "google-generativeai==0.3.2" pip install "google-cloud-aiplatform==1.43.0" + pip install numpydoc # Run pytest and generate JUnit XML report - run: name: Run tests @@ -986,21 +1084,26 @@ jobs: pip install ruff pip install pylint pip install pyright + pip install beautifulsoup4 pip install . curl https://raw.githubusercontent.com/helm/helm/main/scripts/get-helm-3 | bash - run: python -c "from litellm import *" || (echo '🚨 import failed, this means you introduced unprotected imports! 🚨'; exit 1) - run: ruff check ./litellm # - run: python ./tests/documentation_tests/test_general_setting_keys.py + - run: python ./tests/code_coverage_tests/check_licenses.py - run: python ./tests/code_coverage_tests/router_code_coverage.py + - run: python ./tests/code_coverage_tests/callback_manager_test.py - run: python ./tests/code_coverage_tests/recursive_detector.py - run: python ./tests/code_coverage_tests/test_router_strategy_async.py - run: python ./tests/code_coverage_tests/litellm_logging_code_coverage.py + - run: python ./tests/code_coverage_tests/bedrock_pricing.py - run: python ./tests/documentation_tests/test_env_keys.py - run: python ./tests/documentation_tests/test_router_settings.py - run: python ./tests/documentation_tests/test_api_docs.py - run: python ./tests/code_coverage_tests/ensure_async_clients_test.py - run: python ./tests/code_coverage_tests/enforce_llms_folder_style.py - run: python ./tests/documentation_tests/test_circular_imports.py + - run: python ./tests/code_coverage_tests/prevent_key_leaks_in_exceptions.py - run: helm lint ./deploy/charts/litellm-helm db_migration_disable_update_check: @@ -1010,6 +1113,23 @@ jobs: working_directory: ~/project steps: - checkout + - run: + name: Install Python 3.9 + command: | + curl https://repo.anaconda.com/miniconda/Miniconda3-latest-Linux-x86_64.sh --output miniconda.sh + bash miniconda.sh -b -p $HOME/miniconda + export PATH="$HOME/miniconda/bin:$PATH" + conda init bash + source ~/.bashrc + conda create -n myenv python=3.9 -y + conda activate myenv + python --version + - run: + name: Install Dependencies + command: | + pip install "pytest==7.3.1" + pip install "pytest-asyncio==0.21.1" + pip install aiohttp - run: name: Build Docker image command: | @@ -1017,29 +1137,48 @@ jobs: - run: name: Run Docker container command: | - docker run --name my-app \ + docker run -d \ -p 4000:4000 \ -e DATABASE_URL=$PROXY_DATABASE_URL \ -e DISABLE_SCHEMA_UPDATE="True" \ -v $(pwd)/litellm/proxy/example_config_yaml/bad_schema.prisma:/app/schema.prisma \ -v $(pwd)/litellm/proxy/example_config_yaml/bad_schema.prisma:/app/litellm/proxy/schema.prisma \ -v $(pwd)/litellm/proxy/example_config_yaml/disable_schema_update.yaml:/app/config.yaml \ + --name my-app \ myapp:latest \ --config /app/config.yaml \ - --port 4000 > docker_output.log 2>&1 || true + --port 4000 - run: - name: Display Docker logs - command: cat docker_output.log - - run: - name: Check for expected error + name: Install curl and dockerize command: | - if grep -q "prisma schema out of sync with db. Consider running these sql_commands to sync the two" docker_output.log; then - echo "Expected error found. Test passed." + sudo apt-get update + sudo apt-get install -y curl + sudo wget https://github.com/jwilder/dockerize/releases/download/v0.6.1/dockerize-linux-amd64-v0.6.1.tar.gz + sudo tar -C /usr/local/bin -xzvf dockerize-linux-amd64-v0.6.1.tar.gz + sudo rm dockerize-linux-amd64-v0.6.1.tar.gz + + - run: + name: Wait for container to be ready + command: dockerize -wait http://localhost:4000 -timeout 1m + - run: + name: Check container logs for expected message + command: | + echo "=== Printing Full Container Startup Logs ===" + docker logs my-app + echo "=== End of Full Container Startup Logs ===" + + if docker logs my-app 2>&1 | grep -q "prisma schema out of sync with db. Consider running these sql_commands to sync the two"; then + echo "Expected message found in logs. Test passed." else - echo "Expected error not found. Test failed." - cat docker_output.log + echo "Expected message not found in logs. Test failed." exit 1 fi + - run: + name: Run Basic Proxy Startup Tests (Health Readiness and Chat Completion) + command: | + python -m pytest -vv tests/basic_proxy_startup_tests -x --junitxml=test-results/junit-2.xml --durations=5 + no_output_timeout: 120m + build_and_test: machine: @@ -1460,6 +1599,199 @@ jobs: # Store test results - store_test_results: path: test-results + + proxy_multi_instance_tests: + machine: + image: ubuntu-2204:2023.10.1 + resource_class: xlarge + working_directory: ~/project + steps: + - checkout + - run: + name: Install Docker CLI (In case it's not already installed) + command: | + sudo apt-get update + sudo apt-get install -y docker-ce docker-ce-cli containerd.io + - run: + name: Install Python 3.9 + command: | + curl https://repo.anaconda.com/miniconda/Miniconda3-latest-Linux-x86_64.sh --output miniconda.sh + bash miniconda.sh -b -p $HOME/miniconda + export PATH="$HOME/miniconda/bin:$PATH" + conda init bash + source ~/.bashrc + conda create -n myenv python=3.9 -y + conda activate myenv + python --version + - run: + name: Install Dependencies + command: | + pip install "pytest==7.3.1" + pip install "pytest-asyncio==0.21.1" + pip install aiohttp + python -m pip install --upgrade pip + python -m pip install -r requirements.txt + pip install "pytest==7.3.1" + pip install "pytest-retry==1.6.3" + pip install "pytest-mock==3.12.0" + pip install "pytest-asyncio==0.21.1" + - run: + name: Build Docker image + command: docker build -t my-app:latest -f ./docker/Dockerfile.database . + - run: + name: Run Docker container 1 + # intentionally give bad redis credentials here + # the OTEL test - should get this as a trace + command: | + docker run -d \ + -p 4000:4000 \ + -e DATABASE_URL=$PROXY_DATABASE_URL \ + -e REDIS_HOST=$REDIS_HOST \ + -e REDIS_PASSWORD=$REDIS_PASSWORD \ + -e REDIS_PORT=$REDIS_PORT \ + -e LITELLM_MASTER_KEY="sk-1234" \ + -e LITELLM_LICENSE=$LITELLM_LICENSE \ + -e USE_DDTRACE=True \ + -e DD_API_KEY=$DD_API_KEY \ + -e DD_SITE=$DD_SITE \ + --name my-app \ + -v $(pwd)/litellm/proxy/example_config_yaml/multi_instance_simple_config.yaml:/app/config.yaml \ + my-app:latest \ + --config /app/config.yaml \ + --port 4000 \ + --detailed_debug \ + - run: + name: Run Docker container 2 + command: | + docker run -d \ + -p 4001:4001 \ + -e DATABASE_URL=$PROXY_DATABASE_URL \ + -e REDIS_HOST=$REDIS_HOST \ + -e REDIS_PASSWORD=$REDIS_PASSWORD \ + -e REDIS_PORT=$REDIS_PORT \ + -e LITELLM_MASTER_KEY="sk-1234" \ + -e LITELLM_LICENSE=$LITELLM_LICENSE \ + -e USE_DDTRACE=True \ + -e DD_API_KEY=$DD_API_KEY \ + -e DD_SITE=$DD_SITE \ + --name my-app-2 \ + -v $(pwd)/litellm/proxy/example_config_yaml/multi_instance_simple_config.yaml:/app/config.yaml \ + my-app:latest \ + --config /app/config.yaml \ + --port 4001 \ + --detailed_debug + - run: + name: Install curl and dockerize + command: | + sudo apt-get update + sudo apt-get install -y curl + sudo wget https://github.com/jwilder/dockerize/releases/download/v0.6.1/dockerize-linux-amd64-v0.6.1.tar.gz + sudo tar -C /usr/local/bin -xzvf dockerize-linux-amd64-v0.6.1.tar.gz + sudo rm dockerize-linux-amd64-v0.6.1.tar.gz + - run: + name: Start outputting logs + command: docker logs -f my-app + background: true + - run: + name: Wait for instance 1 to be ready + command: dockerize -wait http://localhost:4000 -timeout 5m + - run: + name: Wait for instance 2 to be ready + command: dockerize -wait http://localhost:4001 -timeout 5m + - run: + name: Run tests + command: | + pwd + ls + python -m pytest -vv tests/multi_instance_e2e_tests -x --junitxml=test-results/junit.xml --durations=5 + no_output_timeout: + 120m + # Clean up first container + # Store test results + - store_test_results: + path: test-results + + proxy_store_model_in_db_tests: + machine: + image: ubuntu-2204:2023.10.1 + resource_class: xlarge + working_directory: ~/project + steps: + - checkout + - run: + name: Install Docker CLI (In case it's not already installed) + command: | + sudo apt-get update + sudo apt-get install -y docker-ce docker-ce-cli containerd.io + - run: + name: Install Python 3.9 + command: | + curl https://repo.anaconda.com/miniconda/Miniconda3-latest-Linux-x86_64.sh --output miniconda.sh + bash miniconda.sh -b -p $HOME/miniconda + export PATH="$HOME/miniconda/bin:$PATH" + conda init bash + source ~/.bashrc + conda create -n myenv python=3.9 -y + conda activate myenv + python --version + - run: + name: Install Dependencies + command: | + pip install "pytest==7.3.1" + pip install "pytest-asyncio==0.21.1" + pip install aiohttp + python -m pip install --upgrade pip + python -m pip install -r requirements.txt + pip install "pytest==7.3.1" + pip install "pytest-retry==1.6.3" + pip install "pytest-mock==3.12.0" + pip install "pytest-asyncio==0.21.1" + pip install "assemblyai==0.37.0" + - run: + name: Build Docker image + command: docker build -t my-app:latest -f ./docker/Dockerfile.database . + - run: + name: Run Docker container + # intentionally give bad redis credentials here + # the OTEL test - should get this as a trace + command: | + docker run -d \ + -p 4000:4000 \ + -e DATABASE_URL=$PROXY_DATABASE_URL \ + -e STORE_MODEL_IN_DB="True" \ + -e LITELLM_MASTER_KEY="sk-1234" \ + -e LITELLM_LICENSE=$LITELLM_LICENSE \ + --name my-app \ + -v $(pwd)/litellm/proxy/example_config_yaml/store_model_db_config.yaml:/app/config.yaml \ + my-app:latest \ + --config /app/config.yaml \ + --port 4000 \ + --detailed_debug \ + - run: + name: Install curl and dockerize + command: | + sudo apt-get update + sudo apt-get install -y curl + sudo wget https://github.com/jwilder/dockerize/releases/download/v0.6.1/dockerize-linux-amd64-v0.6.1.tar.gz + sudo tar -C /usr/local/bin -xzvf dockerize-linux-amd64-v0.6.1.tar.gz + sudo rm dockerize-linux-amd64-v0.6.1.tar.gz + - run: + name: Start outputting logs + command: docker logs -f my-app + background: true + - run: + name: Wait for app to be ready + command: dockerize -wait http://localhost:4000 -timeout 5m + - run: + name: Run tests + command: | + pwd + ls + python -m pytest -vv tests/store_model_in_db_tests -x --junitxml=test-results/junit.xml --durations=5 + no_output_timeout: + 120m + # Clean up first container + proxy_build_from_pip_tests: # Change from docker to machine executor machine: @@ -1590,6 +1922,7 @@ jobs: pip install "google-cloud-aiplatform==1.43.0" pip install aiohttp pip install "openai==1.54.0 " + pip install "assemblyai==0.37.0" python -m pip install --upgrade pip pip install "pydantic==2.7.1" pip install "pytest==7.3.1" @@ -1602,12 +1935,12 @@ jobs: pip install prisma pip install fastapi pip install jsonschema - pip install "httpx==0.24.1" + pip install "httpx==0.27.0" pip install "anyio==3.7.1" pip install "asyncio==3.4.3" pip install "PyGithub==1.59.1" pip install "google-cloud-aiplatform==1.59.0" - pip install anthropic + pip install "anthropic==0.49.0" # Run pytest and generate JUnit XML report - run: name: Build Docker image @@ -1622,6 +1955,7 @@ jobs: -e OPENAI_API_KEY=$OPENAI_API_KEY \ -e GEMINI_API_KEY=$GEMINI_API_KEY \ -e ANTHROPIC_API_KEY=$ANTHROPIC_API_KEY \ + -e ASSEMBLYAI_API_KEY=$ASSEMBLYAI_API_KEY \ -e USE_DDTRACE=True \ -e DD_API_KEY=$DD_API_KEY \ -e DD_SITE=$DD_SITE \ @@ -1648,11 +1982,44 @@ jobs: - run: name: Wait for app to be ready command: dockerize -wait http://localhost:4000 -timeout 5m + # Add Ruby installation and testing before the existing Node.js and Python tests + - run: + name: Install Ruby and Bundler + command: | + # Import GPG keys first + gpg --keyserver hkp://keyserver.ubuntu.com --recv-keys 409B6B1796C275462A1703113804BB82D39DC0E3 7D2BAF1CF37B13E2069D6956105BD0E739499BDB || { + curl -sSL https://rvm.io/mpapis.asc | gpg --import - + curl -sSL https://rvm.io/pkuczynski.asc | gpg --import - + } + + # Install Ruby version manager (RVM) + curl -sSL https://get.rvm.io | bash -s stable + + # Source RVM from the correct location + source $HOME/.rvm/scripts/rvm + + # Install Ruby 3.2.2 + rvm install 3.2.2 + rvm use 3.2.2 --default + + # Install latest Bundler + gem install bundler + + - run: + name: Run Ruby tests + command: | + source $HOME/.rvm/scripts/rvm + cd tests/pass_through_tests/ruby_passthrough_tests + bundle install + bundle exec rspec + no_output_timeout: 30m # New steps to run Node.js test - run: name: Install Node.js command: | + export DEBIAN_FRONTEND=noninteractive curl -fsSL https://deb.nodesource.com/setup_18.x | sudo -E bash - + sudo apt-get update sudo apt-get install -y nodejs node --version npm --version @@ -1701,7 +2068,7 @@ jobs: python -m venv venv . venv/bin/activate pip install coverage - coverage combine llm_translation_coverage logging_coverage litellm_router_coverage local_testing_coverage litellm_assistants_api_coverage auth_ui_unit_tests_coverage langfuse_coverage caching_coverage litellm_proxy_unit_tests_coverage image_gen_coverage pass_through_unit_tests_coverage batches_coverage + coverage combine llm_translation_coverage logging_coverage litellm_router_coverage local_testing_coverage litellm_assistants_api_coverage auth_ui_unit_tests_coverage langfuse_coverage caching_coverage litellm_proxy_unit_tests_coverage image_gen_coverage pass_through_unit_tests_coverage batches_coverage litellm_proxy_security_tests_coverage coverage xml - codecov/upload: file: ./coverage.xml @@ -1765,7 +2132,7 @@ jobs: circleci step halt fi - run: - name: Trigger Github Action for new Docker Container + Trigger Stable Release Testing + name: Trigger Github Action for new Docker Container + Trigger Load Testing command: | echo "Install TOML package." python3 -m pip install toml @@ -1775,9 +2142,9 @@ jobs: -H "Accept: application/vnd.github.v3+json" \ -H "Authorization: Bearer $GITHUB_TOKEN" \ "https://api.github.com/repos/BerriAI/litellm/actions/workflows/ghcr_deploy.yml/dispatches" \ - -d "{\"ref\":\"main\", \"inputs\":{\"tag\":\"v${VERSION}\", \"commit_hash\":\"$CIRCLE_SHA1\"}}" - echo "triggering stable release server for version ${VERSION} and commit ${CIRCLE_SHA1}" - curl -X POST "https://proxyloadtester-production.up.railway.app/start/load/test?version=${VERSION}&commit_hash=${CIRCLE_SHA1}" + -d "{\"ref\":\"main\", \"inputs\":{\"tag\":\"v${VERSION}-nightly\", \"commit_hash\":\"$CIRCLE_SHA1\"}}" + echo "triggering load testing server for version ${VERSION} and commit ${CIRCLE_SHA1}" + curl -X POST "https://proxyloadtester-production.up.railway.app/start/load/test?version=${VERSION}&commit_hash=${CIRCLE_SHA1}&release_type=nightly" e2e_ui_testing: machine: @@ -1786,6 +2153,25 @@ jobs: working_directory: ~/project steps: - checkout + - run: + name: Build UI + command: | + # Set up nvm + export NVM_DIR="/opt/circleci/.nvm" + source "$NVM_DIR/nvm.sh" + source "$NVM_DIR/bash_completion" + + # Install and use Node version + nvm install v18.17.0 + nvm use v18.17.0 + + cd ui/litellm-dashboard + + # Install dependencies first + npm install + + # Now source the build script + source ./build_ui.sh - run: name: Install Docker CLI (In case it's not already installed) command: | @@ -1830,6 +2216,7 @@ jobs: name: Install Playwright Browsers command: | npx playwright install + - run: name: Build Docker image command: docker build -t my-app:latest -f ./docker/Dockerfile.database . @@ -1958,6 +2345,12 @@ workflows: only: - main - /litellm_.*/ + - litellm_proxy_security_tests: + filters: + branches: + only: + - main + - /litellm_.*/ - litellm_assistants_api_testing: filters: branches: @@ -2006,6 +2399,18 @@ workflows: only: - main - /litellm_.*/ + - proxy_multi_instance_tests: + filters: + branches: + only: + - main + - /litellm_.*/ + - proxy_store_model_in_db_tests: + filters: + branches: + only: + - main + - /litellm_.*/ - proxy_build_from_pip_tests: filters: branches: @@ -2024,6 +2429,12 @@ workflows: only: - main - /litellm_.*/ + - litellm_mapped_tests: + filters: + branches: + only: + - main + - /litellm_.*/ - batches_testing: filters: branches: @@ -2057,6 +2468,7 @@ workflows: - upload-coverage: requires: - llm_translation_testing + - litellm_mapped_tests - batches_testing - litellm_utils_testing - pass_through_unit_testing @@ -2065,6 +2477,7 @@ workflows: - litellm_router_testing - caching_unit_tests - litellm_proxy_unit_testing + - litellm_proxy_security_tests - langfuse_logging_unit_tests - local_testing - litellm_assistants_api_testing @@ -2113,6 +2526,7 @@ workflows: - load_testing - test_bad_database_url - llm_translation_testing + - litellm_mapped_tests - batches_testing - litellm_utils_testing - pass_through_unit_testing @@ -2126,9 +2540,12 @@ workflows: - db_migration_disable_update_check - e2e_ui_testing - litellm_proxy_unit_testing + - litellm_proxy_security_tests - installing_litellm_on_python - installing_litellm_on_python_3_13 - proxy_logging_guardrails_model_info_tests + - proxy_multi_instance_tests + - proxy_store_model_in_db_tests - proxy_build_from_pip_tests - proxy_pass_through_endpoint_tests - check_code_and_doc_quality diff --git a/.env.example b/.env.example index c87c2ef8fd..82b09ca25e 100644 --- a/.env.example +++ b/.env.example @@ -20,3 +20,8 @@ REPLICATE_API_TOKEN = "" ANTHROPIC_API_KEY = "" # Infisical INFISICAL_TOKEN = "" + +# Development Configs +LITELLM_MASTER_KEY = "sk-1234" +DATABASE_URL = "postgresql://llmproxy:dbpassword9090@db:5432/litellm" +STORE_MODEL_IN_DB = "True" \ No newline at end of file diff --git a/.github/pull_request_template.md b/.github/pull_request_template.md index 3615d030bf..d50aefa8bb 100644 --- a/.github/pull_request_template.md +++ b/.github/pull_request_template.md @@ -6,6 +6,16 @@ +## Pre-Submission checklist + +**Please complete all items before asking a LiteLLM maintainer to review your PR** + +- [ ] I have Added testing in the `tests/litellm/` directory, **Adding at least 1 test is a hard requirement** - [see details](https://docs.litellm.ai/docs/extras/contributing_code) +- [ ] I have added a screenshot of my new test passing locally +- [ ] My PR passes all unit tests on (`make test-unit`)[https://docs.litellm.ai/docs/extras/contributing_code] +- [ ] My PR's scope is as isolated as possible, it only solves 1 specific problem + + ## Type @@ -20,10 +30,4 @@ ## Changes - - -## [REQUIRED] Testing - Attach a screenshot of any new tests passing locally -If UI changes, send a screenshot/GIF of working UI fixes - - diff --git a/.github/workflows/interpret_load_test.py b/.github/workflows/interpret_load_test.py index b1a28e069b..6b5e6535d7 100644 --- a/.github/workflows/interpret_load_test.py +++ b/.github/workflows/interpret_load_test.py @@ -52,6 +52,41 @@ def interpret_results(csv_file): return markdown_table +def _get_docker_run_command_stable_release(release_version): + return f""" +\n\n +## Docker Run LiteLLM Proxy + +``` +docker run \\ +-e STORE_MODEL_IN_DB=True \\ +-p 4000:4000 \\ +ghcr.io/berriai/litellm:litellm_stable_release_branch-{release_version} +``` + """ + + +def _get_docker_run_command(release_version): + return f""" +\n\n +## Docker Run LiteLLM Proxy + +``` +docker run \\ +-e STORE_MODEL_IN_DB=True \\ +-p 4000:4000 \\ +ghcr.io/berriai/litellm:main-{release_version} +``` + """ + + +def get_docker_run_command(release_version): + if "stable" in release_version: + return _get_docker_run_command_stable_release(release_version) + else: + return _get_docker_run_command(release_version) + + if __name__ == "__main__": csv_file = "load_test_stats.csv" # Change this to the path of your CSV file markdown_table = interpret_results(csv_file) @@ -79,17 +114,7 @@ if __name__ == "__main__": start_index = latest_release.body.find("Load Test LiteLLM Proxy Results") existing_release_body = latest_release.body[:start_index] - docker_run_command = f""" -\n\n -## Docker Run LiteLLM Proxy - -``` -docker run \\ --e STORE_MODEL_IN_DB=True \\ --p 4000:4000 \\ -ghcr.io/berriai/litellm:main-{release_version} -``` - """ + docker_run_command = get_docker_run_command(release_version) print("docker run command: ", docker_run_command) new_release_body = ( diff --git a/.github/workflows/locustfile.py b/.github/workflows/locustfile.py index 96dd8e1990..36dbeee9c4 100644 --- a/.github/workflows/locustfile.py +++ b/.github/workflows/locustfile.py @@ -8,7 +8,7 @@ class MyUser(HttpUser): def chat_completion(self): headers = { "Content-Type": "application/json", - "Authorization": "Bearer sk-ZoHqrLIs2-5PzJrqBaviAA", + "Authorization": "Bearer sk-8N1tLOOyH8TIxwOLahhIVg", # Include any additional headers you may need for authentication, etc. } diff --git a/.github/workflows/stale.yml b/.github/workflows/stale.yml new file mode 100644 index 0000000000..5a9b19fc9c --- /dev/null +++ b/.github/workflows/stale.yml @@ -0,0 +1,20 @@ +name: "Stale Issue Management" + +on: + schedule: + - cron: '0 0 * * *' # Runs daily at midnight UTC + workflow_dispatch: + +jobs: + stale: + runs-on: ubuntu-latest + steps: + - uses: actions/stale@v8 + with: + repo-token: "${{ secrets.GITHUB_TOKEN }}" + stale-issue-message: "This issue has been automatically marked as stale because it has not had recent activity. It will be closed if no further activity occurs." + stale-pr-message: "This pull request has been automatically marked as stale because it has not had recent activity. It will be closed if no further activity occurs." + days-before-stale: 90 # Revert to 60 days + days-before-close: 7 # Revert to 7 days + stale-issue-label: "stale" + operations-per-run: 1000 \ No newline at end of file diff --git a/.gitignore b/.gitignore index 6f745a350e..d35923f7c3 100644 --- a/.gitignore +++ b/.gitignore @@ -48,7 +48,7 @@ deploy/charts/litellm/charts/* deploy/charts/*.tgz litellm/proxy/vertex_key.json **/.vim/ -/node_modules +**/node_modules kub.yaml loadtest_kub.yaml litellm/proxy/_new_secret_config.yaml @@ -71,3 +71,11 @@ tests/local_testing/log.txt .codegpt litellm/proxy/_new_new_secret_config.yaml +litellm/proxy/custom_guardrail.py +litellm/proxy/_experimental/out/404.html +litellm/proxy/_experimental/out/404.html +litellm/proxy/_experimental/out/model_hub.html +.mypy_cache/* +litellm/proxy/application.log +tests/llm_translation/vertex_test_account.json +tests/llm_translation/test_vertex_key.json diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index b8567fce76..fb37f32524 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -22,7 +22,7 @@ repos: rev: 7.0.0 # The version of flake8 to use hooks: - id: flake8 - exclude: ^litellm/tests/|^litellm/proxy/tests/ + exclude: ^litellm/tests/|^litellm/proxy/tests/|^litellm/tests/litellm/|^tests/litellm/ additional_dependencies: [flake8-print] files: litellm/.*\.py # - id: flake8 diff --git a/Makefile b/Makefile new file mode 100644 index 0000000000..6bd3cb57d4 --- /dev/null +++ b/Makefile @@ -0,0 +1,21 @@ +# LiteLLM Makefile +# Simple Makefile for running tests and basic development tasks + +.PHONY: help test test-unit test-integration + +# Default target +help: + @echo "Available commands:" + @echo " make test - Run all tests" + @echo " make test-unit - Run unit tests" + @echo " make test-integration - Run integration tests" + +# Testing +test: + poetry run pytest tests/ + +test-unit: + poetry run pytest tests/litellm/ + +test-integration: + poetry run pytest tests/ -k "not litellm" \ No newline at end of file diff --git a/README.md b/README.md index c7ea44cf46..2d2f71e4d1 100644 --- a/README.md +++ b/README.md @@ -40,7 +40,7 @@ LiteLLM manages: [**Jump to LiteLLM Proxy (LLM Gateway) Docs**](https://github.com/BerriAI/litellm?tab=readme-ov-file#openai-proxy---docs) [**Jump to Supported LLM Providers**](https://github.com/BerriAI/litellm?tab=readme-ov-file#supported-providers-docs) -🚨 **Stable Release:** Use docker images with the `-stable` tag. These have undergone 12 hour load tests, before being published. +🚨 **Stable Release:** Use docker images with the `-stable` tag. These have undergone 12 hour load tests, before being published. [More information about the release cycle here](https://docs.litellm.ai/docs/proxy/release_cycle) Support for more providers. Missing a provider or LLM Platform, raise a [feature request](https://github.com/BerriAI/litellm/issues/new?assignees=&labels=enhancement&projects=&template=feature_request.yml&title=%5BFeature%5D%3A+). @@ -64,7 +64,7 @@ import os ## set ENV variables os.environ["OPENAI_API_KEY"] = "your-openai-key" -os.environ["ANTHROPIC_API_KEY"] = "your-cohere-key" +os.environ["ANTHROPIC_API_KEY"] = "your-anthropic-key" messages = [{ "content": "Hello, how are you?","role": "user"}] @@ -187,13 +187,13 @@ os.environ["LANGFUSE_PUBLIC_KEY"] = "" os.environ["LANGFUSE_SECRET_KEY"] = "" os.environ["ATHINA_API_KEY"] = "your-athina-api-key" -os.environ["OPENAI_API_KEY"] +os.environ["OPENAI_API_KEY"] = "your-openai-key" # set callbacks litellm.success_callback = ["lunary", "mlflow", "langfuse", "athina", "helicone"] # log input/output to lunary, langfuse, supabase, athina, helicone etc #openai call -response = completion(model="anthropic/claude-3-sonnet-20240229", messages=[{"role": "user", "content": "Hi 👋 - i'm openai"}]) +response = completion(model="openai/gpt-4o", messages=[{"role": "user", "content": "Hi 👋 - i'm openai"}]) ``` # LiteLLM Proxy Server (LLM Gateway) - ([Docs](https://docs.litellm.ai/docs/simple_proxy)) @@ -303,6 +303,7 @@ curl 'http://0.0.0.0:4000/key/generate' \ |-------------------------------------------------------------------------------------|---------------------------------------------------------|---------------------------------------------------------------------------------|-------------------------------------------------------------------------------------|-----------------------------------------------------------------------------------|-------------------------------------------------------------------------------|-------------------------------------------------------------------------| | [openai](https://docs.litellm.ai/docs/providers/openai) | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | | [azure](https://docs.litellm.ai/docs/providers/azure) | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | +| [AI/ML API](https://docs.litellm.ai/docs/providers/aiml) | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | | [aws - sagemaker](https://docs.litellm.ai/docs/providers/aws_sagemaker) | ✅ | ✅ | ✅ | ✅ | ✅ | | | [aws - bedrock](https://docs.litellm.ai/docs/providers/bedrock) | ✅ | ✅ | ✅ | ✅ | ✅ | | | [google - vertex_ai](https://docs.litellm.ai/docs/providers/vertex) | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | @@ -339,64 +340,7 @@ curl 'http://0.0.0.0:4000/key/generate' \ ## Contributing -To contribute: Clone the repo locally -> Make a change -> Submit a PR with the change. - -Here's how to modify the repo locally: -Step 1: Clone the repo - -``` -git clone https://github.com/BerriAI/litellm.git -``` - -Step 2: Navigate into the project, and install dependencies: - -``` -cd litellm -poetry install -E extra_proxy -E proxy -``` - -Step 3: Test your change: - -``` -cd tests # pwd: Documents/litellm/litellm/tests -poetry run flake8 -poetry run pytest . -``` - -Step 4: Submit a PR with your changes! 🚀 - -- push your fork to your GitHub repo -- submit a PR from there - -### Building LiteLLM Docker Image - -Follow these instructions if you want to build / run the LiteLLM Docker Image yourself. - -Step 1: Clone the repo - -``` -git clone https://github.com/BerriAI/litellm.git -``` - -Step 2: Build the Docker Image - -Build using Dockerfile.non_root -``` -docker build -f docker/Dockerfile.non_root -t litellm_test_image . -``` - -Step 3: Run the Docker Image - -Make sure config.yaml is present in the root directory. This is your litellm proxy config file. -``` -docker run \ - -v $(pwd)/proxy_config.yaml:/app/config.yaml \ - -e DATABASE_URL="postgresql://xxxxxxxx" \ - -e LITELLM_MASTER_KEY="sk-1234" \ - -p 4000:4000 \ - litellm_test_image \ - --config /app/config.yaml --detailed_debug -``` +Interested in contributing? Contributions to LiteLLM Python SDK, Proxy Server, and contributing LLM integrations are both accepted and highly encouraged! [See our Contribution Guide for more details](https://docs.litellm.ai/docs/extras/contributing_code) # Enterprise For companies that need better security, user management and professional support @@ -450,3 +394,20 @@ If you have suggestions on how to improve the code quality feel free to open an + + +## Run in Developer mode +### Services +1. Setup .env file in root +2. Run dependant services `docker-compose up db prometheus` + +### Backend +1. (In root) create virtual environment `python -m venv .venv` +2. Activate virtual environment `source .venv/bin/activate` +3. Install dependencies `pip install -e ".[all]"` +4. Start proxy backend `uvicorn litellm.proxy.proxy_server:app --host localhost --port 4000 --reload` + +### Frontend +1. Navigate to `ui/litellm-dashboard` +2. Install dependencies `npm install` +3. Run `npm run dev` to start the dashboard diff --git a/cookbook/logging_observability/LiteLLM_Arize.ipynb b/cookbook/logging_observability/LiteLLM_Arize.ipynb new file mode 100644 index 0000000000..72a082f874 --- /dev/null +++ b/cookbook/logging_observability/LiteLLM_Arize.ipynb @@ -0,0 +1,172 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": { + "id": "4FbDOmcj2VkM" + }, + "source": [ + "## Use LiteLLM with Arize\n", + "https://docs.litellm.ai/docs/observability/arize_integration\n", + "\n", + "This method uses the litellm proxy to send the data to Arize. The callback is set in the litellm config below, instead of using OpenInference tracing." + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "21W8Woog26Ns" + }, + "source": [ + "## Install Dependencies" + ] + }, + { + "cell_type": "code", + "execution_count": 1, + "metadata": { + "id": "xrjKLBxhxu2L" + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Requirement already satisfied: litellm in /Users/ericxiao/Documents/arize/.venv/lib/python3.11/site-packages (1.54.1)\n", + "Requirement already satisfied: aiohttp in /Users/ericxiao/Documents/arize/.venv/lib/python3.11/site-packages (from litellm) (3.11.10)\n", + "Requirement already satisfied: click in /Users/ericxiao/Documents/arize/.venv/lib/python3.11/site-packages (from litellm) (8.1.7)\n", + "Requirement already satisfied: httpx<0.28.0,>=0.23.0 in /Users/ericxiao/Documents/arize/.venv/lib/python3.11/site-packages (from litellm) (0.27.2)\n", + "Requirement already satisfied: importlib-metadata>=6.8.0 in /Users/ericxiao/Documents/arize/.venv/lib/python3.11/site-packages (from litellm) (8.5.0)\n", + "Requirement already satisfied: jinja2<4.0.0,>=3.1.2 in /Users/ericxiao/Documents/arize/.venv/lib/python3.11/site-packages (from litellm) (3.1.4)\n", + "Requirement already satisfied: jsonschema<5.0.0,>=4.22.0 in /Users/ericxiao/Documents/arize/.venv/lib/python3.11/site-packages (from litellm) (4.23.0)\n", + "Requirement already satisfied: openai>=1.55.3 in /Users/ericxiao/Documents/arize/.venv/lib/python3.11/site-packages (from litellm) (1.57.1)\n", + "Requirement already satisfied: pydantic<3.0.0,>=2.0.0 in /Users/ericxiao/Documents/arize/.venv/lib/python3.11/site-packages (from litellm) (2.10.3)\n", + "Requirement already satisfied: python-dotenv>=0.2.0 in /Users/ericxiao/Documents/arize/.venv/lib/python3.11/site-packages (from litellm) (1.0.1)\n", + "Requirement already satisfied: requests<3.0.0,>=2.31.0 in /Users/ericxiao/Documents/arize/.venv/lib/python3.11/site-packages (from litellm) (2.32.3)\n", + "Requirement already satisfied: tiktoken>=0.7.0 in /Users/ericxiao/Documents/arize/.venv/lib/python3.11/site-packages (from litellm) (0.7.0)\n", + "Requirement already satisfied: tokenizers in /Users/ericxiao/Documents/arize/.venv/lib/python3.11/site-packages (from litellm) (0.21.0)\n", + "Requirement already satisfied: anyio in /Users/ericxiao/Documents/arize/.venv/lib/python3.11/site-packages (from httpx<0.28.0,>=0.23.0->litellm) (4.7.0)\n", + "Requirement already satisfied: certifi in /Users/ericxiao/Documents/arize/.venv/lib/python3.11/site-packages (from httpx<0.28.0,>=0.23.0->litellm) (2024.8.30)\n", + "Requirement already satisfied: httpcore==1.* in /Users/ericxiao/Documents/arize/.venv/lib/python3.11/site-packages (from httpx<0.28.0,>=0.23.0->litellm) (1.0.7)\n", + "Requirement already satisfied: idna in /Users/ericxiao/Documents/arize/.venv/lib/python3.11/site-packages (from httpx<0.28.0,>=0.23.0->litellm) (3.10)\n", + "Requirement already satisfied: sniffio in /Users/ericxiao/Documents/arize/.venv/lib/python3.11/site-packages (from httpx<0.28.0,>=0.23.0->litellm) (1.3.1)\n", + "Requirement already satisfied: h11<0.15,>=0.13 in /Users/ericxiao/Documents/arize/.venv/lib/python3.11/site-packages (from httpcore==1.*->httpx<0.28.0,>=0.23.0->litellm) (0.14.0)\n", + "Requirement already satisfied: zipp>=3.20 in /Users/ericxiao/Documents/arize/.venv/lib/python3.11/site-packages (from importlib-metadata>=6.8.0->litellm) (3.21.0)\n", + "Requirement already satisfied: MarkupSafe>=2.0 in /Users/ericxiao/Documents/arize/.venv/lib/python3.11/site-packages (from jinja2<4.0.0,>=3.1.2->litellm) (3.0.2)\n", + "Requirement already satisfied: attrs>=22.2.0 in /Users/ericxiao/Documents/arize/.venv/lib/python3.11/site-packages (from jsonschema<5.0.0,>=4.22.0->litellm) (24.2.0)\n", + "Requirement already satisfied: jsonschema-specifications>=2023.03.6 in /Users/ericxiao/Documents/arize/.venv/lib/python3.11/site-packages (from jsonschema<5.0.0,>=4.22.0->litellm) (2024.10.1)\n", + "Requirement already satisfied: referencing>=0.28.4 in /Users/ericxiao/Documents/arize/.venv/lib/python3.11/site-packages (from jsonschema<5.0.0,>=4.22.0->litellm) (0.35.1)\n", + "Requirement already satisfied: rpds-py>=0.7.1 in /Users/ericxiao/Documents/arize/.venv/lib/python3.11/site-packages (from jsonschema<5.0.0,>=4.22.0->litellm) (0.22.3)\n", + "Requirement already satisfied: distro<2,>=1.7.0 in /Users/ericxiao/Documents/arize/.venv/lib/python3.11/site-packages (from openai>=1.55.3->litellm) (1.9.0)\n", + "Requirement already satisfied: jiter<1,>=0.4.0 in /Users/ericxiao/Documents/arize/.venv/lib/python3.11/site-packages (from openai>=1.55.3->litellm) (0.6.1)\n", + "Requirement already satisfied: tqdm>4 in /Users/ericxiao/Documents/arize/.venv/lib/python3.11/site-packages (from openai>=1.55.3->litellm) (4.67.1)\n", + "Requirement already satisfied: typing-extensions<5,>=4.11 in /Users/ericxiao/Documents/arize/.venv/lib/python3.11/site-packages (from openai>=1.55.3->litellm) (4.12.2)\n", + "Requirement already satisfied: annotated-types>=0.6.0 in /Users/ericxiao/Documents/arize/.venv/lib/python3.11/site-packages (from pydantic<3.0.0,>=2.0.0->litellm) (0.7.0)\n", + "Requirement already satisfied: pydantic-core==2.27.1 in /Users/ericxiao/Documents/arize/.venv/lib/python3.11/site-packages (from pydantic<3.0.0,>=2.0.0->litellm) (2.27.1)\n", + "Requirement already satisfied: charset-normalizer<4,>=2 in /Users/ericxiao/Documents/arize/.venv/lib/python3.11/site-packages (from requests<3.0.0,>=2.31.0->litellm) (3.4.0)\n", + "Requirement already satisfied: urllib3<3,>=1.21.1 in /Users/ericxiao/Documents/arize/.venv/lib/python3.11/site-packages (from requests<3.0.0,>=2.31.0->litellm) (2.0.7)\n", + "Requirement already satisfied: regex>=2022.1.18 in /Users/ericxiao/Documents/arize/.venv/lib/python3.11/site-packages (from tiktoken>=0.7.0->litellm) (2024.11.6)\n", + "Requirement already satisfied: aiohappyeyeballs>=2.3.0 in /Users/ericxiao/Documents/arize/.venv/lib/python3.11/site-packages (from aiohttp->litellm) (2.4.4)\n", + "Requirement already satisfied: aiosignal>=1.1.2 in /Users/ericxiao/Documents/arize/.venv/lib/python3.11/site-packages (from aiohttp->litellm) (1.3.1)\n", + "Requirement already satisfied: frozenlist>=1.1.1 in /Users/ericxiao/Documents/arize/.venv/lib/python3.11/site-packages (from aiohttp->litellm) (1.5.0)\n", + "Requirement already satisfied: multidict<7.0,>=4.5 in /Users/ericxiao/Documents/arize/.venv/lib/python3.11/site-packages (from aiohttp->litellm) (6.1.0)\n", + "Requirement already satisfied: propcache>=0.2.0 in /Users/ericxiao/Documents/arize/.venv/lib/python3.11/site-packages (from aiohttp->litellm) (0.2.1)\n", + "Requirement already satisfied: yarl<2.0,>=1.17.0 in /Users/ericxiao/Documents/arize/.venv/lib/python3.11/site-packages (from aiohttp->litellm) (1.18.3)\n", + "Requirement already satisfied: huggingface-hub<1.0,>=0.16.4 in /Users/ericxiao/Documents/arize/.venv/lib/python3.11/site-packages (from tokenizers->litellm) (0.26.5)\n", + "Requirement already satisfied: filelock in /Users/ericxiao/Documents/arize/.venv/lib/python3.11/site-packages (from huggingface-hub<1.0,>=0.16.4->tokenizers->litellm) (3.16.1)\n", + "Requirement already satisfied: fsspec>=2023.5.0 in /Users/ericxiao/Documents/arize/.venv/lib/python3.11/site-packages (from huggingface-hub<1.0,>=0.16.4->tokenizers->litellm) (2024.10.0)\n", + "Requirement already satisfied: packaging>=20.9 in /Users/ericxiao/Documents/arize/.venv/lib/python3.11/site-packages (from huggingface-hub<1.0,>=0.16.4->tokenizers->litellm) (24.2)\n", + "Requirement already satisfied: pyyaml>=5.1 in /Users/ericxiao/Documents/arize/.venv/lib/python3.11/site-packages (from huggingface-hub<1.0,>=0.16.4->tokenizers->litellm) (6.0.2)\n" + ] + } + ], + "source": [ + "!pip install litellm" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "jHEu-TjZ29PJ" + }, + "source": [ + "## Set Env Variables" + ] + }, + { + "cell_type": "code", + "execution_count": 5, + "metadata": { + "id": "QWd9rTysxsWO" + }, + "outputs": [], + "source": [ + "import litellm\n", + "import os\n", + "from getpass import getpass\n", + "\n", + "os.environ[\"ARIZE_SPACE_KEY\"] = getpass(\"Enter your Arize space key: \")\n", + "os.environ[\"ARIZE_API_KEY\"] = getpass(\"Enter your Arize API key: \")\n", + "os.environ['OPENAI_API_KEY']= getpass(\"Enter your OpenAI API key: \")" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Let's run a completion call and see the traces in Arize" + ] + }, + { + "cell_type": "code", + "execution_count": 8, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Hello! Nice to meet you, OpenAI. How can I assist you today?\n" + ] + } + ], + "source": [ + "# set arize as a callback, litellm will send the data to arize\n", + "litellm.callbacks = [\"arize\"]\n", + " \n", + "# openai call\n", + "response = litellm.completion(\n", + " model=\"gpt-3.5-turbo\",\n", + " messages=[\n", + " {\"role\": \"user\", \"content\": \"Hi 👋 - i'm openai\"}\n", + " ]\n", + ")\n", + "print(response.choices[0].message.content)" + ] + } + ], + "metadata": { + "colab": { + "provenance": [] + }, + "kernelspec": { + "display_name": ".venv", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.11.6" + } + }, + "nbformat": 4, + "nbformat_minor": 0 +} diff --git a/cookbook/logging_observability/LiteLLM_Proxy_Langfuse.ipynb b/cookbook/logging_observability/LiteLLM_Proxy_Langfuse.ipynb new file mode 100644 index 0000000000..0baaab3f49 --- /dev/null +++ b/cookbook/logging_observability/LiteLLM_Proxy_Langfuse.ipynb @@ -0,0 +1,252 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## LLM Ops Stack - LiteLLM Proxy + Langfuse \n", + "\n", + "This notebook demonstrates how to use LiteLLM Proxy with Langfuse \n", + "- Use LiteLLM Proxy for calling 100+ LLMs in OpenAI format\n", + "- Use Langfuse for viewing request / response traces \n", + "\n", + "\n", + "In this notebook we will setup LiteLLM Proxy to make requests to OpenAI, Anthropic, Bedrock and automatically log traces to Langfuse." + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## 1. Setup LiteLLM Proxy\n", + "\n", + "### 1.1 Define .env variables \n", + "Define .env variables on the container that litellm proxy is running on.\n", + "```bash\n", + "## LLM API Keys\n", + "OPENAI_API_KEY=sk-proj-1234567890\n", + "ANTHROPIC_API_KEY=sk-ant-api03-1234567890\n", + "AWS_ACCESS_KEY_ID=1234567890\n", + "AWS_SECRET_ACCESS_KEY=1234567890\n", + "\n", + "## Langfuse Logging \n", + "LANGFUSE_PUBLIC_KEY=\"pk-lf-xxxx9\"\n", + "LANGFUSE_SECRET_KEY=\"sk-lf-xxxx9\"\n", + "LANGFUSE_HOST=\"https://us.cloud.langfuse.com\"\n", + "```\n", + "\n", + "\n", + "### 1.1 Setup LiteLLM Proxy Config yaml \n", + "```yaml\n", + "model_list:\n", + " - model_name: gpt-4o\n", + " litellm_params:\n", + " model: openai/gpt-4o\n", + " api_key: os.environ/OPENAI_API_KEY\n", + " - model_name: claude-3-5-sonnet-20241022\n", + " litellm_params:\n", + " model: anthropic/claude-3-5-sonnet-20241022\n", + " api_key: os.environ/ANTHROPIC_API_KEY\n", + " - model_name: us.amazon.nova-micro-v1:0\n", + " litellm_params:\n", + " model: bedrock/us.amazon.nova-micro-v1:0\n", + " aws_access_key_id: os.environ/AWS_ACCESS_KEY_ID\n", + " aws_secret_access_key: os.environ/AWS_SECRET_ACCESS_KEY\n", + "\n", + "litellm_settings:\n", + " callbacks: [\"langfuse\"]\n", + "\n", + "\n", + "```" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## 2. Make LLM Requests to LiteLLM Proxy\n", + "\n", + "Now we will make our first LLM request to LiteLLM Proxy" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### 2.1 Setup Client Side Variables to point to LiteLLM Proxy\n", + "Set `LITELLM_PROXY_BASE_URL` to the base url of the LiteLLM Proxy and `LITELLM_VIRTUAL_KEY` to the virtual key you want to use for Authentication to LiteLLM Proxy. (Note: In this initial setup you can)" + ] + }, + { + "cell_type": "code", + "execution_count": 23, + "metadata": {}, + "outputs": [], + "source": [ + "\n", + "LITELLM_PROXY_BASE_URL=\"http://0.0.0.0:4000\"\n", + "LITELLM_VIRTUAL_KEY=\"sk-oXXRa1xxxxxxxxxxx\"" + ] + }, + { + "cell_type": "code", + "execution_count": 22, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "ChatCompletion(id='chatcmpl-B0sq6QkOKNMJ0dwP3x7OoMqk1jZcI', choices=[Choice(finish_reason='stop', index=0, logprobs=None, message=ChatCompletionMessage(content='Langfuse is a platform designed to monitor, observe, and troubleshoot AI and large language model (LLM) applications. It provides features that help developers gain insights into how their AI systems are performing, make debugging easier, and optimize the deployment of models. Langfuse allows for tracking of model interactions, collecting telemetry, and visualizing data, which is crucial for understanding the behavior of AI models in production environments. This kind of tool is particularly useful for developers working with language models who need to ensure reliability and efficiency in their applications.', refusal=None, role='assistant', audio=None, function_call=None, tool_calls=None))], created=1739550502, model='gpt-4o-2024-08-06', object='chat.completion', service_tier='default', system_fingerprint='fp_523b9b6e5f', usage=CompletionUsage(completion_tokens=109, prompt_tokens=13, total_tokens=122, completion_tokens_details=CompletionTokensDetails(accepted_prediction_tokens=0, audio_tokens=0, reasoning_tokens=0, rejected_prediction_tokens=0), prompt_tokens_details=PromptTokensDetails(audio_tokens=0, cached_tokens=0)))" + ] + }, + "execution_count": 22, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "import openai\n", + "client = openai.OpenAI(\n", + " api_key=LITELLM_VIRTUAL_KEY,\n", + " base_url=LITELLM_PROXY_BASE_URL\n", + ")\n", + "\n", + "response = client.chat.completions.create(\n", + " model=\"gpt-4o\",\n", + " messages = [\n", + " {\n", + " \"role\": \"user\",\n", + " \"content\": \"what is Langfuse?\"\n", + " }\n", + " ],\n", + ")\n", + "\n", + "response" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### 2.3 View Traces on Langfuse\n", + "LiteLLM will send the request / response, model, tokens (input + output), cost to Langfuse.\n", + "\n", + "" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### 2.4 Call Anthropic, Bedrock models \n", + "\n", + "Now we can call `us.amazon.nova-micro-v1:0` and `claude-3-5-sonnet-20241022` models defined on your config.yaml both in the OpenAI request / response format." + ] + }, + { + "cell_type": "code", + "execution_count": 24, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "ChatCompletion(id='chatcmpl-7756e509-e61f-4f5e-b5ae-b7a41013522a', choices=[Choice(finish_reason='stop', index=0, logprobs=None, message=ChatCompletionMessage(content=\"Langfuse is an observability tool designed specifically for machine learning models and applications built with natural language processing (NLP) and large language models (LLMs). It focuses on providing detailed insights into how these models perform in real-world scenarios. Here are some key features and purposes of Langfuse:\\n\\n1. **Real-time Monitoring**: Langfuse allows developers to monitor the performance of their NLP and LLM applications in real time. This includes tracking the inputs and outputs of the models, as well as any errors or issues that arise during operation.\\n\\n2. **Error Tracking**: It helps in identifying and tracking errors in the models' outputs. By analyzing incorrect or unexpected responses, developers can pinpoint where and why errors occur, facilitating more effective debugging and improvement.\\n\\n3. **Performance Metrics**: Langfuse provides various performance metrics, such as latency, throughput, and error rates. These metrics help developers understand how well their models are performing under different conditions and workloads.\\n\\n4. **Traceability**: It offers detailed traceability of requests and responses, allowing developers to follow the path of a request through the system and see how it is processed by the model at each step.\\n\\n5. **User Feedback Integration**: Langfuse can integrate user feedback to provide context for model outputs. This helps in understanding how real users are interacting with the model and how its outputs align with user expectations.\\n\\n6. **Customizable Dashboards**: Users can create custom dashboards to visualize the data collected by Langfuse. These dashboards can be tailored to highlight the most important metrics and insights for a specific application or team.\\n\\n7. **Alerting and Notifications**: It can set up alerts for specific conditions or errors, notifying developers when something goes wrong or when performance metrics fall outside of acceptable ranges.\\n\\nBy providing comprehensive observability for NLP and LLM applications, Langfuse helps developers to build more reliable, accurate, and user-friendly models and services.\", refusal=None, role='assistant', audio=None, function_call=None, tool_calls=None))], created=1739554005, model='us.amazon.nova-micro-v1:0', object='chat.completion', service_tier=None, system_fingerprint=None, usage=CompletionUsage(completion_tokens=380, prompt_tokens=5, total_tokens=385, completion_tokens_details=None, prompt_tokens_details=None))" + ] + }, + "execution_count": 24, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "import openai\n", + "client = openai.OpenAI(\n", + " api_key=LITELLM_VIRTUAL_KEY,\n", + " base_url=LITELLM_PROXY_BASE_URL\n", + ")\n", + "\n", + "response = client.chat.completions.create(\n", + " model=\"us.amazon.nova-micro-v1:0\",\n", + " messages = [\n", + " {\n", + " \"role\": \"user\",\n", + " \"content\": \"what is Langfuse?\"\n", + " }\n", + " ],\n", + ")\n", + "\n", + "response" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## 3. Advanced - Set Langfuse Trace ID, Tags, Metadata \n", + "\n", + "Here is an example of how you can set Langfuse specific params on your client side request. See full list of supported langfuse params [here](https://docs.litellm.ai/docs/observability/langfuse_integration)\n", + "\n", + "You can view the logged trace of this request [here](https://us.cloud.langfuse.com/project/clvlhdfat0007vwb74m9lvfvi/traces/567890?timestamp=2025-02-14T17%3A30%3A26.709Z)" + ] + }, + { + "cell_type": "code", + "execution_count": 27, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "ChatCompletion(id='chatcmpl-789babd5-c064-4939-9093-46e4cd2e208a', choices=[Choice(finish_reason='stop', index=0, logprobs=None, message=ChatCompletionMessage(content=\"Langfuse is an observability platform designed specifically for monitoring and improving the performance of natural language processing (NLP) models and applications. It provides developers with tools to track, analyze, and optimize how their language models interact with users and handle natural language inputs.\\n\\nHere are some key features and benefits of Langfuse:\\n\\n1. **Real-Time Monitoring**: Langfuse allows developers to monitor their NLP applications in real time. This includes tracking user interactions, model responses, and overall performance metrics.\\n\\n2. **Error Tracking**: It helps in identifying and tracking errors in the model's responses. This can include incorrect, irrelevant, or unsafe outputs.\\n\\n3. **User Feedback Integration**: Langfuse enables the collection of user feedback directly within the platform. This feedback can be used to identify areas for improvement in the model's performance.\\n\\n4. **Performance Metrics**: The platform provides detailed metrics and analytics on model performance, including latency, throughput, and accuracy.\\n\\n5. **Alerts and Notifications**: Developers can set up alerts to notify them of any significant issues or anomalies in model performance.\\n\\n6. **Debugging Tools**: Langfuse offers tools to help developers debug and refine their models by providing insights into how the model processes different types of inputs.\\n\\n7. **Integration with Development Workflows**: It integrates seamlessly with various development environments and CI/CD pipelines, making it easier to incorporate observability into the development process.\\n\\n8. **Customizable Dashboards**: Users can create custom dashboards to visualize the data in a way that best suits their needs.\\n\\nLangfuse aims to help developers build more reliable, accurate, and user-friendly NLP applications by providing them with the tools to observe and improve how their models perform in real-world scenarios.\", refusal=None, role='assistant', audio=None, function_call=None, tool_calls=None))], created=1739554281, model='us.amazon.nova-micro-v1:0', object='chat.completion', service_tier=None, system_fingerprint=None, usage=CompletionUsage(completion_tokens=346, prompt_tokens=5, total_tokens=351, completion_tokens_details=None, prompt_tokens_details=None))" + ] + }, + "execution_count": 27, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "import openai\n", + "client = openai.OpenAI(\n", + " api_key=LITELLM_VIRTUAL_KEY,\n", + " base_url=LITELLM_PROXY_BASE_URL\n", + ")\n", + "\n", + "response = client.chat.completions.create(\n", + " model=\"us.amazon.nova-micro-v1:0\",\n", + " messages = [\n", + " {\n", + " \"role\": \"user\",\n", + " \"content\": \"what is Langfuse?\"\n", + " }\n", + " ],\n", + " extra_body={\n", + " \"metadata\": {\n", + " \"generation_id\": \"1234567890\",\n", + " \"trace_id\": \"567890\",\n", + " \"trace_user_id\": \"user_1234567890\",\n", + " \"tags\": [\"tag1\", \"tag2\"]\n", + " }\n", + " }\n", + ")\n", + "\n", + "response" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## " + ] + } + ], + "metadata": { + "language_info": { + "name": "python" + } + }, + "nbformat": 4, + "nbformat_minor": 2 +} diff --git a/cookbook/logging_observability/litellm_proxy_langfuse.png b/cookbook/logging_observability/litellm_proxy_langfuse.png new file mode 100644 index 0000000000..6b0691e6a5 Binary files /dev/null and b/cookbook/logging_observability/litellm_proxy_langfuse.png differ diff --git a/db_scripts/create_views.py b/db_scripts/create_views.py index 43226db23c..3027b38958 100644 --- a/db_scripts/create_views.py +++ b/db_scripts/create_views.py @@ -168,11 +168,11 @@ async def check_view_exists(): # noqa: PLR0915 print("MonthlyGlobalSpendPerUserPerKey Created!") # noqa try: - await db.query_raw("""SELECT 1 FROM DailyTagSpend LIMIT 1""") + await db.query_raw("""SELECT 1 FROM "DailyTagSpend" LIMIT 1""") print("DailyTagSpend Exists!") # noqa except Exception: sql_query = """ - CREATE OR REPLACE VIEW DailyTagSpend AS + CREATE OR REPLACE VIEW "DailyTagSpend" AS SELECT jsonb_array_elements_text(request_tags) AS individual_request_tag, DATE(s."startTime") AS spend_date, diff --git a/deploy/charts/litellm-helm/Chart.yaml b/deploy/charts/litellm-helm/Chart.yaml index 6232a2320d..f1f2fd8d64 100644 --- a/deploy/charts/litellm-helm/Chart.yaml +++ b/deploy/charts/litellm-helm/Chart.yaml @@ -18,7 +18,7 @@ type: application # This is the chart version. This version number should be incremented each time you make changes # to the chart and its templates, including the app version. # Versions are expected to follow Semantic Versioning (https://semver.org/) -version: 0.3.0 +version: 0.4.1 # This is the version number of the application being deployed. This version number should be # incremented each time you make changes to the application. Versions are not expected to diff --git a/deploy/charts/litellm-helm/templates/migrations-job.yaml b/deploy/charts/litellm-helm/templates/migrations-job.yaml index 381e9e5433..e994c45548 100644 --- a/deploy/charts/litellm-helm/templates/migrations-job.yaml +++ b/deploy/charts/litellm-helm/templates/migrations-job.yaml @@ -48,6 +48,23 @@ spec: {{- end }} - name: DISABLE_SCHEMA_UPDATE value: "false" # always run the migration from the Helm PreSync hook, override the value set + {{- with .Values.volumeMounts }} + volumeMounts: + {{- toYaml . | nindent 12 }} + {{- end }} + {{- with .Values.volumes }} + volumes: + {{- toYaml . | nindent 8 }} + {{- end }} restartPolicy: OnFailure + {{- with .Values.affinity }} + affinity: + {{- toYaml . | nindent 8 }} + {{- end }} + {{- with .Values.tolerations }} + tolerations: + {{- toYaml . | nindent 8 }} + {{- end }} + ttlSecondsAfterFinished: {{ .Values.migrationJob.ttlSecondsAfterFinished }} backoffLimit: {{ .Values.migrationJob.backoffLimit }} {{- end }} diff --git a/deploy/charts/litellm-helm/values.yaml b/deploy/charts/litellm-helm/values.yaml index 19cbf72321..9f21fc40ad 100644 --- a/deploy/charts/litellm-helm/values.yaml +++ b/deploy/charts/litellm-helm/values.yaml @@ -187,6 +187,7 @@ migrationJob: backoffLimit: 4 # Backoff limit for Job restarts disableSchemaUpdate: false # Skip schema migrations for specific environments. When True, the job will exit with code 0. annotations: {} + ttlSecondsAfterFinished: 120 # Additional environment variables to be added to the deployment envVars: { diff --git a/docker-compose.yml b/docker-compose.yml index 1508bd375c..78044c03b8 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -29,6 +29,8 @@ services: POSTGRES_DB: litellm POSTGRES_USER: llmproxy POSTGRES_PASSWORD: dbpassword9090 + ports: + - "5432:5432" healthcheck: test: ["CMD-SHELL", "pg_isready -d litellm -U llmproxy"] interval: 1s diff --git a/docker/Dockerfile.alpine b/docker/Dockerfile.alpine index 70ab9cac01..cc0c434013 100644 --- a/docker/Dockerfile.alpine +++ b/docker/Dockerfile.alpine @@ -11,9 +11,7 @@ FROM $LITELLM_BUILD_IMAGE AS builder WORKDIR /app # Install build dependencies -RUN apk update && \ - apk add --no-cache gcc python3-dev musl-dev && \ - rm -rf /var/cache/apk/* +RUN apk add --no-cache gcc python3-dev musl-dev RUN pip install --upgrade pip && \ pip install build diff --git a/docs/my-website/docs/anthropic_unified.md b/docs/my-website/docs/anthropic_unified.md new file mode 100644 index 0000000000..71b9203399 --- /dev/null +++ b/docs/my-website/docs/anthropic_unified.md @@ -0,0 +1,92 @@ +import Tabs from '@theme/Tabs'; +import TabItem from '@theme/TabItem'; + +# [BETA] `/v1/messages` + +LiteLLM provides a BETA endpoint in the spec of Anthropic's `/v1/messages` endpoint. + +This currently just supports the Anthropic API. + +| Feature | Supported | Notes | +|-------|-------|-------| +| Cost Tracking | ✅ | | +| Logging | ✅ | works across all integrations | +| End-user Tracking | ✅ | | +| Streaming | ✅ | | +| Fallbacks | ✅ | between anthropic models | +| Loadbalancing | ✅ | between anthropic models | + +Planned improvement: +- Vertex AI Anthropic support +- Bedrock Anthropic support + +## Usage + + + + +1. Setup config.yaml + +```yaml +model_list: + - model_name: anthropic-claude + litellm_params: + model: claude-3-7-sonnet-latest +``` + +2. Start proxy + +```bash +litellm --config /path/to/config.yaml +``` + +3. Test it! + +```bash +curl -L -X POST 'http://0.0.0.0:4000/v1/messages' \ +-H 'content-type: application/json' \ +-H 'x-api-key: $LITELLM_API_KEY' \ +-H 'anthropic-version: 2023-06-01' \ +-d '{ + "model": "anthropic-claude", + "messages": [ + { + "role": "user", + "content": [ + { + "type": "text", + "text": "List 5 important events in the XIX century" + } + ] + } + ], + "max_tokens": 4096 +}' +``` + + + +```python +from litellm.llms.anthropic.experimental_pass_through.messages.handler import anthropic_messages +import asyncio +import os + +# set env +os.environ["ANTHROPIC_API_KEY"] = "my-api-key" + +messages = [{"role": "user", "content": "Hello, can you tell me a short joke?"}] + +# Call the handler +async def call(): + response = await anthropic_messages( + messages=messages, + api_key=api_key, + model="claude-3-haiku-20240307", + max_tokens=100, + ) + +asyncio.run(call()) +``` + + + \ No newline at end of file diff --git a/docs/my-website/docs/completion/function_call.md b/docs/my-website/docs/completion/function_call.md index 514e8cda1a..f10df68bf6 100644 --- a/docs/my-website/docs/completion/function_call.md +++ b/docs/my-website/docs/completion/function_call.md @@ -8,6 +8,7 @@ Use `litellm.supports_function_calling(model="")` -> returns `True` if model sup assert litellm.supports_function_calling(model="gpt-3.5-turbo") == True assert litellm.supports_function_calling(model="azure/gpt-4-1106-preview") == True assert litellm.supports_function_calling(model="palm/chat-bison") == False +assert litellm.supports_function_calling(model="xai/grok-2-latest") == True assert litellm.supports_function_calling(model="ollama/llama2") == False ``` diff --git a/docs/my-website/docs/completion/input.md b/docs/my-website/docs/completion/input.md index 67738a7f1c..a8aa79b8cb 100644 --- a/docs/my-website/docs/completion/input.md +++ b/docs/my-website/docs/completion/input.md @@ -44,6 +44,7 @@ Use `litellm.get_supported_openai_params()` for an updated list of params for ea |Anthropic| ✅ | ✅ | ✅ |✅ | ✅ | ✅ | ✅ | | | | | | |✅ | ✅ | | ✅ | ✅ | | | ✅ | |OpenAI| ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ |✅ | ✅ | ✅ | ✅ |✅ | ✅ | ✅ | ✅ | ✅ | |Azure OpenAI| ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ |✅ | ✅ | ✅ | ✅ |✅ | ✅ | | | ✅ | +|xAI| ✅ | | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | | | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | | |Replicate | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | | | | | | |Anyscale | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | |Cohere| ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | | | diff --git a/docs/my-website/docs/completion/json_mode.md b/docs/my-website/docs/completion/json_mode.md index 0c3a930764..ec140ce582 100644 --- a/docs/my-website/docs/completion/json_mode.md +++ b/docs/my-website/docs/completion/json_mode.md @@ -89,6 +89,7 @@ response_format: { "type": "json_schema", "json_schema": … , "strict": true } Works for: - OpenAI models - Azure OpenAI models +- xAI models (Grok-2 or later) - Google AI Studio - Gemini models - Vertex AI models (Gemini + Anthropic) - Bedrock Models diff --git a/docs/my-website/docs/completion/reliable_completions.md b/docs/my-website/docs/completion/reliable_completions.md index 94102e1944..f38917fe53 100644 --- a/docs/my-website/docs/completion/reliable_completions.md +++ b/docs/my-website/docs/completion/reliable_completions.md @@ -46,7 +46,7 @@ from litellm import completion fallback_dict = {"gpt-3.5-turbo": "gpt-3.5-turbo-16k"} messages = [{"content": "how does a court case get to the Supreme Court?" * 500, "role": "user"}] -completion(model="gpt-3.5-turbo", messages=messages, context_window_fallback_dict=ctx_window_fallback_dict) +completion(model="gpt-3.5-turbo", messages=messages, context_window_fallback_dict=fallback_dict) ``` ### Fallbacks - Switch Models/API Keys/API Bases (SDK) diff --git a/docs/my-website/docs/completion/vision.md b/docs/my-website/docs/completion/vision.md index 0880d0ec49..1e18109b3b 100644 --- a/docs/my-website/docs/completion/vision.md +++ b/docs/my-website/docs/completion/vision.md @@ -118,9 +118,11 @@ response = client.chat.completions.create( Use `litellm.supports_vision(model="")` -> returns `True` if model supports `vision` and `False` if not ```python -assert litellm.supports_vision(model="gpt-4-vision-preview") == True -assert litellm.supports_vision(model="gemini-1.0-pro-vision") == True -assert litellm.supports_vision(model="gpt-3.5-turbo") == False +assert litellm.supports_vision(model="openai/gpt-4-vision-preview") == True +assert litellm.supports_vision(model="vertex_ai/gemini-1.0-pro-vision") == True +assert litellm.supports_vision(model="openai/gpt-3.5-turbo") == False +assert litellm.supports_vision(model="xai/grok-2-vision-latest") == True +assert litellm.supports_vision(model="xai/grok-2-latest") == False ``` @@ -187,4 +189,138 @@ Expected Response ``` - \ No newline at end of file + + + +## Explicitly specify image type + +If you have images without a mime-type, or if litellm is incorrectly inferring the mime type of your image (e.g. calling `gs://` url's with vertex ai), you can set this explicity via the `format` param. + +```python +"image_url": { + "url": "gs://my-gs-image", + "format": "image/jpeg" +} +``` + +LiteLLM will use this for any API endpoint, which supports specifying mime-type (e.g. anthropic/bedrock/vertex ai). + +For others (e.g. openai), it will be ignored. + + + + +```python +import os +from litellm import completion + +os.environ["ANTHROPIC_API_KEY"] = "your-api-key" + +# openai call +response = completion( + model = "claude-3-7-sonnet-latest", + messages=[ + { + "role": "user", + "content": [ + { + "type": "text", + "text": "What’s in this image?" + }, + { + "type": "image_url", + "image_url": { + "url": "https://upload.wikimedia.org/wikipedia/commons/thumb/d/dd/Gfp-wisconsin-madison-the-nature-boardwalk.jpg/2560px-Gfp-wisconsin-madison-the-nature-boardwalk.jpg", + "format": "image/jpeg" + } + } + ] + } + ], +) + +``` + + + + +1. Define vision models on config.yaml + +```yaml +model_list: + - model_name: gpt-4-vision-preview # OpenAI gpt-4-vision-preview + litellm_params: + model: openai/gpt-4-vision-preview + api_key: os.environ/OPENAI_API_KEY + - model_name: llava-hf # Custom OpenAI compatible model + litellm_params: + model: openai/llava-hf/llava-v1.6-vicuna-7b-hf + api_base: http://localhost:8000 + api_key: fake-key + model_info: + supports_vision: True # set supports_vision to True so /model/info returns this attribute as True + +``` + +2. Run proxy server + +```bash +litellm --config config.yaml +``` + +3. Test it using the OpenAI Python SDK + + +```python +import os +from openai import OpenAI + +client = OpenAI( + api_key="sk-1234", # your litellm proxy api key +) + +response = client.chat.completions.create( + model = "gpt-4-vision-preview", # use model="llava-hf" to test your custom OpenAI endpoint + messages=[ + { + "role": "user", + "content": [ + { + "type": "text", + "text": "What’s in this image?" + }, + { + "type": "image_url", + "image_url": { + "url": "https://upload.wikimedia.org/wikipedia/commons/thumb/d/dd/Gfp-wisconsin-madison-the-nature-boardwalk.jpg/2560px-Gfp-wisconsin-madison-the-nature-boardwalk.jpg", + "format": "image/jpeg" + } + } + ] + } + ], +) + +``` + + + + + + + + + +## Spec + +``` +"image_url": str + +OR + +"image_url": { + "url": "url OR base64 encoded str", + "detail": "openai-only param", + "format": "specify mime-type of image" +} +``` \ No newline at end of file diff --git a/docs/my-website/docs/data_security.md b/docs/my-website/docs/data_security.md index 13cde26d5d..30128760f2 100644 --- a/docs/my-website/docs/data_security.md +++ b/docs/my-website/docs/data_security.md @@ -46,7 +46,7 @@ For security inquiries, please contact us at support@berri.ai |-------------------|-------------------------------------------------------------------------------------------------| | SOC 2 Type I | Certified. Report available upon request on Enterprise plan. | | SOC 2 Type II | In progress. Certificate available by April 15th, 2025 | -| ISO27001 | In progress. Certificate available by February 7th, 2025 | +| ISO 27001 | Certified. Report available upon request on Enterprise | ## Supported Data Regions for LiteLLM Cloud @@ -137,7 +137,7 @@ Point of contact email address for general security-related questions: krrish@be Has the Vendor been audited / certified? - SOC 2 Type I. Certified. Report available upon request on Enterprise plan. - SOC 2 Type II. In progress. Certificate available by April 15th, 2025. -- ISO27001. In progress. Certificate available by February 7th, 2025. +- ISO 27001. Certified. Report available upon request on Enterprise plan. Has an information security management system been implemented? - Yes - [CodeQL](https://codeql.github.com/) and a comprehensive ISMS covering multiple security domains. diff --git a/docs/my-website/docs/debugging/local_debugging.md b/docs/my-website/docs/debugging/local_debugging.md index a9409bfab0..8a56d6c34a 100644 --- a/docs/my-website/docs/debugging/local_debugging.md +++ b/docs/my-website/docs/debugging/local_debugging.md @@ -1,5 +1,5 @@ # Local Debugging -There's 2 ways to do local debugging - `litellm.set_verbose=True` and by passing in a custom function `completion(...logger_fn=)`. Warning: Make sure to not use `set_verbose` in production. It logs API keys, which might end up in log files. +There's 2 ways to do local debugging - `litellm._turn_on_debug()` and by passing in a custom function `completion(...logger_fn=)`. Warning: Make sure to not use `_turn_on_debug()` in production. It logs API keys, which might end up in log files. ## Set Verbose @@ -8,7 +8,7 @@ This is good for getting print statements for everything litellm is doing. import litellm from litellm import completion -litellm.set_verbose=True # 👈 this is the 1-line change you need to make +litellm._turn_on_debug() # 👈 this is the 1-line change you need to make ## set ENV variables os.environ["OPENAI_API_KEY"] = "openai key" diff --git a/docs/my-website/docs/embedding/supported_embedding.md b/docs/my-website/docs/embedding/supported_embedding.md index 1f877ecc37..d0cb59b46e 100644 --- a/docs/my-website/docs/embedding/supported_embedding.md +++ b/docs/my-website/docs/embedding/supported_embedding.md @@ -323,6 +323,40 @@ response = embedding( | embed-english-light-v2.0 | `embedding(model="embed-english-light-v2.0", input=["good morning from litellm", "this is another item"])` | | embed-multilingual-v2.0 | `embedding(model="embed-multilingual-v2.0", input=["good morning from litellm", "this is another item"])` | +## NVIDIA NIM Embedding Models + +### API keys +This can be set as env variables or passed as **params to litellm.embedding()** +```python +import os +os.environ["NVIDIA_NIM_API_KEY"] = "" # api key +os.environ["NVIDIA_NIM_API_BASE"] = "" # nim endpoint url +``` + +### Usage +```python +from litellm import embedding +import os +os.environ['NVIDIA_NIM_API_KEY'] = "" +response = embedding( + model='nvidia_nim/', + input=["good morning from litellm"] +) +``` +All models listed [here](https://build.nvidia.com/explore/retrieval) are supported: + +| Model Name | Function Call | +| :--- | :--- | +| NV-Embed-QA | `embedding(model="nvidia_nim/NV-Embed-QA", input)` | +| nvidia/nv-embed-v1 | `embedding(model="nvidia_nim/nvidia/nv-embed-v1", input)` | +| nvidia/nv-embedqa-mistral-7b-v2 | `embedding(model="nvidia_nim/nvidia/nv-embedqa-mistral-7b-v2", input)` | +| nvidia/nv-embedqa-e5-v5 | `embedding(model="nvidia_nim/nvidia/nv-embedqa-e5-v5", input)` | +| nvidia/embed-qa-4 | `embedding(model="nvidia_nim/nvidia/embed-qa-4", input)` | +| nvidia/llama-3.2-nv-embedqa-1b-v1 | `embedding(model="nvidia_nim/nvidia/llama-3.2-nv-embedqa-1b-v1", input)` | +| nvidia/llama-3.2-nv-embedqa-1b-v2 | `embedding(model="nvidia_nim/nvidia/llama-3.2-nv-embedqa-1b-v2", input)` | +| snowflake/arctic-embed-l | `embedding(model="nvidia_nim/snowflake/arctic-embed-l", input)` | +| baai/bge-m3 | `embedding(model="nvidia_nim/baai/bge-m3", input)` | + ## HuggingFace Embedding Models LiteLLM supports all Feature-Extraction + Sentence Similarity Embedding models: https://huggingface.co/models?pipeline_tag=feature-extraction diff --git a/docs/my-website/docs/extras/contributing_code.md b/docs/my-website/docs/extras/contributing_code.md new file mode 100644 index 0000000000..0fe7675ead --- /dev/null +++ b/docs/my-website/docs/extras/contributing_code.md @@ -0,0 +1,96 @@ +# Contributing Code + +## **Checklist before submitting a PR** + +Here are the core requirements for any PR submitted to LiteLLM + + +- [ ] Add testing, **Adding at least 1 test is a hard requirement** - [see details](#2-adding-testing-to-your-pr) +- [ ] Ensure your PR passes the following tests: + - [ ] [Unit Tests](#3-running-unit-tests) + - [ ] Formatting / Linting Tests +- [ ] Keep scope as isolated as possible. As a general rule, your changes should address 1 specific problem at a time + + + +## Quick start + +## 1. Setup your local dev environment + + +Here's how to modify the repo locally: + +Step 1: Clone the repo + +```shell +git clone https://github.com/BerriAI/litellm.git +``` + +Step 2: Install dev dependencies: + +```shell +poetry install --with dev --extras proxy +``` + +That's it, your local dev environment is ready! + +## 2. Adding Testing to your PR + +- Add your test to the [`tests/litellm/` directory](https://github.com/BerriAI/litellm/tree/main/tests/litellm) + +- This directory 1:1 maps the the `litellm/` directory, and can only contain mocked tests. +- Do not add real llm api calls to this directory. + +### 2.1 File Naming Convention for `tests/litellm/` + +The `tests/litellm/` directory follows the same directory structure as `litellm/`. + +- `litellm/proxy/test_caching_routes.py` maps to `litellm/proxy/caching_routes.py` +- `test_{filename}.py` maps to `litellm/{filename}.py` + +## 3. Running Unit Tests + +run the following command on the root of the litellm directory + +```shell +make test-unit +``` + +## 4. Submit a PR with your changes! + +- push your fork to your GitHub repo +- submit a PR from there + + +## Advanced +### Building LiteLLM Docker Image + +Some people might want to build the LiteLLM docker image themselves. Follow these instructions if you want to build / run the LiteLLM Docker Image yourself. + +Step 1: Clone the repo + +```shell +git clone https://github.com/BerriAI/litellm.git +``` + +Step 2: Build the Docker Image + +Build using Dockerfile.non_root + +```shell +docker build -f docker/Dockerfile.non_root -t litellm_test_image . +``` + +Step 3: Run the Docker Image + +Make sure config.yaml is present in the root directory. This is your litellm proxy config file. + +```shell +docker run \ + -v $(pwd)/proxy_config.yaml:/app/config.yaml \ + -e DATABASE_URL="postgresql://xxxxxxxx" \ + -e LITELLM_MASTER_KEY="sk-1234" \ + -p 4000:4000 \ + litellm_test_image \ + --config /app/config.yaml --detailed_debug +``` diff --git a/docs/my-website/docs/image_variations.md b/docs/my-website/docs/image_variations.md new file mode 100644 index 0000000000..23c7d8cb16 --- /dev/null +++ b/docs/my-website/docs/image_variations.md @@ -0,0 +1,31 @@ +# [BETA] Image Variations + +OpenAI's `/image/variations` endpoint is now supported. + +## Quick Start + +```python +from litellm import image_variation +import os + +# set env vars +os.environ["OPENAI_API_KEY"] = "" +os.environ["TOPAZ_API_KEY"] = "" + +# openai call +response = image_variation( + model="dall-e-2", image=image_url +) + +# topaz call +response = image_variation( + model="topaz/Standard V2", image=image_url +) + +print(response) +``` + +## Supported Providers + +- OpenAI +- Topaz diff --git a/docs/my-website/docs/index.md b/docs/my-website/docs/index.md index 0f5c8b84a5..dd3be587b5 100644 --- a/docs/my-website/docs/index.md +++ b/docs/my-website/docs/index.md @@ -89,7 +89,21 @@ response = completion( ``` + +```python +from litellm import completion +import os + +## set ENV variables +os.environ["XAI_API_KEY"] = "your-api-key" + +response = completion( + model="xai/grok-2-latest", + messages=[{ "content": "Hello, how are you?","role": "user"}] +) +``` + ```python @@ -108,6 +122,24 @@ response = completion( + + +```python +from litellm import completion +import os + +## set ENV variables +os.environ["NVIDIA_NIM_API_KEY"] = "nvidia_api_key" +os.environ["NVIDIA_NIM_API_BASE"] = "nvidia_nim_endpoint_url" + +response = completion( + model="nvidia_nim/", + messages=[{ "content": "Hello, how are you?","role": "user"}] +) +``` + + + ```python @@ -254,7 +286,22 @@ response = completion( ``` + +```python +from litellm import completion +import os + +## set ENV variables +os.environ["XAI_API_KEY"] = "your-api-key" + +response = completion( + model="xai/grok-2-latest", + messages=[{ "content": "Hello, how are you?","role": "user"}], + stream=True, +) +``` + ```python @@ -274,6 +321,24 @@ response = completion( + + +```python +from litellm import completion +import os + +## set ENV variables +os.environ["NVIDIA_NIM_API_KEY"] = "nvidia_api_key" +os.environ["NVIDIA_NIM_API_BASE"] = "nvidia_nim_endpoint_url" + +response = completion( + model="nvidia_nim/", + messages=[{ "content": "Hello, how are you?","role": "user"}] + stream=True, +) +``` + + ```python diff --git a/docs/my-website/docs/observability/arize_integration.md b/docs/my-website/docs/observability/arize_integration.md index a69d32e5b3..1cd36a1111 100644 --- a/docs/my-website/docs/observability/arize_integration.md +++ b/docs/my-website/docs/observability/arize_integration.md @@ -19,6 +19,7 @@ Make an account on [Arize AI](https://app.arize.com/auth/login) ## Quick Start Use just 2 lines of code, to instantly log your responses **across all providers** with arize +You can also use the instrumentor option instead of the callback, which you can find [here](https://docs.arize.com/arize/llm-tracing/tracing-integrations-auto/litellm). ```python litellm.callbacks = ["arize"] @@ -28,7 +29,7 @@ import litellm import os os.environ["ARIZE_SPACE_KEY"] = "" -os.environ["ARIZE_API_KEY"] = "" # defaults to litellm-completion +os.environ["ARIZE_API_KEY"] = "" # LLM API Keys os.environ['OPENAI_API_KEY']="" diff --git a/docs/my-website/docs/observability/athina_integration.md b/docs/my-website/docs/observability/athina_integration.md index f7c99a4a9c..ba93ea4c98 100644 --- a/docs/my-website/docs/observability/athina_integration.md +++ b/docs/my-website/docs/observability/athina_integration.md @@ -78,7 +78,10 @@ Following are the allowed fields in metadata, their types, and their description * `context: Optional[Union[dict, str]]` - This is the context used as information for the prompt. For RAG applications, this is the "retrieved" data. You may log context as a string or as an object (dictionary). * `expected_response: Optional[str]` - This is the reference response to compare against for evaluation purposes. This is useful for segmenting inference calls by expected response. * `user_query: Optional[str]` - This is the user's query. For conversational applications, this is the user's last message. - +* `tags: Optional[list]` - This is a list of tags. This is useful for segmenting inference calls by tags. +* `user_feedback: Optional[str]` - The end user’s feedback. +* `model_options: Optional[dict]` - This is a dictionary of model options. This is useful for getting insights into how model behavior affects your end users. +* `custom_attributes: Optional[dict]` - This is a dictionary of custom attributes. This is useful for additional information about the inference. ## Using a self hosted deployment of Athina diff --git a/docs/my-website/docs/observability/custom_callback.md b/docs/my-website/docs/observability/custom_callback.md index 373b4a96c0..cc586b2e5d 100644 --- a/docs/my-website/docs/observability/custom_callback.md +++ b/docs/my-website/docs/observability/custom_callback.md @@ -20,9 +20,7 @@ class MyCustomHandler(CustomLogger): def log_post_api_call(self, kwargs, response_obj, start_time, end_time): print(f"Post-API Call") - def log_stream_event(self, kwargs, response_obj, start_time, end_time): - print(f"On Stream") - + def log_success_event(self, kwargs, response_obj, start_time, end_time): print(f"On Success") @@ -30,9 +28,6 @@ class MyCustomHandler(CustomLogger): print(f"On Failure") #### ASYNC #### - for acompletion/aembeddings - - async def async_log_stream_event(self, kwargs, response_obj, start_time, end_time): - print(f"On Async Streaming") async def async_log_success_event(self, kwargs, response_obj, start_time, end_time): print(f"On Async Success") @@ -127,8 +122,7 @@ from litellm import acompletion class MyCustomHandler(CustomLogger): #### ASYNC #### - async def async_log_stream_event(self, kwargs, response_obj, start_time, end_time): - print(f"On Async Streaming") + async def async_log_success_event(self, kwargs, response_obj, start_time, end_time): print(f"On Async Success") diff --git a/docs/my-website/docs/observability/opik_integration.md b/docs/my-website/docs/observability/opik_integration.md index d8075c70e3..b4bcef5393 100644 --- a/docs/my-website/docs/observability/opik_integration.md +++ b/docs/my-website/docs/observability/opik_integration.md @@ -1,3 +1,5 @@ +import Tabs from '@theme/Tabs'; +import TabItem from '@theme/TabItem'; import Image from '@theme/IdealImage'; # Comet Opik - Logging + Evals @@ -21,17 +23,16 @@ Use just 4 lines of code, to instantly log your responses **across all providers Get your Opik API Key by signing up [here](https://www.comet.com/signup?utm_source=litelllm&utm_medium=docs&utm_content=api_key_cell)! ```python -from litellm.integrations.opik.opik import OpikLogger import litellm - -opik_logger = OpikLogger() -litellm.callbacks = [opik_logger] +litellm.callbacks = ["opik"] ``` Full examples: + + + ```python -from litellm.integrations.opik.opik import OpikLogger import litellm import os @@ -43,8 +44,7 @@ os.environ["OPIK_WORKSPACE"] = "" os.environ["OPENAI_API_KEY"] = "" # set "opik" as a callback, litellm will send the data to an Opik server (such as comet.com) -opik_logger = OpikLogger() -litellm.callbacks = [opik_logger] +litellm.callbacks = ["opik"] # openai call response = litellm.completion( @@ -55,18 +55,16 @@ response = litellm.completion( ) ``` -If you are liteLLM within a function tracked using Opik's `@track` decorator, +If you are using liteLLM within a function tracked using Opik's `@track` decorator, you will need provide the `current_span_data` field in the metadata attribute so that the LLM call is assigned to the correct trace: ```python from opik import track from opik.opik_context import get_current_span_data -from litellm.integrations.opik.opik import OpikLogger import litellm -opik_logger = OpikLogger() -litellm.callbacks = [opik_logger] +litellm.callbacks = ["opik"] @track() def streaming_function(input): @@ -87,6 +85,126 @@ response = streaming_function("Why is tracking and evaluation of LLMs important? chunks = list(response) ``` + + + +1. Setup config.yaml + +```yaml +model_list: + - model_name: gpt-3.5-turbo-testing + litellm_params: + model: gpt-3.5-turbo + api_key: os.environ/OPENAI_API_KEY + +litellm_settings: + callbacks: ["opik"] + +environment_variables: + OPIK_API_KEY: "" + OPIK_WORKSPACE: "" +``` + +2. Run proxy + +```bash +litellm --config config.yaml +``` + +3. Test it! + +```bash +curl -L -X POST 'http://0.0.0.0:4000/v1/chat/completions' \ +-H 'Content-Type: application/json' \ +-H 'Authorization: Bearer sk-1234' \ +-d '{ + "model": "gpt-3.5-turbo-testing", + "messages": [ + { + "role": "user", + "content": "What's the weather like in Boston today?" + } + ] +}' +``` + + + + +## Opik-Specific Parameters + +These can be passed inside metadata with the `opik` key. + +### Fields + +- `project_name` - Name of the Opik project to send data to. +- `current_span_data` - The current span data to be used for tracing. +- `tags` - Tags to be used for tracing. + +### Usage + + + + +```python +from opik import track +from opik.opik_context import get_current_span_data +import litellm + +litellm.callbacks = ["opik"] + +messages = [{"role": "user", "content": input}] +response = litellm.completion( + model="gpt-3.5-turbo", + messages=messages, + metadata = { + "opik": { + "current_span_data": get_current_span_data(), + "tags": ["streaming-test"], + }, + } +) +return response +``` + + + +```bash +curl -L -X POST 'http://0.0.0.0:4000/v1/chat/completions' \ +-H 'Content-Type: application/json' \ +-H 'Authorization: Bearer sk-1234' \ +-d '{ + "model": "gpt-3.5-turbo-testing", + "messages": [ + { + "role": "user", + "content": "What's the weather like in Boston today?" + } + ], + "metadata": { + "opik": { + "current_span_data": "...", + "tags": ["streaming-test"], + }, + } +}' +``` + + + + + + + + + + + + + + + + ## Support & Talk to Founders - [Schedule Demo 👋](https://calendly.com/d/4mp-gd3-k5k/berriai-1-1-onboarding-litellm-hosted-version) diff --git a/docs/my-website/docs/observability/phoenix_integration.md b/docs/my-website/docs/observability/phoenix_integration.md new file mode 100644 index 0000000000..d6974adeca --- /dev/null +++ b/docs/my-website/docs/observability/phoenix_integration.md @@ -0,0 +1,75 @@ +import Image from '@theme/IdealImage'; + +# Phoenix OSS + +Open source tracing and evaluation platform + +:::tip + +This is community maintained, Please make an issue if you run into a bug +https://github.com/BerriAI/litellm + +::: + + +## Pre-Requisites +Make an account on [Phoenix OSS](https://phoenix.arize.com) +OR self-host your own instance of [Phoenix](https://docs.arize.com/phoenix/deployment) + +## Quick Start +Use just 2 lines of code, to instantly log your responses **across all providers** with Phoenix + +You can also use the instrumentor option instead of the callback, which you can find [here](https://docs.arize.com/phoenix/tracing/integrations-tracing/litellm). + +```python +litellm.callbacks = ["arize_phoenix"] +``` +```python +import litellm +import os + +os.environ["PHOENIX_API_KEY"] = "" # Necessary only using Phoenix Cloud +os.environ["PHOENIX_COLLECTOR_HTTP_ENDPOINT"] = "" # The URL of your Phoenix OSS instance +# This defaults to https://app.phoenix.arize.com/v1/traces for Phoenix Cloud + +# LLM API Keys +os.environ['OPENAI_API_KEY']="" + +# set arize as a callback, litellm will send the data to arize +litellm.callbacks = ["phoenix"] + +# openai call +response = litellm.completion( + model="gpt-3.5-turbo", + messages=[ + {"role": "user", "content": "Hi 👋 - i'm openai"} + ] +) +``` + +### Using with LiteLLM Proxy + + +```yaml +model_list: + - model_name: gpt-4o + litellm_params: + model: openai/fake + api_key: fake-key + api_base: https://exampleopenaiendpoint-production.up.railway.app/ + +litellm_settings: + callbacks: ["arize_phoenix"] + +environment_variables: + PHOENIX_API_KEY: "d0*****" + PHOENIX_COLLECTOR_ENDPOINT: "https://app.phoenix.arize.com/v1/traces" # OPTIONAL, for setting the GRPC endpoint + PHOENIX_COLLECTOR_HTTP_ENDPOINT: "https://app.phoenix.arize.com/v1/traces" # OPTIONAL, for setting the HTTP endpoint +``` + +## Support & Talk to Founders + +- [Schedule Demo 👋](https://calendly.com/d/4mp-gd3-k5k/berriai-1-1-onboarding-litellm-hosted-version) +- [Community Discord 💭](https://discord.gg/wuPM9dRgDw) +- Our numbers 📞 +1 (770) 8783-106 / +1 (412) 618-6238 +- Our emails ✉️ ishaan@berri.ai / krrish@berri.ai diff --git a/docs/my-website/docs/pass_through/assembly_ai.md b/docs/my-website/docs/pass_through/assembly_ai.md new file mode 100644 index 0000000000..4606640c5c --- /dev/null +++ b/docs/my-website/docs/pass_through/assembly_ai.md @@ -0,0 +1,85 @@ +# Assembly AI + +Pass-through endpoints for Assembly AI - call Assembly AI endpoints, in native format (no translation). + +| Feature | Supported | Notes | +|-------|-------|-------| +| Cost Tracking | ✅ | works across all integrations | +| Logging | ✅ | works across all integrations | + + +Supports **ALL** Assembly AI Endpoints + +[**See All Assembly AI Endpoints**](https://www.assemblyai.com/docs/api-reference) + + + + +## Quick Start + +Let's call the Assembly AI [`/v2/transcripts` endpoint](https://www.assemblyai.com/docs/api-reference/transcripts) + +1. Add Assembly AI API Key to your environment + +```bash +export ASSEMBLYAI_API_KEY="" +``` + +2. Start LiteLLM Proxy + +```bash +litellm + +# RUNNING on http://0.0.0.0:4000 +``` + +3. Test it! + +Let's call the Assembly AI `/v2/transcripts` endpoint + +```python +import assemblyai as aai + +LITELLM_VIRTUAL_KEY = "sk-1234" # +LITELLM_PROXY_BASE_URL = "http://0.0.0.0:4000/assemblyai" # /assemblyai + +aai.settings.api_key = f"Bearer {LITELLM_VIRTUAL_KEY}" +aai.settings.base_url = LITELLM_PROXY_BASE_URL + +# URL of the file to transcribe +FILE_URL = "https://assembly.ai/wildfires.mp3" + +# You can also transcribe a local file by passing in a file path +# FILE_URL = './path/to/file.mp3' + +transcriber = aai.Transcriber() +transcript = transcriber.transcribe(FILE_URL) +print(transcript) +print(transcript.id) +``` + +## Calling Assembly AI EU endpoints + +If you want to send your request to the Assembly AI EU endpoint, you can do so by setting the `LITELLM_PROXY_BASE_URL` to `/eu.assemblyai` + + +```python +import assemblyai as aai + +LITELLM_VIRTUAL_KEY = "sk-1234" # +LITELLM_PROXY_BASE_URL = "http://0.0.0.0:4000/eu.assemblyai" # /eu.assemblyai + +aai.settings.api_key = f"Bearer {LITELLM_VIRTUAL_KEY}" +aai.settings.base_url = LITELLM_PROXY_BASE_URL + +# URL of the file to transcribe +FILE_URL = "https://assembly.ai/wildfires.mp3" + +# You can also transcribe a local file by passing in a file path +# FILE_URL = './path/to/file.mp3' + +transcriber = aai.Transcriber() +transcript = transcriber.transcribe(FILE_URL) +print(transcript) +print(transcript.id) +``` diff --git a/docs/my-website/docs/pass_through/openai_passthrough.md b/docs/my-website/docs/pass_through/openai_passthrough.md new file mode 100644 index 0000000000..2712369575 --- /dev/null +++ b/docs/my-website/docs/pass_through/openai_passthrough.md @@ -0,0 +1,95 @@ +# OpenAI Passthrough + +Pass-through endpoints for `/openai` + +## Overview + +| Feature | Supported | Notes | +|-------|-------|-------| +| Cost Tracking | ❌ | Not supported | +| Logging | ✅ | Works across all integrations | +| Streaming | ✅ | Fully supported | + +### When to use this? + +- For 90% of your use cases, you should use the [native LiteLLM OpenAI Integration](https://docs.litellm.ai/docs/providers/openai) (`/chat/completions`, `/embeddings`, `/completions`, `/images`, `/batches`, etc.) +- Use this passthrough to call less popular or newer OpenAI endpoints that LiteLLM doesn't fully support yet, such as `/assistants`, `/threads`, `/vector_stores` + +Simply replace `https://api.openai.com` with `LITELLM_PROXY_BASE_URL/openai` + +## Usage Examples + +### Assistants API + +#### Create OpenAI Client + +Make sure you do the following: +- Point `base_url` to your `LITELLM_PROXY_BASE_URL/openai` +- Use your `LITELLM_API_KEY` as the `api_key` + +```python +import openai + +client = openai.OpenAI( + base_url="http://0.0.0.0:4000/openai", # /openai + api_key="sk-anything" # +) +``` + +#### Create an Assistant + +```python +# Create an assistant +assistant = client.beta.assistants.create( + name="Math Tutor", + instructions="You are a math tutor. Help solve equations.", + model="gpt-4o", +) +``` + +#### Create a Thread +```python +# Create a thread +thread = client.beta.threads.create() +``` + +#### Add a Message to the Thread +```python +# Add a message +message = client.beta.threads.messages.create( + thread_id=thread.id, + role="user", + content="Solve 3x + 11 = 14", +) +``` + +#### Run the Assistant +```python +# Create a run to get the assistant's response +run = client.beta.threads.runs.create( + thread_id=thread.id, + assistant_id=assistant.id, +) + +# Check run status +run_status = client.beta.threads.runs.retrieve( + thread_id=thread.id, + run_id=run.id +) +``` + +#### Retrieve Messages +```python +# List messages after the run completes +messages = client.beta.threads.messages.list( + thread_id=thread.id +) +``` + +#### Delete the Assistant + +```python +# Delete the assistant when done +client.beta.assistants.delete(assistant.id) +``` + diff --git a/docs/my-website/docs/projects/Elroy.md b/docs/my-website/docs/projects/Elroy.md new file mode 100644 index 0000000000..07652f577a --- /dev/null +++ b/docs/my-website/docs/projects/Elroy.md @@ -0,0 +1,14 @@ +# 🐕 Elroy + +Elroy is a scriptable AI assistant that remembers and sets goals. + +Interact through the command line, share memories via MCP, or build your own tools using Python. + + +[![Static Badge][github-shield]][github-url] +[![Discord][discord-shield]][discord-url] + +[github-shield]: https://img.shields.io/badge/Github-repo-white?logo=github +[github-url]: https://github.com/elroy-bot/elroy +[discord-shield]:https://img.shields.io/discord/1200684659277832293?color=7289DA&label=Discord&logo=discord&logoColor=white +[discord-url]: https://discord.gg/5PJUY4eMce diff --git a/docs/my-website/docs/projects/PDL.md b/docs/my-website/docs/projects/PDL.md new file mode 100644 index 0000000000..5d6fd77555 --- /dev/null +++ b/docs/my-website/docs/projects/PDL.md @@ -0,0 +1,5 @@ +PDL - A YAML-based approach to prompt programming + +Github: https://github.com/IBM/prompt-declaration-language + +PDL is a declarative approach to prompt programming, helping users to accumulate messages implicitly, with support for model chaining and tool use. \ No newline at end of file diff --git a/docs/my-website/docs/projects/pgai.md b/docs/my-website/docs/projects/pgai.md new file mode 100644 index 0000000000..bece5baf6a --- /dev/null +++ b/docs/my-website/docs/projects/pgai.md @@ -0,0 +1,9 @@ +# pgai + +[pgai](https://github.com/timescale/pgai) is a suite of tools to develop RAG, semantic search, and other AI applications more easily with PostgreSQL. + +If you don't know what pgai is yet check out the [README](https://github.com/timescale/pgai)! + +If you're already familiar with pgai, you can find litellm specific docs here: +- Litellm for [model calling](https://github.com/timescale/pgai/blob/main/docs/model_calling/litellm.md) in pgai +- Use the [litellm provider](https://github.com/timescale/pgai/blob/main/docs/vectorizer/api-reference.md#aiembedding_litellm) to automatically create embeddings for your data via the pgai vectorizer. diff --git a/docs/my-website/docs/projects/smolagents.md b/docs/my-website/docs/projects/smolagents.md new file mode 100644 index 0000000000..9e6ba7b07f --- /dev/null +++ b/docs/my-website/docs/projects/smolagents.md @@ -0,0 +1,8 @@ + +# 🤗 Smolagents + +`smolagents` is a barebones library for agents. Agents write python code to call tools and orchestrate other agents. + +- [Github](https://github.com/huggingface/smolagents) +- [Docs](https://huggingface.co/docs/smolagents/index) +- [Build your agent](https://huggingface.co/docs/smolagents/guided_tour) \ No newline at end of file diff --git a/docs/my-website/docs/providers/aiml.md b/docs/my-website/docs/providers/aiml.md new file mode 100644 index 0000000000..1343cbf8d8 --- /dev/null +++ b/docs/my-website/docs/providers/aiml.md @@ -0,0 +1,160 @@ +# AI/ML API + +Getting started with the AI/ML API is simple. Follow these steps to set up your integration: + +### 1. Get Your API Key +To begin, you need an API key. You can obtain yours here: +🔑 [Get Your API Key](https://aimlapi.com/app/keys/?utm_source=aimlapi&utm_medium=github&utm_campaign=integration) + +### 2. Explore Available Models +Looking for a different model? Browse the full list of supported models: +📚 [Full List of Models](https://docs.aimlapi.com/api-overview/model-database/text-models?utm_source=aimlapi&utm_medium=github&utm_campaign=integration) + +### 3. Read the Documentation +For detailed setup instructions and usage guidelines, check out the official documentation: +📖 [AI/ML API Docs](https://docs.aimlapi.com/quickstart/setting-up?utm_source=aimlapi&utm_medium=github&utm_campaign=integration) + +### 4. Need Help? +If you have any questions, feel free to reach out. We’re happy to assist! 🚀 [Discord](https://discord.gg/hvaUsJpVJf) + +## Usage +You can choose from LLama, Qwen, Flux, and 200+ other open and closed-source models on aimlapi.com/models. For example: + +```python +import litellm + +response = litellm.completion( + model="openai/meta-llama/Meta-Llama-3.1-405B-Instruct-Turbo", # The model name must include prefix "openai" + the model name from ai/ml api + api_key="", # your aiml api-key + api_base="https://api.aimlapi.com/v2", + messages=[ + { + "role": "user", + "content": "Hey, how's it going?", + } + ], +) +``` + +## Streaming + +```python +import litellm + +response = litellm.completion( + model="openai/Qwen/Qwen2-72B-Instruct", # The model name must include prefix "openai" + the model name from ai/ml api + api_key="", # your aiml api-key + api_base="https://api.aimlapi.com/v2", + messages=[ + { + "role": "user", + "content": "Hey, how's it going?", + } + ], + stream=True, +) +for chunk in response: + print(chunk) +``` + +## Async Completion + +```python +import asyncio + +import litellm + + +async def main(): + response = await litellm.acompletion( + model="openai/anthropic/claude-3-5-haiku", # The model name must include prefix "openai" + the model name from ai/ml api + api_key="", # your aiml api-key + api_base="https://api.aimlapi.com/v2", + messages=[ + { + "role": "user", + "content": "Hey, how's it going?", + } + ], + ) + print(response) + + +if __name__ == "__main__": + asyncio.run(main()) +``` + +## Async Streaming + +```python +import asyncio +import traceback + +import litellm + + +async def main(): + try: + print("test acompletion + streaming") + response = await litellm.acompletion( + model="openai/nvidia/Llama-3.1-Nemotron-70B-Instruct-HF", # The model name must include prefix "openai" + the model name from ai/ml api + api_key="", # your aiml api-key + api_base="https://api.aimlapi.com/v2", + messages=[{"content": "Hey, how's it going?", "role": "user"}], + stream=True, + ) + print(f"response: {response}") + async for chunk in response: + print(chunk) + except: + print(f"error occurred: {traceback.format_exc()}") + pass + + +if __name__ == "__main__": + asyncio.run(main()) +``` + +## Async Embedding + +```python +import asyncio + +import litellm + + +async def main(): + response = await litellm.aembedding( + model="openai/text-embedding-3-small", # The model name must include prefix "openai" + the model name from ai/ml api + api_key="", # your aiml api-key + api_base="https://api.aimlapi.com/v1", # 👈 the URL has changed from v2 to v1 + input="Your text string", + ) + print(response) + + +if __name__ == "__main__": + asyncio.run(main()) +``` + +## Async Image Generation + +```python +import asyncio + +import litellm + + +async def main(): + response = await litellm.aimage_generation( + model="openai/dall-e-3", # The model name must include prefix "openai" + the model name from ai/ml api + api_key="", # your aiml api-key + api_base="https://api.aimlapi.com/v1", # 👈 the URL has changed from v2 to v1 + prompt="A cute baby sea otter", + ) + print(response) + + +if __name__ == "__main__": + asyncio.run(main()) +``` \ No newline at end of file diff --git a/docs/my-website/docs/providers/anthropic.md b/docs/my-website/docs/providers/anthropic.md index b3bfe333cc..55e9ba10d3 100644 --- a/docs/my-website/docs/providers/anthropic.md +++ b/docs/my-website/docs/providers/anthropic.md @@ -819,6 +819,114 @@ resp = litellm.completion( print(f"\nResponse: {resp}") ``` +## Usage - Thinking / `reasoning_content` + + + + +```python +from litellm import completion + +resp = completion( + model="anthropic/claude-3-7-sonnet-20250219", + messages=[{"role": "user", "content": "What is the capital of France?"}], + thinking={"type": "enabled", "budget_tokens": 1024}, +) + +``` + + + + + +1. Setup config.yaml + +```yaml +- model_name: claude-3-7-sonnet-20250219 + litellm_params: + model: anthropic/claude-3-7-sonnet-20250219 + api_key: os.environ/ANTHROPIC_API_KEY +``` + +2. Start proxy + +```bash +litellm --config /path/to/config.yaml +``` + +3. Test it! + +```bash +curl http://0.0.0.0:4000/v1/chat/completions \ + -H "Content-Type: application/json" \ + -H "Authorization: Bearer " \ + -d '{ + "model": "claude-3-7-sonnet-20250219", + "messages": [{"role": "user", "content": "What is the capital of France?"}], + "thinking": {"type": "enabled", "budget_tokens": 1024} + }' +``` + + + + + +**Expected Response** + +```python +ModelResponse( + id='chatcmpl-c542d76d-f675-4e87-8e5f-05855f5d0f5e', + created=1740470510, + model='claude-3-7-sonnet-20250219', + object='chat.completion', + system_fingerprint=None, + choices=[ + Choices( + finish_reason='stop', + index=0, + message=Message( + content="The capital of France is Paris.", + role='assistant', + tool_calls=None, + function_call=None, + provider_specific_fields={ + 'citations': None, + 'thinking_blocks': [ + { + 'type': 'thinking', + 'thinking': 'The capital of France is Paris. This is a very straightforward factual question.', + 'signature': 'EuYBCkQYAiJAy6...' + } + ] + } + ), + thinking_blocks=[ + { + 'type': 'thinking', + 'thinking': 'The capital of France is Paris. This is a very straightforward factual question.', + 'signature': 'EuYBCkQYAiJAy6AGB...' + } + ], + reasoning_content='The capital of France is Paris. This is a very straightforward factual question.' + ) + ], + usage=Usage( + completion_tokens=68, + prompt_tokens=42, + total_tokens=110, + completion_tokens_details=None, + prompt_tokens_details=PromptTokensDetailsWrapper( + audio_tokens=None, + cached_tokens=0, + text_tokens=None, + image_tokens=None + ), + cache_creation_input_tokens=0, + cache_read_input_tokens=0 + ) +) +``` + ## **Passing Extra Headers to Anthropic API** Pass `extra_headers: dict` to `litellm.completion` @@ -987,6 +1095,106 @@ curl http://0.0.0.0:4000/v1/chat/completions \ +## [BETA] Citations API + +Pass `citations: {"enabled": true}` to Anthropic, to get citations on your document responses. + +Note: This interface is in BETA. If you have feedback on how citations should be returned, please [tell us here](https://github.com/BerriAI/litellm/issues/7970#issuecomment-2644437943) + + + + +```python +from litellm import completion + +resp = completion( + model="claude-3-5-sonnet-20241022", + messages=[ + { + "role": "user", + "content": [ + { + "type": "document", + "source": { + "type": "text", + "media_type": "text/plain", + "data": "The grass is green. The sky is blue.", + }, + "title": "My Document", + "context": "This is a trustworthy document.", + "citations": {"enabled": True}, + }, + { + "type": "text", + "text": "What color is the grass and sky?", + }, + ], + } + ], +) + +citations = resp.choices[0].message.provider_specific_fields["citations"] + +assert citations is not None +``` + + + + +1. Setup config.yaml + +```yaml +model_list: + - model_name: anthropic-claude + litellm_params: + model: anthropic/claude-3-5-sonnet-20241022 + api_key: os.environ/ANTHROPIC_API_KEY +``` + +2. Start proxy + +```bash +litellm --config /path/to/config.yaml + +# RUNNING on http://0.0.0.0:4000 +``` + +3. Test it! + +```bash +curl -L -X POST 'http://0.0.0.0:4000/v1/chat/completions' \ +-H 'Content-Type: application/json' \ +-H 'Authorization: Bearer sk-1234' \ +-d '{ + "model": "anthropic-claude", + "messages": [ + { + "role": "user", + "content": [ + { + "type": "document", + "source": { + "type": "text", + "media_type": "text/plain", + "data": "The grass is green. The sky is blue.", + }, + "title": "My Document", + "context": "This is a trustworthy document.", + "citations": {"enabled": True}, + }, + { + "type": "text", + "text": "What color is the grass and sky?", + }, + ], + } + ] +}' +``` + + + + ## Usage - passing 'user_id' to Anthropic LiteLLM translates the OpenAI `user` param to Anthropic's `metadata[user_id]` param. @@ -1035,3 +1243,4 @@ curl http://0.0.0.0:4000/v1/chat/completions \ + diff --git a/docs/my-website/docs/providers/azure.md b/docs/my-website/docs/providers/azure.md index 05ea02302d..111738a449 100644 --- a/docs/my-website/docs/providers/azure.md +++ b/docs/my-website/docs/providers/azure.md @@ -10,7 +10,7 @@ import TabItem from '@theme/TabItem'; | Property | Details | |-------|-------| | Description | Azure OpenAI Service provides REST API access to OpenAI's powerful language models including o1, o1-mini, GPT-4o, GPT-4o mini, GPT-4 Turbo with Vision, GPT-4, GPT-3.5-Turbo, and Embeddings model series | -| Provider Route on LiteLLM | `azure/` | +| Provider Route on LiteLLM | `azure/`, [`azure/o_series/`](#azure-o-series-models) | | Supported Operations | [`/chat/completions`](#azure-openai-chat-completion-models), [`/completions`](#azure-instruct-models), [`/embeddings`](../embedding/supported_embedding#azure-openai-embedding-models), [`/audio/speech`](#azure-text-to-speech-tts), [`/audio/transcriptions`](../audio_transcription), `/fine_tuning`, [`/batches`](#azure-batches-api), `/files`, [`/images`](../image_generation#azure-openai-image-generation-models) | | Link to Provider Doc | [Azure OpenAI ↗](https://learn.microsoft.com/en-us/azure/ai-services/openai/overview) @@ -948,6 +948,65 @@ Expected Response: {"data":[{"id":"batch_R3V...} ``` +## O-Series Models + +Azure OpenAI O-Series models are supported on LiteLLM. + +LiteLLM routes any deployment name with `o1` or `o3` in the model name, to the O-Series [transformation](https://github.com/BerriAI/litellm/blob/91ed05df2962b8eee8492374b048d27cc144d08c/litellm/llms/azure/chat/o1_transformation.py#L4) logic. + +To set this explicitly, set `model` to `azure/o_series/`. + +**Automatic Routing** + + + + +```python +import litellm + +litellm.completion(model="azure/my-o3-deployment", messages=[{"role": "user", "content": "Hello, world!"}]) # 👈 Note: 'o3' in the deployment name +``` + + + +```yaml +model_list: + - model_name: o3-mini + litellm_params: + model: azure/o3-model + api_base: os.environ/AZURE_API_BASE + api_key: os.environ/AZURE_API_KEY +``` + + + + +**Explicit Routing** + + + + +```python +import litellm + +litellm.completion(model="azure/o_series/my-random-deployment-name", messages=[{"role": "user", "content": "Hello, world!"}]) # 👈 Note: 'o_series/' in the deployment name +``` + + + +```yaml +model_list: + - model_name: o3-mini + litellm_params: + model: azure/o_series/my-random-deployment-name + api_base: os.environ/AZURE_API_BASE + api_key: os.environ/AZURE_API_KEY +``` + + + + + ## Advanced ### Azure API Load-Balancing diff --git a/docs/my-website/docs/providers/bedrock.md b/docs/my-website/docs/providers/bedrock.md index cf87f0b157..bd2d4be1a4 100644 --- a/docs/my-website/docs/providers/bedrock.md +++ b/docs/my-website/docs/providers/bedrock.md @@ -2,7 +2,17 @@ import Tabs from '@theme/Tabs'; import TabItem from '@theme/TabItem'; # AWS Bedrock -ALL Bedrock models (Anthropic, Meta, Mistral, Amazon, etc.) are Supported +ALL Bedrock models (Anthropic, Meta, Deepseek, Mistral, Amazon, etc.) are Supported + +| Property | Details | +|-------|-------| +| Description | Amazon Bedrock is a fully managed service that offers a choice of high-performing foundation models (FMs). | +| Provider Route on LiteLLM | `bedrock/`, [`bedrock/converse/`](#set-converse--invoke-route), [`bedrock/invoke/`](#set-invoke-route), [`bedrock/converse_like/`](#calling-via-internal-proxy), [`bedrock/llama/`](#deepseek-not-r1), [`bedrock/deepseek_r1/`](#deepseek-r1) | +| Provider Doc | [Amazon Bedrock ↗](https://docs.aws.amazon.com/bedrock/latest/userguide/what-is-bedrock.html) | +| Supported OpenAI Endpoints | `/chat/completions`, `/completions`, `/embeddings`, `/images/generations` | +| Rerank Endpoint | `/rerank` | +| Pass-through Endpoint | [Supported](../pass_through/bedrock.md) | + LiteLLM requires `boto3` to be installed on your system for Bedrock requests ```shell @@ -276,9 +286,12 @@ print(response) -## Usage - Function Calling +## Usage - Function Calling / Tool calling -LiteLLM uses Bedrock's Converse API for making tool calls +LiteLLM supports tool calling via Bedrock's Converse and Invoke API's. + + + ```python from litellm import completion @@ -323,6 +336,69 @@ assert isinstance( response.choices[0].message.tool_calls[0].function.arguments, str ) ``` + + + +1. Setup config.yaml + +```yaml +model_list: + - model_name: bedrock-claude-3-7 + litellm_params: + model: bedrock/us.anthropic.claude-3-7-sonnet-20250219-v1:0 # for bedrock invoke, specify `bedrock/invoke/` +``` + +2. Start proxy + +```bash +litellm --config /path/to/config.yaml +``` + +3. Test it! + +```bash +curl http://0.0.0.0:4000/v1/chat/completions \ +-H "Content-Type: application/json" \ +-H "Authorization: Bearer $LITELLM_API_KEY" \ +-d '{ + "model": "bedrock-claude-3-7", + "messages": [ + { + "role": "user", + "content": "What'\''s the weather like in Boston today?" + } + ], + "tools": [ + { + "type": "function", + "function": { + "name": "get_current_weather", + "description": "Get the current weather in a given location", + "parameters": { + "type": "object", + "properties": { + "location": { + "type": "string", + "description": "The city and state, e.g. San Francisco, CA" + }, + "unit": { + "type": "string", + "enum": ["celsius", "fahrenheit"] + } + }, + "required": ["location"] + } + } + } + ], + "tool_choice": "auto" +}' + +``` + + + + ## Usage - Vision @@ -367,6 +443,226 @@ print(f"\nResponse: {resp}") ``` +## Usage - 'thinking' / 'reasoning content' + +This is currently only supported for Anthropic's Claude 3.7 Sonnet + Deepseek R1. + +Works on v1.61.20+. + +Returns 2 new fields in `message` and `delta` object: +- `reasoning_content` - string - The reasoning content of the response +- `thinking_blocks` - list of objects (Anthropic only) - The thinking blocks of the response + +Each object has the following fields: +- `type` - Literal["thinking"] - The type of thinking block +- `thinking` - string - The thinking of the response. Also returned in `reasoning_content` +- `signature` - string - A base64 encoded string, returned by Anthropic. + +The `signature` is required by Anthropic on subsequent calls, if 'thinking' content is passed in (only required to use `thinking` with tool calling). [Learn more](https://docs.anthropic.com/en/docs/build-with-claude/extended-thinking#understanding-thinking-blocks) + + + + +```python +from litellm import completion + +# set env +os.environ["AWS_ACCESS_KEY_ID"] = "" +os.environ["AWS_SECRET_ACCESS_KEY"] = "" +os.environ["AWS_REGION_NAME"] = "" + + +resp = completion( + model="bedrock/us.anthropic.claude-3-7-sonnet-20250219-v1:0", + messages=[{"role": "user", "content": "What is the capital of France?"}], + thinking={"type": "enabled", "budget_tokens": 1024}, +) + +print(resp) +``` + + + +1. Setup config.yaml + +```yaml +model_list: + - model_name: bedrock-claude-3-7 + litellm_params: + model: bedrock/us.anthropic.claude-3-7-sonnet-20250219-v1:0 + thinking: {"type": "enabled", "budget_tokens": 1024} # 👈 EITHER HERE OR ON REQUEST +``` + +2. Start proxy + +```bash +litellm --config /path/to/config.yaml +``` + +3. Test it! + +```bash +curl http://0.0.0.0:4000/v1/chat/completions \ + -H "Content-Type: application/json" \ + -H "Authorization: Bearer " \ + -d '{ + "model": "bedrock-claude-3-7", + "messages": [{"role": "user", "content": "What is the capital of France?"}], + "thinking": {"type": "enabled", "budget_tokens": 1024} # 👈 EITHER HERE OR ON CONFIG.YAML + }' +``` + + + + + +**Expected Response** + +Same as [Anthropic API response](../providers/anthropic#usage---thinking--reasoning_content). + +```python +{ + "id": "chatcmpl-c661dfd7-7530-49c9-b0cc-d5018ba4727d", + "created": 1740640366, + "model": "us.anthropic.claude-3-7-sonnet-20250219-v1:0", + "object": "chat.completion", + "system_fingerprint": null, + "choices": [ + { + "finish_reason": "stop", + "index": 0, + "message": { + "content": "The capital of France is Paris. It's not only the capital city but also the largest city in France, serving as the country's major cultural, economic, and political center.", + "role": "assistant", + "tool_calls": null, + "function_call": null, + "reasoning_content": "The capital of France is Paris. This is a straightforward factual question.", + "thinking_blocks": [ + { + "type": "thinking", + "thinking": "The capital of France is Paris. This is a straightforward factual question.", + "signature": "EqoBCkgIARABGAIiQL2UoU0b1OHYi+yCHpBY7U6FQW8/FcoLewocJQPa2HnmLM+NECy50y44F/kD4SULFXi57buI9fAvyBwtyjlOiO0SDE3+r3spdg6PLOo9PBoMma2ku5OTAoR46j9VIjDRlvNmBvff7YW4WI9oU8XagaOBSxLPxElrhyuxppEn7m6bfT40dqBSTDrfiw4FYB4qEPETTI6TA6wtjGAAqmFqKTo=" + } + ] + } + } + ], + "usage": { + "completion_tokens": 64, + "prompt_tokens": 42, + "total_tokens": 106, + "completion_tokens_details": null, + "prompt_tokens_details": null + } +} +``` + + +## Usage - Structured Output / JSON mode + + + + +```python +from litellm import completion +import os +from pydantic import BaseModel + +# set env +os.environ["AWS_ACCESS_KEY_ID"] = "" +os.environ["AWS_SECRET_ACCESS_KEY"] = "" +os.environ["AWS_REGION_NAME"] = "" + +class CalendarEvent(BaseModel): + name: str + date: str + participants: list[str] + +class EventsList(BaseModel): + events: list[CalendarEvent] + +response = completion( + model="bedrock/anthropic.claude-3-7-sonnet-20250219-v1:0", # specify invoke via `bedrock/invoke/anthropic.claude-3-7-sonnet-20250219-v1:0` + response_format=EventsList, + messages=[ + {"role": "system", "content": "You are a helpful assistant designed to output JSON."}, + {"role": "user", "content": "Who won the world series in 2020?"} + ], +) +print(response.choices[0].message.content) +``` + + + +1. Setup config.yaml + +```yaml +model_list: + - model_name: bedrock-claude-3-7 + litellm_params: + model: bedrock/us.anthropic.claude-3-7-sonnet-20250219-v1:0 # specify invoke via `bedrock/invoke/` + aws_access_key_id: os.environ/CUSTOM_AWS_ACCESS_KEY_ID + aws_secret_access_key: os.environ/CUSTOM_AWS_SECRET_ACCESS_KEY + aws_region_name: os.environ/CUSTOM_AWS_REGION_NAME +``` + +2. Start proxy + +```bash +litellm --config /path/to/config.yaml +``` + +3. Test it! + +```bash +curl http://0.0.0.0:4000/v1/chat/completions \ + -H "Content-Type: application/json" \ + -H "Authorization: Bearer $LITELLM_KEY" \ + -d '{ + "model": "bedrock-claude-3-7", + "messages": [ + { + "role": "system", + "content": "You are a helpful assistant designed to output JSON." + }, + { + "role": "user", + "content": "Who won the worlde series in 2020?" + } + ], + "response_format": { + "type": "json_schema", + "json_schema": { + "name": "math_reasoning", + "description": "reason about maths", + "schema": { + "type": "object", + "properties": { + "steps": { + "type": "array", + "items": { + "type": "object", + "properties": { + "explanation": { "type": "string" }, + "output": { "type": "string" } + }, + "required": ["explanation", "output"], + "additionalProperties": false + } + }, + "final_answer": { "type": "string" } + }, + "required": ["steps", "final_answer"], + "additionalProperties": false + }, + "strict": true + } + } + }' +``` + + + ## Usage - Bedrock Guardrails Example of using [Bedrock Guardrails with LiteLLM](https://docs.aws.amazon.com/bedrock/latest/userguide/guardrails-use-converse-api.html) @@ -792,6 +1088,16 @@ curl -X POST 'http://0.0.0.0:4000/chat/completions' \ LiteLLM supports Document Understanding for Bedrock models - [AWS Bedrock Docs](https://docs.aws.amazon.com/nova/latest/userguide/modalities-document.html). +:::info + +LiteLLM supports ALL Bedrock document types - + +E.g.: "pdf", "csv", "doc", "docx", "xls", "xlsx", "html", "txt", "md" + +You can also pass these as either `image_url` or `base64` + +::: + ### url @@ -1191,6 +1497,209 @@ response = completion( aws_bedrock_client=bedrock, ) ``` +## Calling via Internal Proxy + +Use the `bedrock/converse_like/model` endpoint to call bedrock converse model via your internal proxy. + + + + +```python +from litellm import completion + +response = completion( + model="bedrock/converse_like/some-model", + messages=[{"role": "user", "content": "What's AWS?"}], + api_key="sk-1234", + api_base="https://some-api-url/models", + extra_headers={"test": "hello world"}, +) +``` + + + + +1. Setup config.yaml + +```yaml +model_list: + - model_name: anthropic-claude + litellm_params: + model: bedrock/converse_like/some-model + api_base: https://some-api-url/models +``` + +2. Start proxy server + +```bash +litellm --config config.yaml + +# RUNNING on http://0.0.0.0:4000 +``` + +3. Test it! + +```bash +curl -X POST 'http://0.0.0.0:4000/chat/completions' \ +-H 'Content-Type: application/json' \ +-H 'Authorization: Bearer sk-1234' \ +-d '{ + "model": "anthropic-claude", + "messages": [ + { + "role": "system", + "content": "You are a helpful math tutor. Guide the user through the solution step by step." + }, + { "content": "Hello, how are you?", "role": "user" } + ] +}' +``` + + + + +**Expected Output URL** + +```bash +https://some-api-url/models +``` + +## Bedrock Imported Models (Deepseek, Deepseek R1) + +### Deepseek R1 + +This is a separate route, as the chat template is different. + +| Property | Details | +|----------|---------| +| Provider Route | `bedrock/deepseek_r1/{model_arn}` | +| Provider Documentation | [Bedrock Imported Models](https://docs.aws.amazon.com/bedrock/latest/userguide/model-customization-import-model.html), [Deepseek Bedrock Imported Model](https://aws.amazon.com/blogs/machine-learning/deploy-deepseek-r1-distilled-llama-models-with-amazon-bedrock-custom-model-import/) | + + + + +```python +from litellm import completion +import os + +response = completion( + model="bedrock/deepseek_r1/arn:aws:bedrock:us-east-1:086734376398:imported-model/r4c4kewx2s0n", # bedrock/deepseek_r1/{your-model-arn} + messages=[{"role": "user", "content": "Tell me a joke"}], +) +``` + + + + + + +**1. Add to config** + +```yaml +model_list: + - model_name: DeepSeek-R1-Distill-Llama-70B + litellm_params: + model: bedrock/deepseek_r1/arn:aws:bedrock:us-east-1:086734376398:imported-model/r4c4kewx2s0n + +``` + +**2. Start proxy** + +```bash +litellm --config /path/to/config.yaml + +# RUNNING at http://0.0.0.0:4000 +``` + +**3. Test it!** + +```bash +curl --location 'http://0.0.0.0:4000/chat/completions' \ + --header 'Authorization: Bearer sk-1234' \ + --header 'Content-Type: application/json' \ + --data '{ + "model": "DeepSeek-R1-Distill-Llama-70B", # 👈 the 'model_name' in config + "messages": [ + { + "role": "user", + "content": "what llm are you" + } + ], + }' +``` + + + + + +### Deepseek (not R1) + +| Property | Details | +|----------|---------| +| Provider Route | `bedrock/llama/{model_arn}` | +| Provider Documentation | [Bedrock Imported Models](https://docs.aws.amazon.com/bedrock/latest/userguide/model-customization-import-model.html), [Deepseek Bedrock Imported Model](https://aws.amazon.com/blogs/machine-learning/deploy-deepseek-r1-distilled-llama-models-with-amazon-bedrock-custom-model-import/) | + + + +Use this route to call Bedrock Imported Models that follow the `llama` Invoke Request / Response spec + + + + + +```python +from litellm import completion +import os + +response = completion( + model="bedrock/llama/arn:aws:bedrock:us-east-1:086734376398:imported-model/r4c4kewx2s0n", # bedrock/llama/{your-model-arn} + messages=[{"role": "user", "content": "Tell me a joke"}], +) +``` + + + + + + +**1. Add to config** + +```yaml +model_list: + - model_name: DeepSeek-R1-Distill-Llama-70B + litellm_params: + model: bedrock/llama/arn:aws:bedrock:us-east-1:086734376398:imported-model/r4c4kewx2s0n + +``` + +**2. Start proxy** + +```bash +litellm --config /path/to/config.yaml + +# RUNNING at http://0.0.0.0:4000 +``` + +**3. Test it!** + +```bash +curl --location 'http://0.0.0.0:4000/chat/completions' \ + --header 'Authorization: Bearer sk-1234' \ + --header 'Content-Type: application/json' \ + --data '{ + "model": "DeepSeek-R1-Distill-Llama-70B", # 👈 the 'model_name' in config + "messages": [ + { + "role": "user", + "content": "what llm are you" + } + ], + }' +``` + + + + ## Provisioned throughput models @@ -1405,4 +1914,6 @@ curl http://0.0.0.0:4000/rerank \ ``` - \ No newline at end of file + + + diff --git a/docs/my-website/docs/providers/cerebras.md b/docs/my-website/docs/providers/cerebras.md index 4fabeb31cb..33bef5e107 100644 --- a/docs/my-website/docs/providers/cerebras.md +++ b/docs/my-website/docs/providers/cerebras.md @@ -23,14 +23,16 @@ import os os.environ['CEREBRAS_API_KEY'] = "" response = completion( - model="cerebras/meta/llama3-70b-instruct", + model="cerebras/llama3-70b-instruct", messages=[ { "role": "user", - "content": "What's the weather like in Boston today in Fahrenheit?", + "content": "What's the weather like in Boston today in Fahrenheit? (Write in JSON)", } ], max_tokens=10, + + # The prompt should include JSON if 'json_object' is selected; otherwise, you will get error code 400. response_format={ "type": "json_object" }, seed=123, stop=["\n\n"], @@ -50,16 +52,18 @@ import os os.environ['CEREBRAS_API_KEY'] = "" response = completion( - model="cerebras/meta/llama3-70b-instruct", + model="cerebras/llama3-70b-instruct", messages=[ { "role": "user", - "content": "What's the weather like in Boston today in Fahrenheit?", + "content": "What's the weather like in Boston today in Fahrenheit? (Write in JSON)", } ], stream=True, max_tokens=10, - response_format={ "type": "json_object" }, + + # The prompt should include JSON if 'json_object' is selected; otherwise, you will get error code 400. + response_format={ "type": "json_object" }, seed=123, stop=["\n\n"], temperature=0.2, diff --git a/docs/my-website/docs/providers/cohere.md b/docs/my-website/docs/providers/cohere.md index 1154dc3c4e..6b7a4743ec 100644 --- a/docs/my-website/docs/providers/cohere.md +++ b/docs/my-website/docs/providers/cohere.md @@ -108,7 +108,7 @@ response = embedding( ### Usage - +LiteLLM supports the v1 and v2 clients for Cohere rerank. By default, the `rerank` endpoint uses the v2 client, but you can specify the v1 client by explicitly calling `v1/rerank` diff --git a/docs/my-website/docs/providers/deepseek.md b/docs/my-website/docs/providers/deepseek.md index 9f48e87123..31efb36c21 100644 --- a/docs/my-website/docs/providers/deepseek.md +++ b/docs/my-website/docs/providers/deepseek.md @@ -76,7 +76,7 @@ resp = completion( ) print( - resp.choices[0].message.provider_specific_fields["reasoning_content"] + resp.choices[0].message.reasoning_content ) ``` diff --git a/docs/my-website/docs/providers/gemini.md b/docs/my-website/docs/providers/gemini.md index 0588200465..4a6cfdf1a3 100644 --- a/docs/my-website/docs/providers/gemini.md +++ b/docs/my-website/docs/providers/gemini.md @@ -688,7 +688,9 @@ response = litellm.completion( |-----------------------|--------------------------------------------------------|--------------------------------| | gemini-pro | `completion(model='gemini/gemini-pro', messages)` | `os.environ['GEMINI_API_KEY']` | | gemini-1.5-pro-latest | `completion(model='gemini/gemini-1.5-pro-latest', messages)` | `os.environ['GEMINI_API_KEY']` | -| gemini-pro-vision | `completion(model='gemini/gemini-pro-vision', messages)` | `os.environ['GEMINI_API_KEY']` | +| gemini-2.0-flash | `completion(model='gemini/gemini-2.0-flash', messages)` | `os.environ['GEMINI_API_KEY']` | +| gemini-2.0-flash-exp | `completion(model='gemini/gemini-2.0-flash-exp', messages)` | `os.environ['GEMINI_API_KEY']` | +| gemini-2.0-flash-lite-preview-02-05 | `completion(model='gemini/gemini-2.0-flash-lite-preview-02-05', messages)` | `os.environ['GEMINI_API_KEY']` | diff --git a/docs/my-website/docs/providers/infinity.md b/docs/my-website/docs/providers/infinity.md index dd6986dfef..091503bf18 100644 --- a/docs/my-website/docs/providers/infinity.md +++ b/docs/my-website/docs/providers/infinity.md @@ -1,3 +1,6 @@ +import Tabs from '@theme/Tabs'; +import TabItem from '@theme/TabItem'; + # Infinity | Property | Details | @@ -12,6 +15,9 @@ ```python from litellm import rerank +import os + +os.environ["INFINITY_API_BASE"] = "http://localhost:8080" response = rerank( model="infinity/rerank", @@ -65,3 +71,114 @@ curl http://0.0.0.0:4000/rerank \ ``` +## Supported Cohere Rerank API Params + +| Param | Type | Description | +|-------|-------|-------| +| `query` | `str` | The query to rerank the documents against | +| `documents` | `list[str]` | The documents to rerank | +| `top_n` | `int` | The number of documents to return | +| `return_documents` | `bool` | Whether to return the documents in the response | + +### Usage - Return Documents + + + + +```python +response = rerank( + model="infinity/rerank", + query="What is the capital of France?", + documents=["Paris", "London", "Berlin", "Madrid"], + return_documents=True, +) +``` + + + + + +```bash +curl http://0.0.0.0:4000/rerank \ + -H "Authorization: Bearer sk-1234" \ + -H "Content-Type: application/json" \ + -d '{ + "model": "custom-infinity-rerank", + "query": "What is the capital of France?", + "documents": [ + "Paris", + "London", + "Berlin", + "Madrid" + ], + "return_documents": True, + }' +``` + + + + +## Pass Provider-specific Params + +Any unmapped params will be passed to the provider as-is. + + + + +```python +from litellm import rerank +import os + +os.environ["INFINITY_API_BASE"] = "http://localhost:8080" + +response = rerank( + model="infinity/rerank", + query="What is the capital of France?", + documents=["Paris", "London", "Berlin", "Madrid"], + raw_scores=True, # 👈 PROVIDER-SPECIFIC PARAM +) +``` + + + + +1. Setup config.yaml + +```yaml +model_list: + - model_name: custom-infinity-rerank + litellm_params: + model: infinity/rerank + api_base: https://localhost:8080 + raw_scores: True # 👈 EITHER SET PROVIDER-SPECIFIC PARAMS HERE OR IN REQUEST BODY +``` + +2. Start litellm + +```bash +litellm --config /path/to/config.yaml + +# RUNNING on http://0.0.0.0:4000 +``` + +3. Test it! + +```bash +curl http://0.0.0.0:4000/rerank \ + -H "Authorization: Bearer sk-1234" \ + -H "Content-Type: application/json" \ + -d '{ + "model": "custom-infinity-rerank", + "query": "What is the capital of the United States?", + "documents": [ + "Carson City is the capital city of the American state of Nevada.", + "The Commonwealth of the Northern Mariana Islands is a group of islands in the Pacific Ocean. Its capital is Saipan.", + "Washington, D.C. is the capital of the United States.", + "Capital punishment has existed in the United States since before it was a country." + ], + "raw_scores": True # 👈 PROVIDER-SPECIFIC PARAM + }' +``` + + + diff --git a/docs/my-website/docs/providers/litellm_proxy.md b/docs/my-website/docs/providers/litellm_proxy.md index 69377b27f1..e204caba0a 100644 --- a/docs/my-website/docs/providers/litellm_proxy.md +++ b/docs/my-website/docs/providers/litellm_proxy.md @@ -3,13 +3,15 @@ import TabItem from '@theme/TabItem'; # LiteLLM Proxy (LLM Gateway) -:::tip -[LiteLLM Providers a **self hosted** proxy server (AI Gateway)](../simple_proxy) to call all the LLMs in the OpenAI format +| Property | Details | +|-------|-------| +| Description | LiteLLM Proxy is an OpenAI-compatible gateway that allows you to interact with multiple LLM providers through a unified API. Simply use the `litellm_proxy/` prefix before the model name to route your requests through the proxy. | +| Provider Route on LiteLLM | `litellm_proxy/` (add this prefix to the model name, to route any requests to litellm_proxy - e.g. `litellm_proxy/your-model-name`) | +| Setup LiteLLM Gateway | [LiteLLM Gateway ↗](../simple_proxy) | +| Supported Endpoints |`/chat/completions`, `/completions`, `/embeddings`, `/audio/speech`, `/audio/transcriptions`, `/images`, `/rerank` | -::: -**[LiteLLM Proxy](../simple_proxy) is OpenAI compatible**, you just need the `litellm_proxy/` prefix before the model ## Required Variables @@ -83,7 +85,76 @@ for chunk in response: print(chunk) ``` +## Embeddings +```python +import litellm + +response = litellm.embedding( + model="litellm_proxy/your-embedding-model", + input="Hello world", + api_base="your-litellm-proxy-url", + api_key="your-litellm-proxy-api-key" +) +``` + +## Image Generation + +```python +import litellm + +response = litellm.image_generation( + model="litellm_proxy/dall-e-3", + prompt="A beautiful sunset over mountains", + api_base="your-litellm-proxy-url", + api_key="your-litellm-proxy-api-key" +) +``` + +## Audio Transcription + +```python +import litellm + +response = litellm.transcription( + model="litellm_proxy/whisper-1", + file="your-audio-file", + api_base="your-litellm-proxy-url", + api_key="your-litellm-proxy-api-key" +) +``` + +## Text to Speech + +```python +import litellm + +response = litellm.speech( + model="litellm_proxy/tts-1", + input="Hello world", + api_base="your-litellm-proxy-url", + api_key="your-litellm-proxy-api-key" +) +``` + +## Rerank + +```python +import litellm + +import litellm + +response = litellm.rerank( + model="litellm_proxy/rerank-english-v2.0", + query="What is machine learning?", + documents=[ + "Machine learning is a field of study in artificial intelligence", + "Biology is the study of living organisms" + ], + api_base="your-litellm-proxy-url", + api_key="your-litellm-proxy-api-key" +) +``` ## **Usage with Langchain, LLamaindex, OpenAI Js, Anthropic SDK, Instructor** #### [Follow this doc to see how to use litellm proxy with langchain, llamaindex, anthropic etc](../proxy/user_keys) \ No newline at end of file diff --git a/docs/my-website/docs/providers/lm_studio.md b/docs/my-website/docs/providers/lm_studio.md index ace138a532..45c546ada6 100644 --- a/docs/my-website/docs/providers/lm_studio.md +++ b/docs/my-website/docs/providers/lm_studio.md @@ -69,7 +69,7 @@ for chunk in response: ## Usage with LiteLLM Proxy Server -Here's how to call a XAI model with the LiteLLM Proxy Server +Here's how to call a LM Studio model with the LiteLLM Proxy Server 1. Modify the config.yaml diff --git a/docs/my-website/docs/providers/ollama.md b/docs/my-website/docs/providers/ollama.md index d850b8ae6e..848be2beb7 100644 --- a/docs/my-website/docs/providers/ollama.md +++ b/docs/my-website/docs/providers/ollama.md @@ -238,6 +238,76 @@ Ollama supported models: https://github.com/ollama/ollama | Nous-Hermes 13B | `completion(model='ollama/nous-hermes:13b', messages, api_base="http://localhost:11434", stream=True)` | | Wizard Vicuna Uncensored | `completion(model='ollama/wizard-vicuna', messages, api_base="http://localhost:11434", stream=True)` | + +### JSON Schema support + + + + +```python +from litellm import completion + +response = completion( + model="ollama_chat/deepseek-r1", + messages=[{ "content": "respond in 20 words. who are you?","role": "user"}], + response_format={"type": "json_schema", "json_schema": {"schema": {"type": "object", "properties": {"name": {"type": "string"}}}}}, +) +print(response) +``` + + + +1. Setup config.yaml + +```yaml +model_list: + - model_name: "deepseek-r1" + litellm_params: + model: "ollama_chat/deepseek-r1" + api_base: "http://localhost:11434" +``` + +2. Start proxy + +```bash +litellm --config /path/to/config.yaml + +# RUNNING ON http://0.0.0.0:4000 +``` + +3. Test it! + +```python +from pydantic import BaseModel +from openai import OpenAI + +client = OpenAI( + api_key="anything", # 👈 PROXY KEY (can be anything, if master_key not set) + base_url="http://0.0.0.0:4000" # 👈 PROXY BASE URL +) + +class Step(BaseModel): + explanation: str + output: str + +class MathReasoning(BaseModel): + steps: list[Step] + final_answer: str + +completion = client.beta.chat.completions.parse( + model="deepseek-r1", + messages=[ + {"role": "system", "content": "You are a helpful math tutor. Guide the user through the solution step by step."}, + {"role": "user", "content": "how can I solve 8x + 7 = -23"} + ], + response_format=MathReasoning, +) + +math_reasoning = completion.choices[0].message.parsed +``` + + + ## Ollama Vision Models | Model Name | Function Call | |------------------|--------------------------------------| diff --git a/docs/my-website/docs/providers/perplexity.md b/docs/my-website/docs/providers/perplexity.md index 446f22b1f2..620a7640ad 100644 --- a/docs/my-website/docs/providers/perplexity.md +++ b/docs/my-website/docs/providers/perplexity.md @@ -64,71 +64,7 @@ All models listed here https://docs.perplexity.ai/docs/model-cards are supported -## Return citations - -Perplexity supports returning citations via `return_citations=True`. [Perplexity Docs](https://docs.perplexity.ai/reference/post_chat_completions). Note: Perplexity has this feature in **closed beta**, so you need them to grant you access to get citations from their API. - -If perplexity returns citations, LiteLLM will pass it straight through. - :::info -For passing more provider-specific, [go here](../completion/provider_specific_params.md) +For more information about passing provider-specific parameters, [go here](../completion/provider_specific_params.md) ::: - - - - -```python -from litellm import completion -import os - -os.environ['PERPLEXITYAI_API_KEY'] = "" -response = completion( - model="perplexity/mistral-7b-instruct", - messages=messages, - return_citations=True -) -print(response) -``` - - - - -1. Add perplexity to config.yaml - -```yaml -model_list: - - model_name: "perplexity-model" - litellm_params: - model: "llama-3.1-sonar-small-128k-online" - api_key: os.environ/PERPLEXITY_API_KEY -``` - -2. Start proxy - -```bash -litellm --config /path/to/config.yaml -``` - -3. Test it! - -```bash -curl -L -X POST 'http://0.0.0.0:4000/chat/completions' \ --H 'Content-Type: application/json' \ --H 'Authorization: Bearer sk-1234' \ --d '{ - "model": "perplexity-model", - "messages": [ - { - "role": "user", - "content": "Who won the world cup in 2022?" - } - ], - "return_citations": true -}' -``` - -[**Call w/ OpenAI SDK, Langchain, Instructor, etc.**](../proxy/user_keys.md#chatcompletions) - - - diff --git a/docs/my-website/docs/providers/sambanova.md b/docs/my-website/docs/providers/sambanova.md index 9fa6ce8b60..7dd837e1b0 100644 --- a/docs/my-website/docs/providers/sambanova.md +++ b/docs/my-website/docs/providers/sambanova.md @@ -2,11 +2,11 @@ import Tabs from '@theme/Tabs'; import TabItem from '@theme/TabItem'; # Sambanova -https://community.sambanova.ai/t/create-chat-completion-api/ +https://cloud.sambanova.ai/ :::tip -**We support ALL Sambanova models, just set `model=sambanova/` as a prefix when sending litellm requests. For the complete supported model list, visit https://sambanova.ai/technology/models ** +**We support ALL Sambanova models, just set `model=sambanova/` as a prefix when sending litellm requests. For the complete supported model list, visit https://docs.sambanova.ai/cloud/docs/get-started/supported-models ** ::: @@ -27,12 +27,11 @@ response = completion( messages=[ { "role": "user", - "content": "What do you know about sambanova.ai", + "content": "What do you know about sambanova.ai. Give your response in json format", } ], max_tokens=10, response_format={ "type": "json_object" }, - seed=123, stop=["\n\n"], temperature=0.2, top_p=0.9, @@ -54,13 +53,12 @@ response = completion( messages=[ { "role": "user", - "content": "What do you know about sambanova.ai", + "content": "What do you know about sambanova.ai. Give your response in json format", } ], stream=True, max_tokens=10, response_format={ "type": "json_object" }, - seed=123, stop=["\n\n"], temperature=0.2, top_p=0.9, diff --git a/docs/my-website/docs/providers/topaz.md b/docs/my-website/docs/providers/topaz.md new file mode 100644 index 0000000000..018d269684 --- /dev/null +++ b/docs/my-website/docs/providers/topaz.md @@ -0,0 +1,27 @@ +# Topaz + +| Property | Details | +|-------|-------| +| Description | Professional-grade photo and video editing powered by AI. | +| Provider Route on LiteLLM | `topaz/` | +| Provider Doc | [Topaz ↗](https://www.topazlabs.com/enhance-api) | +| API Endpoint for Provider | https://api.topazlabs.com | +| Supported OpenAI Endpoints | `/image/variations` | + + +## Quick Start + +```python +from litellm import image_variation +import os + +os.environ["TOPAZ_API_KEY"] = "" +response = image_variation( + model="topaz/Standard V2", image=image_url +) +``` + +## Supported OpenAI Params + +- `response_format` +- `size` (widthxheight) diff --git a/docs/my-website/docs/providers/vertex.md b/docs/my-website/docs/providers/vertex.md index cb8c031c06..10ac13ecaf 100644 --- a/docs/my-website/docs/providers/vertex.md +++ b/docs/my-website/docs/providers/vertex.md @@ -404,14 +404,16 @@ curl http://localhost:4000/v1/chat/completions \ If this was your initial VertexAI Grounding code, ```python -import vertexai +import vertexai +from vertexai.generative_models import GenerativeModel, GenerationConfig, Tool, grounding + vertexai.init(project=project_id, location="us-central1") model = GenerativeModel("gemini-1.5-flash-001") # Use Google Search for grounding -tool = Tool.from_google_search_retrieval(grounding.GoogleSearchRetrieval(disable_attributon=False)) +tool = Tool.from_google_search_retrieval(grounding.GoogleSearchRetrieval()) prompt = "When is the next total solar eclipse in US?" response = model.generate_content( @@ -428,7 +430,7 @@ print(response) then, this is what it looks like now ```python -from litellm import completion +from litellm import completion # !gcloud auth application-default login - run this to add vertex credentials to your env @@ -852,6 +854,7 @@ litellm.vertex_location = "us-central1 # Your Location | claude-3-5-sonnet@20240620 | `completion('vertex_ai/claude-3-5-sonnet@20240620', messages)` | | claude-3-sonnet@20240229 | `completion('vertex_ai/claude-3-sonnet@20240229', messages)` | | claude-3-haiku@20240307 | `completion('vertex_ai/claude-3-haiku@20240307', messages)` | +| claude-3-7-sonnet@20250219 | `completion('vertex_ai/claude-3-7-sonnet@20250219', messages)` | ### Usage @@ -926,6 +929,119 @@ curl --location 'http://0.0.0.0:4000/chat/completions' \ + +### Usage - `thinking` / `reasoning_content` + + + + + +```python +from litellm import completion + +resp = completion( + model="vertex_ai/claude-3-7-sonnet-20250219", + messages=[{"role": "user", "content": "What is the capital of France?"}], + thinking={"type": "enabled", "budget_tokens": 1024}, +) + +``` + + + + + +1. Setup config.yaml + +```yaml +- model_name: claude-3-7-sonnet-20250219 + litellm_params: + model: vertex_ai/claude-3-7-sonnet-20250219 + vertex_ai_project: "my-test-project" + vertex_ai_location: "us-west-1" +``` + +2. Start proxy + +```bash +litellm --config /path/to/config.yaml +``` + +3. Test it! + +```bash +curl http://0.0.0.0:4000/v1/chat/completions \ + -H "Content-Type: application/json" \ + -H "Authorization: Bearer " \ + -d '{ + "model": "claude-3-7-sonnet-20250219", + "messages": [{"role": "user", "content": "What is the capital of France?"}], + "thinking": {"type": "enabled", "budget_tokens": 1024} + }' +``` + + + + + +**Expected Response** + +```python +ModelResponse( + id='chatcmpl-c542d76d-f675-4e87-8e5f-05855f5d0f5e', + created=1740470510, + model='claude-3-7-sonnet-20250219', + object='chat.completion', + system_fingerprint=None, + choices=[ + Choices( + finish_reason='stop', + index=0, + message=Message( + content="The capital of France is Paris.", + role='assistant', + tool_calls=None, + function_call=None, + provider_specific_fields={ + 'citations': None, + 'thinking_blocks': [ + { + 'type': 'thinking', + 'thinking': 'The capital of France is Paris. This is a very straightforward factual question.', + 'signature': 'EuYBCkQYAiJAy6...' + } + ] + } + ), + thinking_blocks=[ + { + 'type': 'thinking', + 'thinking': 'The capital of France is Paris. This is a very straightforward factual question.', + 'signature': 'EuYBCkQYAiJAy6AGB...' + } + ], + reasoning_content='The capital of France is Paris. This is a very straightforward factual question.' + ) + ], + usage=Usage( + completion_tokens=68, + prompt_tokens=42, + total_tokens=110, + completion_tokens_details=None, + prompt_tokens_details=PromptTokensDetailsWrapper( + audio_tokens=None, + cached_tokens=0, + text_tokens=None, + image_tokens=None + ), + cache_creation_input_tokens=0, + cache_read_input_tokens=0 + ) +) +``` + + + ## Llama 3 API | Model Name | Function Call | @@ -1572,6 +1688,14 @@ assert isinstance( Pass any file supported by Vertex AI, through LiteLLM. +LiteLLM Supports the following image types passed in url + +``` +Images with Cloud Storage URIs - gs://cloud-samples-data/generative-ai/image/boats.jpeg +Images with direct links - https://storage.googleapis.com/github-repo/img/gemini/intro/landmark3.jpg +Videos with Cloud Storage URIs - https://storage.googleapis.com/github-repo/img/gemini/multimodality_usecases_overview/pixel8.mp4 +Base64 Encoded Local Images +``` diff --git a/docs/my-website/docs/providers/vllm.md b/docs/my-website/docs/providers/vllm.md index 9cc0ad487e..b5987167ec 100644 --- a/docs/my-website/docs/providers/vllm.md +++ b/docs/my-website/docs/providers/vllm.md @@ -157,6 +157,98 @@ curl -L -X POST 'http://0.0.0.0:4000/embeddings' \ +## Send Video URL to VLLM + +Example Implementation from VLLM [here](https://github.com/vllm-project/vllm/pull/10020) + +There are two ways to send a video url to VLLM: + +1. Pass the video url directly + +``` +{"type": "video_url", "video_url": {"url": video_url}}, +``` + +2. Pass the video data as base64 + +``` +{"type": "video_url", "video_url": {"url": f"data:video/mp4;base64,{video_data_base64}"}} +``` + + + + +```python +from litellm import completion + +response = completion( + model="hosted_vllm/qwen", # pass the vllm model name + messages=[ + { + "role": "user", + "content": [ + { + "type": "text", + "text": "Summarize the following video" + }, + { + "type": "video_url", + "video_url": { + "url": "https://www.youtube.com/watch?v=dQw4w9WgXcQ" + } + } + ] + } + ], + api_base="https://hosted-vllm-api.co") + +print(response) +``` + + + + +1. Setup config.yaml + +```yaml +model_list: + - model_name: my-model + litellm_params: + model: hosted_vllm/qwen # add hosted_vllm/ prefix to route as OpenAI provider + api_base: https://hosted-vllm-api.co # add api base for OpenAI compatible provider +``` + +2. Start the proxy + +```bash +$ litellm --config /path/to/config.yaml + +# RUNNING on http://0.0.0.0:4000 +``` + +3. Test it! + +```bash +curl -X POST http://0.0.0.0:4000/chat/completions \ +-H "Authorization: Bearer sk-1234" \ +-H "Content-Type: application/json" \ +-d '{ + "model": "my-model", + "messages": [ + {"role": "user", "content": + [ + {"type": "text", "text": "Summarize the following video"}, + {"type": "video_url", "video_url": {"url": "https://www.youtube.com/watch?v=dQw4w9WgXcQ"}} + ] + } + ] +}' +``` + + + + + ## (Deprecated) for `vllm pip package` ### Using - `litellm.completion` diff --git a/docs/my-website/docs/providers/voyage.md b/docs/my-website/docs/providers/voyage.md index a56a1408ea..6ab6b1846f 100644 --- a/docs/my-website/docs/providers/voyage.md +++ b/docs/my-website/docs/providers/voyage.md @@ -14,7 +14,7 @@ import os os.environ['VOYAGE_API_KEY'] = "" response = embedding( - model="voyage/voyage-01", + model="voyage/voyage-3-large", input=["good morning from litellm"], ) print(response) @@ -23,13 +23,20 @@ print(response) ## Supported Models All models listed here https://docs.voyageai.com/embeddings/#models-and-specifics are supported -| Model Name | Function Call | -|--------------------------|------------------------------------------------------------------------------------------------------------------------------------------------------------------| -| voyage-2 | `embedding(model="voyage/voyage-2", input)` | -| voyage-large-2 | `embedding(model="voyage/voyage-large-2", input)` | -| voyage-law-2 | `embedding(model="voyage/voyage-law-2", input)` | -| voyage-code-2 | `embedding(model="voyage/voyage-code-2", input)` | +| Model Name | Function Call | +|-------------------------|------------------------------------------------------------| +| voyage-3-large | `embedding(model="voyage/voyage-3-large", input)` | +| voyage-3 | `embedding(model="voyage/voyage-3", input)` | +| voyage-3-lite | `embedding(model="voyage/voyage-3-lite", input)` | +| voyage-code-3 | `embedding(model="voyage/voyage-code-3", input)` | +| voyage-finance-2 | `embedding(model="voyage/voyage-finance-2", input)` | +| voyage-law-2 | `embedding(model="voyage/voyage-law-2", input)` | +| voyage-code-2 | `embedding(model="voyage/voyage-code-2", input)` | +| voyage-multilingual-2 | `embedding(model="voyage/voyage-multilingual-2 ", input)` | +| voyage-large-2-instruct | `embedding(model="voyage/voyage-large-2-instruct", input)` | +| voyage-large-2 | `embedding(model="voyage/voyage-large-2", input)` | +| voyage-2 | `embedding(model="voyage/voyage-2", input)` | | voyage-lite-02-instruct | `embedding(model="voyage/voyage-lite-02-instruct", input)` | -| voyage-01 | `embedding(model="voyage/voyage-01", input)` | -| voyage-lite-01 | `embedding(model="voyage/voyage-lite-01", input)` | -| voyage-lite-01-instruct | `embedding(model="voyage/voyage-lite-01-instruct", input)` | \ No newline at end of file +| voyage-01 | `embedding(model="voyage/voyage-01", input)` | +| voyage-lite-01 | `embedding(model="voyage/voyage-lite-01", input)` | +| voyage-lite-01-instruct | `embedding(model="voyage/voyage-lite-01-instruct", input)` | diff --git a/docs/my-website/docs/providers/xai.md b/docs/my-website/docs/providers/xai.md index 131c02b3dc..3faf7d1052 100644 --- a/docs/my-website/docs/providers/xai.md +++ b/docs/my-website/docs/providers/xai.md @@ -1,13 +1,13 @@ import Tabs from '@theme/Tabs'; import TabItem from '@theme/TabItem'; -# XAI +# xAI https://docs.x.ai/docs :::tip -**We support ALL XAI models, just set `model=xai/` as a prefix when sending litellm requests** +**We support ALL xAI models, just set `model=xai/` as a prefix when sending litellm requests** ::: @@ -24,7 +24,7 @@ import os os.environ['XAI_API_KEY'] = "" response = completion( - model="xai/grok-beta", + model="xai/grok-2-latest", messages=[ { "role": "user", @@ -51,7 +51,7 @@ import os os.environ['XAI_API_KEY'] = "" response = completion( - model="xai/grok-beta", + model="xai/grok-2-latest", messages=[ { "role": "user", @@ -74,6 +74,35 @@ for chunk in response: print(chunk) ``` +## Sample Usage - Vision +```python +import os +from litellm import completion + +os.environ["XAI_API_KEY"] = "your-api-key" + +response = completion( + model="xai/grok-2-latest", + messages=[ + { + "role": "user", + "content": [ + { + "type": "image_url", + "image_url": { + "url": "https://science.nasa.gov/wp-content/uploads/2023/09/web-first-images-release.png", + "detail": "high", + }, + }, + { + "type": "text", + "text": "What's in this image?", + }, + ], + }, + ], +) +``` ## Usage with LiteLLM Proxy Server diff --git a/docs/my-website/docs/proxy/architecture.md b/docs/my-website/docs/proxy/architecture.md index 832fd266b6..2b83583ed9 100644 --- a/docs/my-website/docs/proxy/architecture.md +++ b/docs/my-website/docs/proxy/architecture.md @@ -36,7 +36,7 @@ import TabItem from '@theme/TabItem'; - Virtual Key Rate Limit - User Rate Limit - Team Limit - - The `_PROXY_track_cost_callback` updates spend / usage in the LiteLLM database. [Here is everything tracked in the DB per request](https://github.com/BerriAI/litellm/blob/ba41a72f92a9abf1d659a87ec880e8e319f87481/schema.prisma#L172) + - The `_ProxyDBLogger` updates spend / usage in the LiteLLM database. [Here is everything tracked in the DB per request](https://github.com/BerriAI/litellm/blob/ba41a72f92a9abf1d659a87ec880e8e319f87481/schema.prisma#L172) ## Frequently Asked Questions diff --git a/docs/my-website/docs/proxy/bucket.md b/docs/my-website/docs/proxy/bucket.md deleted file mode 100644 index d1b9e60769..0000000000 --- a/docs/my-website/docs/proxy/bucket.md +++ /dev/null @@ -1,154 +0,0 @@ - -import Image from '@theme/IdealImage'; -import Tabs from '@theme/Tabs'; -import TabItem from '@theme/TabItem'; - -# Logging GCS, s3 Buckets - -LiteLLM Supports Logging to the following Cloud Buckets -- (Enterprise) ✨ [Google Cloud Storage Buckets](#logging-proxy-inputoutput-to-google-cloud-storage-buckets) -- (Free OSS) [Amazon s3 Buckets](#logging-proxy-inputoutput---s3-buckets) - -## Google Cloud Storage Buckets - -Log LLM Logs to [Google Cloud Storage Buckets](https://cloud.google.com/storage?hl=en) - -:::info - -✨ This is an Enterprise only feature [Get Started with Enterprise here](https://calendly.com/d/4mp-gd3-k5k/litellm-1-1-onboarding-chat) - -::: - - -| Property | Details | -|----------|---------| -| Description | Log LLM Input/Output to cloud storage buckets | -| Load Test Benchmarks | [Benchmarks](https://docs.litellm.ai/docs/benchmarks) | -| Google Docs on Cloud Storage | [Google Cloud Storage](https://cloud.google.com/storage?hl=en) | - - - -### Usage - -1. Add `gcs_bucket` to LiteLLM Config.yaml -```yaml -model_list: -- litellm_params: - api_base: https://openai-function-calling-workers.tasslexyz.workers.dev/ - api_key: my-fake-key - model: openai/my-fake-model - model_name: fake-openai-endpoint - -litellm_settings: - callbacks: ["gcs_bucket"] # 👈 KEY CHANGE # 👈 KEY CHANGE -``` - -2. Set required env variables - -```shell -GCS_BUCKET_NAME="" -GCS_PATH_SERVICE_ACCOUNT="/Users/ishaanjaffer/Downloads/adroit-crow-413218-a956eef1a2a8.json" # Add path to service account.json -``` - -3. Start Proxy - -``` -litellm --config /path/to/config.yaml -``` - -4. Test it! - -```bash -curl --location 'http://0.0.0.0:4000/chat/completions' \ ---header 'Content-Type: application/json' \ ---data ' { - "model": "fake-openai-endpoint", - "messages": [ - { - "role": "user", - "content": "what llm are you" - } - ], - } -' -``` - - -### Expected Logs on GCS Buckets - - - -### Fields Logged on GCS Buckets - -[**The standard logging object is logged on GCS Bucket**](../proxy/logging) - - -### Getting `service_account.json` from Google Cloud Console - -1. Go to [Google Cloud Console](https://console.cloud.google.com/) -2. Search for IAM & Admin -3. Click on Service Accounts -4. Select a Service Account -5. Click on 'Keys' -> Add Key -> Create New Key -> JSON -6. Save the JSON file and add the path to `GCS_PATH_SERVICE_ACCOUNT` - - -## s3 Buckets - -We will use the `--config` to set - -- `litellm.success_callback = ["s3"]` - -This will log all successfull LLM calls to s3 Bucket - -**Step 1** Set AWS Credentials in .env - -```shell -AWS_ACCESS_KEY_ID = "" -AWS_SECRET_ACCESS_KEY = "" -AWS_REGION_NAME = "" -``` - -**Step 2**: Create a `config.yaml` file and set `litellm_settings`: `success_callback` - -```yaml -model_list: - - model_name: gpt-3.5-turbo - litellm_params: - model: gpt-3.5-turbo -litellm_settings: - success_callback: ["s3"] - s3_callback_params: - s3_bucket_name: logs-bucket-litellm # AWS Bucket Name for S3 - s3_region_name: us-west-2 # AWS Region Name for S3 - s3_aws_access_key_id: os.environ/AWS_ACCESS_KEY_ID # us os.environ/ to pass environment variables. This is AWS Access Key ID for S3 - s3_aws_secret_access_key: os.environ/AWS_SECRET_ACCESS_KEY # AWS Secret Access Key for S3 - s3_path: my-test-path # [OPTIONAL] set path in bucket you want to write logs to - s3_endpoint_url: https://s3.amazonaws.com # [OPTIONAL] S3 endpoint URL, if you want to use Backblaze/cloudflare s3 buckets -``` - -**Step 3**: Start the proxy, make a test request - -Start proxy - -```shell -litellm --config config.yaml --debug -``` - -Test Request - -```shell -curl --location 'http://0.0.0.0:4000/chat/completions' \ - --header 'Content-Type: application/json' \ - --data ' { - "model": "Azure OpenAI GPT-4 East", - "messages": [ - { - "role": "user", - "content": "what llm are you" - } - ] - }' -``` - -Your logs should be available on the specified s3 Bucket diff --git a/docs/my-website/docs/proxy/caching.md b/docs/my-website/docs/proxy/caching.md index 3f5342c7e6..b60b9966ba 100644 --- a/docs/my-website/docs/proxy/caching.md +++ b/docs/my-website/docs/proxy/caching.md @@ -2,7 +2,6 @@ import Tabs from '@theme/Tabs'; import TabItem from '@theme/TabItem'; # Caching -Cache LLM Responses :::note @@ -10,14 +9,19 @@ For OpenAI/Anthropic Prompt Caching, go [here](../completion/prompt_caching.md) ::: -LiteLLM supports: +Cache LLM Responses. LiteLLM's caching system stores and reuses LLM responses to save costs and reduce latency. When you make the same request twice, the cached response is returned instead of calling the LLM API again. + + + +### Supported Caches + - In Memory Cache - Redis Cache - Qdrant Semantic Cache - Redis Semantic Cache - s3 Bucket Cache -## Quick Start - Redis, s3 Cache, Semantic Cache +## Quick Start @@ -369,9 +373,9 @@ $ litellm --config /path/to/config.yaml +## Usage - -## Using Caching - /chat/completions +### Basic @@ -416,6 +420,239 @@ curl --location 'http://0.0.0.0:4000/embeddings' \ +### Dynamic Cache Controls + +| Parameter | Type | Description | +|-----------|------|-------------| +| `ttl` | *Optional(int)* | Will cache the response for the user-defined amount of time (in seconds) | +| `s-maxage` | *Optional(int)* | Will only accept cached responses that are within user-defined range (in seconds) | +| `no-cache` | *Optional(bool)* | Will not store the response in cache. | +| `no-store` | *Optional(bool)* | Will not cache the response | +| `namespace` | *Optional(str)* | Will cache the response under a user-defined namespace | + +Each cache parameter can be controlled on a per-request basis. Here are examples for each parameter: + +### `ttl` + +Set how long (in seconds) to cache a response. + + + + +```python +from openai import OpenAI + +client = OpenAI( + api_key="your-api-key", + base_url="http://0.0.0.0:4000" +) + +chat_completion = client.chat.completions.create( + messages=[{"role": "user", "content": "Hello"}], + model="gpt-3.5-turbo", + extra_body={ + "cache": { + "ttl": 300 # Cache response for 5 minutes + } + } +) +``` + + + + +```shell +curl http://localhost:4000/v1/chat/completions \ + -H "Content-Type: application/json" \ + -H "Authorization: Bearer sk-1234" \ + -d '{ + "model": "gpt-3.5-turbo", + "cache": {"ttl": 300}, + "messages": [ + {"role": "user", "content": "Hello"} + ] + }' +``` + + + +### `s-maxage` + +Only accept cached responses that are within the specified age (in seconds). + + + + +```python +from openai import OpenAI + +client = OpenAI( + api_key="your-api-key", + base_url="http://0.0.0.0:4000" +) + +chat_completion = client.chat.completions.create( + messages=[{"role": "user", "content": "Hello"}], + model="gpt-3.5-turbo", + extra_body={ + "cache": { + "s-maxage": 600 # Only use cache if less than 10 minutes old + } + } +) +``` + + + + +```shell +curl http://localhost:4000/v1/chat/completions \ + -H "Content-Type: application/json" \ + -H "Authorization: Bearer sk-1234" \ + -d '{ + "model": "gpt-3.5-turbo", + "cache": {"s-maxage": 600}, + "messages": [ + {"role": "user", "content": "Hello"} + ] + }' +``` + + + +### `no-cache` +Force a fresh response, bypassing the cache. + + + + +```python +from openai import OpenAI + +client = OpenAI( + api_key="your-api-key", + base_url="http://0.0.0.0:4000" +) + +chat_completion = client.chat.completions.create( + messages=[{"role": "user", "content": "Hello"}], + model="gpt-3.5-turbo", + extra_body={ + "cache": { + "no-cache": True # Skip cache check, get fresh response + } + } +) +``` + + + + +```shell +curl http://localhost:4000/v1/chat/completions \ + -H "Content-Type: application/json" \ + -H "Authorization: Bearer sk-1234" \ + -d '{ + "model": "gpt-3.5-turbo", + "cache": {"no-cache": true}, + "messages": [ + {"role": "user", "content": "Hello"} + ] + }' +``` + + + +### `no-store` + +Will not store the response in cache. + + + + + +```python +from openai import OpenAI + +client = OpenAI( + api_key="your-api-key", + base_url="http://0.0.0.0:4000" +) + +chat_completion = client.chat.completions.create( + messages=[{"role": "user", "content": "Hello"}], + model="gpt-3.5-turbo", + extra_body={ + "cache": { + "no-store": True # Don't cache this response + } + } +) +``` + + + + +```shell +curl http://localhost:4000/v1/chat/completions \ + -H "Content-Type: application/json" \ + -H "Authorization: Bearer sk-1234" \ + -d '{ + "model": "gpt-3.5-turbo", + "cache": {"no-store": true}, + "messages": [ + {"role": "user", "content": "Hello"} + ] + }' +``` + + + +### `namespace` +Store the response under a specific cache namespace. + + + + +```python +from openai import OpenAI + +client = OpenAI( + api_key="your-api-key", + base_url="http://0.0.0.0:4000" +) + +chat_completion = client.chat.completions.create( + messages=[{"role": "user", "content": "Hello"}], + model="gpt-3.5-turbo", + extra_body={ + "cache": { + "namespace": "my-custom-namespace" # Store in custom namespace + } + } +) +``` + + + + +```shell +curl http://localhost:4000/v1/chat/completions \ + -H "Content-Type: application/json" \ + -H "Authorization: Bearer sk-1234" \ + -d '{ + "model": "gpt-3.5-turbo", + "cache": {"namespace": "my-custom-namespace"}, + "messages": [ + {"role": "user", "content": "Hello"} + ] + }' +``` + + + + + ## Set cache for proxy, but not on the actual llm api call Use this if you just want to enable features like rate limiting, and loadbalancing across multiple instances. @@ -501,253 +738,6 @@ litellm_settings: # /chat/completions, /completions, /embeddings, /audio/transcriptions ``` -### **Turn on / off caching per request. ** - -The proxy support 4 cache-controls: - -- `ttl`: *Optional(int)* - Will cache the response for the user-defined amount of time (in seconds). -- `s-maxage`: *Optional(int)* Will only accept cached responses that are within user-defined range (in seconds). -- `no-cache`: *Optional(bool)* Will not return a cached response, but instead call the actual endpoint. -- `no-store`: *Optional(bool)* Will not cache the response. - -[Let us know if you need more](https://github.com/BerriAI/litellm/issues/1218) - -**Turn off caching** - -Set `no-cache=True`, this will not return a cached response - - - - -```python -import os -from openai import OpenAI - -client = OpenAI( - # This is the default and can be omitted - api_key=os.environ.get("OPENAI_API_KEY"), - base_url="http://0.0.0.0:4000" -) - -chat_completion = client.chat.completions.create( - messages=[ - { - "role": "user", - "content": "Say this is a test", - } - ], - model="gpt-3.5-turbo", - extra_body = { # OpenAI python accepts extra args in extra_body - cache: { - "no-cache": True # will not return a cached response - } - } -) -``` - - - - -```shell -curl http://localhost:4000/v1/chat/completions \ - -H "Content-Type: application/json" \ - -H "Authorization: Bearer sk-1234" \ - -d '{ - "model": "gpt-3.5-turbo", - "cache": {"no-cache": True}, - "messages": [ - {"role": "user", "content": "Say this is a test"} - ] - }' -``` - - - - - -**Turn on caching** - -By default cache is always on - - - - -```python -import os -from openai import OpenAI - -client = OpenAI( - # This is the default and can be omitted - api_key=os.environ.get("OPENAI_API_KEY"), - base_url="http://0.0.0.0:4000" -) - -chat_completion = client.chat.completions.create( - messages=[ - { - "role": "user", - "content": "Say this is a test", - } - ], - model="gpt-3.5-turbo" -) -``` - - - - -```shell -curl http://localhost:4000/v1/chat/completions \ - -H "Content-Type: application/json" \ - -H "Authorization: Bearer sk-1234" \ - -d '{ - "model": "gpt-3.5-turbo", - "messages": [ - {"role": "user", "content": "Say this is a test"} - ] - }' -``` - - - - - -**Set `ttl`** - -Set `ttl=600`, this will caches response for 10 minutes (600 seconds) - - - - -```python -import os -from openai import OpenAI - -client = OpenAI( - # This is the default and can be omitted - api_key=os.environ.get("OPENAI_API_KEY"), - base_url="http://0.0.0.0:4000" -) - -chat_completion = client.chat.completions.create( - messages=[ - { - "role": "user", - "content": "Say this is a test", - } - ], - model="gpt-3.5-turbo", - extra_body = { # OpenAI python accepts extra args in extra_body - cache: { - "ttl": 600 # caches response for 10 minutes - } - } -) -``` - - - - -```shell -curl http://localhost:4000/v1/chat/completions \ - -H "Content-Type: application/json" \ - -H "Authorization: Bearer sk-1234" \ - -d '{ - "model": "gpt-3.5-turbo", - "cache": {"ttl": 600}, - "messages": [ - {"role": "user", "content": "Say this is a test"} - ] - }' -``` - - - - - - - -**Set `s-maxage`** - -Set `s-maxage`, this will only get responses cached within last 10 minutes - - - - -```python -import os -from openai import OpenAI - -client = OpenAI( - # This is the default and can be omitted - api_key=os.environ.get("OPENAI_API_KEY"), - base_url="http://0.0.0.0:4000" -) - -chat_completion = client.chat.completions.create( - messages=[ - { - "role": "user", - "content": "Say this is a test", - } - ], - model="gpt-3.5-turbo", - extra_body = { # OpenAI python accepts extra args in extra_body - cache: { - "s-maxage": 600 # only get responses cached within last 10 minutes - } - } -) -``` - - - - -```shell -curl http://localhost:4000/v1/chat/completions \ - -H "Content-Type: application/json" \ - -H "Authorization: Bearer sk-1234" \ - -d '{ - "model": "gpt-3.5-turbo", - "cache": {"s-maxage": 600}, - "messages": [ - {"role": "user", "content": "Say this is a test"} - ] - }' -``` - - - - - - -### Turn on / off caching per Key. - -1. Add cache params when creating a key [full list](#turn-on--off-caching-per-key) - -```bash -curl -X POST 'http://0.0.0.0:4000/key/generate' \ --H 'Authorization: Bearer sk-1234' \ --H 'Content-Type: application/json' \ --d '{ - "user_id": "222", - "metadata": { - "cache": { - "no-cache": true - } - } -}' -``` - -2. Test it! - -```bash -curl -X POST 'http://localhost:4000/chat/completions' \ --H 'Content-Type: application/json' \ --H 'Authorization: Bearer ' \ --d '{"model": "gpt-3.5-turbo", "messages": [{"role": "user", "content": "bom dia"}]}' -``` - ### Deleting Cache Keys - `/cache/delete` In order to delete a cache key, send a request to `/cache/delete` with the `keys` you want to delete diff --git a/docs/my-website/docs/proxy/call_hooks.md b/docs/my-website/docs/proxy/call_hooks.md index 6651393efe..8ea220cfa1 100644 --- a/docs/my-website/docs/proxy/call_hooks.md +++ b/docs/my-website/docs/proxy/call_hooks.md @@ -139,9 +139,6 @@ class MyCustomHandler(CustomLogger): # https://docs.litellm.ai/docs/observabilit #### ASYNC #### - async def async_log_stream_event(self, kwargs, response_obj, start_time, end_time): - pass - async def async_log_pre_api_call(self, model, messages, kwargs): pass diff --git a/docs/my-website/docs/proxy/config_settings.md b/docs/my-website/docs/proxy/config_settings.md index 6d5c80c691..9e24437449 100644 --- a/docs/my-website/docs/proxy/config_settings.md +++ b/docs/my-website/docs/proxy/config_settings.md @@ -139,6 +139,7 @@ general_settings: | disable_end_user_cost_tracking_prometheus_only | boolean | If true, turns off end user cost tracking on prometheus metrics only. | | key_generation_settings | object | Restricts who can generate keys. [Further docs](./virtual_keys.md#restricting-key-generation) | | disable_add_transform_inline_image_block | boolean | For Fireworks AI models - if true, turns off the auto-add of `#transform=inline` to the url of the image_url, if the model is not a vision model. | +| disable_hf_tokenizer_download | boolean | If true, it defaults to using the openai tokenizer for all models (including huggingface models). | ### general_settings - Reference @@ -177,6 +178,7 @@ general_settings: | service_account_settings | List[Dict[str, Any]] | Set `service_account_settings` if you want to create settings that only apply to service account keys (Doc on service accounts)[./service_accounts.md] | | image_generation_model | str | The default model to use for image generation - ignores model set in request | | store_model_in_db | boolean | If true, allows `/model/new` endpoint to store model information in db. Endpoint disabled by default. [Doc on `/model/new` endpoint](./model_management.md#create-a-new-model) | +| store_prompts_in_spend_logs | boolean | If true, allows prompts and responses to be stored in the spend logs table. | | max_request_size_mb | int | The maximum size for requests in MB. Requests above this size will be rejected. | | max_response_size_mb | int | The maximum size for responses in MB. LLM Responses above this size will not be sent. | | proxy_budget_rescheduler_min_time | int | The minimum time (in seconds) to wait before checking db for budget resets. **Default is 597 seconds** | @@ -366,6 +368,8 @@ router_settings: | GCS_PATH_SERVICE_ACCOUNT | Path to the Google Cloud service account JSON file | GCS_FLUSH_INTERVAL | Flush interval for GCS logging (in seconds). Specify how often you want a log to be sent to GCS. **Default is 20 seconds** | GCS_BATCH_SIZE | Batch size for GCS logging. Specify after how many logs you want to flush to GCS. If `BATCH_SIZE` is set to 10, logs are flushed every 10 logs. **Default is 2048** +| GCS_PUBSUB_TOPIC_ID | PubSub Topic ID to send LiteLLM SpendLogs to. +| GCS_PUBSUB_PROJECT_ID | PubSub Project ID to send LiteLLM SpendLogs to. | GENERIC_AUTHORIZATION_ENDPOINT | Authorization endpoint for generic OAuth providers | GENERIC_CLIENT_ID | Client ID for generic OAuth providers | GENERIC_CLIENT_SECRET | Client secret for generic OAuth providers @@ -462,6 +466,9 @@ router_settings: | OTEL_SERVICE_NAME | Service name identifier for OpenTelemetry | OTEL_TRACER_NAME | Tracer name for OpenTelemetry tracing | PAGERDUTY_API_KEY | API key for PagerDuty Alerting +| PHOENIX_API_KEY | API key for Arize Phoenix +| PHOENIX_COLLECTOR_ENDPOINT | API endpoint for Arize Phoenix +| PHOENIX_COLLECTOR_HTTP_ENDPOINT | API http endpoint for Arize Phoenix | POD_NAME | Pod name for the server, this will be [emitted to `datadog` logs](https://docs.litellm.ai/docs/proxy/logging#datadog) as `POD_NAME` | PREDIBASE_API_BASE | Base URL for Predibase API | PRESIDIO_ANALYZER_API_BASE | Base URL for Presidio Analyzer service @@ -484,12 +491,12 @@ router_settings: | SLACK_DAILY_REPORT_FREQUENCY | Frequency of daily Slack reports (e.g., daily, weekly) | SLACK_WEBHOOK_URL | Webhook URL for Slack integration | SMTP_HOST | Hostname for the SMTP server -| SMTP_PASSWORD | Password for SMTP authentication +| SMTP_PASSWORD | Password for SMTP authentication (do not set if SMTP does not require auth) | SMTP_PORT | Port number for SMTP server | SMTP_SENDER_EMAIL | Email address used as the sender in SMTP transactions | SMTP_SENDER_LOGO | Logo used in emails sent via SMTP | SMTP_TLS | Flag to enable or disable TLS for SMTP connections -| SMTP_USERNAME | Username for SMTP authentication +| SMTP_USERNAME | Username for SMTP authentication (do not set if SMTP does not require auth) | SPEND_LOGS_URL | URL for retrieving spend logs | SSL_CERTIFICATE | Path to the SSL certificate file | SSL_VERIFY | Flag to enable or disable SSL certificate verification diff --git a/docs/my-website/docs/proxy/custom_auth.md b/docs/my-website/docs/proxy/custom_auth.md new file mode 100644 index 0000000000..c98ad8e09d --- /dev/null +++ b/docs/my-website/docs/proxy/custom_auth.md @@ -0,0 +1,48 @@ +# Custom Auth + +You can now override the default api key auth. + +Here's how: + +#### 1. Create a custom auth file. + +Make sure the response type follows the `UserAPIKeyAuth` pydantic object. This is used by for logging usage specific to that user key. + +```python +from litellm.proxy._types import UserAPIKeyAuth + +async def user_api_key_auth(request: Request, api_key: str) -> UserAPIKeyAuth: + try: + modified_master_key = "sk-my-master-key" + if api_key == modified_master_key: + return UserAPIKeyAuth(api_key=api_key) + raise Exception + except: + raise Exception +``` + +#### 2. Pass the filepath (relative to the config.yaml) + +Pass the filepath to the config.yaml + +e.g. if they're both in the same dir - `./config.yaml` and `./custom_auth.py`, this is what it looks like: +```yaml +model_list: + - model_name: "openai-model" + litellm_params: + model: "gpt-3.5-turbo" + +litellm_settings: + drop_params: True + set_verbose: True + +general_settings: + custom_auth: custom_auth.user_api_key_auth +``` + +[**Implementation Code**](https://github.com/BerriAI/litellm/blob/caf2a6b279ddbe89ebd1d8f4499f65715d684851/litellm/proxy/utils.py#L122) + +#### 3. Start the proxy +```shell +$ litellm --config /path/to/config.yaml +``` diff --git a/docs/my-website/docs/proxy/db_info.md b/docs/my-website/docs/proxy/db_info.md index 1b87aa1e54..946089bf14 100644 --- a/docs/my-website/docs/proxy/db_info.md +++ b/docs/my-website/docs/proxy/db_info.md @@ -46,18 +46,17 @@ You can see the full DB Schema [here](https://github.com/BerriAI/litellm/blob/ma | Table Name | Description | Row Insert Frequency | |------------|-------------|---------------------| -| LiteLLM_SpendLogs | Detailed logs of all API requests. Records token usage, spend, and timing information. Tracks which models and keys were used. | **High - every LLM API request** | -| LiteLLM_ErrorLogs | Captures failed requests and errors. Stores exception details and request information. Helps with debugging and monitoring. | **Medium - on errors only** | +| LiteLLM_SpendLogs | Detailed logs of all API requests. Records token usage, spend, and timing information. Tracks which models and keys were used. | **High - every LLM API request - Success or Failure** | | LiteLLM_AuditLog | Tracks changes to system configuration. Records who made changes and what was modified. Maintains history of updates to teams, users, and models. | **Off by default**, **High - when enabled** | -## Disable `LiteLLM_SpendLogs` & `LiteLLM_ErrorLogs` +## Disable `LiteLLM_SpendLogs` You can disable spend_logs and error_logs by setting `disable_spend_logs` and `disable_error_logs` to `True` on the `general_settings` section of your proxy_config.yaml file. ```yaml general_settings: disable_spend_logs: True # Disable writing spend logs to DB - disable_error_logs: True # Disable writing error logs to DB + disable_error_logs: True # Only disable writing error logs to DB, regular spend logs will still be written unless `disable_spend_logs: True` ``` ### What is the impact of disabling these logs? diff --git a/docs/my-website/docs/proxy/enterprise.md b/docs/my-website/docs/proxy/enterprise.md index f2211aa035..fb0945d488 100644 --- a/docs/my-website/docs/proxy/enterprise.md +++ b/docs/my-website/docs/proxy/enterprise.md @@ -14,7 +14,7 @@ Features: - **Security** - ✅ [SSO for Admin UI](./ui.md#✨-enterprise-features) - ✅ [Audit Logs with retention policy](#audit-logs) - - ✅ [JWT-Auth](../docs/proxy/token_auth.md) + - ✅ [JWT-Auth](./token_auth.md) - ✅ [Control available public, private routes (Restrict certain endpoints on proxy)](#control-available-public-private-routes) - ✅ [Control available public, private routes](#control-available-public-private-routes) - ✅ [Secret Managers - AWS Key Manager, Google Secret Manager, Azure Key, Hashicorp Vault](../secret) @@ -24,6 +24,7 @@ Features: - ✅ [Use LiteLLM keys/authentication on Pass Through Endpoints](pass_through#✨-enterprise---use-litellm-keysauthentication-on-pass-through-endpoints) - ✅ [Set Max Request Size / File Size on Requests](#set-max-request--response-size-on-litellm-proxy) - ✅ [Enforce Required Params for LLM Requests (ex. Reject requests missing ["metadata"]["generation_name"])](#enforce-required-params-for-llm-requests) + - ✅ [Key Rotations](./virtual_keys.md#-key-rotations) - **Customize Logging, Guardrails, Caching per project** - ✅ [Team Based Logging](./team_logging.md) - Allow each team to use their own Langfuse Project / custom callbacks - ✅ [Disable Logging for a Team](./team_logging.md#disable-logging-for-a-team) - Switch off all logging for a team/project (GDPR Compliance) @@ -39,8 +40,8 @@ Features: - **Control Guardrails per API Key** - **Custom Branding** - ✅ [Custom Branding + Routes on Swagger Docs](#swagger-docs---custom-routes--branding) - - ✅ [Public Model Hub](../docs/proxy/enterprise.md#public-model-hub) - - ✅ [Custom Email Branding](../docs/proxy/email.md#customizing-email-branding) + - ✅ [Public Model Hub](#public-model-hub) + - ✅ [Custom Email Branding](./email.md#customizing-email-branding) ## Audit Logs diff --git a/docs/my-website/docs/proxy/guardrails/aim_security.md b/docs/my-website/docs/proxy/guardrails/aim_security.md index d588afa424..3de933c0b7 100644 --- a/docs/my-website/docs/proxy/guardrails/aim_security.md +++ b/docs/my-website/docs/proxy/guardrails/aim_security.md @@ -37,7 +37,7 @@ guardrails: - guardrail_name: aim-protected-app litellm_params: guardrail: aim - mode: pre_call + mode: pre_call # 'during_call' is also available api_key: os.environ/AIM_API_KEY api_base: os.environ/AIM_API_BASE # Optional, use only when using a self-hosted Aim Outpost ``` diff --git a/docs/my-website/docs/proxy/guardrails/quick_start.md b/docs/my-website/docs/proxy/guardrails/quick_start.md index 22b76a0dae..6744dc6578 100644 --- a/docs/my-website/docs/proxy/guardrails/quick_start.md +++ b/docs/my-website/docs/proxy/guardrails/quick_start.md @@ -2,7 +2,7 @@ import Image from '@theme/IdealImage'; import Tabs from '@theme/Tabs'; import TabItem from '@theme/TabItem'; -# Quick Start +# Guardrails - Quick Start Setup Prompt Injection Detection, PII Masking on LiteLLM Proxy (AI Gateway) @@ -121,6 +121,49 @@ curl -i http://localhost:4000/v1/chat/completions \ +## **Default On Guardrails** + +Set `default_on: true` in your guardrail config to run the guardrail on every request. This is useful if you want to run a guardrail on every request without the user having to specify it. + +**Note:** These will run even if user specifies a different guardrail or empty guardrails array. + +```yaml +guardrails: + - guardrail_name: "aporia-pre-guard" + litellm_params: + guardrail: aporia + mode: "pre_call" + default_on: true +``` + +**Test Request** + +In this request, the guardrail `aporia-pre-guard` will run on every request because `default_on: true` is set. + + +```shell +curl -i http://localhost:4000/v1/chat/completions \ + -H "Content-Type: application/json" \ + -H "Authorization: Bearer sk-npnwjPQciVRok5yNZgKmFQ" \ + -d '{ + "model": "gpt-3.5-turbo", + "messages": [ + {"role": "user", "content": "hi my email is ishaan@berri.ai"} + ] + }' +``` + +**Expected response** + +Your response headers will incude `x-litellm-applied-guardrails` with the guardrail applied + +``` +x-litellm-applied-guardrails: aporia-pre-guard +``` + + + + ## **Using Guardrails Client Side** ### Test yourself **(OSS)** @@ -349,7 +392,7 @@ Monitor which guardrails were executed and whether they passed or failed. e.g. g -### ✨ Control Guardrails per Project (API Key) +### ✨ Control Guardrails per API Key :::info @@ -357,7 +400,7 @@ Monitor which guardrails were executed and whether they passed or failed. e.g. g ::: -Use this to control what guardrails run per project. In this tutorial we only want the following guardrails to run for 1 project (API Key) +Use this to control what guardrails run per API Key. In this tutorial we only want the following guardrails to run for 1 API Key - `guardrails`: ["aporia-pre-guard", "aporia-post-guard"] **Step 1** Create Key with guardrail settings @@ -481,9 +524,10 @@ guardrails: - guardrail_name: string # Required: Name of the guardrail litellm_params: # Required: Configuration parameters guardrail: string # Required: One of "aporia", "bedrock", "guardrails_ai", "lakera", "presidio", "hide-secrets" - mode: string # Required: One of "pre_call", "post_call", "during_call", "logging_only" + mode: Union[string, List[string]] # Required: One or more of "pre_call", "post_call", "during_call", "logging_only" api_key: string # Required: API key for the guardrail service api_base: string # Optional: Base URL for the guardrail service + default_on: boolean # Optional: Default False. When set to True, will run on every request, does not need client to specify guardrail in request guardrail_info: # Optional[Dict]: Additional information about the guardrail ``` diff --git a/docs/my-website/docs/proxy/health.md b/docs/my-website/docs/proxy/health.md index c9f67394bd..52321a3845 100644 --- a/docs/my-website/docs/proxy/health.md +++ b/docs/my-website/docs/proxy/health.md @@ -314,6 +314,17 @@ Example Response: "I'm alive!" ``` +## `/health/services` + +Use this admin-only endpoint to check if a connected service (datadog/slack/langfuse/etc.) is healthy. + +```bash +curl -L -X GET 'http://0.0.0.0:4000/health/services?service=datadog' -H 'Authorization: Bearer sk-1234' +``` + +[**API Reference**](https://litellm-api.up.railway.app/#/health/health_services_endpoint_health_services_get) + + ## Advanced - Call specific models To check health of specific models, here's how to call them: diff --git a/docs/my-website/docs/proxy/jwt_auth_arch.md b/docs/my-website/docs/proxy/jwt_auth_arch.md new file mode 100644 index 0000000000..6f591e5986 --- /dev/null +++ b/docs/my-website/docs/proxy/jwt_auth_arch.md @@ -0,0 +1,116 @@ +import Image from '@theme/IdealImage'; +import Tabs from '@theme/Tabs'; +import TabItem from '@theme/TabItem'; + +# Control Model Access with OIDC (Azure AD/Keycloak/etc.) + +:::info + +✨ JWT Auth is on LiteLLM Enterprise + +[Enterprise Pricing](https://www.litellm.ai/#pricing) + +[Get free 7-day trial key](https://www.litellm.ai/#trial) + +::: + + + +## Example Token + + + + +```bash +{ + "sub": "1234567890", + "name": "John Doe", + "email": "john.doe@example.com", + "roles": ["basic_user"] # 👈 ROLE +} +``` + + + +```bash +{ + "sub": "1234567890", + "name": "John Doe", + "email": "john.doe@example.com", + "resource_access": { + "litellm-test-client-id": { + "roles": ["basic_user"] # 👈 ROLE + } + } +} +``` + + + +## Proxy Configuration + + + + +```yaml +general_settings: + enable_jwt_auth: True + litellm_jwtauth: + user_roles_jwt_field: "roles" # the field in the JWT that contains the roles + user_allowed_roles: ["basic_user"] # roles that map to an 'internal_user' role on LiteLLM + enforce_rbac: true # if true, will check if the user has the correct role to access the model + + role_permissions: # control what models are allowed for each role + - role: internal_user + models: ["anthropic-claude"] + +model_list: + - model: anthropic-claude + litellm_params: + model: claude-3-5-haiku-20241022 + - model: openai-gpt-4o + litellm_params: + model: gpt-4o +``` + + + + +```yaml +general_settings: + enable_jwt_auth: True + litellm_jwtauth: + user_roles_jwt_field: "resource_access.litellm-test-client-id.roles" # the field in the JWT that contains the roles + user_allowed_roles: ["basic_user"] # roles that map to an 'internal_user' role on LiteLLM + enforce_rbac: true # if true, will check if the user has the correct role to access the model + + role_permissions: # control what models are allowed for each role + - role: internal_user + models: ["anthropic-claude"] + +model_list: + - model: anthropic-claude + litellm_params: + model: claude-3-5-haiku-20241022 + - model: openai-gpt-4o + litellm_params: + model: gpt-4o +``` + + + + + +## How it works + +1. Specify JWT_PUBLIC_KEY_URL - This is the public keys endpoint of your OpenID provider. For Azure AD it's `https://login.microsoftonline.com/{tenant_id}/discovery/v2.0/keys`. For Keycloak it's `{keycloak_base_url}/realms/{your-realm}/protocol/openid-connect/certs`. + +1. Map JWT roles to LiteLLM roles - Done via `user_roles_jwt_field` and `user_allowed_roles` + - Currently just `internal_user` is supported for role mapping. +2. Specify model access: + - `role_permissions`: control what models are allowed for each role. + - `role`: the LiteLLM role to control access for. Allowed roles = ["internal_user", "proxy_admin", "team"] + - `models`: list of models that the role is allowed to access. + - `model_list`: parent list of models on the proxy. [Learn more](./configs.md#llm-configs-model_list) + +3. Model Checks: The proxy will run validation checks on the received JWT. [Code](https://github.com/BerriAI/litellm/blob/3a4f5b23b5025b87b6d969f2485cc9bc741f9ba6/litellm/proxy/auth/user_api_key_auth.py#L284) \ No newline at end of file diff --git a/docs/my-website/docs/proxy/logging.md b/docs/my-website/docs/proxy/logging.md index 3629cdd629..e13a403634 100644 --- a/docs/my-website/docs/proxy/logging.md +++ b/docs/my-website/docs/proxy/logging.md @@ -1,3 +1,7 @@ +import Image from '@theme/IdealImage'; +import Tabs from '@theme/Tabs'; +import TabItem from '@theme/TabItem'; + # Logging Log Proxy input, output, and exceptions using: @@ -13,9 +17,7 @@ Log Proxy input, output, and exceptions using: - DynamoDB - etc. -import Image from '@theme/IdealImage'; -import Tabs from '@theme/Tabs'; -import TabItem from '@theme/TabItem'; + ## Getting the LiteLLM Call ID @@ -77,10 +79,13 @@ litellm_settings: ### Redact Messages, Response Content -Set `litellm.turn_off_message_logging=True` This will prevent the messages and responses from being logged to your logging provider, but request metadata will still be logged. +Set `litellm.turn_off_message_logging=True` This will prevent the messages and responses from being logged to your logging provider, but request metadata - e.g. spend, will still be tracked. + -Example config.yaml + + +**1. Setup config.yaml ** ```yaml model_list: - model_name: gpt-3.5-turbo @@ -91,9 +96,87 @@ litellm_settings: turn_off_message_logging: True # 👈 Key Change ``` -If you have this feature turned on, you can override it for specific requests by +**2. Send request** +```shell +curl --location 'http://0.0.0.0:4000/chat/completions' \ + --header 'Content-Type: application/json' \ + --data '{ + "model": "gpt-3.5-turbo", + "messages": [ + { + "role": "user", + "content": "what llm are you" + } + ] +}' +``` + + + + + + +:::info + +Dynamic request message redaction is in BETA. + +::: + +Pass in a request header to enable message redaction for a request. + +``` +x-litellm-enable-message-redaction: true +``` + +Example config.yaml + +**1. Setup config.yaml ** + +```yaml +model_list: + - model_name: gpt-3.5-turbo + litellm_params: + model: gpt-3.5-turbo +``` + +**2. Setup per request header** + +```shell +curl -L -X POST 'http://0.0.0.0:4000/v1/chat/completions' \ +-H 'Content-Type: application/json' \ +-H 'Authorization: Bearer sk-zV5HlSIm8ihj1F9C_ZbB1g' \ +-H 'x-litellm-enable-message-redaction: true' \ +-d '{ + "model": "gpt-3.5-turbo-testing", + "messages": [ + { + "role": "user", + "content": "Hey, how'\''s it going 1234?" + } + ] +}' +``` + + + + +**3. Check Logging Tool + Spend Logs** + +**Logging Tool** + + + +**Spend Logs** + + + + +### Disable Message Redaction + +If you have `litellm.turn_on_message_logging` turned on, you can override it for specific requests by setting a request header `LiteLLM-Disable-Message-Redaction: true`. + ```shell curl --location 'http://0.0.0.0:4000/chat/completions' \ --header 'Content-Type: application/json' \ @@ -109,13 +192,21 @@ curl --location 'http://0.0.0.0:4000/chat/completions' \ }' ``` -Removes any field with `user_api_key_*` from metadata. - ### Turn off all tracking/logging For some use cases, you may want to turn off all tracking/logging. You can do this by passing `no-log=True` in the request body. +:::info + +Disable this by setting `global_disable_no_log_param:true` in your config.yaml file. + +```yaml +litellm_settings: + global_disable_no_log_param: True +``` +::: + @@ -1025,6 +1116,74 @@ curl --location 'http://0.0.0.0:4000/chat/completions' \ 6. Save the JSON file and add the path to `GCS_PATH_SERVICE_ACCOUNT` + +## Google Cloud Storage - PubSub Topic + +Log LLM Logs/SpendLogs to [Google Cloud Storage PubSub Topic](https://cloud.google.com/pubsub/docs/reference/rest) + +:::info + +✨ This is an Enterprise only feature [Get Started with Enterprise here](https://calendly.com/d/4mp-gd3-k5k/litellm-1-1-onboarding-chat) + +::: + + +| Property | Details | +|----------|---------| +| Description | Log LiteLLM `SpendLogs Table` to Google Cloud Storage PubSub Topic | + +When to use `gcs_pubsub`? + +- If your LiteLLM Database has crossed 1M+ spend logs and you want to send `SpendLogs` to a PubSub Topic that can be consumed by GCS BigQuery + + +#### Usage + +1. Add `gcs_pubsub` to LiteLLM Config.yaml +```yaml +model_list: +- litellm_params: + api_base: https://exampleopenaiendpoint-production.up.railway.app/ + api_key: my-fake-key + model: openai/my-fake-model + model_name: fake-openai-endpoint + +litellm_settings: + callbacks: ["gcs_pubsub"] # 👈 KEY CHANGE # 👈 KEY CHANGE +``` + +2. Set required env variables + +```shell +GCS_PUBSUB_TOPIC_ID="litellmDB" +GCS_PUBSUB_PROJECT_ID="reliableKeys" +``` + +3. Start Proxy + +``` +litellm --config /path/to/config.yaml +``` + +4. Test it! + +```bash +curl --location 'http://0.0.0.0:4000/chat/completions' \ +--header 'Content-Type: application/json' \ +--data ' { + "model": "fake-openai-endpoint", + "messages": [ + { + "role": "user", + "content": "what llm are you" + } + ], + } +' +``` + + + ## s3 Buckets We will use the `--config` to set @@ -1301,7 +1460,7 @@ LiteLLM supports customizing the following Datadog environment variables ## Lunary -### Step1: Install dependencies and set your environment variables +#### Step1: Install dependencies and set your environment variables Install the dependencies ```shell pip install litellm lunary @@ -1312,7 +1471,7 @@ Get you Lunary public key from from https://app.lunary.ai/settings export LUNARY_PUBLIC_KEY="" ``` -### Step 2: Create a `config.yaml` and set `lunary` callbacks +#### Step 2: Create a `config.yaml` and set `lunary` callbacks ```yaml model_list: @@ -1324,12 +1483,12 @@ litellm_settings: failure_callback: ["lunary"] ``` -### Step 3: Start the LiteLLM proxy +#### Step 3: Start the LiteLLM proxy ```shell litellm --config config.yaml ``` -### Step 4: Make a request +#### Step 4: Make a request ```shell curl -X POST 'http://0.0.0.0:4000/chat/completions' \ @@ -1352,14 +1511,14 @@ curl -X POST 'http://0.0.0.0:4000/chat/completions' \ ## MLflow -### Step1: Install dependencies +#### Step1: Install dependencies Install the dependencies. ```shell pip install litellm mlflow ``` -### Step 2: Create a `config.yaml` with `mlflow` callback +#### Step 2: Create a `config.yaml` with `mlflow` callback ```yaml model_list: @@ -1371,12 +1530,12 @@ litellm_settings: failure_callback: ["mlflow"] ``` -### Step 3: Start the LiteLLM proxy +#### Step 3: Start the LiteLLM proxy ```shell litellm --config config.yaml ``` -### Step 4: Make a request +#### Step 4: Make a request ```shell curl -X POST 'http://0.0.0.0:4000/chat/completions' \ @@ -1392,7 +1551,7 @@ curl -X POST 'http://0.0.0.0:4000/chat/completions' \ }' ``` -### Step 5: Review traces +#### Step 5: Review traces Run the following command to start MLflow UI and review recorded traces. @@ -1426,9 +1585,6 @@ class MyCustomHandler(CustomLogger): def log_post_api_call(self, kwargs, response_obj, start_time, end_time): print(f"Post-API Call") - - def log_stream_event(self, kwargs, response_obj, start_time, end_time): - print(f"On Stream") def log_success_event(self, kwargs, response_obj, start_time, end_time): print("On Success") diff --git a/docs/my-website/docs/proxy/logging_spec.md b/docs/my-website/docs/proxy/logging_spec.md index 86ba907373..7da937e565 100644 --- a/docs/my-website/docs/proxy/logging_spec.md +++ b/docs/my-website/docs/proxy/logging_spec.md @@ -78,6 +78,7 @@ Inherits from `StandardLoggingUserAPIKeyMetadata` and adds: | `api_base` | `Optional[str]` | Optional API base URL | | `response_cost` | `Optional[str]` | Optional response cost | | `additional_headers` | `Optional[StandardLoggingAdditionalHeaders]` | Additional headers | +| `batch_models` | `Optional[List[str]]` | Only set for Batches API. Lists the models used for cost calculation | ## StandardLoggingModelInformation diff --git a/docs/my-website/docs/proxy/master_key_rotations.md b/docs/my-website/docs/proxy/master_key_rotations.md new file mode 100644 index 0000000000..1713679863 --- /dev/null +++ b/docs/my-website/docs/proxy/master_key_rotations.md @@ -0,0 +1,53 @@ +# Rotating Master Key + +Here are our recommended steps for rotating your master key. + + +**1. Backup your DB** +In case of any errors during the encryption/de-encryption process, this will allow you to revert back to current state without issues. + +**2. Call `/key/regenerate` with the new master key** + +```bash +curl -L -X POST 'http://localhost:4000/key/regenerate' \ +-H 'Authorization: Bearer sk-1234' \ +-H 'Content-Type: application/json' \ +-d '{ + "key": "sk-1234", + "new_master_key": "sk-PIp1h0RekR" +}' +``` + +This will re-encrypt any models in your Proxy_ModelTable with the new master key. + +Expect to start seeing decryption errors in logs, as your old master key is no longer able to decrypt the new values. + +```bash + raise Exception("Unable to decrypt value={}".format(v)) +Exception: Unable to decrypt value= +``` + +**3. Update LITELLM_MASTER_KEY** + +In your environment variables update the value of LITELLM_MASTER_KEY to the new_master_key from Step 2. + +This ensures the key used for decryption from db is the new key. + +**4. Test it** + +Make a test request to a model stored on proxy with a litellm key (new master key or virtual key) and see if it works + +```bash + curl -L -X POST 'http://0.0.0.0:4000/v1/chat/completions' \ +-H 'Content-Type: application/json' \ +-H 'Authorization: Bearer sk-1234' \ +-d '{ + "model": "gpt-4o-mini", # 👈 REPLACE with 'public model name' for any db-model + "messages": [ + { + "content": "Hey, how's it going", + "role": "user" + } + ], +}' +``` \ No newline at end of file diff --git a/docs/my-website/docs/proxy/model_access.md b/docs/my-website/docs/proxy/model_access.md index 545d74865b..854baa2edb 100644 --- a/docs/my-website/docs/proxy/model_access.md +++ b/docs/my-website/docs/proxy/model_access.md @@ -344,3 +344,6 @@ curl -i http://localhost:4000/v1/chat/completions \ + + +## [Role Based Access Control (RBAC)](./jwt_auth_arch) \ No newline at end of file diff --git a/docs/my-website/docs/proxy/prod.md b/docs/my-website/docs/proxy/prod.md index d0b8c48174..d3ba2d6224 100644 --- a/docs/my-website/docs/proxy/prod.md +++ b/docs/my-website/docs/proxy/prod.md @@ -107,9 +107,9 @@ general_settings: By default, LiteLLM writes several types of logs to the database: - Every LLM API request to the `LiteLLM_SpendLogs` table -- LLM Exceptions to the `LiteLLM_LogsErrors` table +- LLM Exceptions to the `LiteLLM_SpendLogs` table -If you're not viewing these logs on the LiteLLM UI (most users use Prometheus for monitoring), you can disable them by setting the following flags to `True`: +If you're not viewing these logs on the LiteLLM UI, you can disable them by setting the following flags to `True`: ```yaml general_settings: diff --git a/docs/my-website/docs/proxy/prometheus.md b/docs/my-website/docs/proxy/prometheus.md index a0e19a006d..8dff527ae5 100644 --- a/docs/my-website/docs/proxy/prometheus.md +++ b/docs/my-website/docs/proxy/prometheus.md @@ -57,7 +57,7 @@ http://localhost:4000/metrics # /metrics ``` -## Virtual Keys, Teams, Internal Users Metrics +## Virtual Keys, Teams, Internal Users Use this for for tracking per [user, key, team, etc.](virtual_keys) @@ -68,6 +68,42 @@ Use this for for tracking per [user, key, team, etc.](virtual_keys) | `litellm_input_tokens` | input tokens per `"end_user", "hashed_api_key", "api_key_alias", "requested_model", "team", "team_alias", "user", "model"` | | `litellm_output_tokens` | output tokens per `"end_user", "hashed_api_key", "api_key_alias", "requested_model", "team", "team_alias", "user", "model"` | +### Team - Budget + + +| Metric Name | Description | +|----------------------|--------------------------------------| +| `litellm_team_max_budget_metric` | Max Budget for Team Labels: `"team_id", "team_alias"`| +| `litellm_remaining_team_budget_metric` | Remaining Budget for Team (A team created on LiteLLM) Labels: `"team_id", "team_alias"`| +| `litellm_team_budget_remaining_hours_metric` | Hours before the team budget is reset Labels: `"team_id", "team_alias"`| + +### Virtual Key - Budget + +| Metric Name | Description | +|----------------------|--------------------------------------| +| `litellm_api_key_max_budget_metric` | Max Budget for API Key Labels: `"hashed_api_key", "api_key_alias"`| +| `litellm_remaining_api_key_budget_metric` | Remaining Budget for API Key (A key Created on LiteLLM) Labels: `"hashed_api_key", "api_key_alias"`| +| `litellm_api_key_budget_remaining_hours_metric` | Hours before the API Key budget is reset Labels: `"hashed_api_key", "api_key_alias"`| + +### Virtual Key - Rate Limit + +| Metric Name | Description | +|----------------------|--------------------------------------| +| `litellm_remaining_api_key_requests_for_model` | Remaining Requests for a LiteLLM virtual API key, only if a model-specific rate limit (rpm) has been set for that virtual key. Labels: `"hashed_api_key", "api_key_alias", "model"`| +| `litellm_remaining_api_key_tokens_for_model` | Remaining Tokens for a LiteLLM virtual API key, only if a model-specific token limit (tpm) has been set for that virtual key. Labels: `"hashed_api_key", "api_key_alias", "model"`| + + +### Initialize Budget Metrics on Startup + +If you want to initialize the key/team budget metrics on startup, you can set the `prometheus_initialize_budget_metrics` to `true` in the `config.yaml` + +```yaml +litellm_settings: + callbacks: ["prometheus"] + prometheus_initialize_budget_metrics: true +``` + + ## Proxy Level Tracking Metrics Use this to track overall LiteLLM Proxy usage. @@ -79,12 +115,11 @@ Use this to track overall LiteLLM Proxy usage. | `litellm_proxy_failed_requests_metric` | Total number of failed responses from proxy - the client did not get a success response from litellm proxy. Labels: `"end_user", "hashed_api_key", "api_key_alias", "requested_model", "team", "team_alias", "user", "exception_status", "exception_class"` | | `litellm_proxy_total_requests_metric` | Total number of requests made to the proxy server - track number of client side requests. Labels: `"end_user", "hashed_api_key", "api_key_alias", "requested_model", "team", "team_alias", "user", "status_code"` | -## LLM API / Provider Metrics +## LLM Provider Metrics Use this for LLM API Error monitoring and tracking remaining rate limits and token limits -### Labels Tracked for LLM API Metrics - +### Labels Tracked | Label | Description | |-------|-------------| @@ -100,7 +135,7 @@ Use this for LLM API Error monitoring and tracking remaining rate limits and tok | exception_status | The status of the exception, if any | | exception_class | The class of the exception, if any | -### Success and Failure Metrics for LLM API +### Success and Failure | Metric Name | Description | |----------------------|--------------------------------------| @@ -108,15 +143,14 @@ Use this for LLM API Error monitoring and tracking remaining rate limits and tok | `litellm_deployment_failure_responses` | Total number of failed LLM API calls for a specific LLM deployment. Labels: `"requested_model", "litellm_model_name", "model_id", "api_base", "api_provider", "hashed_api_key", "api_key_alias", "team", "team_alias", "exception_status", "exception_class"` | | `litellm_deployment_total_requests` | Total number of LLM API calls for deployment - success + failure. Labels: `"requested_model", "litellm_model_name", "model_id", "api_base", "api_provider", "hashed_api_key", "api_key_alias", "team", "team_alias"` | -### Remaining Requests and Tokens Metrics +### Remaining Requests and Tokens | Metric Name | Description | |----------------------|--------------------------------------| | `litellm_remaining_requests_metric` | Track `x-ratelimit-remaining-requests` returned from LLM API Deployment. Labels: `"model_group", "api_provider", "api_base", "litellm_model_name", "hashed_api_key", "api_key_alias"` | | `litellm_remaining_tokens` | Track `x-ratelimit-remaining-tokens` return from LLM API Deployment. Labels: `"model_group", "api_provider", "api_base", "litellm_model_name", "hashed_api_key", "api_key_alias"` | -### Deployment State Metrics - +### Deployment State | Metric Name | Description | |----------------------|--------------------------------------| | `litellm_deployment_state` | The state of the deployment: 0 = healthy, 1 = partial outage, 2 = complete outage. Labels: `"litellm_model_name", "model_id", "api_base", "api_provider"` | @@ -139,17 +173,6 @@ Use this for LLM API Error monitoring and tracking remaining rate limits and tok | `litellm_llm_api_latency_metric` | Latency (seconds) for just the LLM API call - tracked for labels "model", "hashed_api_key", "api_key_alias", "team", "team_alias", "requested_model", "end_user", "user" | | `litellm_llm_api_time_to_first_token_metric` | Time to first token for LLM API call - tracked for labels `model`, `hashed_api_key`, `api_key_alias`, `team`, `team_alias` [Note: only emitted for streaming requests] | -## Virtual Key - Budget, Rate Limit Metrics - -Metrics used to track LiteLLM Proxy Budgeting and Rate limiting logic - -| Metric Name | Description | -|----------------------|--------------------------------------| -| `litellm_remaining_team_budget_metric` | Remaining Budget for Team (A team created on LiteLLM) Labels: `"team_id", "team_alias"`| -| `litellm_remaining_api_key_budget_metric` | Remaining Budget for API Key (A key Created on LiteLLM) Labels: `"hashed_api_key", "api_key_alias"`| -| `litellm_remaining_api_key_requests_for_model` | Remaining Requests for a LiteLLM virtual API key, only if a model-specific rate limit (rpm) has been set for that virtual key. Labels: `"hashed_api_key", "api_key_alias", "model"`| -| `litellm_remaining_api_key_tokens_for_model` | Remaining Tokens for a LiteLLM virtual API key, only if a model-specific token limit (tpm) has been set for that virtual key. Labels: `"hashed_api_key", "api_key_alias", "model"`| - ## [BETA] Custom Metrics Track custom metrics on prometheus on all events mentioned above. @@ -200,7 +223,6 @@ curl -L -X POST 'http://0.0.0.0:4000/v1/chat/completions' \ ... "metadata_foo": "hello world" ... ``` - ## Monitor System Health To monitor the health of litellm adjacent services (redis / postgres), do: diff --git a/docs/my-website/docs/proxy/public_teams.md b/docs/my-website/docs/proxy/public_teams.md new file mode 100644 index 0000000000..6ff2258308 --- /dev/null +++ b/docs/my-website/docs/proxy/public_teams.md @@ -0,0 +1,40 @@ +# [BETA] Public Teams + +Expose available teams to your users to join on signup. + + + + +## Quick Start + +1. Create a team on LiteLLM + +```bash +curl -X POST '/team/new' \ +-H 'Content-Type: application/json' \ +-H 'Authorization: Bearer ' \ +-d '{"name": "My Team", "team_id": "team_id_1"}' +``` + +2. Expose the team to your users + +```yaml +litellm_settings: + default_internal_user_params: + available_teams: ["team_id_1"] # 👈 Make team available to new SSO users +``` + +3. Test it! + +```bash +curl -L -X POST 'http://0.0.0.0:4000/team/member_add' \ +-H 'Authorization: Bearer sk-' \ +-H 'Content-Type: application/json' \ +--data-raw '{ + "team_id": "team_id_1", + "member": [{"role": "user", "user_id": "my-test-user"}] +}' +``` + + + diff --git a/docs/my-website/docs/proxy/release_cycle.md b/docs/my-website/docs/proxy/release_cycle.md new file mode 100644 index 0000000000..947a4ae6b3 --- /dev/null +++ b/docs/my-website/docs/proxy/release_cycle.md @@ -0,0 +1,12 @@ +# Release Cycle + +Litellm Proxy has the following release cycle: + +- `v1.x.x-nightly`: These are releases which pass ci/cd. +- `v1.x.x.rc`: These are releases which pass ci/cd + [manual review](https://github.com/BerriAI/litellm/discussions/8495#discussioncomment-12180711). +- `v1.x.x` OR `v1.x.x-stable`: These are releases which pass ci/cd + manual review + 3 days of production testing. + +In production, we recommend using the latest `v1.x.x` release. + + +Follow our release notes [here](https://github.com/BerriAI/litellm/releases). \ No newline at end of file diff --git a/docs/my-website/docs/proxy/reliability.md b/docs/my-website/docs/proxy/reliability.md index 489f4e2ef1..654c2618c2 100644 --- a/docs/my-website/docs/proxy/reliability.md +++ b/docs/my-website/docs/proxy/reliability.md @@ -1007,7 +1007,34 @@ curl -L -X POST 'http://0.0.0.0:4000/v1/chat/completions' \ }' ``` -### Disable Fallbacks per key +### Disable Fallbacks (Per Request/Key) + + + + + + +You can disable fallbacks per key by setting `disable_fallbacks: true` in your request body. + +```bash +curl -L -X POST 'http://0.0.0.0:4000/v1/chat/completions' \ +-H 'Content-Type: application/json' \ +-H 'Authorization: Bearer sk-1234' \ +-d '{ + "messages": [ + { + "role": "user", + "content": "List 5 important events in the XIX century" + } + ], + "model": "gpt-3.5-turbo", + "disable_fallbacks": true # 👈 DISABLE FALLBACKS +}' +``` + + + + You can disable fallbacks per key by setting `disable_fallbacks: true` in your key metadata. @@ -1020,4 +1047,7 @@ curl -L -X POST 'http://0.0.0.0:4000/key/generate' \ "disable_fallbacks": true } }' -``` \ No newline at end of file +``` + + + \ No newline at end of file diff --git a/docs/my-website/docs/proxy/request_headers.md b/docs/my-website/docs/proxy/request_headers.md new file mode 100644 index 0000000000..79bcea2c86 --- /dev/null +++ b/docs/my-website/docs/proxy/request_headers.md @@ -0,0 +1,23 @@ +# Request Headers + +Special headers that are supported by LiteLLM. + +## LiteLLM Headers + +`x-litellm-timeout` Optional[float]: The timeout for the request in seconds. + +`x-litellm-enable-message-redaction`: Optional[bool]: Don't log the message content to logging integrations. Just track spend. [Learn More](./logging#redact-messages-response-content) + +`x-litellm-tags`: Optional[str]: A comma separated list (e.g. `tag1,tag2,tag3`) of tags to use for [tag-based routing](./tag_routing) **OR** [spend-tracking](./enterprise.md#tracking-spend-for-custom-tags). + +## Anthropic Headers + +`anthropic-version` Optional[str]: The version of the Anthropic API to use. +`anthropic-beta` Optional[str]: The beta version of the Anthropic API to use. + +## OpenAI Headers + +`openai-organization` Optional[str]: The organization to use for the OpenAI API. (currently needs to be enabled via `general_settings::forward_openai_org_id: true`) + + + diff --git a/docs/my-website/docs/proxy/response_headers.md b/docs/my-website/docs/proxy/response_headers.md index c066df1e02..b07f82d780 100644 --- a/docs/my-website/docs/proxy/response_headers.md +++ b/docs/my-website/docs/proxy/response_headers.md @@ -1,17 +1,20 @@ -# Rate Limit Headers +# Response Headers -When you make a request to the proxy, the proxy will return the following [OpenAI-compatible headers](https://platform.openai.com/docs/guides/rate-limits/rate-limits-in-headers): +When you make a request to the proxy, the proxy will return the following headers: -- `x-ratelimit-remaining-requests` - Optional[int]: The remaining number of requests that are permitted before exhausting the rate limit. -- `x-ratelimit-remaining-tokens` - Optional[int]: The remaining number of tokens that are permitted before exhausting the rate limit. -- `x-ratelimit-limit-requests` - Optional[int]: The maximum number of requests that are permitted before exhausting the rate limit. -- `x-ratelimit-limit-tokens` - Optional[int]: The maximum number of tokens that are permitted before exhausting the rate limit. -- `x-ratelimit-reset-requests` - Optional[int]: The time at which the rate limit will reset. -- `x-ratelimit-reset-tokens` - Optional[int]: The time at which the rate limit will reset. +## Rate Limit Headers +[OpenAI-compatible headers](https://platform.openai.com/docs/guides/rate-limits/rate-limits-in-headers): -These headers are useful for clients to understand the current rate limit status and adjust their request rate accordingly. +| Header | Type | Description | +|--------|------|-------------| +| `x-ratelimit-remaining-requests` | Optional[int] | The remaining number of requests that are permitted before exhausting the rate limit | +| `x-ratelimit-remaining-tokens` | Optional[int] | The remaining number of tokens that are permitted before exhausting the rate limit | +| `x-ratelimit-limit-requests` | Optional[int] | The maximum number of requests that are permitted before exhausting the rate limit | +| `x-ratelimit-limit-tokens` | Optional[int] | The maximum number of tokens that are permitted before exhausting the rate limit | +| `x-ratelimit-reset-requests` | Optional[int] | The time at which the rate limit will reset | +| `x-ratelimit-reset-tokens` | Optional[int] | The time at which the rate limit will reset | -## How are these headers calculated? +### How Rate Limit Headers work **If key has rate limits set** @@ -19,6 +22,50 @@ The proxy will return the [remaining rate limits for that key](https://github.co **If key does not have rate limits set** -The proxy returns the remaining requests/tokens returned by the backend provider. +The proxy returns the remaining requests/tokens returned by the backend provider. (LiteLLM will standardize the backend provider's response headers to match the OpenAI format) If the backend provider does not return these headers, the value will be `None`. + +These headers are useful for clients to understand the current rate limit status and adjust their request rate accordingly. + + +## Latency Headers +| Header | Type | Description | +|--------|------|-------------| +| `x-litellm-response-duration-ms` | float | Total duration of the API response in milliseconds | +| `x-litellm-overhead-duration-ms` | float | LiteLLM processing overhead in milliseconds | + +## Retry, Fallback Headers +| Header | Type | Description | +|--------|------|-------------| +| `x-litellm-attempted-retries` | int | Number of retry attempts made | +| `x-litellm-attempted-fallbacks` | int | Number of fallback attempts made | +| `x-litellm-max-fallbacks` | int | Maximum number of fallback attempts allowed | + +## Cost Tracking Headers +| Header | Type | Description | +|--------|------|-------------| +| `x-litellm-response-cost` | float | Cost of the API call | +| `x-litellm-key-spend` | float | Total spend for the API key | + +## LiteLLM Specific Headers +| Header | Type | Description | +|--------|------|-------------| +| `x-litellm-call-id` | string | Unique identifier for the API call | +| `x-litellm-model-id` | string | Unique identifier for the model used | +| `x-litellm-model-api-base` | string | Base URL of the API endpoint | +| `x-litellm-version` | string | Version of LiteLLM being used | +| `x-litellm-model-group` | string | Model group identifier | + +## Response headers from LLM providers + +LiteLLM also returns the original response headers from the LLM provider. These headers are prefixed with `llm_provider-` to distinguish them from LiteLLM's headers. + +Example response headers: +``` +llm_provider-openai-processing-ms: 256 +llm_provider-openai-version: 2020-10-01 +llm_provider-x-ratelimit-limit-requests: 30000 +llm_provider-x-ratelimit-limit-tokens: 150000000 +``` + diff --git a/docs/my-website/docs/proxy/tag_routing.md b/docs/my-website/docs/proxy/tag_routing.md index 4b2621fa8c..23715e77f8 100644 --- a/docs/my-website/docs/proxy/tag_routing.md +++ b/docs/my-website/docs/proxy/tag_routing.md @@ -143,6 +143,26 @@ Response } ``` +## Calling via Request Header + +You can also call via request header `x-litellm-tags` + +```shell +curl -L -X POST 'http://0.0.0.0:4000/v1/chat/completions' \ +-H 'Content-Type: application/json' \ +-H 'Authorization: Bearer sk-1234' \ +-H 'x-litellm-tags: free,my-custom-tag' \ +-d '{ + "model": "gpt-4", + "messages": [ + { + "role": "user", + "content": "Hey, how'\''s it going 123456?" + } + ] +}' +``` + ## Setting Default Tags Use this if you want all untagged requests to be routed to specific deployments diff --git a/docs/my-website/docs/proxy/timeout.md b/docs/my-website/docs/proxy/timeout.md index 2bf93298fe..85428ae53e 100644 --- a/docs/my-website/docs/proxy/timeout.md +++ b/docs/my-website/docs/proxy/timeout.md @@ -166,7 +166,7 @@ response = client.chat.completions.create( {"role": "user", "content": "what color is red"} ], logit_bias={12481: 100}, - timeout=1 + extra_body={"timeout": 1} # 👈 KEY CHANGE ) print(response) diff --git a/docs/my-website/docs/proxy/token_auth.md b/docs/my-website/docs/proxy/token_auth.md index ffff2694fe..c6d280cb82 100644 --- a/docs/my-website/docs/proxy/token_auth.md +++ b/docs/my-website/docs/proxy/token_auth.md @@ -1,9 +1,9 @@ import Tabs from '@theme/Tabs'; import TabItem from '@theme/TabItem'; -# JWT-based Auth +# OIDC - JWT-based Auth -Use JWT's to auth admins / projects into the proxy. +Use JWT's to auth admins / users / projects into the proxy. :::info @@ -156,33 +156,115 @@ scope: ["litellm-proxy-admin",...] scope: "litellm-proxy-admin ..." ``` -## Enforce Role-Based Access Control (RBAC) +## Control model access with Teams -Reject a JWT token if it's valid but doesn't have the required scopes / fields. -Only tokens which with valid Admin (`admin_jwt_scope`), User (`user_id_jwt_field`), Team (`team_id_jwt_field`) are allowed. +1. Specify the JWT field that contains the team ids, that the user belongs to. + +```yaml +general_settings: + enable_jwt_auth: True + litellm_jwtauth: + user_id_jwt_field: "sub" + team_ids_jwt_field: "groups" + user_id_upsert: true # add user_id to the db if they don't exist + enforce_team_based_model_access: true # don't allow users to access models unless the team has access +``` + +This is assuming your token looks like this: +``` +{ + ..., + "sub": "my-unique-user", + "groups": ["team_id_1", "team_id_2"] +} +``` + +2. Create the teams on LiteLLM + +```bash +curl -X POST '/team/new' \ +-H 'Authorization: Bearer ' \ +-H 'Content-Type: application/json' \ +-D '{ + "team_alias": "team_1", + "team_id": "team_id_1" # 👈 MUST BE THE SAME AS THE SSO GROUP ID +}' +``` + +3. Test the flow + +SSO for UI: [**See Walkthrough**](https://www.loom.com/share/8959be458edf41fd85937452c29a33f3?sid=7ebd6d37-569a-4023-866e-e0cde67cb23e) + +OIDC Auth for API: [**See Walkthrough**](https://www.loom.com/share/00fe2deab59a426183a46b1e2b522200?sid=4ed6d497-ead6-47f9-80c0-ca1c4b6b4814) + + +### Flow + +- Validate if user id is in the DB (LiteLLM_UserTable) +- Validate if any of the groups are in the DB (LiteLLM_TeamTable) +- Validate if any group has model access +- If all checks pass, allow the request + + +## Advanced - Custom Validate + +Validate a JWT Token using custom logic, if you need an extra way to verify if tokens are valid for LiteLLM Proxy. + +### 1. Setup custom validate function + +```python +from typing import Literal + +def my_custom_validate(token: str) -> Literal[True]: + """ + Only allow tokens with tenant-id == "my-unique-tenant", and claims == ["proxy-admin"] + """ + allowed_tenants = ["my-unique-tenant"] + allowed_claims = ["proxy-admin"] + + if token["tenant_id"] not in allowed_tenants: + raise Exception("Invalid JWT token") + if token["claims"] not in allowed_claims: + raise Exception("Invalid JWT token") + return True +``` + +### 2. Setup config.yaml ```yaml general_settings: master_key: sk-1234 enable_jwt_auth: True litellm_jwtauth: - admin_jwt_scope: "litellm_proxy_endpoints_access" - admin_allowed_routes: - - openai_routes - - info_routes - public_key_ttl: 600 - enforce_rbac: true # 👈 Enforce RBAC + user_id_jwt_field: "sub" + team_id_jwt_field: "tenant_id" + user_id_upsert: True + custom_validate: custom_validate.my_custom_validate # 👈 custom validate function ``` -Expected Scope in JWT: +### 3. Test the flow + +**Expected JWT** ``` { - "scope": "litellm_proxy_endpoints_access" + "sub": "my-unique-user", + "tenant_id": "INVALID_TENANT", + "claims": ["proxy-admin"] } ``` +**Expected Response** + +``` +{ + "error": "Invalid JWT token" +} +``` + + + ## Advanced - Allowed Routes Configure which routes a JWT can access via the config. @@ -287,4 +369,129 @@ general_settings: user_email_jwt_field: "email" # 👈 checks 'email' field in jwt payload user_allowed_email_domain: "my-co.com" # allows user@my-co.com to call proxy user_id_upsert: true # 👈 upserts the user to db, if valid email but not in db +``` + +## [BETA] Control Access with OIDC Roles + +Allow JWT tokens with supported roles to access the proxy. + +Let users and teams access the proxy, without needing to add them to the DB. + + +Very important, set `enforce_rbac: true` to ensure that the RBAC system is enabled. + +**Note:** This is in beta and might change unexpectedly. + +```yaml +general_settings: + enable_jwt_auth: True + litellm_jwtauth: + object_id_jwt_field: "oid" # can be either user / team, inferred from the role mapping + roles_jwt_field: "roles" + role_mappings: + - role: litellm.api.consumer + internal_role: "team" + enforce_rbac: true # 👈 VERY IMPORTANT + + role_permissions: # default model + endpoint permissions for a role. + - role: team + models: ["anthropic-claude"] + routes: ["/v1/chat/completions"] + +environment_variables: + JWT_AUDIENCE: "api://LiteLLM_Proxy" # ensures audience is validated +``` + +- `object_id_jwt_field`: The field in the JWT token that contains the object id. This id can be either a user id or a team id. Use this instead of `user_id_jwt_field` and `team_id_jwt_field`. If the same field could be both. + +- `roles_jwt_field`: The field in the JWT token that contains the roles. This field is a list of roles that the user has. To index into a nested field, use dot notation - eg. `resource_access.litellm-test-client-id.roles`. + +- `role_mappings`: A list of role mappings. Map the received role in the JWT token to an internal role on LiteLLM. + +- `JWT_AUDIENCE`: The audience of the JWT token. This is used to validate the audience of the JWT token. Set via an environment variable. + +### Example Token + +```bash +{ + "aud": "api://LiteLLM_Proxy", + "oid": "eec236bd-0135-4b28-9354-8fc4032d543e", + "roles": ["litellm.api.consumer"] +} +``` + +### Role Mapping Spec + +- `role`: The expected role in the JWT token. +- `internal_role`: The internal role on LiteLLM that will be used to control access. + +Supported internal roles: +- `team`: Team object will be used for RBAC spend tracking. Use this for tracking spend for a 'use case'. +- `internal_user`: User object will be used for RBAC spend tracking. Use this for tracking spend for an 'individual user'. +- `proxy_admin`: Proxy admin will be used for RBAC spend tracking. Use this for granting admin access to a token. + +### [Architecture Diagram (Control Model Access)](./jwt_auth_arch) + +## [BETA] Control Model Access with Scopes + +Control which models a JWT can access. Set `enforce_scope_based_access: true` to enforce scope-based access control. + +### 1. Setup config.yaml with scope mappings. + + +```yaml +model_list: + - model_name: anthropic-claude + litellm_params: + model: anthropic/claude-3-5-sonnet + api_key: os.environ/ANTHROPIC_API_KEY + - model_name: gpt-3.5-turbo-testing + litellm_params: + model: gpt-3.5-turbo + api_key: os.environ/OPENAI_API_KEY + +general_settings: + enable_jwt_auth: True + litellm_jwtauth: + team_id_jwt_field: "client_id" # 👈 set the field in the JWT token that contains the team id + team_id_upsert: true # 👈 upsert the team to db, if team id is not found in db + scope_mappings: + - scope: litellm.api.consumer + models: ["anthropic-claude"] + - scope: litellm.api.gpt_3_5_turbo + models: ["gpt-3.5-turbo-testing"] + enforce_scope_based_access: true # 👈 enforce scope-based access control + enforce_rbac: true # 👈 enforces only a Team/User/ProxyAdmin can access the proxy. +``` + +#### Scope Mapping Spec + +- `scope`: The scope to be used for the JWT token. +- `models`: The models that the JWT token can access. Value is the `model_name` in `model_list`. Note: Wildcard routes are not currently supported. + +### 2. Create a JWT with the correct scopes. + +Expected Token: + +```bash +{ + "scope": ["litellm.api.consumer", "litellm.api.gpt_3_5_turbo"] # can be a list or a space-separated string +} +``` + +### 3. Test the flow. + +```bash +curl -L -X POST 'http://0.0.0.0:4000/v1/chat/completions' \ +-H 'Content-Type: application/json' \ +-H 'Authorization: Bearer eyJhbGci...' \ +-d '{ + "model": "gpt-3.5-turbo-testing", + "messages": [ + { + "role": "user", + "content": "Hey, how'\''s it going 1234?" + } + ] +}' ``` \ No newline at end of file diff --git a/docs/my-website/docs/proxy/ui.md b/docs/my-website/docs/proxy/ui.md index f32f8ffa2d..a093b226a2 100644 --- a/docs/my-website/docs/proxy/ui.md +++ b/docs/my-website/docs/proxy/ui.md @@ -6,11 +6,6 @@ import TabItem from '@theme/TabItem'; Create keys, track spend, add models without worrying about the config / CRUD endpoints. -:::info - -This is in beta, so things may change. If you have feedback, [let us know](https://discord.com/invite/wuPM9dRgDw) - -::: diff --git a/docs/my-website/docs/proxy/user_management_heirarchy.md b/docs/my-website/docs/proxy/user_management_heirarchy.md index 5f3e83ae35..3565c9d257 100644 --- a/docs/my-website/docs/proxy/user_management_heirarchy.md +++ b/docs/my-website/docs/proxy/user_management_heirarchy.md @@ -1,11 +1,11 @@ import Image from '@theme/IdealImage'; -# User Management Heirarchy +# User Management Hierarchy -LiteLLM supports a heirarchy of users, teams, organizations, and budgets. +LiteLLM supports a hierarchy of users, teams, organizations, and budgets. - Organizations can have multiple teams. [API Reference](https://litellm-api.up.railway.app/#/organization%20management) - Teams can have multiple users. [API Reference](https://litellm-api.up.railway.app/#/team%20management) diff --git a/docs/my-website/docs/proxy/virtual_keys.md b/docs/my-website/docs/proxy/virtual_keys.md index 254b50bca3..04be4ade48 100644 --- a/docs/my-website/docs/proxy/virtual_keys.md +++ b/docs/my-website/docs/proxy/virtual_keys.md @@ -393,55 +393,6 @@ curl -L -X POST 'http://0.0.0.0:4000/key/unblock' \ ``` -### Custom Auth - -You can now override the default api key auth. - -Here's how: - -#### 1. Create a custom auth file. - -Make sure the response type follows the `UserAPIKeyAuth` pydantic object. This is used by for logging usage specific to that user key. - -```python -from litellm.proxy._types import UserAPIKeyAuth - -async def user_api_key_auth(request: Request, api_key: str) -> UserAPIKeyAuth: - try: - modified_master_key = "sk-my-master-key" - if api_key == modified_master_key: - return UserAPIKeyAuth(api_key=api_key) - raise Exception - except: - raise Exception -``` - -#### 2. Pass the filepath (relative to the config.yaml) - -Pass the filepath to the config.yaml - -e.g. if they're both in the same dir - `./config.yaml` and `./custom_auth.py`, this is what it looks like: -```yaml -model_list: - - model_name: "openai-model" - litellm_params: - model: "gpt-3.5-turbo" - -litellm_settings: - drop_params: True - set_verbose: True - -general_settings: - custom_auth: custom_auth.user_api_key_auth -``` - -[**Implementation Code**](https://github.com/BerriAI/litellm/blob/caf2a6b279ddbe89ebd1d8f4499f65715d684851/litellm/proxy/utils.py#L122) - -#### 3. Start the proxy -```shell -$ litellm --config /path/to/config.yaml -``` - ### Custom /key/generate If you need to add custom logic before generating a Proxy API Key (Example Validating `team_id`) @@ -568,6 +519,61 @@ litellm_settings: team_id: "core-infra" ``` +### ✨ Key Rotations + +:::info + +This is an Enterprise feature. + +[Enterprise Pricing](https://www.litellm.ai/#pricing) + +[Get free 7-day trial key](https://www.litellm.ai/#trial) + + +::: + +Rotate an existing API Key, while optionally updating its parameters. + +```bash + +curl 'http://localhost:4000/key/sk-1234/regenerate' \ + -X POST \ + -H 'Authorization: Bearer sk-1234' \ + -H 'Content-Type: application/json' \ + -d '{ + "max_budget": 100, + "metadata": { + "team": "core-infra" + }, + "models": [ + "gpt-4", + "gpt-3.5-turbo" + ] + }' + +``` + +**Read More** + +- [Write rotated keys to secrets manager](https://docs.litellm.ai/docs/secret#aws-secret-manager) + +[**👉 API REFERENCE DOCS**](https://litellm-api.up.railway.app/#/key%20management/regenerate_key_fn_key__key__regenerate_post) + + +### Temporary Budget Increase + +Use the `/key/update` endpoint to increase the budget of an existing key. + +```bash +curl -L -X POST 'http://localhost:4000/key/update' \ +-H 'Authorization: Bearer sk-1234' \ +-H 'Content-Type: application/json' \ +-d '{"key": "sk-b3Z3Lqdb_detHXSUp4ol4Q", "temp_budget_increase": 100, "temp_budget_expiry": "10d"}' +``` + +[API Reference](https://litellm-api.up.railway.app/#/key%20management/update_key_fn_key_update_post) + + ### Restricting Key Generation Use this to control who can generate keys. Useful when letting others create keys on the UI. diff --git a/docs/my-website/docs/reasoning_content.md b/docs/my-website/docs/reasoning_content.md new file mode 100644 index 0000000000..5cf287e737 --- /dev/null +++ b/docs/my-website/docs/reasoning_content.md @@ -0,0 +1,357 @@ +import Tabs from '@theme/Tabs'; +import TabItem from '@theme/TabItem'; + +# 'Thinking' / 'Reasoning Content' + +Supported Providers: +- Deepseek (`deepseek/`) +- Anthropic API (`anthropic/`) +- Bedrock (Anthropic + Deepseek) (`bedrock/`) +- Vertex AI (Anthropic) (`vertexai/`) + +```python +"message": { + ... + "reasoning_content": "The capital of France is Paris.", + "thinking_blocks": [ + { + "type": "thinking", + "thinking": "The capital of France is Paris.", + "signature": "EqoBCkgIARABGAIiQL2UoU0b1OHYi+..." + } + ] +} +``` + +## Quick Start + + + + +```python +from litellm import completion +import os + +os.environ["ANTHROPIC_API_KEY"] = "" + +response = completion( + model="anthropic/claude-3-7-sonnet-20250219", + messages=[ + {"role": "user", "content": "What is the capital of France?"}, + ], + thinking={"type": "enabled", "budget_tokens": 1024} # 👈 REQUIRED FOR ANTHROPIC models (on `anthropic/`, `bedrock/`, `vertexai/`) +) +print(response.choices[0].message.content) +``` + + + + +```bash +curl http://0.0.0.0:4000/v1/chat/completions \ + -H "Content-Type: application/json" \ + -H "Authorization: Bearer $LITELLM_KEY" \ + -d '{ + "model": "anthropic/claude-3-7-sonnet-20250219", + "messages": [ + { + "role": "user", + "content": "What is the capital of France?" + } + ], + "thinking": {"type": "enabled", "budget_tokens": 1024} +}' +``` + + + +**Expected Response** + +```bash +{ + "id": "3b66124d79a708e10c603496b363574c", + "choices": [ + { + "finish_reason": "stop", + "index": 0, + "message": { + "content": " won the FIFA World Cup in 2022.", + "role": "assistant", + "tool_calls": null, + "function_call": null + } + } + ], + "created": 1723323084, + "model": "deepseek/deepseek-chat", + "object": "chat.completion", + "system_fingerprint": "fp_7e0991cad4", + "usage": { + "completion_tokens": 12, + "prompt_tokens": 16, + "total_tokens": 28, + }, + "service_tier": null +} +``` + +## Tool Calling with `thinking` + +Here's how to use `thinking` blocks by Anthropic with tool calling. + + + + +```python +litellm._turn_on_debug() +litellm.modify_params = True +model = "anthropic/claude-3-7-sonnet-20250219" # works across Anthropic, Bedrock, Vertex AI +# Step 1: send the conversation and available functions to the model +messages = [ + { + "role": "user", + "content": "What's the weather like in San Francisco, Tokyo, and Paris? - give me 3 responses", + } +] +tools = [ + { + "type": "function", + "function": { + "name": "get_current_weather", + "description": "Get the current weather in a given location", + "parameters": { + "type": "object", + "properties": { + "location": { + "type": "string", + "description": "The city and state", + }, + "unit": { + "type": "string", + "enum": ["celsius", "fahrenheit"], + }, + }, + "required": ["location"], + }, + }, + } +] +response = litellm.completion( + model=model, + messages=messages, + tools=tools, + tool_choice="auto", # auto is default, but we'll be explicit + thinking={"type": "enabled", "budget_tokens": 1024}, +) +print("Response\n", response) +response_message = response.choices[0].message +tool_calls = response_message.tool_calls + +print("Expecting there to be 3 tool calls") +assert ( + len(tool_calls) > 0 +) # this has to call the function for SF, Tokyo and paris + +# Step 2: check if the model wanted to call a function +print(f"tool_calls: {tool_calls}") +if tool_calls: + # Step 3: call the function + # Note: the JSON response may not always be valid; be sure to handle errors + available_functions = { + "get_current_weather": get_current_weather, + } # only one function in this example, but you can have multiple + messages.append( + response_message + ) # extend conversation with assistant's reply + print("Response message\n", response_message) + # Step 4: send the info for each function call and function response to the model + for tool_call in tool_calls: + function_name = tool_call.function.name + if function_name not in available_functions: + # the model called a function that does not exist in available_functions - don't try calling anything + return + function_to_call = available_functions[function_name] + function_args = json.loads(tool_call.function.arguments) + function_response = function_to_call( + location=function_args.get("location"), + unit=function_args.get("unit"), + ) + messages.append( + { + "tool_call_id": tool_call.id, + "role": "tool", + "name": function_name, + "content": function_response, + } + ) # extend conversation with function response + print(f"messages: {messages}") + second_response = litellm.completion( + model=model, + messages=messages, + seed=22, + # tools=tools, + drop_params=True, + thinking={"type": "enabled", "budget_tokens": 1024}, + ) # get a new response from the model where it can see the function response + print("second response\n", second_response) +``` + + + + +1. Setup config.yaml + +```yaml +model_list: + - model_name: claude-3-7-sonnet-thinking + litellm_params: + model: anthropic/claude-3-7-sonnet-20250219 + api_key: os.environ/ANTHROPIC_API_KEY + thinking: { + "type": "enabled", + "budget_tokens": 1024 + } +``` + +2. Run proxy + +```bash +litellm --config config.yaml + +# RUNNING on http://0.0.0.0:4000 +``` + +3. Make 1st call + +```bash +curl http://0.0.0.0:4000/v1/chat/completions \ + -H "Content-Type: application/json" \ + -H "Authorization: Bearer $LITELLM_KEY" \ + -d '{ + "model": "claude-3-7-sonnet-thinking", + "messages": [ + {"role": "user", "content": "What's the weather like in San Francisco, Tokyo, and Paris? - give me 3 responses"}, + ], + "tools": [ + { + "type": "function", + "function": { + "name": "get_current_weather", + "description": "Get the current weather in a given location", + "parameters": { + "type": "object", + "properties": { + "location": { + "type": "string", + "description": "The city and state", + }, + "unit": { + "type": "string", + "enum": ["celsius", "fahrenheit"], + }, + }, + "required": ["location"], + }, + }, + } + ], + "tool_choice": "auto" + }' +``` + +4. Make 2nd call with tool call results + +```bash +curl http://0.0.0.0:4000/v1/chat/completions \ + -H "Content-Type: application/json" \ + -H "Authorization: Bearer $LITELLM_KEY" \ + -d '{ + "model": "claude-3-7-sonnet-thinking", + "messages": [ + { + "role": "user", + "content": "What\'s the weather like in San Francisco, Tokyo, and Paris? - give me 3 responses" + }, + { + "role": "assistant", + "content": "I\'ll check the current weather for these three cities for you:", + "tool_calls": [ + { + "index": 2, + "function": { + "arguments": "{\"location\": \"San Francisco\"}", + "name": "get_current_weather" + }, + "id": "tooluse_mnqzmtWYRjCxUInuAdK7-w", + "type": "function" + } + ], + "function_call": null, + "reasoning_content": "The user is asking for the current weather in three different locations: San Francisco, Tokyo, and Paris. I have access to the `get_current_weather` function that can provide this information.\n\nThe function requires a `location` parameter, and has an optional `unit` parameter. The user hasn't specified which unit they prefer (celsius or fahrenheit), so I'll use the default provided by the function.\n\nI need to make three separate function calls, one for each location:\n1. San Francisco\n2. Tokyo\n3. Paris\n\nThen I'll compile the results into a response with three distinct weather reports as requested by the user.", + "thinking_blocks": [ + { + "type": "thinking", + "thinking": "The user is asking for the current weather in three different locations: San Francisco, Tokyo, and Paris. I have access to the `get_current_weather` function that can provide this information.\n\nThe function requires a `location` parameter, and has an optional `unit` parameter. The user hasn't specified which unit they prefer (celsius or fahrenheit), so I'll use the default provided by the function.\n\nI need to make three separate function calls, one for each location:\n1. San Francisco\n2. Tokyo\n3. Paris\n\nThen I'll compile the results into a response with three distinct weather reports as requested by the user.", + "signature": "EqoBCkgIARABGAIiQCkBXENoyB+HstUOs/iGjG+bvDbIQRrxPsPpOSt5yDxX6iulZ/4K/w9Rt4J5Nb2+3XUYsyOH+CpZMfADYvItFR4SDPb7CmzoGKoolCMAJRoM62p1ZRASZhrD3swqIjAVY7vOAFWKZyPEJglfX/60+bJphN9W1wXR6rWrqn3MwUbQ5Mb/pnpeb10HMploRgUqEGKOd6fRKTkUoNDuAnPb55c=" + } + ], + "provider_specific_fields": { + "reasoningContentBlocks": [ + { + "reasoningText": { + "signature": "EqoBCkgIARABGAIiQCkBXENoyB+HstUOs/iGjG+bvDbIQRrxPsPpOSt5yDxX6iulZ/4K/w9Rt4J5Nb2+3XUYsyOH+CpZMfADYvItFR4SDPb7CmzoGKoolCMAJRoM62p1ZRASZhrD3swqIjAVY7vOAFWKZyPEJglfX/60+bJphN9W1wXR6rWrqn3MwUbQ5Mb/pnpeb10HMploRgUqEGKOd6fRKTkUoNDuAnPb55c=", + "text": "The user is asking for the current weather in three different locations: San Francisco, Tokyo, and Paris. I have access to the `get_current_weather` function that can provide this information.\n\nThe function requires a `location` parameter, and has an optional `unit` parameter. The user hasn't specified which unit they prefer (celsius or fahrenheit), so I'll use the default provided by the function.\n\nI need to make three separate function calls, one for each location:\n1. San Francisco\n2. Tokyo\n3. Paris\n\nThen I'll compile the results into a response with three distinct weather reports as requested by the user." + } + } + ] + } + }, + { + "tool_call_id": "tooluse_mnqzmtWYRjCxUInuAdK7-w", + "role": "tool", + "name": "get_current_weather", + "content": "{\"location\": \"San Francisco\", \"temperature\": \"72\", \"unit\": \"fahrenheit\"}" + } + ] + }' +``` + + + + +## Switching between Anthropic + Deepseek models + +Set `drop_params=True` to drop the 'thinking' blocks when swapping from Anthropic to Deepseek models. Suggest improvements to this approach [here](https://github.com/BerriAI/litellm/discussions/8927). + +```python +litellm.drop_params = True # 👈 EITHER GLOBALLY or per request + +# or per request +## Anthropic +response = litellm.completion( + model="anthropic/claude-3-7-sonnet-20250219", + messages=[{"role": "user", "content": "What is the capital of France?"}], + thinking={"type": "enabled", "budget_tokens": 1024}, + drop_params=True, +) + +## Deepseek +response = litellm.completion( + model="deepseek/deepseek-chat", + messages=[{"role": "user", "content": "What is the capital of France?"}], + thinking={"type": "enabled", "budget_tokens": 1024}, + drop_params=True, +) +``` + +## Spec + + +These fields can be accessed via `response.choices[0].message.reasoning_content` and `response.choices[0].message.thinking_blocks`. + +- `reasoning_content` - str: The reasoning content from the model. Returned across all providers. +- `thinking_blocks` - Optional[List[Dict[str, str]]]: A list of thinking blocks from the model. Only returned for Anthropic models. + - `type` - str: The type of thinking block. + - `thinking` - str: The thinking from the model. + - `signature` - str: The signature delta from the model. + diff --git a/docs/my-website/docs/rerank.md b/docs/my-website/docs/rerank.md index 598c672942..cc58c374c7 100644 --- a/docs/my-website/docs/rerank.md +++ b/docs/my-website/docs/rerank.md @@ -111,7 +111,7 @@ curl http://0.0.0.0:4000/rerank \ | Provider | Link to Usage | |-------------|--------------------| -| Cohere | [Usage](#quick-start) | +| Cohere (v1 + v2 clients) | [Usage](#quick-start) | | Together AI| [Usage](../docs/providers/togetherai) | | Azure AI| [Usage](../docs/providers/azure_ai) | | Jina AI| [Usage](../docs/providers/jina_ai) | diff --git a/docs/my-website/docs/routing.md b/docs/my-website/docs/routing.md index 308b850e45..0ad28b24f4 100644 --- a/docs/my-website/docs/routing.md +++ b/docs/my-website/docs/routing.md @@ -826,6 +826,65 @@ asyncio.run(router_acompletion()) ## Basic Reliability +### Weighted Deployments + +Set `weight` on a deployment to pick one deployment more often than others. + +This works across **ALL** routing strategies. + + + + +```python +from litellm import Router + +model_list = [ + { + "model_name": "o1", + "litellm_params": { + "model": "o1-preview", + "api_key": os.getenv("OPENAI_API_KEY"), + "weight": 1 + }, + }, + { + "model_name": "o1", + "litellm_params": { + "model": "o1-preview", + "api_key": os.getenv("OPENAI_API_KEY"), + "weight": 2 # 👈 PICK THIS DEPLOYMENT 2x MORE OFTEN THAN o1-preview + }, + }, +] + +router = Router(model_list=model_list, routing_strategy="cost-based-routing") + +response = await router.acompletion( + model="gpt-3.5-turbo", + messages=[{"role": "user", "content": "Hey, how's it going?"}] +) +print(response) +``` + + + +```yaml +model_list: + - model_name: o1 + litellm_params: + model: o1 + api_key: os.environ/OPENAI_API_KEY + weight: 1 + - model_name: o1 + litellm_params: + model: o1-preview + api_key: os.environ/OPENAI_API_KEY + weight: 2 # 👈 PICK THIS DEPLOYMENT 2x MORE OFTEN THAN o1-preview +``` + + + + ### Max Parallel Requests (ASYNC) Used in semaphore for async requests on router. Limit the max concurrent calls made to a deployment. Useful in high-traffic scenarios. @@ -893,8 +952,8 @@ router_settings: ``` Defaults: -- allowed_fails: 0 -- cooldown_time: 60s +- allowed_fails: 3 +- cooldown_time: 5s (`DEFAULT_COOLDOWN_TIME_SECONDS` in constants.py) **Set Per Model** diff --git a/docs/my-website/docs/secret.md b/docs/my-website/docs/secret.md index a65c696f36..7676164259 100644 --- a/docs/my-website/docs/secret.md +++ b/docs/my-website/docs/secret.md @@ -96,6 +96,33 @@ litellm --config /path/to/config.yaml ``` +### Using K/V pairs in 1 AWS Secret + +You can read multiple keys from a single AWS Secret using the `primary_secret_name` parameter: + +```yaml +general_settings: + key_management_system: "aws_secret_manager" + key_management_settings: + hosted_keys: [ + "OPENAI_API_KEY_MODEL_1", + "OPENAI_API_KEY_MODEL_2", + ] + primary_secret_name: "litellm_secrets" # 👈 Read multiple keys from one JSON secret +``` + +The `primary_secret_name` allows you to read multiple keys from a single AWS Secret as a JSON object. For example, the "litellm_secrets" would contain: + +```json +{ + "OPENAI_API_KEY_MODEL_1": "sk-key1...", + "OPENAI_API_KEY_MODEL_2": "sk-key2..." +} +``` + +This reduces the number of AWS Secrets you need to manage. + + ## Hashicorp Vault @@ -353,4 +380,7 @@ general_settings: # Hosted Keys Settings hosted_keys: ["litellm_master_key"] # OPTIONAL. Specify which env keys you stored on AWS + + # K/V pairs in 1 AWS Secret Settings + primary_secret_name: "litellm_secrets" # OPTIONAL. Read multiple keys from one JSON secret on AWS Secret Manager ``` \ No newline at end of file diff --git a/docs/my-website/docs/set_keys.md b/docs/my-website/docs/set_keys.md index 7e63b5a888..3a5ff08d63 100644 --- a/docs/my-website/docs/set_keys.md +++ b/docs/my-website/docs/set_keys.md @@ -30,6 +30,7 @@ import os # Set OpenAI API key os.environ["OPENAI_API_KEY"] = "Your API Key" os.environ["ANTHROPIC_API_KEY"] = "Your API Key" +os.environ["XAI_API_KEY"] = "Your API Key" os.environ["REPLICATE_API_KEY"] = "Your API Key" os.environ["TOGETHERAI_API_KEY"] = "Your API Key" ``` diff --git a/docs/my-website/docs/tutorials/litellm_proxy_aporia.md b/docs/my-website/docs/tutorials/litellm_proxy_aporia.md index 3b5bada2bc..143512f99c 100644 --- a/docs/my-website/docs/tutorials/litellm_proxy_aporia.md +++ b/docs/my-website/docs/tutorials/litellm_proxy_aporia.md @@ -2,9 +2,9 @@ import Image from '@theme/IdealImage'; import Tabs from '@theme/Tabs'; import TabItem from '@theme/TabItem'; -# Use LiteLLM AI Gateway with Aporia Guardrails +# Aporia Guardrails with LiteLLM Gateway -In this tutorial we will use LiteLLM Proxy with Aporia to detect PII in requests and profanity in responses +In this tutorial we will use LiteLLM AI Gateway with Aporia to detect PII in requests and profanity in responses ## 1. Setup guardrails on Aporia diff --git a/docs/my-website/docs/tutorials/openweb_ui.md b/docs/my-website/docs/tutorials/openweb_ui.md new file mode 100644 index 0000000000..ab1e2e121e --- /dev/null +++ b/docs/my-website/docs/tutorials/openweb_ui.md @@ -0,0 +1,103 @@ +import Image from '@theme/IdealImage'; +import Tabs from '@theme/Tabs'; +import TabItem from '@theme/TabItem'; + +# OpenWeb UI with LiteLLM + +This guide walks you through connecting OpenWeb UI to LiteLLM. Using LiteLLM with OpenWeb UI allows teams to +- Access 100+ LLMs on OpenWeb UI +- Track Spend / Usage, Set Budget Limits +- Send Request/Response Logs to logging destinations like langfuse, s3, gcs buckets, etc. +- Set access controls eg. Control what models OpenWebUI can access. + +## Quickstart + +- Make sure to setup LiteLLM with the [LiteLLM Getting Started Guide](https://docs.litellm.ai/docs/proxy/docker_quick_start) + + +## 1. Start LiteLLM & OpenWebUI + +- OpenWebUI starts running on [http://localhost:3000](http://localhost:3000) +- LiteLLM starts running on [http://localhost:4000](http://localhost:4000) + + +## 2. Create a Virtual Key on LiteLLM + +Virtual Keys are API Keys that allow you to authenticate to LiteLLM Proxy. We will create a Virtual Key that will allow OpenWebUI to access LiteLLM. + +### 2.1 LiteLLM User Management Hierarchy + +On LiteLLM, you can create Organizations, Teams, Users and Virtual Keys. For this tutorial, we will create a Team and a Virtual Key. + +- `Organization` - An Organization is a group of Teams. (US Engineering, EU Developer Tools) +- `Team` - A Team is a group of Users. (OpenWeb UI Team, Data Science Team, etc.) +- `User` - A User is an individual user (employee, developer, eg. `krrish@litellm.ai`) +- `Virtual Key` - A Virtual Key is an API Key that allows you to authenticate to LiteLLM Proxy. A Virtual Key is associated with a User or Team. + +Once the Team is created, you can invite Users to the Team. You can read more about LiteLLM's User Management [here](https://docs.litellm.ai/docs/proxy/user_management_heirarchy). + +### 2.2 Create a Team on LiteLLM + +Navigate to [http://localhost:4000/ui](http://localhost:4000/ui) and create a new team. + + + +### 2.2 Create a Virtual Key on LiteLLM + +Navigate to [http://localhost:4000/ui](http://localhost:4000/ui) and create a new virtual Key. + +LiteLLM allows you to specify what models are available on OpenWeb UI (by specifying the models the key will have access to). + + + +## 3. Connect OpenWeb UI to LiteLLM + +On OpenWeb UI, navigate to Settings -> Connections and create a new connection to LiteLLM + +Enter the following details: +- URL: `http://localhost:4000` (your litellm proxy base url) +- Key: `your-virtual-key` (the key you created in the previous step) + + + +### 3.1 Test Request + +On the top left corner, select models you should only see the models you gave the key access to in Step 2. + +Once you selected a model, enter your message content and click on `Submit` + + + +### 3.2 Tracking Spend / Usage + +After your request is made, navigate to `Logs` on the LiteLLM UI, you can see Team, Key, Model, Usage and Cost. + + + + + +## Render `thinking` content on OpenWeb UI + +OpenWebUI requires reasoning/thinking content to be rendered with `` tags. In order to render this for specific models, you can use the `merge_reasoning_content_in_choices` litellm parameter. + +Example litellm config.yaml: + +```yaml +model_list: + - model_name: thinking-anthropic-claude-3-7-sonnet + litellm_params: + model: bedrock/us.anthropic.claude-3-7-sonnet-20250219-v1:0 + thinking: {"type": "enabled", "budget_tokens": 1024} + max_tokens: 1080 + merge_reasoning_content_in_choices: true +``` + +### Test it on OpenWeb UI + +On the models dropdown select `thinking-anthropic-claude-3-7-sonnet` + + + + + + diff --git a/docs/my-website/docusaurus.config.js b/docs/my-website/docusaurus.config.js index cf20dfcd70..8d480131ff 100644 --- a/docs/my-website/docusaurus.config.js +++ b/docs/my-website/docusaurus.config.js @@ -44,7 +44,7 @@ const config = { path: './release_notes', routeBasePath: 'release_notes', blogTitle: 'Release Notes', - blogSidebarTitle: 'All Releases', + blogSidebarTitle: 'Releases', blogSidebarCount: 'ALL', postsPerPage: 'ALL', showReadingTime: false, diff --git a/docs/my-website/img/basic_litellm.gif b/docs/my-website/img/basic_litellm.gif new file mode 100644 index 0000000000..d4cf9fd52a Binary files /dev/null and b/docs/my-website/img/basic_litellm.gif differ diff --git a/docs/my-website/img/control_model_access_jwt.png b/docs/my-website/img/control_model_access_jwt.png new file mode 100644 index 0000000000..ab6cda5396 Binary files /dev/null and b/docs/my-website/img/control_model_access_jwt.png differ diff --git a/docs/my-website/img/create_key_in_team_oweb.gif b/docs/my-website/img/create_key_in_team_oweb.gif new file mode 100644 index 0000000000..d24849b259 Binary files /dev/null and b/docs/my-website/img/create_key_in_team_oweb.gif differ diff --git a/docs/my-website/img/litellm_create_team.gif b/docs/my-website/img/litellm_create_team.gif new file mode 100644 index 0000000000..e2f12613ec Binary files /dev/null and b/docs/my-website/img/litellm_create_team.gif differ diff --git a/docs/my-website/img/litellm_setup_openweb.gif b/docs/my-website/img/litellm_setup_openweb.gif new file mode 100644 index 0000000000..5618660d6c Binary files /dev/null and b/docs/my-website/img/litellm_setup_openweb.gif differ diff --git a/docs/my-website/img/litellm_thinking_openweb.gif b/docs/my-website/img/litellm_thinking_openweb.gif new file mode 100644 index 0000000000..385db583a4 Binary files /dev/null and b/docs/my-website/img/litellm_thinking_openweb.gif differ diff --git a/docs/my-website/img/litellm_user_heirarchy.png b/docs/my-website/img/litellm_user_heirarchy.png index 63dba72c21..591b36add7 100644 Binary files a/docs/my-website/img/litellm_user_heirarchy.png and b/docs/my-website/img/litellm_user_heirarchy.png differ diff --git a/docs/my-website/img/message_redaction_logging.png b/docs/my-website/img/message_redaction_logging.png new file mode 100644 index 0000000000..6e210ad182 Binary files /dev/null and b/docs/my-website/img/message_redaction_logging.png differ diff --git a/docs/my-website/img/message_redaction_spend_logs.png b/docs/my-website/img/message_redaction_spend_logs.png new file mode 100644 index 0000000000..eacfac2ece Binary files /dev/null and b/docs/my-website/img/message_redaction_spend_logs.png differ diff --git a/docs/my-website/img/release_notes/anthropic_thinking.jpg b/docs/my-website/img/release_notes/anthropic_thinking.jpg new file mode 100644 index 0000000000..f10de06dec Binary files /dev/null and b/docs/my-website/img/release_notes/anthropic_thinking.jpg differ diff --git a/docs/my-website/img/release_notes/error_logs.jpg b/docs/my-website/img/release_notes/error_logs.jpg new file mode 100644 index 0000000000..6f2767e1fb Binary files /dev/null and b/docs/my-website/img/release_notes/error_logs.jpg differ diff --git a/docs/my-website/img/release_notes/v1632_release.jpg b/docs/my-website/img/release_notes/v1632_release.jpg new file mode 100644 index 0000000000..1770460b2a Binary files /dev/null and b/docs/my-website/img/release_notes/v1632_release.jpg differ diff --git a/docs/my-website/release_notes/v1.55.10/index.md b/docs/my-website/release_notes/v1.55.10/index.md index 2d509a5d53..7f9839c2b5 100644 --- a/docs/my-website/release_notes/v1.55.10/index.md +++ b/docs/my-website/release_notes/v1.55.10/index.md @@ -6,7 +6,7 @@ authors: - name: Krrish Dholakia title: CEO, LiteLLM url: https://www.linkedin.com/in/krish-d/ - image_url: https://media.licdn.com/dms/image/v2/C5103AQHYMXJfHTf4Ng/profile-displayphoto-shrink_800_800/profile-displayphoto-shrink_800_800/0/1517455593871?e=1741824000&v=beta&t=udmat6jS-s3EQZp1DTykf7NZmf-3sefD_I9B1aMjE5Y + image_url: https://media.licdn.com/dms/image/v2/D4D03AQGrlsJ3aqpHmQ/profile-displayphoto-shrink_400_400/B4DZSAzgP7HYAg-/0/1737327772964?e=1743638400&v=beta&t=39KOXMUFedvukiWWVPHf3qI45fuQD7lNglICwN31DrI - name: Ishaan Jaffer title: CTO, LiteLLM url: https://www.linkedin.com/in/reffajnaahsi/ diff --git a/docs/my-website/release_notes/v1.55.8-stable/index.md b/docs/my-website/release_notes/v1.55.8-stable/index.md index 7887192eca..7e82e94747 100644 --- a/docs/my-website/release_notes/v1.55.8-stable/index.md +++ b/docs/my-website/release_notes/v1.55.8-stable/index.md @@ -6,7 +6,7 @@ authors: - name: Krrish Dholakia title: CEO, LiteLLM url: https://www.linkedin.com/in/krish-d/ - image_url: https://media.licdn.com/dms/image/v2/C5103AQHYMXJfHTf4Ng/profile-displayphoto-shrink_800_800/profile-displayphoto-shrink_800_800/0/1517455593871?e=1741824000&v=beta&t=udmat6jS-s3EQZp1DTykf7NZmf-3sefD_I9B1aMjE5Y + image_url: https://media.licdn.com/dms/image/v2/D4D03AQGrlsJ3aqpHmQ/profile-displayphoto-shrink_400_400/B4DZSAzgP7HYAg-/0/1737327772964?e=1743638400&v=beta&t=39KOXMUFedvukiWWVPHf3qI45fuQD7lNglICwN31DrI - name: Ishaan Jaffer title: CTO, LiteLLM url: https://www.linkedin.com/in/reffajnaahsi/ diff --git a/docs/my-website/release_notes/v1.56.1/index.md b/docs/my-website/release_notes/v1.56.1/index.md index 1a1b1aaa1a..7c4ccc74ea 100644 --- a/docs/my-website/release_notes/v1.56.1/index.md +++ b/docs/my-website/release_notes/v1.56.1/index.md @@ -6,7 +6,7 @@ authors: - name: Krrish Dholakia title: CEO, LiteLLM url: https://www.linkedin.com/in/krish-d/ - image_url: https://media.licdn.com/dms/image/v2/C5103AQHYMXJfHTf4Ng/profile-displayphoto-shrink_800_800/profile-displayphoto-shrink_800_800/0/1517455593871?e=1741824000&v=beta&t=udmat6jS-s3EQZp1DTykf7NZmf-3sefD_I9B1aMjE5Y + image_url: https://media.licdn.com/dms/image/v2/D4D03AQGrlsJ3aqpHmQ/profile-displayphoto-shrink_400_400/B4DZSAzgP7HYAg-/0/1737327772964?e=1743638400&v=beta&t=39KOXMUFedvukiWWVPHf3qI45fuQD7lNglICwN31DrI - name: Ishaan Jaffer title: CTO, LiteLLM url: https://www.linkedin.com/in/reffajnaahsi/ diff --git a/docs/my-website/release_notes/v1.56.3/index.md b/docs/my-website/release_notes/v1.56.3/index.md index 6ec6a6e2ec..95205633ea 100644 --- a/docs/my-website/release_notes/v1.56.3/index.md +++ b/docs/my-website/release_notes/v1.56.3/index.md @@ -6,7 +6,7 @@ authors: - name: Krrish Dholakia title: CEO, LiteLLM url: https://www.linkedin.com/in/krish-d/ - image_url: https://media.licdn.com/dms/image/v2/C5103AQHYMXJfHTf4Ng/profile-displayphoto-shrink_800_800/profile-displayphoto-shrink_800_800/0/1517455593871?e=1741824000&v=beta&t=udmat6jS-s3EQZp1DTykf7NZmf-3sefD_I9B1aMjE5Y + image_url: https://media.licdn.com/dms/image/v2/D4D03AQGrlsJ3aqpHmQ/profile-displayphoto-shrink_400_400/B4DZSAzgP7HYAg-/0/1737327772964?e=1743638400&v=beta&t=39KOXMUFedvukiWWVPHf3qI45fuQD7lNglICwN31DrI - name: Ishaan Jaffer title: CTO, LiteLLM url: https://www.linkedin.com/in/reffajnaahsi/ diff --git a/docs/my-website/release_notes/v1.56.4/index.md b/docs/my-website/release_notes/v1.56.4/index.md index ea0cf0e73c..93f8725632 100644 --- a/docs/my-website/release_notes/v1.56.4/index.md +++ b/docs/my-website/release_notes/v1.56.4/index.md @@ -6,7 +6,7 @@ authors: - name: Krrish Dholakia title: CEO, LiteLLM url: https://www.linkedin.com/in/krish-d/ - image_url: https://media.licdn.com/dms/image/v2/C5103AQHYMXJfHTf4Ng/profile-displayphoto-shrink_800_800/profile-displayphoto-shrink_800_800/0/1517455593871?e=1741824000&v=beta&t=udmat6jS-s3EQZp1DTykf7NZmf-3sefD_I9B1aMjE5Y + image_url: https://media.licdn.com/dms/image/v2/D4D03AQGrlsJ3aqpHmQ/profile-displayphoto-shrink_400_400/B4DZSAzgP7HYAg-/0/1737327772964?e=1743638400&v=beta&t=39KOXMUFedvukiWWVPHf3qI45fuQD7lNglICwN31DrI - name: Ishaan Jaffer title: CTO, LiteLLM url: https://www.linkedin.com/in/reffajnaahsi/ diff --git a/docs/my-website/release_notes/v1.57.3/index.md b/docs/my-website/release_notes/v1.57.3/index.md index 0a5fba3e52..3bee71a8e1 100644 --- a/docs/my-website/release_notes/v1.57.3/index.md +++ b/docs/my-website/release_notes/v1.57.3/index.md @@ -6,7 +6,7 @@ authors: - name: Krrish Dholakia title: CEO, LiteLLM url: https://www.linkedin.com/in/krish-d/ - image_url: https://media.licdn.com/dms/image/v2/C5103AQHYMXJfHTf4Ng/profile-displayphoto-shrink_800_800/profile-displayphoto-shrink_800_800/0/1517455593871?e=1741824000&v=beta&t=udmat6jS-s3EQZp1DTykf7NZmf-3sefD_I9B1aMjE5Y + image_url: https://media.licdn.com/dms/image/v2/D4D03AQGrlsJ3aqpHmQ/profile-displayphoto-shrink_400_400/B4DZSAzgP7HYAg-/0/1737327772964?e=1743638400&v=beta&t=39KOXMUFedvukiWWVPHf3qI45fuQD7lNglICwN31DrI - name: Ishaan Jaffer title: CTO, LiteLLM url: https://www.linkedin.com/in/reffajnaahsi/ diff --git a/docs/my-website/release_notes/v1.57.7/index.md b/docs/my-website/release_notes/v1.57.7/index.md index 747aaba4bd..ce987baf77 100644 --- a/docs/my-website/release_notes/v1.57.7/index.md +++ b/docs/my-website/release_notes/v1.57.7/index.md @@ -6,7 +6,7 @@ authors: - name: Krrish Dholakia title: CEO, LiteLLM url: https://www.linkedin.com/in/krish-d/ - image_url: https://media.licdn.com/dms/image/v2/C5103AQHYMXJfHTf4Ng/profile-displayphoto-shrink_800_800/profile-displayphoto-shrink_800_800/0/1517455593871?e=1741824000&v=beta&t=udmat6jS-s3EQZp1DTykf7NZmf-3sefD_I9B1aMjE5Y + image_url: https://media.licdn.com/dms/image/v2/D4D03AQGrlsJ3aqpHmQ/profile-displayphoto-shrink_400_400/B4DZSAzgP7HYAg-/0/1737327772964?e=1743638400&v=beta&t=39KOXMUFedvukiWWVPHf3qI45fuQD7lNglICwN31DrI - name: Ishaan Jaffer title: CTO, LiteLLM url: https://www.linkedin.com/in/reffajnaahsi/ diff --git a/docs/my-website/release_notes/v1.57.8-stable/index.md b/docs/my-website/release_notes/v1.57.8-stable/index.md index 4c54e35cba..d37a7b9ff8 100644 --- a/docs/my-website/release_notes/v1.57.8-stable/index.md +++ b/docs/my-website/release_notes/v1.57.8-stable/index.md @@ -6,7 +6,7 @@ authors: - name: Krrish Dholakia title: CEO, LiteLLM url: https://www.linkedin.com/in/krish-d/ - image_url: https://media.licdn.com/dms/image/v2/C5103AQHYMXJfHTf4Ng/profile-displayphoto-shrink_800_800/profile-displayphoto-shrink_800_800/0/1517455593871?e=1741824000&v=beta&t=udmat6jS-s3EQZp1DTykf7NZmf-3sefD_I9B1aMjE5Y + image_url: https://media.licdn.com/dms/image/v2/D4D03AQGrlsJ3aqpHmQ/profile-displayphoto-shrink_400_400/B4DZSAzgP7HYAg-/0/1737327772964?e=1743638400&v=beta&t=39KOXMUFedvukiWWVPHf3qI45fuQD7lNglICwN31DrI - name: Ishaan Jaffer title: CTO, LiteLLM url: https://www.linkedin.com/in/reffajnaahsi/ @@ -18,13 +18,6 @@ hide_table_of_contents: false `alerting`, `prometheus`, `secret management`, `management endpoints`, `ui`, `prompt management`, `finetuning`, `batch` -:::note - -v1.57.8-stable, is currently being tested. It will be released on 2025-01-12. - -::: - - ## New / Updated Models 1. Mistral large pricing - https://github.com/BerriAI/litellm/pull/7452 diff --git a/docs/my-website/release_notes/v1.59.0/index.md b/docs/my-website/release_notes/v1.59.0/index.md index 515ff464e2..5343ba49ad 100644 --- a/docs/my-website/release_notes/v1.59.0/index.md +++ b/docs/my-website/release_notes/v1.59.0/index.md @@ -6,7 +6,7 @@ authors: - name: Krrish Dholakia title: CEO, LiteLLM url: https://www.linkedin.com/in/krish-d/ - image_url: https://media.licdn.com/dms/image/v2/C5103AQHYMXJfHTf4Ng/profile-displayphoto-shrink_800_800/profile-displayphoto-shrink_800_800/0/1517455593871?e=1741824000&v=beta&t=udmat6jS-s3EQZp1DTykf7NZmf-3sefD_I9B1aMjE5Y + image_url: https://media.licdn.com/dms/image/v2/D4D03AQGrlsJ3aqpHmQ/profile-displayphoto-shrink_400_400/B4DZSAzgP7HYAg-/0/1737327772964?e=1743638400&v=beta&t=39KOXMUFedvukiWWVPHf3qI45fuQD7lNglICwN31DrI - name: Ishaan Jaffer title: CTO, LiteLLM url: https://www.linkedin.com/in/reffajnaahsi/ diff --git a/docs/my-website/release_notes/v1.59.8-stable/index.md b/docs/my-website/release_notes/v1.59.8-stable/index.md new file mode 100644 index 0000000000..fa9825fb66 --- /dev/null +++ b/docs/my-website/release_notes/v1.59.8-stable/index.md @@ -0,0 +1,161 @@ +--- +title: v1.59.8-stable +slug: v1.59.8-stable +date: 2025-01-31T10:00:00 +authors: + - name: Krrish Dholakia + title: CEO, LiteLLM + url: https://www.linkedin.com/in/krish-d/ + image_url: https://media.licdn.com/dms/image/v2/D4D03AQGrlsJ3aqpHmQ/profile-displayphoto-shrink_400_400/B4DZSAzgP7HYAg-/0/1737327772964?e=1743638400&v=beta&t=39KOXMUFedvukiWWVPHf3qI45fuQD7lNglICwN31DrI + - name: Ishaan Jaffer + title: CTO, LiteLLM + url: https://www.linkedin.com/in/reffajnaahsi/ + image_url: https://media.licdn.com/dms/image/v2/D4D03AQGiM7ZrUwqu_Q/profile-displayphoto-shrink_800_800/profile-displayphoto-shrink_800_800/0/1675971026692?e=1741824000&v=beta&t=eQnRdXPJo4eiINWTZARoYTfqh064pgZ-E21pQTSy8jc +tags: [admin ui, logging, db schema] +hide_table_of_contents: false +--- + +import Image from '@theme/IdealImage'; + +# v1.59.8-stable + + + +:::info + +Get a 7 day free trial for LiteLLM Enterprise [here](https://litellm.ai/#trial). + +**no call needed** + +::: + + +## New Models / Updated Models + +1. New OpenAI `/image/variations` endpoint BETA support [Docs](../../docs/image_variations) +2. Topaz API support on OpenAI `/image/variations` BETA endpoint [Docs](../../docs/providers/topaz) +3. Deepseek - r1 support w/ reasoning_content ([Deepseek API](../../docs/providers/deepseek#reasoning-models), [Vertex AI](../../docs/providers/vertex#model-garden), [Bedrock](../../docs/providers/bedrock#deepseek)) +4. Azure - Add azure o1 pricing [See Here](https://github.com/BerriAI/litellm/blob/b8b927f23bc336862dacb89f59c784a8d62aaa15/model_prices_and_context_window.json#L952) +5. Anthropic - handle `-latest` tag in model for cost calculation +6. Gemini-2.0-flash-thinking - add model pricing (it’s 0.0) [See Here](https://github.com/BerriAI/litellm/blob/b8b927f23bc336862dacb89f59c784a8d62aaa15/model_prices_and_context_window.json#L3393) +7. Bedrock - add stability sd3 model pricing [See Here](https://github.com/BerriAI/litellm/blob/b8b927f23bc336862dacb89f59c784a8d62aaa15/model_prices_and_context_window.json#L6814) (s/o [Marty Sullivan](https://github.com/marty-sullivan)) +8. Bedrock - add us.amazon.nova-lite-v1:0 to model cost map [See Here](https://github.com/BerriAI/litellm/blob/b8b927f23bc336862dacb89f59c784a8d62aaa15/model_prices_and_context_window.json#L5619) +9. TogetherAI - add new together_ai llama3.3 models [See Here](https://github.com/BerriAI/litellm/blob/b8b927f23bc336862dacb89f59c784a8d62aaa15/model_prices_and_context_window.json#L6985) + +## LLM Translation + +1. LM Studio -> fix async embedding call +2. Gpt 4o models - fix response_format translation +3. Bedrock nova - expand supported document types to include .md, .csv, etc. [Start Here](../../docs/providers/bedrock#usage---pdf--document-understanding) +4. Bedrock - docs on IAM role based access for bedrock - [Start Here](https://docs.litellm.ai/docs/providers/bedrock#sts-role-based-auth) +5. Bedrock - cache IAM role credentials when used +6. Google AI Studio (`gemini/`) - support gemini 'frequency_penalty' and 'presence_penalty' +7. Azure O1 - fix model name check +8. WatsonX - ZenAPIKey support for WatsonX [Docs](../../docs/providers/watsonx) +9. Ollama Chat - support json schema response format [Start Here](../../docs/providers/ollama#json-schema-support) +10. Bedrock - return correct bedrock status code and error message if error during streaming +11. Anthropic - Supported nested json schema on anthropic calls +12. OpenAI - `metadata` param preview support + 1. SDK - enable via `litellm.enable_preview_features = True` + 2. PROXY - enable via `litellm_settings::enable_preview_features: true` +13. Replicate - retry completion response on status=processing + +## Spend Tracking Improvements + +1. Bedrock - QA asserts all bedrock regional models have same `supported_` as base model +2. Bedrock - fix bedrock converse cost tracking w/ region name specified +3. Spend Logs reliability fix - when `user` passed in request body is int instead of string +4. Ensure ‘base_model’ cost tracking works across all endpoints +5. Fixes for Image generation cost tracking +6. Anthropic - fix anthropic end user cost tracking +7. JWT / OIDC Auth - add end user id tracking from jwt auth + +## Management Endpoints / UI + +1. allows team member to become admin post-add (ui + endpoints) +2. New edit/delete button for updating team membership on UI +3. If team admin - show all team keys +4. Model Hub - clarify cost of models is per 1m tokens +5. Invitation Links - fix invalid url generated +6. New - SpendLogs Table Viewer - allows proxy admin to view spend logs on UI + 1. New spend logs - allow proxy admin to ‘opt in’ to logging request/response in spend logs table - enables easier abuse detection + 2. Show country of origin in spend logs + 3. Add pagination + filtering by key name/team name +7. `/key/delete` - allow team admin to delete team keys +8. Internal User ‘view’ - fix spend calculation when team selected +9. Model Analytics is now on Free +10. Usage page - shows days when spend = 0, and round spend on charts to 2 sig figs +11. Public Teams - allow admins to expose teams for new users to ‘join’ on UI - [Start Here](https://docs.litellm.ai/docs/proxy/public_teams) +12. Guardrails + 1. set/edit guardrails on a virtual key + 2. Allow setting guardrails on a team + 3. Set guardrails on team create + edit page +13. Support temporary budget increases on `/key/update` - new `temp_budget_increase` and `temp_budget_expiry` fields - [Start Here](../../docs/proxy/virtual_keys#temporary-budget-increase) +14. Support writing new key alias to AWS Secret Manager - on key rotation [Start Here](../../docs/secret#aws-secret-manager) + +## Helm + +1. add securityContext and pull policy values to migration job (s/o https://github.com/Hexoplon) +2. allow specifying envVars on values.yaml +3. new helm lint test + +## Logging / Guardrail Integrations + +1. Log the used prompt when prompt management used. [Start Here](../../docs/proxy/prompt_management) +2. Support s3 logging with team alias prefixes - [Start Here](https://docs.litellm.ai/docs/proxy/logging#team-alias-prefix-in-object-key) +3. Prometheus [Start Here](../../docs/proxy/prometheus) + 1. fix litellm_llm_api_time_to_first_token_metric not populating for bedrock models + 2. emit remaining team budget metric on regular basis (even when call isn’t made) - allows for more stable metrics on Grafana/etc. + 3. add key and team level budget metrics + 4. emit `litellm_overhead_latency_metric` + 5. Emit `litellm_team_budget_reset_at_metric` and `litellm_api_key_budget_remaining_hours_metric` +4. Datadog - support logging spend tags to Datadog. [Start Here](../../docs/proxy/enterprise#tracking-spend-for-custom-tags) +5. Langfuse - fix logging request tags, read from standard logging payload +6. GCS - don’t truncate payload on logging +7. New GCS Pub/Sub logging support [Start Here](https://docs.litellm.ai/docs/proxy/logging#google-cloud-storage---pubsub-topic) +8. Add AIM Guardrails support [Start Here](../../docs/proxy/guardrails/aim_security) + +## Security + +1. New Enterprise SLA for patching security vulnerabilities. [See Here](../../docs/enterprise#slas--professional-support) +2. Hashicorp - support using vault namespace for TLS auth. [Start Here](../../docs/secret#hashicorp-vault) +3. Azure - DefaultAzureCredential support + +## Health Checks + +1. Cleanup pricing-only model names from wildcard route list - prevent bad health checks +2. Allow specifying a health check model for wildcard routes - https://docs.litellm.ai/docs/proxy/health#wildcard-routes +3. New ‘health_check_timeout ‘ param with default 1min upperbound to prevent bad model from health check to hang and cause pod restarts. [Start Here](../../docs/proxy/health#health-check-timeout) +4. Datadog - add data dog service health check + expose new `/health/services` endpoint. [Start Here](../../docs/proxy/health#healthservices) + +## Performance / Reliability improvements + +1. 3x increase in RPS - moving to orjson for reading request body +2. LLM Routing speedup - using cached get model group info +3. SDK speedup - using cached get model info helper - reduces CPU work to get model info +4. Proxy speedup - only read request body 1 time per request +5. Infinite loop detection scripts added to codebase +6. Bedrock - pure async image transformation requests +7. Cooldowns - single deployment model group if 100% calls fail in high traffic - prevents an o1 outage from impacting other calls +8. Response Headers - return + 1. `x-litellm-timeout` + 2. `x-litellm-attempted-retries` + 3. `x-litellm-overhead-duration-ms` + 4. `x-litellm-response-duration-ms` +9. ensure duplicate callbacks are not added to proxy +10. Requirements.txt - bump certifi version + +## General Proxy Improvements + +1. JWT / OIDC Auth - new `enforce_rbac` param,allows proxy admin to prevent any unmapped yet authenticated jwt tokens from calling proxy. [Start Here](../../docs/proxy/token_auth#enforce-role-based-access-control-rbac) +2. fix custom openapi schema generation for customized swagger’s +3. Request Headers - support reading `x-litellm-timeout` param from request headers. Enables model timeout control when using Vercel’s AI SDK + LiteLLM Proxy. [Start Here](../../docs/proxy/request_headers#litellm-headers) +4. JWT / OIDC Auth - new `role` based permissions for model authentication. [See Here](https://docs.litellm.ai/docs/proxy/jwt_auth_arch) + +## Complete Git Diff + +This is the diff between v1.57.8-stable and v1.59.8-stable. + +Use this to see the changes in the codebase. + +[**Git Diff**](https://github.com/BerriAI/litellm/compare/v1.57.8-stable...v1.59.8-stable) diff --git a/docs/my-website/release_notes/v1.61.20-stable/index.md b/docs/my-website/release_notes/v1.61.20-stable/index.md new file mode 100644 index 0000000000..132c1aa318 --- /dev/null +++ b/docs/my-website/release_notes/v1.61.20-stable/index.md @@ -0,0 +1,103 @@ +--- +title: v1.61.20-stable +slug: v1.61.20-stable +date: 2025-03-01T10:00:00 +authors: + - name: Krrish Dholakia + title: CEO, LiteLLM + url: https://www.linkedin.com/in/krish-d/ + image_url: https://media.licdn.com/dms/image/v2/D4D03AQGrlsJ3aqpHmQ/profile-displayphoto-shrink_400_400/B4DZSAzgP7HYAg-/0/1737327772964?e=1743638400&v=beta&t=39KOXMUFedvukiWWVPHf3qI45fuQD7lNglICwN31DrI + - name: Ishaan Jaffer + title: CTO, LiteLLM + url: https://www.linkedin.com/in/reffajnaahsi/ + image_url: https://media.licdn.com/dms/image/v2/D4D03AQGiM7ZrUwqu_Q/profile-displayphoto-shrink_800_800/profile-displayphoto-shrink_800_800/0/1675971026692?e=1741824000&v=beta&t=eQnRdXPJo4eiINWTZARoYTfqh064pgZ-E21pQTSy8jc +tags: [llm translation, rerank, ui, thinking, reasoning_content, claude-3-7-sonnet] +hide_table_of_contents: false +--- + +import Image from '@theme/IdealImage'; + +# v1.61.20-stable + + +These are the changes since `v1.61.13-stable`. + +This release is primarily focused on: +- LLM Translation improvements (claude-3-7-sonnet + 'thinking'/'reasoning_content' support) +- UI improvements (add model flow, user management, etc) + +## Demo Instance + +Here's a Demo Instance to test changes: +- Instance: https://demo.litellm.ai/ +- Login Credentials: + - Username: admin + - Password: sk-1234 + +## New Models / Updated Models + +1. Anthropic 3-7 sonnet support + cost tracking (Anthropic API + Bedrock + Vertex AI + OpenRouter) + 1. Anthropic API [Start here](https://docs.litellm.ai/docs/providers/anthropic#usage---thinking--reasoning_content) + 2. Bedrock API [Start here](https://docs.litellm.ai/docs/providers/bedrock#usage---thinking--reasoning-content) + 3. Vertex AI API [See here](../../docs/providers/vertex#usage---thinking--reasoning_content) + 4. OpenRouter [See here](https://github.com/BerriAI/litellm/blob/ba5bdce50a0b9bc822de58c03940354f19a733ed/model_prices_and_context_window.json#L5626) +2. Gpt-4.5-preview support + cost tracking [See here](https://github.com/BerriAI/litellm/blob/ba5bdce50a0b9bc822de58c03940354f19a733ed/model_prices_and_context_window.json#L79) +3. Azure AI - Phi-4 cost tracking [See here](https://github.com/BerriAI/litellm/blob/ba5bdce50a0b9bc822de58c03940354f19a733ed/model_prices_and_context_window.json#L1773) +4. Claude-3.5-sonnet - vision support updated on Anthropic API [See here](https://github.com/BerriAI/litellm/blob/ba5bdce50a0b9bc822de58c03940354f19a733ed/model_prices_and_context_window.json#L2888) +5. Bedrock llama vision support [See here](https://github.com/BerriAI/litellm/blob/ba5bdce50a0b9bc822de58c03940354f19a733ed/model_prices_and_context_window.json#L7714) +6. Cerebras llama3.3-70b pricing [See here](https://github.com/BerriAI/litellm/blob/ba5bdce50a0b9bc822de58c03940354f19a733ed/model_prices_and_context_window.json#L2697) + +## LLM Translation + +1. Infinity Rerank - support returning documents when return_documents=True [Start here](../../docs/providers/infinity#usage---returning-documents) +2. Amazon Deepseek - `` param extraction into ‘reasoning_content’ [Start here](https://docs.litellm.ai/docs/providers/bedrock#bedrock-imported-models-deepseek-deepseek-r1) +3. Amazon Titan Embeddings - filter out ‘aws_’ params from request body [Start here](https://docs.litellm.ai/docs/providers/bedrock#bedrock-embedding) +4. Anthropic ‘thinking’ + ‘reasoning_content’ translation support (Anthropic API, Bedrock, Vertex AI) [Start here](https://docs.litellm.ai/docs/reasoning_content) +5. VLLM - support ‘video_url’ [Start here](../../docs/providers/vllm#send-video-url-to-vllm) +6. Call proxy via litellm SDK: Support `litellm_proxy/` for embedding, image_generation, transcription, speech, rerank [Start here](https://docs.litellm.ai/docs/providers/litellm_proxy) +7. OpenAI Pass-through - allow using Assistants GET, DELETE on /openai pass through routes [Start here](https://docs.litellm.ai/docs/pass_through/openai_passthrough) +8. Message Translation - fix openai message for assistant msg if role is missing - openai allows this +9. O1/O3 - support ‘drop_params’ for o3-mini and o1 parallel_tool_calls param (not supported currently) [See here](https://docs.litellm.ai/docs/completion/drop_params) + +## Spend Tracking Improvements + +1. Cost tracking for rerank via Bedrock [See PR](https://github.com/BerriAI/litellm/commit/b682dc4ec8fd07acf2f4c981d2721e36ae2a49c5) +2. Anthropic pass-through - fix race condition causing cost to not be tracked [See PR](https://github.com/BerriAI/litellm/pull/8874) +3. Anthropic pass-through: Ensure accurate token counting [See PR](https://github.com/BerriAI/litellm/pull/8880) + +## Management Endpoints / UI + +1. Models Page - Allow sorting models by ‘created at’ +2. Models Page - Edit Model Flow Improvements +3. Models Page - Fix Adding Azure, Azure AI Studio models on UI +4. Internal Users Page - Allow Bulk Adding Internal Users on UI +5. Internal Users Page - Allow sorting users by ‘created at’ +6. Virtual Keys Page - Allow searching for UserIDs on the dropdown when assigning a user to a team [See PR](https://github.com/BerriAI/litellm/pull/8844) +7. Virtual Keys Page - allow creating a user when assigning keys to users [See PR](https://github.com/BerriAI/litellm/pull/8844) +8. Model Hub Page - fix text overflow issue [See PR](https://github.com/BerriAI/litellm/pull/8749) +9. Admin Settings Page - Allow adding MSFT SSO on UI +10. Backend - don't allow creating duplicate internal users in DB + +## Helm + +1. support ttlSecondsAfterFinished on the migration job - [See PR](https://github.com/BerriAI/litellm/pull/8593) +2. enhance migrations job with additional configurable properties - [See PR](https://github.com/BerriAI/litellm/pull/8636) + +## Logging / Guardrail Integrations + +1. Arize Phoenix support +2. ‘No-log’ - fix ‘no-log’ param support on embedding calls + +## Performance / Loadbalancing / Reliability improvements + +1. Single Deployment Cooldown logic - Use allowed_fails or allowed_fail_policy if set [Start here](https://docs.litellm.ai/docs/routing#advanced-custom-retries-cooldowns-based-on-error-type) + +## General Proxy Improvements + +1. Hypercorn - fix reading / parsing request body +2. Windows - fix running proxy in windows +3. DD-Trace - fix dd-trace enablement on proxy + +## Complete Git Diff + +View the complete git diff [here](https://github.com/BerriAI/litellm/compare/v1.61.13-stable...v1.61.20-stable). \ No newline at end of file diff --git a/docs/my-website/release_notes/v1.63.0/index.md b/docs/my-website/release_notes/v1.63.0/index.md new file mode 100644 index 0000000000..e74a2f9b86 --- /dev/null +++ b/docs/my-website/release_notes/v1.63.0/index.md @@ -0,0 +1,40 @@ +--- +title: v1.63.0 - Anthropic 'thinking' response update +slug: v1.63.0 +date: 2025-03-05T10:00:00 +authors: + - name: Krrish Dholakia + title: CEO, LiteLLM + url: https://www.linkedin.com/in/krish-d/ + image_url: https://media.licdn.com/dms/image/v2/D4D03AQGrlsJ3aqpHmQ/profile-displayphoto-shrink_400_400/B4DZSAzgP7HYAg-/0/1737327772964?e=1743638400&v=beta&t=39KOXMUFedvukiWWVPHf3qI45fuQD7lNglICwN31DrI + - name: Ishaan Jaffer + title: CTO, LiteLLM + url: https://www.linkedin.com/in/reffajnaahsi/ + image_url: https://media.licdn.com/dms/image/v2/D4D03AQGiM7ZrUwqu_Q/profile-displayphoto-shrink_800_800/profile-displayphoto-shrink_800_800/0/1675971026692?e=1741824000&v=beta&t=eQnRdXPJo4eiINWTZARoYTfqh064pgZ-E21pQTSy8jc +tags: [llm translation, thinking, reasoning_content, claude-3-7-sonnet] +hide_table_of_contents: false +--- + +v1.63.0 fixes Anthropic 'thinking' response on streaming to return the `signature` block. [Github Issue](https://github.com/BerriAI/litellm/issues/8964) + + + +It also moves the response structure from `signature_delta` to `signature` to be the same as Anthropic. [Anthropic Docs](https://docs.anthropic.com/en/docs/build-with-claude/extended-thinking#implementing-extended-thinking) + + +## Diff + +```bash +"message": { + ... + "reasoning_content": "The capital of France is Paris.", + "thinking_blocks": [ + { + "type": "thinking", + "thinking": "The capital of France is Paris.", +- "signature_delta": "EqoBCkgIARABGAIiQL2UoU0b1OHYi+..." # 👈 OLD FORMAT ++ "signature": "EqoBCkgIARABGAIiQL2UoU0b1OHYi+..." # 👈 KEY CHANGE + } + ] +} +``` diff --git a/docs/my-website/release_notes/v1.63.2-stable/index.md b/docs/my-website/release_notes/v1.63.2-stable/index.md new file mode 100644 index 0000000000..0c359452dc --- /dev/null +++ b/docs/my-website/release_notes/v1.63.2-stable/index.md @@ -0,0 +1,112 @@ +--- +title: v1.63.2-stable +slug: v1.63.2-stable +date: 2025-03-08T10:00:00 +authors: + - name: Krrish Dholakia + title: CEO, LiteLLM + url: https://www.linkedin.com/in/krish-d/ + image_url: https://media.licdn.com/dms/image/v2/D4D03AQGrlsJ3aqpHmQ/profile-displayphoto-shrink_400_400/B4DZSAzgP7HYAg-/0/1737327772964?e=1743638400&v=beta&t=39KOXMUFedvukiWWVPHf3qI45fuQD7lNglICwN31DrI + - name: Ishaan Jaffer + title: CTO, LiteLLM + url: https://www.linkedin.com/in/reffajnaahsi/ + image_url: https://media.licdn.com/dms/image/v2/D4D03AQGiM7ZrUwqu_Q/profile-displayphoto-shrink_800_800/profile-displayphoto-shrink_800_800/0/1675971026692?e=1741824000&v=beta&t=eQnRdXPJo4eiINWTZARoYTfqh064pgZ-E21pQTSy8jc +tags: [llm translation, thinking, reasoning_content, claude-3-7-sonnet] +hide_table_of_contents: false +--- + +import Image from '@theme/IdealImage'; + + +These are the changes since `v1.61.20-stable`. + +This release is primarily focused on: +- LLM Translation improvements (more `thinking` content improvements) +- UI improvements (Error logs now shown on UI) + + +:::info + +This release will be live on 03/09/2025 + +::: + + + + +## Demo Instance + +Here's a Demo Instance to test changes: +- Instance: https://demo.litellm.ai/ +- Login Credentials: + - Username: admin + - Password: sk-1234 + + +## New Models / Updated Models + +1. Add `supports_pdf_input` for specific Bedrock Claude models [PR](https://github.com/BerriAI/litellm/commit/f63cf0030679fe1a43d03fb196e815a0f28dae92) +2. Add pricing for amazon `eu` models [PR](https://github.com/BerriAI/litellm/commits/main/model_prices_and_context_window.json) +3. Fix Azure O1 mini pricing [PR](https://github.com/BerriAI/litellm/commit/52de1949ef2f76b8572df751f9c868a016d4832c) + +## LLM Translation + + + +1. Support `/openai/` passthrough for Assistant endpoints. [Get Started](https://docs.litellm.ai/docs/pass_through/openai_passthrough) +2. Bedrock Claude - fix tool calling transformation on invoke route. [Get Started](../../docs/providers/bedrock#usage---function-calling--tool-calling) +3. Bedrock Claude - response_format support for claude on invoke route. [Get Started](../../docs/providers/bedrock#usage---structured-output--json-mode) +4. Bedrock - pass `description` if set in response_format. [Get Started](../../docs/providers/bedrock#usage---structured-output--json-mode) +5. Bedrock - Fix passing response_format: {"type": "text"}. [PR](https://github.com/BerriAI/litellm/commit/c84b489d5897755139aa7d4e9e54727ebe0fa540) +6. OpenAI - Handle sending image_url as str to openai. [Get Started](https://docs.litellm.ai/docs/completion/vision) +7. Deepseek - return 'reasoning_content' missing on streaming. [Get Started](https://docs.litellm.ai/docs/reasoning_content) +8. Caching - Support caching on reasoning content. [Get Started](https://docs.litellm.ai/docs/proxy/caching) +9. Bedrock - handle thinking blocks in assistant message. [Get Started](https://docs.litellm.ai/docs/providers/bedrock#usage---thinking--reasoning-content) +10. Anthropic - Return `signature` on streaming. [Get Started](https://docs.litellm.ai/docs/providers/bedrock#usage---thinking--reasoning-content) +- Note: We've also migrated from `signature_delta` to `signature`. [Read more](https://docs.litellm.ai/release_notes/v1.63.0) +11. Support format param for specifying image type. [Get Started](../../docs/completion/vision.md#explicitly-specify-image-type) +12. Anthropic - `/v1/messages` endpoint - `thinking` param support. [Get Started](../../docs/anthropic_unified.md) +- Note: this refactors the [BETA] unified `/v1/messages` endpoint, to just work for the Anthropic API. +13. Vertex AI - handle $id in response schema when calling vertex ai. [Get Started](https://docs.litellm.ai/docs/providers/vertex#json-schema) + +## Spend Tracking Improvements + +1. Batches API - Fix cost calculation to run on retrieve_batch. [Get Started](https://docs.litellm.ai/docs/batches) +2. Batches API - Log batch models in spend logs / standard logging payload. [Get Started](../../docs/proxy/logging_spec.md#standardlogginghiddenparams) + +## Management Endpoints / UI + + + +1. Virtual Keys Page + - Allow team/org filters to be searchable on the Create Key Page + - Add created_by and updated_by fields to Keys table + - Show 'user_email' on key table + - Show 100 Keys Per Page, Use full height, increase width of key alias +2. Logs Page + - Show Error Logs on LiteLLM UI + - Allow Internal Users to View their own logs +3. Internal Users Page + - Allow admin to control default model access for internal users +7. Fix session handling with cookies + +## Logging / Guardrail Integrations + +1. Fix prometheus metrics w/ custom metrics, when keys containing team_id make requests. [PR](https://github.com/BerriAI/litellm/pull/8935) + +## Performance / Loadbalancing / Reliability improvements + +1. Cooldowns - Support cooldowns on models called with client side credentials. [Get Started](https://docs.litellm.ai/docs/proxy/clientside_auth#pass-user-llm-api-keys--api-base) +2. Tag-based Routing - ensures tag-based routing across all endpoints (`/embeddings`, `/image_generation`, etc.). [Get Started](https://docs.litellm.ai/docs/proxy/tag_routing) + +## General Proxy Improvements + +1. Raise BadRequestError when unknown model passed in request +2. Enforce model access restrictions on Azure OpenAI proxy route +3. Reliability fix - Handle emoji’s in text - fix orjson error +4. Model Access Patch - don't overwrite litellm.anthropic_models when running auth checks +5. Enable setting timezone information in docker image + +## Complete Git Diff + +[Here's the complete git diff](https://github.com/BerriAI/litellm/compare/v1.61.20-stable...v1.63.2-stable) \ No newline at end of file diff --git a/docs/my-website/sidebars.js b/docs/my-website/sidebars.js index d9289864e6..cf4f14b202 100644 --- a/docs/my-website/sidebars.js +++ b/docs/my-website/sidebars.js @@ -41,17 +41,19 @@ const sidebars = { "proxy/deploy", "proxy/prod", "proxy/cli", + "proxy/release_cycle", "proxy/model_management", "proxy/health", "proxy/debugging", "proxy/spending_monitoring", - ], + "proxy/master_key_rotations", + ], }, "proxy/demo", { type: "category", label: "Architecture", - items: ["proxy/architecture", "proxy/db_info", "router_architecture", "proxy/user_management_heirarchy"], + items: ["proxy/architecture", "proxy/db_info", "router_architecture", "proxy/user_management_heirarchy", "proxy/jwt_auth_arch"], }, { type: "link", @@ -65,6 +67,7 @@ const sidebars = { items: [ "proxy/user_keys", "proxy/clientside_auth", + "proxy/request_headers", "proxy/response_headers", ], }, @@ -76,6 +79,7 @@ const sidebars = { "proxy/token_auth", "proxy/service_accounts", "proxy/access_control", + "proxy/custom_auth", "proxy/ip_address", "proxy/email", "proxy/multiple_admins", @@ -96,6 +100,7 @@ const sidebars = { "proxy/ui", "proxy/admin_ui_sso", "proxy/self_serve", + "proxy/public_teams", "proxy/custom_sso" ], }, @@ -139,7 +144,7 @@ const sidebars = { "proxy/guardrails/secret_detection", "proxy/guardrails/custom_guardrail", "prompt_injection" - ], + ], }, { type: "category", @@ -159,7 +164,6 @@ const sidebars = { ] }, "proxy/caching", - ] }, { @@ -178,6 +182,7 @@ const sidebars = { "providers/openai_compatible", "providers/azure", "providers/azure_ai", + "providers/aiml", "providers/vertex", "providers/gemini", "providers/anthropic", @@ -203,6 +208,7 @@ const sidebars = { "providers/perplexity", "providers/friendliai", "providers/galadriel", + "providers/topaz", "providers/groq", "providers/github", "providers/deepseek", @@ -238,6 +244,7 @@ const sidebars = { "completion/document_understanding", "completion/vision", "completion/json_mode", + "reasoning_content", "completion/prompt_caching", "completion/predict_outputs", "completion/prefix", @@ -250,13 +257,19 @@ const sidebars = { "completion/batching", "completion/mock_requests", "completion/reliable_completions", - 'tutorials/litellm_proxy_aporia', ] }, { type: "category", label: "Supported Endpoints", + link: { + type: "generated-index", + title: "Supported Endpoints", + description: + "Learn how to deploy + call models from different providers on LiteLLM", + slug: "/supported_endpoints", + }, items: [ { type: "category", @@ -275,7 +288,15 @@ const sidebars = { }, "text_completion", "embedding/supported_embedding", - "image_generation", + "anthropic_unified", + { + type: "category", + label: "Image", + items: [ + "image_generation", + "image_variations", + ] + }, { type: "category", label: "Audio", @@ -292,8 +313,10 @@ const sidebars = { "pass_through/vertex_ai", "pass_through/google_ai_studio", "pass_through/cohere", + "pass_through/openai_passthrough", "pass_through/anthropic_completion", "pass_through/bedrock", + "pass_through/assembly_ai", "pass_through/langfuse", "proxy/pass_through", ], @@ -316,7 +339,7 @@ const sidebars = { description: "Learn how to load balance, route, and set fallbacks for your LLM requests", slug: "/routing-load-balancing", }, - items: ["routing", "scheduler", "proxy/load_balancing", "proxy/reliability", "proxy/timeout", "proxy/tag_routing", "proxy/provider_budget_routing", "wildcard_routing"], + items: ["routing", "scheduler", "proxy/load_balancing", "proxy/reliability", "proxy/timeout", "proxy/tag_routing", "proxy/provider_budget_routing", "wildcard_routing"], }, { type: "category", @@ -335,23 +358,6 @@ const sidebars = { label: "LangChain, LlamaIndex, Instructor Integration", items: ["langchain/langchain", "tutorials/instructor"], }, - { - type: "category", - label: "Tutorials", - items: [ - - 'tutorials/azure_openai', - 'tutorials/instructor', - "tutorials/gradio_integration", - "tutorials/huggingface_codellama", - "tutorials/huggingface_tutorial", - "tutorials/TogetherAI_liteLLM", - "tutorials/finetuned_chat_gpt", - "tutorials/text_completion", - "tutorials/first_playground", - "tutorials/model_fallbacks", - ], - }, ], }, { @@ -368,13 +374,6 @@ const sidebars = { "load_test_rpm", ] }, - { - type: "category", - label: "Adding Providers", - items: [ - "adding_provider/directory_structure", - "adding_provider/new_rerank_provider"], - }, { type: "category", label: "Logging & Observability", @@ -409,22 +408,54 @@ const sidebars = { "observability/opik_integration", ], }, + { + type: "category", + label: "Tutorials", + items: [ + "tutorials/openweb_ui", + 'tutorials/litellm_proxy_aporia', + { + type: "category", + label: "LiteLLM Python SDK Tutorials", + items: [ + 'tutorials/azure_openai', + 'tutorials/instructor', + "tutorials/gradio_integration", + "tutorials/huggingface_codellama", + "tutorials/huggingface_tutorial", + "tutorials/TogetherAI_liteLLM", + "tutorials/finetuned_chat_gpt", + "tutorials/text_completion", + "tutorials/first_playground", + "tutorials/model_fallbacks", + ], + }, + ] + }, + { + type: "category", + label: "Contributing", + items: [ + "extras/contributing_code", + { + type: "category", + label: "Adding Providers", + items: [ + "adding_provider/directory_structure", + "adding_provider/new_rerank_provider"], + }, + "extras/contributing", + "contributing", + ] + }, { type: "category", label: "Extras", items: [ - "extras/contributing", "data_security", "data_retention", "migration_policy", - "contributing", - "proxy/pii_masking", - "extras/code_quality", - "rules", - "proxy/team_based_routing", - "proxy/customer_routing", - "proxy_server", { type: "category", label: "❤️ 🚅 Projects built on LiteLLM", @@ -436,8 +467,11 @@ const sidebars = { slug: "/project", }, items: [ + "projects/smolagents", "projects/Docq.AI", + "projects/PDL", "projects/OpenInterpreter", + "projects/Elroy", "projects/dbally", "projects/FastREPL", "projects/PROMPTMETHEUS", @@ -451,8 +485,15 @@ const sidebars = { "projects/YiVal", "projects/LiteLLM Proxy", "projects/llm_cord", + "projects/pgai", ], }, + "proxy/pii_masking", + "extras/code_quality", + "rules", + "proxy/team_based_routing", + "proxy/customer_routing", + "proxy_server", ], }, "troubleshoot", diff --git a/docs/my-website/src/pages/index.md b/docs/my-website/src/pages/index.md index 1b06dc592c..4a2e5203e3 100644 --- a/docs/my-website/src/pages/index.md +++ b/docs/my-website/src/pages/index.md @@ -108,6 +108,24 @@ response = completion( + + +```python +from litellm import completion +import os + +## set ENV variables +os.environ["NVIDIA_NIM_API_KEY"] = "nvidia_api_key" +os.environ["NVIDIA_NIM_API_BASE"] = "nvidia_nim_endpoint_url" + +response = completion( + model="nvidia_nim/", + messages=[{ "content": "Hello, how are you?","role": "user"}] +) +``` + + + ```python @@ -238,6 +256,24 @@ response = completion( + + +```python +from litellm import completion +import os + +## set ENV variables +os.environ["NVIDIA_NIM_API_KEY"] = "nvidia_api_key" +os.environ["NVIDIA_NIM_API_BASE"] = "nvidia_nim_endpoint_url" + +response = completion( + model="nvidia_nim/", + messages=[{ "content": "Hello, how are you?","role": "user"}] + stream=True, +) +``` + + ```python diff --git a/litellm-js/proxy/tsconfig.json b/litellm-js/proxy/tsconfig.json index 33a96fd088..28fcfb5824 100644 --- a/litellm-js/proxy/tsconfig.json +++ b/litellm-js/proxy/tsconfig.json @@ -11,6 +11,7 @@ "@cloudflare/workers-types" ], "jsx": "react-jsx", - "jsxImportSource": "hono/jsx" + "jsxImportSource": "hono/jsx", + "skipLibCheck": true }, } \ No newline at end of file diff --git a/litellm/__init__.py b/litellm/__init__.py index fcef6bc56f..d66707f8b3 100644 --- a/litellm/__init__.py +++ b/litellm/__init__.py @@ -2,14 +2,19 @@ import warnings warnings.filterwarnings("ignore", message=".*conflict with protected namespace.*") -### INIT VARIABLES ###### +### INIT VARIABLES ######### import threading import os from typing import Callable, List, Optional, Dict, Union, Any, Literal, get_args from litellm.llms.custom_httpx.http_handler import AsyncHTTPHandler, HTTPHandler from litellm.caching.caching import Cache, DualCache, RedisCache, InMemoryCache from litellm.types.llms.bedrock import COHERE_EMBEDDING_INPUT_TYPES -from litellm.types.utils import ImageObject, BudgetConfig +from litellm.types.utils import ( + ImageObject, + BudgetConfig, + all_litellm_params, + all_litellm_params as _litellm_completion_params, +) # maintain backwards compatibility for root param from litellm._logging import ( set_verbose, _turn_on_debug, @@ -29,6 +34,26 @@ from litellm.constants import ( LITELLM_CHAT_PROVIDERS, HUMANLOOP_PROMPT_CACHE_TTL_SECONDS, OPENAI_CHAT_COMPLETION_PARAMS, + OPENAI_CHAT_COMPLETION_PARAMS as _openai_completion_params, # backwards compatibility + OPENAI_FINISH_REASONS, + OPENAI_FINISH_REASONS as _openai_finish_reasons, # backwards compatibility + openai_compatible_endpoints, + openai_compatible_providers, + openai_text_completion_compatible_providers, + _openai_like_providers, + replicate_models, + clarifai_models, + huggingface_models, + empower_models, + together_ai_models, + baseten_models, + REPEATED_STREAMING_CHUNK_LIMIT, + request_timeout, + open_ai_embedding_models, + cohere_embedding_models, + bedrock_embedding_models, + known_tokenizer_config, + BEDROCK_INVOKE_PROVIDERS_LITERAL, ) from litellm.types.guardrails import GuardrailItem from litellm.proxy._types import ( @@ -38,6 +63,7 @@ from litellm.proxy._types import ( ) from litellm.types.utils import StandardKeyGenerationConfig, LlmProviders from litellm.integrations.custom_logger import CustomLogger +from litellm.litellm_core_utils.logging_callback_manager import LoggingCallbackManager import httpx import dotenv from enum import Enum @@ -45,15 +71,17 @@ from enum import Enum litellm_mode = os.getenv("LITELLM_MODE", "DEV") # "PRODUCTION", "DEV" if litellm_mode == "DEV": dotenv.load_dotenv() -############################################### +################################################ if set_verbose == True: _turn_on_debug() -############################################### +################################################ ### Callbacks /Logging / Success / Failure Handlers ##### -input_callback: List[Union[str, Callable, CustomLogger]] = [] -success_callback: List[Union[str, Callable, CustomLogger]] = [] -failure_callback: List[Union[str, Callable, CustomLogger]] = [] -service_callback: List[Union[str, Callable, CustomLogger]] = [] +CALLBACK_TYPES = Union[str, Callable, CustomLogger] +input_callback: List[CALLBACK_TYPES] = [] +success_callback: List[CALLBACK_TYPES] = [] +failure_callback: List[CALLBACK_TYPES] = [] +service_callback: List[CALLBACK_TYPES] = [] +logging_callback_manager = LoggingCallbackManager() _custom_logger_compatible_callbacks_literal = Literal[ "lago", "openmeter", @@ -68,6 +96,7 @@ _custom_logger_compatible_callbacks_literal = Literal[ "galileo", "braintrust", "arize", + "arize_phoenix", "langtrace", "gcs_bucket", "azure_storage", @@ -77,6 +106,7 @@ _custom_logger_compatible_callbacks_literal = Literal[ "langfuse", "pagerduty", "humanloop", + "gcs_pubsub", ] logged_real_time_event_types: Optional[Union[List[str], Literal["*"]]] = None _known_custom_logger_compatible_callbacks: List = list( @@ -87,6 +117,7 @@ callbacks: List[ ] = [] langfuse_default_tags: Optional[List[str]] = None langsmith_batch_size: Optional[int] = None +prometheus_initialize_budget_metrics: Optional[bool] = False argilla_batch_size: Optional[int] = None datadog_use_v1: Optional[bool] = False # if you want to use v1 datadog logged payload argilla_transformation_object: Optional[Dict[str, Any]] = None @@ -212,75 +243,8 @@ default_soft_budget: float = ( 50.0 # by default all litellm proxy keys have a soft budget of 50.0 ) forward_traceparent_to_llm_provider: bool = False -_openai_finish_reasons = ["stop", "length", "function_call", "content_filter", "null"] -_openai_completion_params = [ - "functions", - "function_call", - "temperature", - "temperature", - "top_p", - "n", - "stream", - "stop", - "max_tokens", - "presence_penalty", - "frequency_penalty", - "logit_bias", - "user", - "request_timeout", - "api_base", - "api_version", - "api_key", - "deployment_id", - "organization", - "base_url", - "default_headers", - "timeout", - "response_format", - "seed", - "tools", - "tool_choice", - "max_retries", -] -_litellm_completion_params = [ - "metadata", - "acompletion", - "caching", - "mock_response", - "api_key", - "api_version", - "api_base", - "force_timeout", - "logger_fn", - "verbose", - "custom_llm_provider", - "litellm_logging_obj", - "litellm_call_id", - "use_client", - "id", - "fallbacks", - "azure", - "headers", - "model_list", - "num_retries", - "context_window_fallback_dict", - "roles", - "final_prompt_value", - "bos_token", - "eos_token", - "request_timeout", - "complete_response", - "self", - "client", - "rpm", - "tpm", - "input_cost_per_token", - "output_cost_per_token", - "hf_model_name", - "model_info", - "proxy_server_request", - "preset_cache_key", -] + + _current_cost = 0.0 # private variable, used if max budget is set error_logs: Dict = {} add_function_to_prompt: bool = ( @@ -313,11 +277,6 @@ disable_end_user_cost_tracking_prometheus_only: Optional[bool] = None custom_prometheus_metadata_labels: List[str] = [] #### REQUEST PRIORITIZATION #### priority_reservation: Optional[Dict[str, float]] = None -#### RELIABILITY #### -REPEATED_STREAMING_CHUNK_LIMIT = 100 # catch if model starts looping the same chunk while streaming. Uses high default to prevent false positives. - -#### Networking settings #### -request_timeout: float = 6000 # time in seconds force_ipv4: bool = ( False # when True, litellm will force ipv4 for all LLM requests. Some users have seen httpx ConnectionError when using ipv6. ) @@ -347,39 +306,7 @@ _key_management_settings: KeyManagementSettings = KeyManagementSettings() #### PII MASKING #### output_parse_pii: bool = False ############################################# - - -def get_model_cost_map(url: str): - if ( - os.getenv("LITELLM_LOCAL_MODEL_COST_MAP", False) == True - or os.getenv("LITELLM_LOCAL_MODEL_COST_MAP", False) == "True" - ): - import importlib.resources - import json - - with importlib.resources.open_text( - "litellm", "model_prices_and_context_window_backup.json" - ) as f: - content = json.load(f) - return content - - try: - response = httpx.get( - url, timeout=5 - ) # set a 5 second timeout for the get request - response.raise_for_status() # Raise an exception if the request is unsuccessful - content = response.json() - return content - except Exception: - import importlib.resources - import json - - with importlib.resources.open_text( - "litellm", "model_prices_and_context_window_backup.json" - ) as f: - content = json.load(f) - return content - +from litellm.litellm_core_utils.get_model_cost_map import get_model_cost_map model_cost = get_model_cost_map(url=model_cost_map_url) custom_prompt_dict: Dict[str, dict] = {} @@ -401,7 +328,7 @@ def identify(event_details): ####### ADDITIONAL PARAMS ################### configurable params if you use proxy models like Helicone, map spend to org id, etc. -api_base = None +api_base: Optional[str] = None headers = None api_version = None organization = None @@ -432,8 +359,8 @@ BEDROCK_CONVERSE_MODELS = [ "meta.llama3-2-3b-instruct-v1:0", "meta.llama3-2-11b-instruct-v1:0", "meta.llama3-2-90b-instruct-v1:0", - "meta.llama3-2-405b-instruct-v1:0", ] + ####### COMPLETION MODELS ################### open_ai_chat_completion_models: List = [] open_ai_text_completion_models: List = [] @@ -442,7 +369,6 @@ cohere_chat_models: List = [] mistral_chat_models: List = [] text_completion_codestral_models: List = [] anthropic_models: List = [] -empower_models: List = [] openrouter_models: List = [] vertex_language_models: List = [] vertex_vision_models: List = [] @@ -471,6 +397,7 @@ gemini_models: List = [] xai_models: List = [] deepseek_models: List = [] azure_ai_models: List = [] +jina_ai_models: List = [] voyage_models: List = [] databricks_models: List = [] cloudflare_models: List = [] @@ -484,6 +411,7 @@ anyscale_models: List = [] cerebras_models: List = [] galadriel_models: List = [] sambanova_models: List = [] +assemblyai_models: List = [] def is_bedrock_pricing_only_model(key: str) -> bool: @@ -633,206 +561,16 @@ def add_known_models(): galadriel_models.append(key) elif value.get("litellm_provider") == "sambanova_models": sambanova_models.append(key) + elif value.get("litellm_provider") == "assemblyai": + assemblyai_models.append(key) + elif value.get("litellm_provider") == "jina_ai": + jina_ai_models.append(key) add_known_models() # known openai compatible endpoints - we'll eventually move this list to the model_prices_and_context_window.json dictionary -openai_compatible_endpoints: List = [ - "api.perplexity.ai", - "api.endpoints.anyscale.com/v1", - "api.deepinfra.com/v1/openai", - "api.mistral.ai/v1", - "codestral.mistral.ai/v1/chat/completions", - "codestral.mistral.ai/v1/fim/completions", - "api.groq.com/openai/v1", - "https://integrate.api.nvidia.com/v1", - "api.deepseek.com/v1", - "api.together.xyz/v1", - "app.empower.dev/api/v1", - "https://api.friendli.ai/serverless/v1", - "api.sambanova.ai/v1", - "api.x.ai/v1", - "api.galadriel.ai/v1", -] # this is maintained for Exception Mapping -openai_compatible_providers: List = [ - "anyscale", - "mistral", - "groq", - "nvidia_nim", - "cerebras", - "sambanova", - "ai21_chat", - "ai21", - "volcengine", - "codestral", - "deepseek", - "deepinfra", - "perplexity", - "xinference", - "xai", - "together_ai", - "fireworks_ai", - "empower", - "friendliai", - "azure_ai", - "github", - "litellm_proxy", - "hosted_vllm", - "lm_studio", - "galadriel", -] -openai_text_completion_compatible_providers: List = ( - [ # providers that support `/v1/completions` - "together_ai", - "fireworks_ai", - "hosted_vllm", - ] -) -_openai_like_providers: List = [ - "predibase", - "databricks", - "watsonx", -] # private helper. similar to openai but require some custom auth / endpoint handling, so can't use the openai sdk -# well supported replicate llms -replicate_models: List = [ - # llama replicate supported LLMs - "replicate/llama-2-70b-chat:2796ee9483c3fd7aa2e171d38f4ca12251a30609463dcfd4cd76703f22e96cdf", - "a16z-infra/llama-2-13b-chat:2a7f981751ec7fdf87b5b91ad4db53683a98082e9ff7bfd12c8cd5ea85980a52", - "meta/codellama-13b:1c914d844307b0588599b8393480a3ba917b660c7e9dfae681542b5325f228db", - # Vicuna - "replicate/vicuna-13b:6282abe6a492de4145d7bb601023762212f9ddbbe78278bd6771c8b3b2f2a13b", - "joehoover/instructblip-vicuna13b:c4c54e3c8c97cd50c2d2fec9be3b6065563ccf7d43787fb99f84151b867178fe", - # Flan T-5 - "daanelson/flan-t5-large:ce962b3f6792a57074a601d3979db5839697add2e4e02696b3ced4c022d4767f", - # Others - "replicate/dolly-v2-12b:ef0e1aefc61f8e096ebe4db6b2bacc297daf2ef6899f0f7e001ec445893500e5", - "replit/replit-code-v1-3b:b84f4c074b807211cd75e3e8b1589b6399052125b4c27106e43d47189e8415ad", -] - -clarifai_models: List = [ - "clarifai/meta.Llama-3.Llama-3-8B-Instruct", - "clarifai/gcp.generate.gemma-1_1-7b-it", - "clarifai/mistralai.completion.mixtral-8x22B", - "clarifai/cohere.generate.command-r-plus", - "clarifai/databricks.drbx.dbrx-instruct", - "clarifai/mistralai.completion.mistral-large", - "clarifai/mistralai.completion.mistral-medium", - "clarifai/mistralai.completion.mistral-small", - "clarifai/mistralai.completion.mixtral-8x7B-Instruct-v0_1", - "clarifai/gcp.generate.gemma-2b-it", - "clarifai/gcp.generate.gemma-7b-it", - "clarifai/deci.decilm.deciLM-7B-instruct", - "clarifai/mistralai.completion.mistral-7B-Instruct", - "clarifai/gcp.generate.gemini-pro", - "clarifai/anthropic.completion.claude-v1", - "clarifai/anthropic.completion.claude-instant-1_2", - "clarifai/anthropic.completion.claude-instant", - "clarifai/anthropic.completion.claude-v2", - "clarifai/anthropic.completion.claude-2_1", - "clarifai/meta.Llama-2.codeLlama-70b-Python", - "clarifai/meta.Llama-2.codeLlama-70b-Instruct", - "clarifai/openai.completion.gpt-3_5-turbo-instruct", - "clarifai/meta.Llama-2.llama2-7b-chat", - "clarifai/meta.Llama-2.llama2-13b-chat", - "clarifai/meta.Llama-2.llama2-70b-chat", - "clarifai/openai.chat-completion.gpt-4-turbo", - "clarifai/microsoft.text-generation.phi-2", - "clarifai/meta.Llama-2.llama2-7b-chat-vllm", - "clarifai/upstage.solar.solar-10_7b-instruct", - "clarifai/openchat.openchat.openchat-3_5-1210", - "clarifai/togethercomputer.stripedHyena.stripedHyena-Nous-7B", - "clarifai/gcp.generate.text-bison", - "clarifai/meta.Llama-2.llamaGuard-7b", - "clarifai/fblgit.una-cybertron.una-cybertron-7b-v2", - "clarifai/openai.chat-completion.GPT-4", - "clarifai/openai.chat-completion.GPT-3_5-turbo", - "clarifai/ai21.complete.Jurassic2-Grande", - "clarifai/ai21.complete.Jurassic2-Grande-Instruct", - "clarifai/ai21.complete.Jurassic2-Jumbo-Instruct", - "clarifai/ai21.complete.Jurassic2-Jumbo", - "clarifai/ai21.complete.Jurassic2-Large", - "clarifai/cohere.generate.cohere-generate-command", - "clarifai/wizardlm.generate.wizardCoder-Python-34B", - "clarifai/wizardlm.generate.wizardLM-70B", - "clarifai/tiiuae.falcon.falcon-40b-instruct", - "clarifai/togethercomputer.RedPajama.RedPajama-INCITE-7B-Chat", - "clarifai/gcp.generate.code-gecko", - "clarifai/gcp.generate.code-bison", - "clarifai/mistralai.completion.mistral-7B-OpenOrca", - "clarifai/mistralai.completion.openHermes-2-mistral-7B", - "clarifai/wizardlm.generate.wizardLM-13B", - "clarifai/huggingface-research.zephyr.zephyr-7B-alpha", - "clarifai/wizardlm.generate.wizardCoder-15B", - "clarifai/microsoft.text-generation.phi-1_5", - "clarifai/databricks.Dolly-v2.dolly-v2-12b", - "clarifai/bigcode.code.StarCoder", - "clarifai/salesforce.xgen.xgen-7b-8k-instruct", - "clarifai/mosaicml.mpt.mpt-7b-instruct", - "clarifai/anthropic.completion.claude-3-opus", - "clarifai/anthropic.completion.claude-3-sonnet", - "clarifai/gcp.generate.gemini-1_5-pro", - "clarifai/gcp.generate.imagen-2", - "clarifai/salesforce.blip.general-english-image-caption-blip-2", -] - - -huggingface_models: List = [ - "meta-llama/Llama-2-7b-hf", - "meta-llama/Llama-2-7b-chat-hf", - "meta-llama/Llama-2-13b-hf", - "meta-llama/Llama-2-13b-chat-hf", - "meta-llama/Llama-2-70b-hf", - "meta-llama/Llama-2-70b-chat-hf", - "meta-llama/Llama-2-7b", - "meta-llama/Llama-2-7b-chat", - "meta-llama/Llama-2-13b", - "meta-llama/Llama-2-13b-chat", - "meta-llama/Llama-2-70b", - "meta-llama/Llama-2-70b-chat", -] # these have been tested on extensively. But by default all text2text-generation and text-generation models are supported by liteLLM. - https://docs.litellm.ai/docs/providers -empower_models = [ - "empower/empower-functions", - "empower/empower-functions-small", -] - -together_ai_models: List = [ - # llama llms - chat - "togethercomputer/llama-2-70b-chat", - # llama llms - language / instruct - "togethercomputer/llama-2-70b", - "togethercomputer/LLaMA-2-7B-32K", - "togethercomputer/Llama-2-7B-32K-Instruct", - "togethercomputer/llama-2-7b", - # falcon llms - "togethercomputer/falcon-40b-instruct", - "togethercomputer/falcon-7b-instruct", - # alpaca - "togethercomputer/alpaca-7b", - # chat llms - "HuggingFaceH4/starchat-alpha", - # code llms - "togethercomputer/CodeLlama-34b", - "togethercomputer/CodeLlama-34b-Instruct", - "togethercomputer/CodeLlama-34b-Python", - "defog/sqlcoder", - "NumbersStation/nsql-llama-2-7B", - "WizardLM/WizardCoder-15B-V1.0", - "WizardLM/WizardCoder-Python-34B-V1.0", - # language llms - "NousResearch/Nous-Hermes-Llama2-13b", - "Austism/chronos-hermes-13b", - "upstage/SOLAR-0-70b-16bit", - "WizardLM/WizardLM-70B-V1.0", -] # supports all together ai models, just pass in the model id e.g. completion(model="together_computer/replit_code_3b",...) - - -baseten_models: List = [ - "qvv0xeq", - "q841o8w", - "31dxrj3", -] # FALCON 7B # WizardLM # Mosaic ML # used for Cost Tracking & Token counting @@ -898,6 +636,8 @@ model_list = ( + galadriel_models + sambanova_models + azure_text_models + + assemblyai_models + + jina_ai_models ) model_list_set = set(model_list) @@ -951,6 +691,8 @@ models_by_provider: dict = { "cerebras": cerebras_models, "galadriel": galadriel_models, "sambanova": sambanova_models, + "assemblyai": assemblyai_models, + "jina_ai": jina_ai_models, } # mapping for those models which have larger equivalents @@ -976,20 +718,6 @@ longer_context_model_fallback_dict: dict = { } ####### EMBEDDING MODELS ################### -open_ai_embedding_models: List = ["text-embedding-ada-002"] -cohere_embedding_models: List = [ - "embed-english-v3.0", - "embed-english-light-v3.0", - "embed-multilingual-v3.0", - "embed-english-v2.0", - "embed-english-light-v2.0", - "embed-multilingual-v2.0", -] -bedrock_embedding_models: List = [ - "amazon.titan-embed-text-v1", - "cohere.embed-english-v3", - "cohere.embed-multilingual-v3", -] all_embedding_models = ( open_ai_embedding_models @@ -1070,9 +798,6 @@ from .llms.oobabooga.chat.transformation import OobaboogaConfig from .llms.maritalk import MaritalkConfig from .llms.openrouter.chat.transformation import OpenrouterConfig from .llms.anthropic.chat.transformation import AnthropicConfig -from .llms.anthropic.experimental_pass_through.transformation import ( - AnthropicExperimentalPassThroughConfig, -) from .llms.groq.stt.transformation import GroqSTTConfig from .llms.anthropic.completion.transformation import AnthropicTextConfig from .llms.triton.completion.transformation import TritonConfig @@ -1085,10 +810,15 @@ from .llms.predibase.chat.transformation import PredibaseConfig from .llms.replicate.chat.transformation import ReplicateConfig from .llms.cohere.completion.transformation import CohereTextConfig as CohereConfig from .llms.cohere.rerank.transformation import CohereRerankConfig +from .llms.cohere.rerank_v2.transformation import CohereRerankV2Config from .llms.azure_ai.rerank.transformation import AzureAIRerankConfig from .llms.infinity.rerank.transformation import InfinityRerankConfig +from .llms.jina_ai.rerank.transformation import JinaAIRerankConfig from .llms.clarifai.chat.transformation import ClarifaiConfig from .llms.ai21.chat.transformation import AI21ChatConfig, AI21ChatConfig as AI21Config +from .llms.anthropic.experimental_pass_through.messages.transformation import ( + AnthropicMessagesConfig, +) from .llms.together_ai.chat import TogetherAIConfig from .llms.together_ai.completion.transformation import TogetherAITextCompletionConfig from .llms.cloudflare.chat.transformation import CloudflareChatConfig @@ -1134,15 +864,39 @@ from .llms.bedrock.chat.invoke_handler import ( ) from .llms.bedrock.common_utils import ( - AmazonTitanConfig, - AmazonAI21Config, - AmazonAnthropicConfig, - AmazonAnthropicClaude3Config, - AmazonCohereConfig, - AmazonLlamaConfig, - AmazonMistralConfig, AmazonBedrockGlobalConfig, ) +from .llms.bedrock.chat.invoke_transformations.amazon_ai21_transformation import ( + AmazonAI21Config, +) +from .llms.bedrock.chat.invoke_transformations.amazon_nova_transformation import ( + AmazonInvokeNovaConfig, +) +from .llms.bedrock.chat.invoke_transformations.anthropic_claude2_transformation import ( + AmazonAnthropicConfig, +) +from .llms.bedrock.chat.invoke_transformations.anthropic_claude3_transformation import ( + AmazonAnthropicClaude3Config, +) +from .llms.bedrock.chat.invoke_transformations.amazon_cohere_transformation import ( + AmazonCohereConfig, +) +from .llms.bedrock.chat.invoke_transformations.amazon_llama_transformation import ( + AmazonLlamaConfig, +) +from .llms.bedrock.chat.invoke_transformations.amazon_deepseek_transformation import ( + AmazonDeepSeekR1Config, +) +from .llms.bedrock.chat.invoke_transformations.amazon_mistral_transformation import ( + AmazonMistralConfig, +) +from .llms.bedrock.chat.invoke_transformations.amazon_titan_transformation import ( + AmazonTitanConfig, +) +from .llms.bedrock.chat.invoke_transformations.base_invoke_transformation import ( + AmazonInvokeConfig, +) + from .llms.bedrock.image.amazon_stability1_transformation import AmazonStabilityConfig from .llms.bedrock.image.amazon_stability3_transformation import AmazonStability3Config from .llms.bedrock.embed.amazon_titan_g1_transformation import AmazonTitanG1Config @@ -1167,11 +921,12 @@ from .llms.groq.chat.transformation import GroqChatConfig from .llms.voyage.embedding.transformation import VoyageEmbeddingConfig from .llms.azure_ai.chat.transformation import AzureAIStudioConfig from .llms.mistral.mistral_chat_transformation import MistralConfig -from .llms.openai.chat.o1_transformation import ( - OpenAIO1Config, +from .llms.openai.chat.o_series_transformation import ( + OpenAIOSeriesConfig as OpenAIO1Config, # maintain backwards compatibility + OpenAIOSeriesConfig, ) -openAIO1Config = OpenAIO1Config() +openaiOSeriesConfig = OpenAIOSeriesConfig() from .llms.openai.chat.gpt_transformation import ( OpenAIGPTConfig, ) @@ -1219,7 +974,7 @@ from .llms.deepseek.chat.transformation import DeepSeekChatConfig from .llms.lm_studio.chat.transformation import LMStudioChatConfig from .llms.lm_studio.embed.transformation import LmStudioEmbeddingConfig from .llms.perplexity.chat.transformation import PerplexityChatConfig -from .llms.azure.chat.o1_transformation import AzureOpenAIO1Config +from .llms.azure.chat.o_series_transformation import AzureOpenAIO1Config from .llms.watsonx.completion.transformation import IBMWatsonXAIConfig from .llms.watsonx.chat.transformation import IBMWatsonXChatConfig from .llms.watsonx.embed.transformation import IBMWatsonXEmbeddingConfig @@ -1252,8 +1007,9 @@ from .proxy.proxy_cli import run_server from .router import Router from .assistants.main import * from .batches.main import * -from .batch_completion.main import * +from .batch_completion.main import * # type: ignore from .rerank_api.main import * +from .llms.anthropic.experimental_pass_through.messages.handler import * from .realtime_api.main import _arealtime from .fine_tuning.main import * from .files.main import * @@ -1273,3 +1029,7 @@ custom_provider_map: List[CustomLLMItem] = [] _custom_providers: List[str] = ( [] ) # internal helper util, used to track names of custom providers +disable_hf_tokenizer_download: Optional[bool] = ( + None # disable huggingface tokenizer download. Defaults to openai clk100 +) +global_disable_no_log_param: bool = False diff --git a/litellm/_redis.py b/litellm/_redis.py index 70c38cf7f5..1e03993c20 100644 --- a/litellm/_redis.py +++ b/litellm/_redis.py @@ -183,7 +183,7 @@ def init_redis_cluster(redis_kwargs) -> redis.RedisCluster: ) verbose_logger.debug( - "init_redis_cluster: startup nodes: ", redis_kwargs["startup_nodes"] + "init_redis_cluster: startup nodes are being initialized." ) from redis.cluster import ClusterNode @@ -266,7 +266,9 @@ def get_redis_client(**env_overrides): return redis.Redis(**redis_kwargs) -def get_redis_async_client(**env_overrides) -> async_redis.Redis: +def get_redis_async_client( + **env_overrides, +) -> async_redis.Redis: redis_kwargs = _get_redis_client_logic(**env_overrides) if "url" in redis_kwargs and redis_kwargs["url"] is not None: args = _get_redis_url_kwargs(client=async_redis.Redis.from_url) diff --git a/litellm/adapters/anthropic_adapter.py b/litellm/adapters/anthropic_adapter.py deleted file mode 100644 index 961bc77527..0000000000 --- a/litellm/adapters/anthropic_adapter.py +++ /dev/null @@ -1,186 +0,0 @@ -# What is this? -## Translates OpenAI call to Anthropic `/v1/messages` format -import traceback -from typing import Any, Optional - -import litellm -from litellm import ChatCompletionRequest, verbose_logger -from litellm.integrations.custom_logger import CustomLogger -from litellm.types.llms.anthropic import AnthropicMessagesRequest, AnthropicResponse -from litellm.types.utils import AdapterCompletionStreamWrapper, ModelResponse - - -class AnthropicAdapter(CustomLogger): - def __init__(self) -> None: - super().__init__() - - def translate_completion_input_params( - self, kwargs - ) -> Optional[ChatCompletionRequest]: - """ - - translate params, where needed - - pass rest, as is - """ - request_body = AnthropicMessagesRequest(**kwargs) # type: ignore - - translated_body = litellm.AnthropicExperimentalPassThroughConfig().translate_anthropic_to_openai( - anthropic_message_request=request_body - ) - - return translated_body - - def translate_completion_output_params( - self, response: ModelResponse - ) -> Optional[AnthropicResponse]: - - return litellm.AnthropicExperimentalPassThroughConfig().translate_openai_response_to_anthropic( - response=response - ) - - def translate_completion_output_params_streaming( - self, completion_stream: Any - ) -> AdapterCompletionStreamWrapper | None: - return AnthropicStreamWrapper(completion_stream=completion_stream) - - -anthropic_adapter = AnthropicAdapter() - - -class AnthropicStreamWrapper(AdapterCompletionStreamWrapper): - """ - - first chunk return 'message_start' - - content block must be started and stopped - - finish_reason must map exactly to anthropic reason, else anthropic client won't be able to parse it. - """ - - sent_first_chunk: bool = False - sent_content_block_start: bool = False - sent_content_block_finish: bool = False - sent_last_message: bool = False - holding_chunk: Optional[Any] = None - - def __next__(self): - try: - if self.sent_first_chunk is False: - self.sent_first_chunk = True - return { - "type": "message_start", - "message": { - "id": "msg_1nZdL29xx5MUA1yADyHTEsnR8uuvGzszyY", - "type": "message", - "role": "assistant", - "content": [], - "model": "claude-3-5-sonnet-20240620", - "stop_reason": None, - "stop_sequence": None, - "usage": {"input_tokens": 25, "output_tokens": 1}, - }, - } - if self.sent_content_block_start is False: - self.sent_content_block_start = True - return { - "type": "content_block_start", - "index": 0, - "content_block": {"type": "text", "text": ""}, - } - - for chunk in self.completion_stream: - if chunk == "None" or chunk is None: - raise Exception - - processed_chunk = litellm.AnthropicExperimentalPassThroughConfig().translate_streaming_openai_response_to_anthropic( - response=chunk - ) - if ( - processed_chunk["type"] == "message_delta" - and self.sent_content_block_finish is False - ): - self.holding_chunk = processed_chunk - self.sent_content_block_finish = True - return { - "type": "content_block_stop", - "index": 0, - } - elif self.holding_chunk is not None: - return_chunk = self.holding_chunk - self.holding_chunk = processed_chunk - return return_chunk - else: - return processed_chunk - if self.holding_chunk is not None: - return_chunk = self.holding_chunk - self.holding_chunk = None - return return_chunk - if self.sent_last_message is False: - self.sent_last_message = True - return {"type": "message_stop"} - raise StopIteration - except StopIteration: - if self.sent_last_message is False: - self.sent_last_message = True - return {"type": "message_stop"} - raise StopIteration - except Exception as e: - verbose_logger.error( - "Anthropic Adapter - {}\n{}".format(e, traceback.format_exc()) - ) - - async def __anext__(self): - try: - if self.sent_first_chunk is False: - self.sent_first_chunk = True - return { - "type": "message_start", - "message": { - "id": "msg_1nZdL29xx5MUA1yADyHTEsnR8uuvGzszyY", - "type": "message", - "role": "assistant", - "content": [], - "model": "claude-3-5-sonnet-20240620", - "stop_reason": None, - "stop_sequence": None, - "usage": {"input_tokens": 25, "output_tokens": 1}, - }, - } - if self.sent_content_block_start is False: - self.sent_content_block_start = True - return { - "type": "content_block_start", - "index": 0, - "content_block": {"type": "text", "text": ""}, - } - async for chunk in self.completion_stream: - if chunk == "None" or chunk is None: - raise Exception - processed_chunk = litellm.AnthropicExperimentalPassThroughConfig().translate_streaming_openai_response_to_anthropic( - response=chunk - ) - if ( - processed_chunk["type"] == "message_delta" - and self.sent_content_block_finish is False - ): - self.holding_chunk = processed_chunk - self.sent_content_block_finish = True - return { - "type": "content_block_stop", - "index": 0, - } - elif self.holding_chunk is not None: - return_chunk = self.holding_chunk - self.holding_chunk = processed_chunk - return return_chunk - else: - return processed_chunk - if self.holding_chunk is not None: - return_chunk = self.holding_chunk - self.holding_chunk = None - return return_chunk - if self.sent_last_message is False: - self.sent_last_message = True - return {"type": "message_stop"} - raise StopIteration - except StopIteration: - if self.sent_last_message is False: - self.sent_last_message = True - return {"type": "message_stop"} - raise StopAsyncIteration diff --git a/litellm/batches/batch_utils.py b/litellm/batches/batch_utils.py index f24eda0432..af53304e5a 100644 --- a/litellm/batches/batch_utils.py +++ b/litellm/batches/batch_utils.py @@ -1,76 +1,16 @@ -import asyncio -import datetime import json -import threading -from typing import Any, List, Literal, Optional +from typing import Any, List, Literal, Tuple import litellm from litellm._logging import verbose_logger -from litellm.constants import ( - BATCH_STATUS_POLL_INTERVAL_SECONDS, - BATCH_STATUS_POLL_MAX_ATTEMPTS, -) -from litellm.files.main import afile_content -from litellm.litellm_core_utils.litellm_logging import Logging as LiteLLMLoggingObj from litellm.types.llms.openai import Batch -from litellm.types.utils import StandardLoggingPayload, Usage - - -async def batches_async_logging( - batch_id: str, - custom_llm_provider: Literal["openai", "azure", "vertex_ai"] = "openai", - logging_obj: Optional[LiteLLMLoggingObj] = None, - **kwargs, -): - """ - Async Job waits for the batch to complete and then logs the completed batch usage - cost, total tokens, prompt tokens, completion tokens - - - Polls retrieve_batch until it returns a batch with status "completed" or "failed" - """ - from .main import aretrieve_batch - - verbose_logger.debug( - ".....in _batches_async_logging... polling retrieve to get batch status" - ) - if logging_obj is None: - raise ValueError( - "logging_obj is None cannot calculate cost / log batch creation event" - ) - for _ in range(BATCH_STATUS_POLL_MAX_ATTEMPTS): - try: - start_time = datetime.datetime.now() - batch: Batch = await aretrieve_batch(batch_id, custom_llm_provider) - verbose_logger.debug( - "in _batches_async_logging... batch status= %s", batch.status - ) - - if batch.status == "completed": - end_time = datetime.datetime.now() - await _handle_completed_batch( - batch=batch, - custom_llm_provider=custom_llm_provider, - logging_obj=logging_obj, - start_time=start_time, - end_time=end_time, - **kwargs, - ) - break - elif batch.status == "failed": - pass - except Exception as e: - verbose_logger.error("error in batches_async_logging", e) - await asyncio.sleep(BATCH_STATUS_POLL_INTERVAL_SECONDS) +from litellm.types.utils import CallTypes, Usage async def _handle_completed_batch( batch: Batch, custom_llm_provider: Literal["openai", "azure", "vertex_ai"], - logging_obj: LiteLLMLoggingObj, - start_time: datetime.datetime, - end_time: datetime.datetime, - **kwargs, -) -> None: +) -> Tuple[float, Usage, List[str]]: """Helper function to process a completed batch and handle logging""" # Get batch results file_content_dictionary = await _get_batch_output_file_content_as_dictionary( @@ -87,52 +27,25 @@ async def _handle_completed_batch( custom_llm_provider=custom_llm_provider, ) - # Handle logging - await _log_completed_batch( - logging_obj=logging_obj, - batch_usage=batch_usage, - batch_cost=batch_cost, - start_time=start_time, - end_time=end_time, - **kwargs, - ) + batch_models = _get_batch_models_from_file_content(file_content_dictionary) + + return batch_cost, batch_usage, batch_models -async def _log_completed_batch( - logging_obj: LiteLLMLoggingObj, - batch_usage: Usage, - batch_cost: float, - start_time: datetime.datetime, - end_time: datetime.datetime, - **kwargs, -) -> None: - """Helper function to handle all logging operations for a completed batch""" - logging_obj.call_type = "batch_success" - - standard_logging_object = _create_standard_logging_object_for_completed_batch( - kwargs=kwargs, - start_time=start_time, - end_time=end_time, - logging_obj=logging_obj, - batch_usage_object=batch_usage, - response_cost=batch_cost, - ) - - logging_obj.model_call_details["standard_logging_object"] = standard_logging_object - - # Launch async and sync logging handlers - asyncio.create_task( - logging_obj.async_success_handler( - result=None, - start_time=start_time, - end_time=end_time, - cache_hit=None, - ) - ) - threading.Thread( - target=logging_obj.success_handler, - args=(None, start_time, end_time), - ).start() +def _get_batch_models_from_file_content( + file_content_dictionary: List[dict], +) -> List[str]: + """ + Get the models from the file content + """ + batch_models = [] + for _item in file_content_dictionary: + if _batch_response_was_successful(_item): + _response_body = _get_response_from_batch_job_output_file(_item) + _model = _response_body.get("model") + if _model: + batch_models.append(_model) + return batch_models async def _batch_cost_calculator( @@ -159,6 +72,8 @@ async def _get_batch_output_file_content_as_dictionary( """ Get the batch output file content as a list of dictionaries """ + from litellm.files.main import afile_content + if custom_llm_provider == "vertex_ai": raise ValueError("Vertex AI does not support file content retrieval") @@ -208,6 +123,7 @@ def _get_batch_job_cost_from_file_content( total_cost += litellm.completion_cost( completion_response=_response_body, custom_llm_provider=custom_llm_provider, + call_type=CallTypes.aretrieve_batch.value, ) verbose_logger.debug("total_cost=%s", total_cost) return total_cost @@ -264,30 +180,3 @@ def _batch_response_was_successful(batch_job_output_file: dict) -> bool: """ _response: dict = batch_job_output_file.get("response", None) or {} return _response.get("status_code", None) == 200 - - -def _create_standard_logging_object_for_completed_batch( - kwargs: dict, - start_time: datetime.datetime, - end_time: datetime.datetime, - logging_obj: LiteLLMLoggingObj, - batch_usage_object: Usage, - response_cost: float, -) -> StandardLoggingPayload: - """ - Create a standard logging object for a completed batch - """ - standard_logging_object = logging_obj.model_call_details.get( - "standard_logging_object", None - ) - - if standard_logging_object is None: - raise ValueError("unable to create standard logging object for completed batch") - - # Add Completed Batch Job Usage and Response Cost - standard_logging_object["call_type"] = "batch_success" - standard_logging_object["response_cost"] = response_cost - standard_logging_object["total_tokens"] = batch_usage_object.total_tokens - standard_logging_object["prompt_tokens"] = batch_usage_object.prompt_tokens - standard_logging_object["completion_tokens"] = batch_usage_object.completion_tokens - return standard_logging_object diff --git a/litellm/batches/main.py b/litellm/batches/main.py index 32428c9c18..2f4800043c 100644 --- a/litellm/batches/main.py +++ b/litellm/batches/main.py @@ -31,10 +31,9 @@ from litellm.types.llms.openai import ( RetrieveBatchRequest, ) from litellm.types.router import GenericLiteLLMParams +from litellm.types.utils import LiteLLMBatch from litellm.utils import client, get_litellm_params, supports_httpx_timeout -from .batch_utils import batches_async_logging - ####### ENVIRONMENT VARIABLES ################### openai_batches_instance = OpenAIBatchesAPI() azure_batches_instance = AzureBatchesAPI() @@ -85,17 +84,6 @@ async def acreate_batch( else: response = init_response - # Start async logging job - if response is not None: - asyncio.create_task( - batches_async_logging( - logging_obj=kwargs.get("litellm_logging_obj", None), - batch_id=response.id, - custom_llm_provider=custom_llm_provider, - **kwargs, - ) - ) - return response except Exception as e: raise e @@ -111,7 +99,7 @@ def create_batch( extra_headers: Optional[Dict[str, str]] = None, extra_body: Optional[Dict[str, str]] = None, **kwargs, -) -> Union[Batch, Coroutine[Any, Any, Batch]]: +) -> Union[LiteLLMBatch, Coroutine[Any, Any, LiteLLMBatch]]: """ Creates and executes a batch from an uploaded file of request @@ -119,21 +107,26 @@ def create_batch( """ try: optional_params = GenericLiteLLMParams(**kwargs) + litellm_call_id = kwargs.get("litellm_call_id", None) + proxy_server_request = kwargs.get("proxy_server_request", None) + model_info = kwargs.get("model_info", None) _is_async = kwargs.pop("acreate_batch", False) is True litellm_logging_obj: LiteLLMLoggingObj = kwargs.get("litellm_logging_obj", None) ### TIMEOUT LOGIC ### timeout = optional_params.timeout or kwargs.get("request_timeout", 600) or 600 - litellm_params = get_litellm_params( - custom_llm_provider=custom_llm_provider, - litellm_call_id=kwargs.get("litellm_call_id", None), - litellm_trace_id=kwargs.get("litellm_trace_id"), - litellm_metadata=kwargs.get("litellm_metadata"), - ) litellm_logging_obj.update_environment_variables( model=None, user=None, optional_params=optional_params.model_dump(), - litellm_params=litellm_params, + litellm_params={ + "litellm_call_id": litellm_call_id, + "proxy_server_request": proxy_server_request, + "model_info": model_info, + "metadata": metadata, + "preset_cache_key": None, + "stream_response": {}, + **optional_params.model_dump(exclude_unset=True), + }, custom_llm_provider=custom_llm_provider, ) @@ -261,7 +254,7 @@ def create_batch( response=httpx.Response( status_code=400, content="Unsupported provider", - request=httpx.Request(method="create_thread", url="https://github.com/BerriAI/litellm"), # type: ignore + request=httpx.Request(method="create_batch", url="https://github.com/BerriAI/litellm"), # type: ignore ), ) return response @@ -269,6 +262,7 @@ def create_batch( raise e +@client async def aretrieve_batch( batch_id: str, custom_llm_provider: Literal["openai", "azure", "vertex_ai"] = "openai", @@ -276,7 +270,7 @@ async def aretrieve_batch( extra_headers: Optional[Dict[str, str]] = None, extra_body: Optional[Dict[str, str]] = None, **kwargs, -) -> Batch: +) -> LiteLLMBatch: """ Async: Retrieves a batch. @@ -310,6 +304,7 @@ async def aretrieve_batch( raise e +@client def retrieve_batch( batch_id: str, custom_llm_provider: Literal["openai", "azure", "vertex_ai"] = "openai", @@ -317,7 +312,7 @@ def retrieve_batch( extra_headers: Optional[Dict[str, str]] = None, extra_body: Optional[Dict[str, str]] = None, **kwargs, -) -> Union[Batch, Coroutine[Any, Any, Batch]]: +) -> Union[LiteLLMBatch, Coroutine[Any, Any, LiteLLMBatch]]: """ Retrieves a batch. @@ -325,9 +320,23 @@ def retrieve_batch( """ try: optional_params = GenericLiteLLMParams(**kwargs) + + litellm_logging_obj: LiteLLMLoggingObj = kwargs.get("litellm_logging_obj", None) ### TIMEOUT LOGIC ### timeout = optional_params.timeout or kwargs.get("request_timeout", 600) or 600 - # set timeout for 10 minutes by default + litellm_params = get_litellm_params( + custom_llm_provider=custom_llm_provider, + litellm_call_id=kwargs.get("litellm_call_id", None), + litellm_trace_id=kwargs.get("litellm_trace_id"), + litellm_metadata=kwargs.get("litellm_metadata"), + ) + litellm_logging_obj.update_environment_variables( + model=None, + user=None, + optional_params=optional_params.model_dump(), + litellm_params=litellm_params, + custom_llm_provider=custom_llm_provider, + ) if ( timeout is not None diff --git a/litellm/caching/__init__.py b/litellm/caching/__init__.py index f10675f5e0..e10d01ff02 100644 --- a/litellm/caching/__init__.py +++ b/litellm/caching/__init__.py @@ -4,5 +4,6 @@ from .dual_cache import DualCache from .in_memory_cache import InMemoryCache from .qdrant_semantic_cache import QdrantSemanticCache from .redis_cache import RedisCache +from .redis_cluster_cache import RedisClusterCache from .redis_semantic_cache import RedisSemanticCache from .s3_cache import S3Cache diff --git a/litellm/caching/caching.py b/litellm/caching/caching.py index e50e8b76d6..415c49edff 100644 --- a/litellm/caching/caching.py +++ b/litellm/caching/caching.py @@ -13,26 +13,14 @@ import json import time import traceback from enum import Enum -from typing import Any, Dict, List, Optional, Set, Union +from typing import Any, Dict, List, Optional, Union -from openai.types.audio.transcription_create_params import TranscriptionCreateParams -from openai.types.chat.completion_create_params import ( - CompletionCreateParamsNonStreaming, - CompletionCreateParamsStreaming, -) -from openai.types.completion_create_params import ( - CompletionCreateParamsNonStreaming as TextCompletionCreateParamsNonStreaming, -) -from openai.types.completion_create_params import ( - CompletionCreateParamsStreaming as TextCompletionCreateParamsStreaming, -) -from openai.types.embedding_create_params import EmbeddingCreateParams from pydantic import BaseModel import litellm from litellm._logging import verbose_logger +from litellm.litellm_core_utils.model_param_helper import ModelParamHelper from litellm.types.caching import * -from litellm.types.rerank import RerankRequest from litellm.types.utils import all_litellm_params from .base_cache import BaseCache @@ -41,6 +29,7 @@ from .dual_cache import DualCache # noqa from .in_memory_cache import InMemoryCache from .qdrant_semantic_cache import QdrantSemanticCache from .redis_cache import RedisCache +from .redis_cluster_cache import RedisClusterCache from .redis_semantic_cache import RedisSemanticCache from .s3_cache import S3Cache @@ -158,14 +147,23 @@ class Cache: None. Cache is set as a litellm param """ if type == LiteLLMCacheType.REDIS: - self.cache: BaseCache = RedisCache( - host=host, - port=port, - password=password, - redis_flush_size=redis_flush_size, - startup_nodes=redis_startup_nodes, - **kwargs, - ) + if redis_startup_nodes: + self.cache: BaseCache = RedisClusterCache( + host=host, + port=port, + password=password, + redis_flush_size=redis_flush_size, + startup_nodes=redis_startup_nodes, + **kwargs, + ) + else: + self.cache = RedisCache( + host=host, + port=port, + password=password, + redis_flush_size=redis_flush_size, + **kwargs, + ) elif type == LiteLLMCacheType.REDIS_SEMANTIC: self.cache = RedisSemanticCache( host=host, @@ -207,9 +205,9 @@ class Cache: if "cache" not in litellm.input_callback: litellm.input_callback.append("cache") if "cache" not in litellm.success_callback: - litellm.success_callback.append("cache") + litellm.logging_callback_manager.add_litellm_success_callback("cache") if "cache" not in litellm._async_success_callback: - litellm._async_success_callback.append("cache") + litellm.logging_callback_manager.add_litellm_async_success_callback("cache") self.supported_call_types = supported_call_types # default to ["completion", "acompletion", "embedding", "aembedding"] self.type = type self.namespace = namespace @@ -247,7 +245,7 @@ class Cache: verbose_logger.debug("\nReturning preset cache key: %s", preset_cache_key) return preset_cache_key - combined_kwargs = self._get_relevant_args_to_use_for_cache_key() + combined_kwargs = ModelParamHelper._get_all_llm_api_params() litellm_param_kwargs = all_litellm_params for param in kwargs: if param in combined_kwargs: @@ -267,9 +265,7 @@ class Cache: verbose_logger.debug("\nCreated cache key: %s", cache_key) hashed_cache_key = Cache._get_hashed_cache_key(cache_key) - hashed_cache_key = self._add_redis_namespace_to_cache_key( - hashed_cache_key, **kwargs - ) + hashed_cache_key = self._add_namespace_to_cache_key(hashed_cache_key, **kwargs) self._set_preset_cache_key_in_kwargs( preset_cache_key=hashed_cache_key, **kwargs ) @@ -356,76 +352,6 @@ class Cache: if "litellm_params" in kwargs: kwargs["litellm_params"]["preset_cache_key"] = preset_cache_key - def _get_relevant_args_to_use_for_cache_key(self) -> Set[str]: - """ - Gets the supported kwargs for each call type and combines them - """ - chat_completion_kwargs = self._get_litellm_supported_chat_completion_kwargs() - text_completion_kwargs = self._get_litellm_supported_text_completion_kwargs() - embedding_kwargs = self._get_litellm_supported_embedding_kwargs() - transcription_kwargs = self._get_litellm_supported_transcription_kwargs() - rerank_kwargs = self._get_litellm_supported_rerank_kwargs() - exclude_kwargs = self._get_kwargs_to_exclude_from_cache_key() - - combined_kwargs = chat_completion_kwargs.union( - text_completion_kwargs, - embedding_kwargs, - transcription_kwargs, - rerank_kwargs, - ) - combined_kwargs = combined_kwargs.difference(exclude_kwargs) - return combined_kwargs - - def _get_litellm_supported_chat_completion_kwargs(self) -> Set[str]: - """ - Get the litellm supported chat completion kwargs - - This follows the OpenAI API Spec - """ - all_chat_completion_kwargs = set( - CompletionCreateParamsNonStreaming.__annotations__.keys() - ).union(set(CompletionCreateParamsStreaming.__annotations__.keys())) - return all_chat_completion_kwargs - - def _get_litellm_supported_text_completion_kwargs(self) -> Set[str]: - """ - Get the litellm supported text completion kwargs - - This follows the OpenAI API Spec - """ - all_text_completion_kwargs = set( - TextCompletionCreateParamsNonStreaming.__annotations__.keys() - ).union(set(TextCompletionCreateParamsStreaming.__annotations__.keys())) - return all_text_completion_kwargs - - def _get_litellm_supported_rerank_kwargs(self) -> Set[str]: - """ - Get the litellm supported rerank kwargs - """ - return set(RerankRequest.model_fields.keys()) - - def _get_litellm_supported_embedding_kwargs(self) -> Set[str]: - """ - Get the litellm supported embedding kwargs - - This follows the OpenAI API Spec - """ - return set(EmbeddingCreateParams.__annotations__.keys()) - - def _get_litellm_supported_transcription_kwargs(self) -> Set[str]: - """ - Get the litellm supported transcription kwargs - - This follows the OpenAI API Spec - """ - return set(TranscriptionCreateParams.__annotations__.keys()) - - def _get_kwargs_to_exclude_from_cache_key(self) -> Set[str]: - """ - Get the kwargs to exclude from the cache key - """ - return set(["metadata"]) - @staticmethod def _get_hashed_cache_key(cache_key: str) -> str: """ @@ -445,7 +371,7 @@ class Cache: verbose_logger.debug("Hashed cache key (SHA-256): %s", hash_hex) return hash_hex - def _add_redis_namespace_to_cache_key(self, hash_hex: str, **kwargs) -> str: + def _add_namespace_to_cache_key(self, hash_hex: str, **kwargs) -> str: """ If a redis namespace is provided, add it to the cache key @@ -456,7 +382,12 @@ class Cache: Returns: str: The final hashed cache key with the redis namespace. """ - namespace = kwargs.get("metadata", {}).get("redis_namespace") or self.namespace + dynamic_cache_control: DynamicCacheControl = kwargs.get("cache", {}) + namespace = ( + dynamic_cache_control.get("namespace") + or kwargs.get("metadata", {}).get("redis_namespace") + or self.namespace + ) if namespace: hash_hex = f"{namespace}:{hash_hex}" verbose_logger.debug("Final hashed key: %s", hash_hex) @@ -536,11 +467,14 @@ class Cache: else: cache_key = self.get_cache_key(**kwargs) if cache_key is not None: - cache_control_args = kwargs.get("cache", {}) - max_age = cache_control_args.get( - "s-max-age", cache_control_args.get("s-maxage", float("inf")) + cache_control_args: DynamicCacheControl = kwargs.get("cache", {}) + max_age = ( + cache_control_args.get("s-maxage") + or cache_control_args.get("s-max-age") + or float("inf") ) cached_result = self.cache.get_cache(cache_key, messages=messages) + cached_result = self.cache.get_cache(cache_key, messages=messages) return self._get_cache_logic( cached_result=cached_result, max_age=max_age ) @@ -774,9 +708,9 @@ def enable_cache( if "cache" not in litellm.input_callback: litellm.input_callback.append("cache") if "cache" not in litellm.success_callback: - litellm.success_callback.append("cache") + litellm.logging_callback_manager.add_litellm_success_callback("cache") if "cache" not in litellm._async_success_callback: - litellm._async_success_callback.append("cache") + litellm.logging_callback_manager.add_litellm_async_success_callback("cache") if litellm.cache is None: litellm.cache = Cache( diff --git a/litellm/caching/caching_handler.py b/litellm/caching/caching_handler.py index 40c1001732..2a958c9eee 100644 --- a/litellm/caching/caching_handler.py +++ b/litellm/caching/caching_handler.py @@ -247,7 +247,6 @@ class LLMCachingHandler: pass else: call_type = original_function.__name__ - cached_result = self._convert_cached_result_to_model_response( cached_result=cached_result, call_type=call_type, @@ -725,6 +724,7 @@ class LLMCachingHandler: """ Sync internal method to add the result to the cache """ + new_kwargs = kwargs.copy() new_kwargs.update( convert_args_to_kwargs( @@ -738,6 +738,7 @@ class LLMCachingHandler: if self._should_store_result_in_cache( original_function=self.original_function, kwargs=new_kwargs ): + litellm.cache.add_cache(result, **new_kwargs) return diff --git a/litellm/caching/redis_cache.py b/litellm/caching/redis_cache.py index 21455fa7f2..66245e7476 100644 --- a/litellm/caching/redis_cache.py +++ b/litellm/caching/redis_cache.py @@ -14,7 +14,7 @@ import inspect import json import time from datetime import timedelta -from typing import TYPE_CHECKING, Any, List, Optional, Tuple +from typing import TYPE_CHECKING, Any, List, Optional, Tuple, Union import litellm from litellm._logging import print_verbose, verbose_logger @@ -26,15 +26,20 @@ from .base_cache import BaseCache if TYPE_CHECKING: from opentelemetry.trace import Span as _Span - from redis.asyncio import Redis + from redis.asyncio import Redis, RedisCluster from redis.asyncio.client import Pipeline + from redis.asyncio.cluster import ClusterPipeline pipeline = Pipeline + cluster_pipeline = ClusterPipeline async_redis_client = Redis + async_redis_cluster_client = RedisCluster Span = _Span else: pipeline = Any + cluster_pipeline = Any async_redis_client = Any + async_redis_cluster_client = Any Span = Any @@ -75,6 +80,7 @@ class RedisCache(BaseCache): redis_kwargs.update(kwargs) self.redis_client = get_redis_client(**redis_kwargs) + self.redis_async_client: Optional[async_redis_client] = None self.redis_kwargs = redis_kwargs self.async_redis_conn_pool = get_redis_connection_pool(**redis_kwargs) @@ -122,12 +128,16 @@ class RedisCache(BaseCache): else: super().__init__() # defaults to 60s - def init_async_client(self): + def init_async_client( + self, + ) -> Union[async_redis_client, async_redis_cluster_client]: from .._redis import get_redis_async_client - return get_redis_async_client( - connection_pool=self.async_redis_conn_pool, **self.redis_kwargs - ) + if self.redis_async_client is None: + self.redis_async_client = get_redis_async_client( + connection_pool=self.async_redis_conn_pool, **self.redis_kwargs + ) + return self.redis_async_client def check_and_fix_namespace(self, key: str) -> str: """ @@ -227,26 +237,23 @@ class RedisCache(BaseCache): keys = [] _redis_client: Redis = self.init_async_client() # type: ignore - async with _redis_client as redis_client: - async for key in redis_client.scan_iter( - match=pattern + "*", count=count - ): - keys.append(key) - if len(keys) >= count: - break + async for key in _redis_client.scan_iter(match=pattern + "*", count=count): + keys.append(key) + if len(keys) >= count: + break - ## LOGGING ## - end_time = time.time() - _duration = end_time - start_time - asyncio.create_task( - self.service_logger_obj.async_service_success_hook( - service=ServiceTypes.REDIS, - duration=_duration, - call_type="async_scan_iter", - start_time=start_time, - end_time=end_time, - ) - ) # DO NOT SLOW DOWN CALL B/C OF THIS + ## LOGGING ## + end_time = time.time() + _duration = end_time - start_time + asyncio.create_task( + self.service_logger_obj.async_service_success_hook( + service=ServiceTypes.REDIS, + duration=_duration, + call_type="async_scan_iter", + start_time=start_time, + end_time=end_time, + ) + ) # DO NOT SLOW DOWN CALL B/C OF THIS return keys except Exception as e: # NON blocking - notify users Redis is throwing an exception @@ -285,7 +292,6 @@ class RedisCache(BaseCache): call_type="async_set_cache", ) ) - # NON blocking - notify users Redis is throwing an exception verbose_logger.error( "LiteLLM Redis Caching: async set() - Got exception from REDIS %s, Writing value=%s", str(e), @@ -294,59 +300,59 @@ class RedisCache(BaseCache): raise e key = self.check_and_fix_namespace(key=key) - async with _redis_client as redis_client: - ttl = self.get_ttl(**kwargs) + ttl = self.get_ttl(**kwargs) + print_verbose(f"Set ASYNC Redis Cache: key: {key}\nValue {value}\nttl={ttl}") + + try: + if not hasattr(_redis_client, "set"): + raise Exception("Redis client cannot set cache. Attribute not found.") + await _redis_client.set(name=key, value=json.dumps(value), ex=ttl) print_verbose( - f"Set ASYNC Redis Cache: key: {key}\nValue {value}\nttl={ttl}" + f"Successfully Set ASYNC Redis Cache: key: {key}\nValue {value}\nttl={ttl}" + ) + end_time = time.time() + _duration = end_time - start_time + asyncio.create_task( + self.service_logger_obj.async_service_success_hook( + service=ServiceTypes.REDIS, + duration=_duration, + call_type="async_set_cache", + start_time=start_time, + end_time=end_time, + parent_otel_span=_get_parent_otel_span_from_kwargs(kwargs), + event_metadata={"key": key}, + ) + ) + except Exception as e: + end_time = time.time() + _duration = end_time - start_time + asyncio.create_task( + self.service_logger_obj.async_service_failure_hook( + service=ServiceTypes.REDIS, + duration=_duration, + error=e, + call_type="async_set_cache", + start_time=start_time, + end_time=end_time, + parent_otel_span=_get_parent_otel_span_from_kwargs(kwargs), + event_metadata={"key": key}, + ) + ) + verbose_logger.error( + "LiteLLM Redis Caching: async set() - Got exception from REDIS %s, Writing value=%s", + str(e), + value, ) - try: - if not hasattr(redis_client, "set"): - raise Exception( - "Redis client cannot set cache. Attribute not found." - ) - await redis_client.set(name=key, value=json.dumps(value), ex=ttl) - print_verbose( - f"Successfully Set ASYNC Redis Cache: key: {key}\nValue {value}\nttl={ttl}" - ) - end_time = time.time() - _duration = end_time - start_time - asyncio.create_task( - self.service_logger_obj.async_service_success_hook( - service=ServiceTypes.REDIS, - duration=_duration, - call_type="async_set_cache", - start_time=start_time, - end_time=end_time, - parent_otel_span=_get_parent_otel_span_from_kwargs(kwargs), - event_metadata={"key": key}, - ) - ) - except Exception as e: - end_time = time.time() - _duration = end_time - start_time - asyncio.create_task( - self.service_logger_obj.async_service_failure_hook( - service=ServiceTypes.REDIS, - duration=_duration, - error=e, - call_type="async_set_cache", - start_time=start_time, - end_time=end_time, - parent_otel_span=_get_parent_otel_span_from_kwargs(kwargs), - event_metadata={"key": key}, - ) - ) - # NON blocking - notify users Redis is throwing an exception - verbose_logger.error( - "LiteLLM Redis Caching: async set() - Got exception from REDIS %s, Writing value=%s", - str(e), - value, - ) - async def _pipeline_helper( - self, pipe: pipeline, cache_list: List[Tuple[Any, Any]], ttl: Optional[float] + self, + pipe: Union[pipeline, cluster_pipeline], + cache_list: List[Tuple[Any, Any]], + ttl: Optional[float], ) -> List: + """ + Helper function for executing a pipeline of set operations on Redis + """ ttl = self.get_ttl(ttl=ttl) # Iterate through each key-value pair in the cache_list and set them in the pipeline. for cache_key, cache_value in cache_list: @@ -359,7 +365,11 @@ class RedisCache(BaseCache): _td: Optional[timedelta] = None if ttl is not None: _td = timedelta(seconds=ttl) - pipe.set(cache_key, json_cache_value, ex=_td) + pipe.set( # type: ignore + name=cache_key, + value=json_cache_value, + ex=_td, + ) # Execute the pipeline and return the results. results = await pipe.execute() return results @@ -373,9 +383,8 @@ class RedisCache(BaseCache): # don't waste a network request if there's nothing to set if len(cache_list) == 0: return - from redis.asyncio import Redis - _redis_client: Redis = self.init_async_client() # type: ignore + _redis_client = self.init_async_client() start_time = time.time() print_verbose( @@ -383,9 +392,8 @@ class RedisCache(BaseCache): ) cache_value: Any = None try: - async with _redis_client as redis_client: - async with redis_client.pipeline(transaction=True) as pipe: - results = await self._pipeline_helper(pipe, cache_list, ttl) + async with _redis_client.pipeline(transaction=False) as pipe: + results = await self._pipeline_helper(pipe, cache_list, ttl) print_verbose(f"pipeline results: {results}") # Optionally, you could process 'results' to make sure that all set operations were successful. @@ -473,49 +481,46 @@ class RedisCache(BaseCache): raise e key = self.check_and_fix_namespace(key=key) - async with _redis_client as redis_client: - print_verbose( - f"Set ASYNC Redis Cache: key: {key}\nValue {value}\nttl={ttl}" + print_verbose(f"Set ASYNC Redis Cache: key: {key}\nValue {value}\nttl={ttl}") + try: + await self._set_cache_sadd_helper( + redis_client=_redis_client, key=key, value=value, ttl=ttl ) - try: - await self._set_cache_sadd_helper( - redis_client=redis_client, key=key, value=value, ttl=ttl + print_verbose( + f"Successfully Set ASYNC Redis Cache SADD: key: {key}\nValue {value}\nttl={ttl}" + ) + end_time = time.time() + _duration = end_time - start_time + asyncio.create_task( + self.service_logger_obj.async_service_success_hook( + service=ServiceTypes.REDIS, + duration=_duration, + call_type="async_set_cache_sadd", + start_time=start_time, + end_time=end_time, + parent_otel_span=_get_parent_otel_span_from_kwargs(kwargs), ) - print_verbose( - f"Successfully Set ASYNC Redis Cache SADD: key: {key}\nValue {value}\nttl={ttl}" - ) - end_time = time.time() - _duration = end_time - start_time - asyncio.create_task( - self.service_logger_obj.async_service_success_hook( - service=ServiceTypes.REDIS, - duration=_duration, - call_type="async_set_cache_sadd", - start_time=start_time, - end_time=end_time, - parent_otel_span=_get_parent_otel_span_from_kwargs(kwargs), - ) - ) - except Exception as e: - end_time = time.time() - _duration = end_time - start_time - asyncio.create_task( - self.service_logger_obj.async_service_failure_hook( - service=ServiceTypes.REDIS, - duration=_duration, - error=e, - call_type="async_set_cache_sadd", - start_time=start_time, - end_time=end_time, - parent_otel_span=_get_parent_otel_span_from_kwargs(kwargs), - ) - ) - # NON blocking - notify users Redis is throwing an exception - verbose_logger.error( - "LiteLLM Redis Caching: async set_cache_sadd() - Got exception from REDIS %s, Writing value=%s", - str(e), - value, + ) + except Exception as e: + end_time = time.time() + _duration = end_time - start_time + asyncio.create_task( + self.service_logger_obj.async_service_failure_hook( + service=ServiceTypes.REDIS, + duration=_duration, + error=e, + call_type="async_set_cache_sadd", + start_time=start_time, + end_time=end_time, + parent_otel_span=_get_parent_otel_span_from_kwargs(kwargs), ) + ) + # NON blocking - notify users Redis is throwing an exception + verbose_logger.error( + "LiteLLM Redis Caching: async set_cache_sadd() - Got exception from REDIS %s, Writing value=%s", + str(e), + value, + ) async def batch_cache_write(self, key, value, **kwargs): print_verbose( @@ -538,31 +543,30 @@ class RedisCache(BaseCache): _redis_client: Redis = self.init_async_client() # type: ignore start_time = time.time() _used_ttl = self.get_ttl(ttl=ttl) + key = self.check_and_fix_namespace(key=key) try: - async with _redis_client as redis_client: - result = await redis_client.incrbyfloat(name=key, amount=value) + result = await _redis_client.incrbyfloat(name=key, amount=value) + if _used_ttl is not None: + # check if key already has ttl, if not -> set ttl + current_ttl = await _redis_client.ttl(key) + if current_ttl == -1: + # Key has no expiration + await _redis_client.expire(key, _used_ttl) - if _used_ttl is not None: - # check if key already has ttl, if not -> set ttl - current_ttl = await redis_client.ttl(key) - if current_ttl == -1: - # Key has no expiration - await redis_client.expire(key, _used_ttl) - - ## LOGGING ## - end_time = time.time() - _duration = end_time - start_time - asyncio.create_task( - self.service_logger_obj.async_service_success_hook( - service=ServiceTypes.REDIS, - duration=_duration, - call_type="async_increment", - start_time=start_time, - end_time=end_time, - parent_otel_span=parent_otel_span, - ) + ## LOGGING ## + end_time = time.time() + _duration = end_time - start_time + asyncio.create_task( + self.service_logger_obj.async_service_success_hook( + service=ServiceTypes.REDIS, + duration=_duration, + call_type="async_increment", + start_time=start_time, + end_time=end_time, + parent_otel_span=parent_otel_span, ) - return result + ) + return result except Exception as e: ## LOGGING ## end_time = time.time() @@ -634,19 +638,48 @@ class RedisCache(BaseCache): "litellm.caching.caching: get() - Got exception from REDIS: ", e ) - def batch_get_cache(self, key_list, parent_otel_span: Optional[Span]) -> dict: + def _run_redis_mget_operation(self, keys: List[str]) -> List[Any]: + """ + Wrapper to call `mget` on the redis client + + We use a wrapper so RedisCluster can override this method + """ + return self.redis_client.mget(keys=keys) # type: ignore + + async def _async_run_redis_mget_operation(self, keys: List[str]) -> List[Any]: + """ + Wrapper to call `mget` on the redis client + + We use a wrapper so RedisCluster can override this method + """ + async_redis_client = self.init_async_client() + return await async_redis_client.mget(keys=keys) # type: ignore + + def batch_get_cache( + self, + key_list: Union[List[str], List[Optional[str]]], + parent_otel_span: Optional[Span] = None, + ) -> dict: """ Use Redis for bulk read operations + + Args: + key_list: List of keys to get from Redis + parent_otel_span: Optional parent OpenTelemetry span + + Returns: + dict: A dictionary mapping keys to their cached values """ key_value_dict = {} + _key_list = [key for key in key_list if key is not None] try: _keys = [] - for cache_key in key_list: - cache_key = self.check_and_fix_namespace(key=cache_key) + for cache_key in _key_list: + cache_key = self.check_and_fix_namespace(key=cache_key or "") _keys.append(cache_key) start_time = time.time() - results: List = self.redis_client.mget(keys=_keys) # type: ignore + results: List = self._run_redis_mget_operation(keys=_keys) end_time = time.time() _duration = end_time - start_time self.service_logger_obj.service_success_hook( @@ -659,17 +692,19 @@ class RedisCache(BaseCache): ) # Associate the results back with their keys. - # 'results' is a list of values corresponding to the order of keys in 'key_list'. - key_value_dict = dict(zip(key_list, results)) + # 'results' is a list of values corresponding to the order of keys in '_key_list'. + key_value_dict = dict(zip(_key_list, results)) - decoded_results = { - k.decode("utf-8"): self._get_cache_logic(v) - for k, v in key_value_dict.items() - } + decoded_results = {} + for k, v in key_value_dict.items(): + if isinstance(k, bytes): + k = k.decode("utf-8") + v = self._get_cache_logic(v) + decoded_results[k] = v return decoded_results except Exception as e: - print_verbose(f"Error occurred in pipeline read - {str(e)}") + verbose_logger.error(f"Error occurred in batch get cache - {str(e)}") return key_value_dict async def async_get_cache( @@ -680,67 +715,75 @@ class RedisCache(BaseCache): _redis_client: Redis = self.init_async_client() # type: ignore key = self.check_and_fix_namespace(key=key) start_time = time.time() - async with _redis_client as redis_client: - try: - print_verbose(f"Get Async Redis Cache: key: {key}") - cached_response = await redis_client.get(key) - print_verbose( - f"Got Async Redis Cache: key: {key}, cached_response {cached_response}" + + try: + print_verbose(f"Get Async Redis Cache: key: {key}") + cached_response = await _redis_client.get(key) + print_verbose( + f"Got Async Redis Cache: key: {key}, cached_response {cached_response}" + ) + response = self._get_cache_logic(cached_response=cached_response) + + end_time = time.time() + _duration = end_time - start_time + asyncio.create_task( + self.service_logger_obj.async_service_success_hook( + service=ServiceTypes.REDIS, + duration=_duration, + call_type="async_get_cache", + start_time=start_time, + end_time=end_time, + parent_otel_span=parent_otel_span, + event_metadata={"key": key}, ) - response = self._get_cache_logic(cached_response=cached_response) - ## LOGGING ## - end_time = time.time() - _duration = end_time - start_time - asyncio.create_task( - self.service_logger_obj.async_service_success_hook( - service=ServiceTypes.REDIS, - duration=_duration, - call_type="async_get_cache", - start_time=start_time, - end_time=end_time, - parent_otel_span=parent_otel_span, - event_metadata={"key": key}, - ) - ) - return response - except Exception as e: - ## LOGGING ## - end_time = time.time() - _duration = end_time - start_time - asyncio.create_task( - self.service_logger_obj.async_service_failure_hook( - service=ServiceTypes.REDIS, - duration=_duration, - error=e, - call_type="async_get_cache", - start_time=start_time, - end_time=end_time, - parent_otel_span=parent_otel_span, - event_metadata={"key": key}, - ) - ) - # NON blocking - notify users Redis is throwing an exception - print_verbose( - f"litellm.caching.caching: async get() - Got exception from REDIS: {str(e)}" + ) + return response + except Exception as e: + end_time = time.time() + _duration = end_time - start_time + asyncio.create_task( + self.service_logger_obj.async_service_failure_hook( + service=ServiceTypes.REDIS, + duration=_duration, + error=e, + call_type="async_get_cache", + start_time=start_time, + end_time=end_time, + parent_otel_span=parent_otel_span, + event_metadata={"key": key}, ) + ) + print_verbose( + f"litellm.caching.caching: async get() - Got exception from REDIS: {str(e)}" + ) async def async_batch_get_cache( - self, key_list: List[str], parent_otel_span: Optional[Span] = None + self, + key_list: Union[List[str], List[Optional[str]]], + parent_otel_span: Optional[Span] = None, ) -> dict: """ Use Redis for bulk read operations + + Args: + key_list: List of keys to get from Redis + parent_otel_span: Optional parent OpenTelemetry span + + Returns: + dict: A dictionary mapping keys to their cached values + + `.mget` does not support None keys. This will filter out None keys. """ - _redis_client = await self.init_async_client() + # typed as Any, redis python lib has incomplete type stubs for RedisCluster and does not include `mget` key_value_dict = {} start_time = time.time() + _key_list = [key for key in key_list if key is not None] try: - async with _redis_client as redis_client: - _keys = [] - for cache_key in key_list: - cache_key = self.check_and_fix_namespace(key=cache_key) - _keys.append(cache_key) - results = await redis_client.mget(keys=_keys) - + _keys = [] + for cache_key in _key_list: + cache_key = self.check_and_fix_namespace(key=cache_key) + _keys.append(cache_key) + results = await self._async_run_redis_mget_operation(keys=_keys) ## LOGGING ## end_time = time.time() _duration = end_time - start_time @@ -757,7 +800,7 @@ class RedisCache(BaseCache): # Associate the results back with their keys. # 'results' is a list of values corresponding to the order of keys in 'key_list'. - key_value_dict = dict(zip(key_list, results)) + key_value_dict = dict(zip(_key_list, results)) decoded_results = {} for k, v in key_value_dict.items(): @@ -782,7 +825,7 @@ class RedisCache(BaseCache): parent_otel_span=parent_otel_span, ) ) - print_verbose(f"Error occurred in pipeline read - {str(e)}") + verbose_logger.error(f"Error occurred in async batch get cache - {str(e)}") return key_value_dict def sync_ping(self) -> bool: @@ -822,46 +865,46 @@ class RedisCache(BaseCache): raise e async def ping(self) -> bool: - _redis_client = self.init_async_client() + # typed as Any, redis python lib has incomplete type stubs for RedisCluster and does not include `ping` + _redis_client: Any = self.init_async_client() start_time = time.time() - async with _redis_client as redis_client: - print_verbose("Pinging Async Redis Cache") - try: - response = await redis_client.ping() - ## LOGGING ## - end_time = time.time() - _duration = end_time - start_time - asyncio.create_task( - self.service_logger_obj.async_service_success_hook( - service=ServiceTypes.REDIS, - duration=_duration, - call_type="async_ping", - ) + print_verbose("Pinging Async Redis Cache") + try: + response = await _redis_client.ping() + ## LOGGING ## + end_time = time.time() + _duration = end_time - start_time + asyncio.create_task( + self.service_logger_obj.async_service_success_hook( + service=ServiceTypes.REDIS, + duration=_duration, + call_type="async_ping", ) - return response - except Exception as e: - # NON blocking - notify users Redis is throwing an exception - ## LOGGING ## - end_time = time.time() - _duration = end_time - start_time - asyncio.create_task( - self.service_logger_obj.async_service_failure_hook( - service=ServiceTypes.REDIS, - duration=_duration, - error=e, - call_type="async_ping", - ) + ) + return response + except Exception as e: + # NON blocking - notify users Redis is throwing an exception + ## LOGGING ## + end_time = time.time() + _duration = end_time - start_time + asyncio.create_task( + self.service_logger_obj.async_service_failure_hook( + service=ServiceTypes.REDIS, + duration=_duration, + error=e, + call_type="async_ping", ) - verbose_logger.error( - f"LiteLLM Redis Cache PING: - Got exception from REDIS : {str(e)}" - ) - raise e + ) + verbose_logger.error( + f"LiteLLM Redis Cache PING: - Got exception from REDIS : {str(e)}" + ) + raise e async def delete_cache_keys(self, keys): - _redis_client = self.init_async_client() + # typed as Any, redis python lib has incomplete type stubs for RedisCluster and does not include `delete` + _redis_client: Any = self.init_async_client() # keys is a list, unpack it so it gets passed as individual elements to delete - async with _redis_client as redis_client: - await redis_client.delete(*keys) + await _redis_client.delete(*keys) def client_list(self) -> List: client_list: List = self.redis_client.client_list() # type: ignore @@ -881,10 +924,10 @@ class RedisCache(BaseCache): await self.async_redis_conn_pool.disconnect(inuse_connections=True) async def async_delete_cache(self, key: str): - _redis_client = self.init_async_client() + # typed as Any, redis python lib has incomplete type stubs for RedisCluster and does not include `delete` + _redis_client: Any = self.init_async_client() # keys is str - async with _redis_client as redis_client: - await redis_client.delete(key) + await _redis_client.delete(key) def delete_cache(self, key): self.redis_client.delete(key) @@ -935,11 +978,8 @@ class RedisCache(BaseCache): ) try: - async with _redis_client as redis_client: - async with redis_client.pipeline(transaction=True) as pipe: - results = await self._pipeline_increment_helper( - pipe, increment_list - ) + async with _redis_client.pipeline(transaction=False) as pipe: + results = await self._pipeline_increment_helper(pipe, increment_list) print_verbose(f"pipeline increment results: {results}") @@ -991,12 +1031,12 @@ class RedisCache(BaseCache): Redis ref: https://redis.io/docs/latest/commands/ttl/ """ try: - _redis_client = await self.init_async_client() - async with _redis_client as redis_client: - ttl = await redis_client.ttl(key) - if ttl <= -1: # -1 means the key does not exist, -2 key does not exist - return None - return ttl + # typed as Any, redis python lib has incomplete type stubs for RedisCluster and does not include `ttl` + _redis_client: Any = self.init_async_client() + ttl = await _redis_client.ttl(key) + if ttl <= -1: # -1 means the key does not exist, -2 key does not exist + return None + return ttl except Exception as e: verbose_logger.debug(f"Redis TTL Error: {e}") return None diff --git a/litellm/caching/redis_cluster_cache.py b/litellm/caching/redis_cluster_cache.py new file mode 100644 index 0000000000..2e7d1de17f --- /dev/null +++ b/litellm/caching/redis_cluster_cache.py @@ -0,0 +1,59 @@ +""" +Redis Cluster Cache implementation + +Key differences: +- RedisClient NEEDs to be re-used across requests, adds 3000ms latency if it's re-created +""" + +from typing import TYPE_CHECKING, Any, List, Optional + +from litellm.caching.redis_cache import RedisCache + +if TYPE_CHECKING: + from opentelemetry.trace import Span as _Span + from redis.asyncio import Redis, RedisCluster + from redis.asyncio.client import Pipeline + + pipeline = Pipeline + async_redis_client = Redis + Span = _Span +else: + pipeline = Any + async_redis_client = Any + Span = Any + + +class RedisClusterCache(RedisCache): + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self.redis_async_redis_cluster_client: Optional[RedisCluster] = None + self.redis_sync_redis_cluster_client: Optional[RedisCluster] = None + + def init_async_client(self): + from redis.asyncio import RedisCluster + + from .._redis import get_redis_async_client + + if self.redis_async_redis_cluster_client: + return self.redis_async_redis_cluster_client + + _redis_client = get_redis_async_client( + connection_pool=self.async_redis_conn_pool, **self.redis_kwargs + ) + if isinstance(_redis_client, RedisCluster): + self.redis_async_redis_cluster_client = _redis_client + + return _redis_client + + def _run_redis_mget_operation(self, keys: List[str]) -> List[Any]: + """ + Overrides `_run_redis_mget_operation` in redis_cache.py + """ + return self.redis_client.mget_nonatomic(keys=keys) # type: ignore + + async def _async_run_redis_mget_operation(self, keys: List[str]) -> List[Any]: + """ + Overrides `_async_run_redis_mget_operation` in redis_cache.py + """ + async_redis_cluster_client = self.init_async_client() + return await async_redis_cluster_client.mget_nonatomic(keys=keys) # type: ignore diff --git a/litellm/constants.py b/litellm/constants.py index dff574f0f6..0288c45e40 100644 --- a/litellm/constants.py +++ b/litellm/constants.py @@ -1,3 +1,5 @@ +from typing import List, Literal + ROUTER_MAX_FALLBACKS = 5 DEFAULT_BATCH_SIZE = 512 DEFAULT_FLUSH_INTERVAL_SECONDS = 5 @@ -12,6 +14,11 @@ DEFAULT_IMAGE_TOKEN_COUNT = 250 DEFAULT_IMAGE_WIDTH = 300 DEFAULT_IMAGE_HEIGHT = 300 SINGLE_DEPLOYMENT_TRAFFIC_FAILURE_THRESHOLD = 1000 # Minimum number of requests to consider "reasonable traffic". Used for single-deployment cooldown logic. +#### RELIABILITY #### +REPEATED_STREAMING_CHUNK_LIMIT = 100 # catch if model starts looping the same chunk while streaming. Uses high default to prevent false positives. +#### Networking settings #### +request_timeout: float = 6000 # time in seconds + LITELLM_CHAT_PROVIDERS = [ "openai", "openai_like", @@ -111,8 +118,294 @@ OPENAI_CHAT_COMPLETION_PARAMS = [ "parallel_tool_calls", "logprobs", "top_logprobs", + "reasoning_effort", "extra_headers", + "thinking", ] + +openai_compatible_endpoints: List = [ + "api.perplexity.ai", + "api.endpoints.anyscale.com/v1", + "api.deepinfra.com/v1/openai", + "api.mistral.ai/v1", + "codestral.mistral.ai/v1/chat/completions", + "codestral.mistral.ai/v1/fim/completions", + "api.groq.com/openai/v1", + "https://integrate.api.nvidia.com/v1", + "api.deepseek.com/v1", + "api.together.xyz/v1", + "app.empower.dev/api/v1", + "https://api.friendli.ai/serverless/v1", + "api.sambanova.ai/v1", + "api.x.ai/v1", + "api.galadriel.ai/v1", +] + + +openai_compatible_providers: List = [ + "anyscale", + "mistral", + "groq", + "nvidia_nim", + "cerebras", + "sambanova", + "ai21_chat", + "ai21", + "volcengine", + "codestral", + "deepseek", + "deepinfra", + "perplexity", + "xinference", + "xai", + "together_ai", + "fireworks_ai", + "empower", + "friendliai", + "azure_ai", + "github", + "litellm_proxy", + "hosted_vllm", + "lm_studio", + "galadriel", +] +openai_text_completion_compatible_providers: List = ( + [ # providers that support `/v1/completions` + "together_ai", + "fireworks_ai", + "hosted_vllm", + ] +) +_openai_like_providers: List = [ + "predibase", + "databricks", + "watsonx", +] # private helper. similar to openai but require some custom auth / endpoint handling, so can't use the openai sdk +# well supported replicate llms +replicate_models: List = [ + # llama replicate supported LLMs + "replicate/llama-2-70b-chat:2796ee9483c3fd7aa2e171d38f4ca12251a30609463dcfd4cd76703f22e96cdf", + "a16z-infra/llama-2-13b-chat:2a7f981751ec7fdf87b5b91ad4db53683a98082e9ff7bfd12c8cd5ea85980a52", + "meta/codellama-13b:1c914d844307b0588599b8393480a3ba917b660c7e9dfae681542b5325f228db", + # Vicuna + "replicate/vicuna-13b:6282abe6a492de4145d7bb601023762212f9ddbbe78278bd6771c8b3b2f2a13b", + "joehoover/instructblip-vicuna13b:c4c54e3c8c97cd50c2d2fec9be3b6065563ccf7d43787fb99f84151b867178fe", + # Flan T-5 + "daanelson/flan-t5-large:ce962b3f6792a57074a601d3979db5839697add2e4e02696b3ced4c022d4767f", + # Others + "replicate/dolly-v2-12b:ef0e1aefc61f8e096ebe4db6b2bacc297daf2ef6899f0f7e001ec445893500e5", + "replit/replit-code-v1-3b:b84f4c074b807211cd75e3e8b1589b6399052125b4c27106e43d47189e8415ad", +] + +clarifai_models: List = [ + "clarifai/meta.Llama-3.Llama-3-8B-Instruct", + "clarifai/gcp.generate.gemma-1_1-7b-it", + "clarifai/mistralai.completion.mixtral-8x22B", + "clarifai/cohere.generate.command-r-plus", + "clarifai/databricks.drbx.dbrx-instruct", + "clarifai/mistralai.completion.mistral-large", + "clarifai/mistralai.completion.mistral-medium", + "clarifai/mistralai.completion.mistral-small", + "clarifai/mistralai.completion.mixtral-8x7B-Instruct-v0_1", + "clarifai/gcp.generate.gemma-2b-it", + "clarifai/gcp.generate.gemma-7b-it", + "clarifai/deci.decilm.deciLM-7B-instruct", + "clarifai/mistralai.completion.mistral-7B-Instruct", + "clarifai/gcp.generate.gemini-pro", + "clarifai/anthropic.completion.claude-v1", + "clarifai/anthropic.completion.claude-instant-1_2", + "clarifai/anthropic.completion.claude-instant", + "clarifai/anthropic.completion.claude-v2", + "clarifai/anthropic.completion.claude-2_1", + "clarifai/meta.Llama-2.codeLlama-70b-Python", + "clarifai/meta.Llama-2.codeLlama-70b-Instruct", + "clarifai/openai.completion.gpt-3_5-turbo-instruct", + "clarifai/meta.Llama-2.llama2-7b-chat", + "clarifai/meta.Llama-2.llama2-13b-chat", + "clarifai/meta.Llama-2.llama2-70b-chat", + "clarifai/openai.chat-completion.gpt-4-turbo", + "clarifai/microsoft.text-generation.phi-2", + "clarifai/meta.Llama-2.llama2-7b-chat-vllm", + "clarifai/upstage.solar.solar-10_7b-instruct", + "clarifai/openchat.openchat.openchat-3_5-1210", + "clarifai/togethercomputer.stripedHyena.stripedHyena-Nous-7B", + "clarifai/gcp.generate.text-bison", + "clarifai/meta.Llama-2.llamaGuard-7b", + "clarifai/fblgit.una-cybertron.una-cybertron-7b-v2", + "clarifai/openai.chat-completion.GPT-4", + "clarifai/openai.chat-completion.GPT-3_5-turbo", + "clarifai/ai21.complete.Jurassic2-Grande", + "clarifai/ai21.complete.Jurassic2-Grande-Instruct", + "clarifai/ai21.complete.Jurassic2-Jumbo-Instruct", + "clarifai/ai21.complete.Jurassic2-Jumbo", + "clarifai/ai21.complete.Jurassic2-Large", + "clarifai/cohere.generate.cohere-generate-command", + "clarifai/wizardlm.generate.wizardCoder-Python-34B", + "clarifai/wizardlm.generate.wizardLM-70B", + "clarifai/tiiuae.falcon.falcon-40b-instruct", + "clarifai/togethercomputer.RedPajama.RedPajama-INCITE-7B-Chat", + "clarifai/gcp.generate.code-gecko", + "clarifai/gcp.generate.code-bison", + "clarifai/mistralai.completion.mistral-7B-OpenOrca", + "clarifai/mistralai.completion.openHermes-2-mistral-7B", + "clarifai/wizardlm.generate.wizardLM-13B", + "clarifai/huggingface-research.zephyr.zephyr-7B-alpha", + "clarifai/wizardlm.generate.wizardCoder-15B", + "clarifai/microsoft.text-generation.phi-1_5", + "clarifai/databricks.Dolly-v2.dolly-v2-12b", + "clarifai/bigcode.code.StarCoder", + "clarifai/salesforce.xgen.xgen-7b-8k-instruct", + "clarifai/mosaicml.mpt.mpt-7b-instruct", + "clarifai/anthropic.completion.claude-3-opus", + "clarifai/anthropic.completion.claude-3-sonnet", + "clarifai/gcp.generate.gemini-1_5-pro", + "clarifai/gcp.generate.imagen-2", + "clarifai/salesforce.blip.general-english-image-caption-blip-2", +] + + +huggingface_models: List = [ + "meta-llama/Llama-2-7b-hf", + "meta-llama/Llama-2-7b-chat-hf", + "meta-llama/Llama-2-13b-hf", + "meta-llama/Llama-2-13b-chat-hf", + "meta-llama/Llama-2-70b-hf", + "meta-llama/Llama-2-70b-chat-hf", + "meta-llama/Llama-2-7b", + "meta-llama/Llama-2-7b-chat", + "meta-llama/Llama-2-13b", + "meta-llama/Llama-2-13b-chat", + "meta-llama/Llama-2-70b", + "meta-llama/Llama-2-70b-chat", +] # these have been tested on extensively. But by default all text2text-generation and text-generation models are supported by liteLLM. - https://docs.litellm.ai/docs/providers +empower_models = [ + "empower/empower-functions", + "empower/empower-functions-small", +] + +together_ai_models: List = [ + # llama llms - chat + "togethercomputer/llama-2-70b-chat", + # llama llms - language / instruct + "togethercomputer/llama-2-70b", + "togethercomputer/LLaMA-2-7B-32K", + "togethercomputer/Llama-2-7B-32K-Instruct", + "togethercomputer/llama-2-7b", + # falcon llms + "togethercomputer/falcon-40b-instruct", + "togethercomputer/falcon-7b-instruct", + # alpaca + "togethercomputer/alpaca-7b", + # chat llms + "HuggingFaceH4/starchat-alpha", + # code llms + "togethercomputer/CodeLlama-34b", + "togethercomputer/CodeLlama-34b-Instruct", + "togethercomputer/CodeLlama-34b-Python", + "defog/sqlcoder", + "NumbersStation/nsql-llama-2-7B", + "WizardLM/WizardCoder-15B-V1.0", + "WizardLM/WizardCoder-Python-34B-V1.0", + # language llms + "NousResearch/Nous-Hermes-Llama2-13b", + "Austism/chronos-hermes-13b", + "upstage/SOLAR-0-70b-16bit", + "WizardLM/WizardLM-70B-V1.0", +] # supports all together ai models, just pass in the model id e.g. completion(model="together_computer/replit_code_3b",...) + + +baseten_models: List = [ + "qvv0xeq", + "q841o8w", + "31dxrj3", +] # FALCON 7B # WizardLM # Mosaic ML + +BEDROCK_INVOKE_PROVIDERS_LITERAL = Literal[ + "cohere", + "anthropic", + "mistral", + "amazon", + "meta", + "llama", + "ai21", + "nova", + "deepseek_r1", +] + +open_ai_embedding_models: List = ["text-embedding-ada-002"] +cohere_embedding_models: List = [ + "embed-english-v3.0", + "embed-english-light-v3.0", + "embed-multilingual-v3.0", + "embed-english-v2.0", + "embed-english-light-v2.0", + "embed-multilingual-v2.0", +] +bedrock_embedding_models: List = [ + "amazon.titan-embed-text-v1", + "cohere.embed-english-v3", + "cohere.embed-multilingual-v3", +] + +known_tokenizer_config = { + "mistralai/Mistral-7B-Instruct-v0.1": { + "tokenizer": { + "chat_template": "{{ bos_token }}{% for message in messages %}{% if (message['role'] == 'user') != (loop.index0 % 2 == 0) %}{{ raise_exception('Conversation roles must alternate user/assistant/user/assistant/...') }}{% endif %}{% if message['role'] == 'user' %}{{ '[INST] ' + message['content'] + ' [/INST]' }}{% elif message['role'] == 'assistant' %}{{ message['content'] + eos_token + ' ' }}{% else %}{{ raise_exception('Only user and assistant roles are supported!') }}{% endif %}{% endfor %}", + "bos_token": "", + "eos_token": "", + }, + "status": "success", + }, + "meta-llama/Meta-Llama-3-8B-Instruct": { + "tokenizer": { + "chat_template": "{% set loop_messages = messages %}{% for message in loop_messages %}{% set content = '<|start_header_id|>' + message['role'] + '<|end_header_id|>\n\n'+ message['content'] | trim + '<|eot_id|>' %}{% if loop.index0 == 0 %}{% set content = bos_token + content %}{% endif %}{{ content }}{% endfor %}{{ '<|start_header_id|>assistant<|end_header_id|>\n\n' }}", + "bos_token": "<|begin_of_text|>", + "eos_token": "", + }, + "status": "success", + }, + "deepseek-r1/deepseek-r1-7b-instruct": { + "tokenizer": { + "add_bos_token": True, + "add_eos_token": False, + "bos_token": { + "__type": "AddedToken", + "content": "<|begin▁of▁sentence|>", + "lstrip": False, + "normalized": True, + "rstrip": False, + "single_word": False, + }, + "clean_up_tokenization_spaces": False, + "eos_token": { + "__type": "AddedToken", + "content": "<|end▁of▁sentence|>", + "lstrip": False, + "normalized": True, + "rstrip": False, + "single_word": False, + }, + "legacy": True, + "model_max_length": 16384, + "pad_token": { + "__type": "AddedToken", + "content": "<|end▁of▁sentence|>", + "lstrip": False, + "normalized": True, + "rstrip": False, + "single_word": False, + }, + "sp_model_kwargs": {}, + "unk_token": None, + "tokenizer_class": "LlamaTokenizerFast", + "chat_template": "{% if not add_generation_prompt is defined %}{% set add_generation_prompt = false %}{% endif %}{% set ns = namespace(is_first=false, is_tool=false, is_output_first=true, system_prompt='') %}{%- for message in messages %}{%- if message['role'] == 'system' %}{% set ns.system_prompt = message['content'] %}{%- endif %}{%- endfor %}{{bos_token}}{{ns.system_prompt}}{%- for message in messages %}{%- if message['role'] == 'user' %}{%- set ns.is_tool = false -%}{{'<|User|>' + message['content']}}{%- endif %}{%- if message['role'] == 'assistant' and message['content'] is none %}{%- set ns.is_tool = false -%}{%- for tool in message['tool_calls']%}{%- if not ns.is_first %}{{'<|Assistant|><|tool▁calls▁begin|><|tool▁call▁begin|>' + tool['type'] + '<|tool▁sep|>' + tool['function']['name'] + '\\n' + '```json' + '\\n' + tool['function']['arguments'] + '\\n' + '```' + '<|tool▁call▁end|>'}}{%- set ns.is_first = true -%}{%- else %}{{'\\n' + '<|tool▁call▁begin|>' + tool['type'] + '<|tool▁sep|>' + tool['function']['name'] + '\\n' + '```json' + '\\n' + tool['function']['arguments'] + '\\n' + '```' + '<|tool▁call▁end|>'}}{{'<|tool▁calls▁end|><|end▁of▁sentence|>'}}{%- endif %}{%- endfor %}{%- endif %}{%- if message['role'] == 'assistant' and message['content'] is not none %}{%- if ns.is_tool %}{{'<|tool▁outputs▁end|>' + message['content'] + '<|end▁of▁sentence|>'}}{%- set ns.is_tool = false -%}{%- else %}{% set content = message['content'] %}{% if '' in content %}{% set content = content.split('')[-1] %}{% endif %}{{'<|Assistant|>' + content + '<|end▁of▁sentence|>'}}{%- endif %}{%- endif %}{%- if message['role'] == 'tool' %}{%- set ns.is_tool = true -%}{%- if ns.is_output_first %}{{'<|tool▁outputs▁begin|><|tool▁output▁begin|>' + message['content'] + '<|tool▁output▁end|>'}}{%- set ns.is_output_first = false %}{%- else %}{{'\\n<|tool▁output▁begin|>' + message['content'] + '<|tool▁output▁end|>'}}{%- endif %}{%- endif %}{%- endfor -%}{% if ns.is_tool %}{{'<|tool▁outputs▁end|>'}}{% endif %}{% if add_generation_prompt and not ns.is_tool %}{{'<|Assistant|>\\n'}}{% endif %}", + }, + "status": "success", + }, +} + + +OPENAI_FINISH_REASONS = ["stop", "length", "function_call", "content_filter", "null"] HUMANLOOP_PROMPT_CACHE_TTL_SECONDS = 60 # 1 minute RESPONSE_FORMAT_TOOL_NAME = "json_tool_call" # default tool name used when converting response format to tool call @@ -142,3 +435,6 @@ BATCH_STATUS_POLL_INTERVAL_SECONDS = 3600 # 1 hour BATCH_STATUS_POLL_MAX_ATTEMPTS = 24 # for 24 hours HEALTH_CHECK_TIMEOUT_SECONDS = 60 # 60 seconds + +UI_SESSION_TOKEN_TEAM_ID = "litellm-dashboard" +LITELLM_PROXY_ADMIN_NAME = "default_user_id" diff --git a/litellm/cost_calculator.py b/litellm/cost_calculator.py index 6a885858bc..1d10fa1f9e 100644 --- a/litellm/cost_calculator.py +++ b/litellm/cost_calculator.py @@ -16,15 +16,9 @@ from litellm.llms.anthropic.cost_calculation import ( from litellm.llms.azure.cost_calculation import ( cost_per_token as azure_openai_cost_per_token, ) -from litellm.llms.azure_ai.cost_calculator import ( - cost_per_query as azure_ai_rerank_cost_per_query, -) from litellm.llms.bedrock.image.cost_calculator import ( cost_calculator as bedrock_image_cost_calculator, ) -from litellm.llms.cohere.cost_calculator import ( - cost_per_query as cohere_rerank_cost_per_query, -) from litellm.llms.databricks.cost_calculator import ( cost_per_token as databricks_cost_per_token, ) @@ -51,10 +45,12 @@ from litellm.llms.vertex_ai.image_generation.cost_calculator import ( cost_calculator as vertex_ai_image_cost_calculator, ) from litellm.types.llms.openai import HttpxBinaryResponseContent -from litellm.types.rerank import RerankResponse +from litellm.types.rerank import RerankBilledUnits, RerankResponse from litellm.types.utils import ( CallTypesLiteral, + LlmProviders, LlmProvidersSet, + ModelInfo, PassthroughCallTypes, Usage, ) @@ -64,6 +60,7 @@ from litellm.utils import ( EmbeddingResponse, ImageResponse, ModelResponse, + ProviderConfigManager, TextCompletionResponse, TranscriptionResponse, _cached_get_model_info_helper, @@ -114,6 +111,8 @@ def cost_per_token( # noqa: PLR0915 number_of_queries: Optional[int] = None, ### USAGE OBJECT ### usage_object: Optional[Usage] = None, # just read the usage object if provided + ### BILLED UNITS ### + rerank_billed_units: Optional[RerankBilledUnits] = None, ### CALL TYPE ### call_type: CallTypesLiteral = "completion", audio_transcription_file_duration: float = 0.0, # for audio transcription calls - the file time in seconds @@ -238,6 +237,16 @@ def cost_per_token( # noqa: PLR0915 return rerank_cost( model=model, custom_llm_provider=custom_llm_provider, + billed_units=rerank_billed_units, + ) + elif ( + call_type == "aretrieve_batch" + or call_type == "retrieve_batch" + or call_type == CallTypes.aretrieve_batch + or call_type == CallTypes.retrieve_batch + ): + return batch_cost_calculator( + usage=usage_block, model=model, custom_llm_provider=custom_llm_provider ) elif call_type == "atranscription" or call_type == "transcription": return openai_cost_per_second( @@ -399,9 +408,12 @@ def _select_model_name_for_cost_calc( if base_model is not None: return_model = base_model - completion_response_model: Optional[str] = getattr( - completion_response, "model", None - ) + completion_response_model: Optional[str] = None + if completion_response is not None: + if isinstance(completion_response, BaseModel): + completion_response_model = getattr(completion_response, "model", None) + elif isinstance(completion_response, dict): + completion_response_model = completion_response.get("model", None) hidden_params: Optional[dict] = getattr(completion_response, "_hidden_params", None) if completion_response_model is None and hidden_params is not None: if ( @@ -530,6 +542,7 @@ def completion_cost( # noqa: PLR0915 - For un-mapped Replicate models, the cost is calculated based on the total time used for the request. """ try: + call_type = _infer_call_type(call_type, completion_response) or "completion" if ( @@ -551,6 +564,7 @@ def completion_cost( # noqa: PLR0915 cost_per_token_usage_object: Optional[Usage] = _get_usage_object( completion_response=completion_response ) + rerank_billed_units: Optional[RerankBilledUnits] = None model = _select_model_name_for_cost_calc( model=model, completion_response=completion_response, @@ -559,6 +573,10 @@ def completion_cost( # noqa: PLR0915 base_model=base_model, ) + verbose_logger.debug( + f"completion_response _select_model_name_for_cost_calc: {model}" + ) + if completion_response is not None and ( isinstance(completion_response, BaseModel) or isinstance(completion_response, dict) @@ -597,9 +615,6 @@ def completion_cost( # noqa: PLR0915 cache_read_input_tokens = prompt_tokens_details.get("cached_tokens", 0) total_time = getattr(completion_response, "_response_ms", 0) - verbose_logger.debug( - f"completion_response response ms: {getattr(completion_response, '_response_ms', None)} " - ) hidden_params = getattr(completion_response, "_hidden_params", None) if hidden_params is not None: @@ -696,6 +711,11 @@ def completion_cost( # noqa: PLR0915 else: billed_units = {} + rerank_billed_units = RerankBilledUnits( + search_units=billed_units.get("search_units"), + total_tokens=billed_units.get("total_tokens"), + ) + search_units = ( billed_units.get("search_units") or 1 ) # cohere charges per request by default. @@ -761,6 +781,7 @@ def completion_cost( # noqa: PLR0915 usage_object=cost_per_token_usage_object, call_type=call_type, audio_transcription_file_duration=audio_transcription_file_duration, + rerank_billed_units=rerank_billed_units, ) _final_cost = prompt_tokens_cost_usd_dollar + completion_tokens_cost_usd_dollar @@ -834,27 +855,36 @@ def response_cost_calculator( def rerank_cost( model: str, custom_llm_provider: Optional[str], + billed_units: Optional[RerankBilledUnits] = None, ) -> Tuple[float, float]: """ Returns - float or None: cost of response OR none if error. """ - default_num_queries = 1 _, custom_llm_provider, _, _ = litellm.get_llm_provider( model=model, custom_llm_provider=custom_llm_provider ) try: - if custom_llm_provider == "cohere": - return cohere_rerank_cost_per_query( - model=model, num_queries=default_num_queries + config = ProviderConfigManager.get_provider_rerank_config( + model=model, + api_base=None, + present_version_params=[], + provider=LlmProviders(custom_llm_provider), + ) + + try: + model_info: Optional[ModelInfo] = litellm.get_model_info( + model=model, custom_llm_provider=custom_llm_provider ) - elif custom_llm_provider == "azure_ai": - return azure_ai_rerank_cost_per_query( - model=model, num_queries=default_num_queries - ) - raise ValueError( - f"invalid custom_llm_provider for rerank model: {model}, custom_llm_provider: {custom_llm_provider}" + except Exception: + model_info = None + + return config.calculate_rerank_cost( + model=model, + custom_llm_provider=custom_llm_provider, + billed_units=billed_units, + model_info=model_info, ) except Exception as e: raise e @@ -939,3 +969,54 @@ def default_image_cost_calculator( ) return cost_info["input_cost_per_pixel"] * height * width * n + + +def batch_cost_calculator( + usage: Usage, + model: str, + custom_llm_provider: Optional[str] = None, +) -> Tuple[float, float]: + """ + Calculate the cost of a batch job + """ + + _, custom_llm_provider, _, _ = litellm.get_llm_provider( + model=model, custom_llm_provider=custom_llm_provider + ) + + verbose_logger.info( + "Calculating batch cost per token. model=%s, custom_llm_provider=%s", + model, + custom_llm_provider, + ) + + try: + model_info: Optional[ModelInfo] = litellm.get_model_info( + model=model, custom_llm_provider=custom_llm_provider + ) + except Exception: + model_info = None + + if not model_info: + return 0.0, 0.0 + + input_cost_per_token_batches = model_info.get("input_cost_per_token_batches") + input_cost_per_token = model_info.get("input_cost_per_token") + output_cost_per_token_batches = model_info.get("output_cost_per_token_batches") + output_cost_per_token = model_info.get("output_cost_per_token") + total_prompt_cost = 0.0 + total_completion_cost = 0.0 + if input_cost_per_token_batches: + total_prompt_cost = usage.prompt_tokens * input_cost_per_token_batches + elif input_cost_per_token: + total_prompt_cost = ( + usage.prompt_tokens * (input_cost_per_token) / 2 + ) # batch cost is usually half of the regular token cost + if output_cost_per_token_batches: + total_completion_cost = usage.completion_tokens * output_cost_per_token_batches + elif output_cost_per_token: + total_completion_cost = ( + usage.completion_tokens * (output_cost_per_token) / 2 + ) # batch cost is usually half of the regular token cost + + return total_prompt_cost, total_completion_cost diff --git a/litellm/exceptions.py b/litellm/exceptions.py index c26928a656..6a927f0712 100644 --- a/litellm/exceptions.py +++ b/litellm/exceptions.py @@ -14,6 +14,8 @@ from typing import Optional import httpx import openai +from litellm.types.utils import LiteLLMCommonStrings + class AuthenticationError(openai.AuthenticationError): # type: ignore def __init__( @@ -116,6 +118,7 @@ class BadRequestError(openai.BadRequestError): # type: ignore litellm_debug_info: Optional[str] = None, max_retries: Optional[int] = None, num_retries: Optional[int] = None, + body: Optional[dict] = None, ): self.status_code = 400 self.message = "litellm.BadRequestError: {}".format(message) @@ -131,7 +134,7 @@ class BadRequestError(openai.BadRequestError): # type: ignore self.max_retries = max_retries self.num_retries = num_retries super().__init__( - self.message, response=response, body=None + self.message, response=response, body=body ) # Call the base class constructor with the parameters it needs def __str__(self): @@ -790,3 +793,16 @@ class MockException(openai.APIError): if request is None: request = httpx.Request(method="POST", url="https://api.openai.com/v1") super().__init__(self.message, request=request, body=None) # type: ignore + + +class LiteLLMUnknownProvider(BadRequestError): + def __init__(self, model: str, custom_llm_provider: Optional[str] = None): + self.message = LiteLLMCommonStrings.llm_provider_not_provided.value.format( + model=model, custom_llm_provider=custom_llm_provider + ) + super().__init__( + self.message, model=model, llm_provider=custom_llm_provider, response=None + ) + + def __str__(self): + return self.message diff --git a/litellm/files/main.py b/litellm/files/main.py index 9f81b2e385..e49066e84b 100644 --- a/litellm/files/main.py +++ b/litellm/files/main.py @@ -816,7 +816,7 @@ def file_content( ) else: raise litellm.exceptions.BadRequestError( - message="LiteLLM doesn't support {} for 'file_content'. Only 'openai' and 'azure' are supported.".format( + message="LiteLLM doesn't support {} for 'custom_llm_provider'. Supported providers are 'openai', 'azure', 'vertex_ai'.".format( custom_llm_provider ), model="n/a", diff --git a/litellm/fine_tuning/main.py b/litellm/fine_tuning/main.py index 1eae51f390..b726a394c2 100644 --- a/litellm/fine_tuning/main.py +++ b/litellm/fine_tuning/main.py @@ -183,6 +183,9 @@ def create_fine_tuning_job( timeout=timeout, max_retries=optional_params.max_retries, _is_async=_is_async, + client=kwargs.get( + "client", None + ), # note, when we add this to `GenericLiteLLMParams` it impacts a lot of other tests + linting ) # Azure OpenAI elif custom_llm_provider == "azure": @@ -388,6 +391,7 @@ def cancel_fine_tuning_job( timeout=timeout, max_retries=optional_params.max_retries, _is_async=_is_async, + client=kwargs.get("client", None), ) # Azure OpenAI elif custom_llm_provider == "azure": @@ -550,6 +554,7 @@ def list_fine_tuning_jobs( timeout=timeout, max_retries=optional_params.max_retries, _is_async=_is_async, + client=kwargs.get("client", None), ) # Azure OpenAI elif custom_llm_provider == "azure": @@ -701,6 +706,7 @@ def retrieve_fine_tuning_job( timeout=timeout, max_retries=optional_params.max_retries, _is_async=_is_async, + client=kwargs.get("client", None), ) # Azure OpenAI elif custom_llm_provider == "azure": diff --git a/litellm/integrations/Readme.md b/litellm/integrations/Readme.md new file mode 100644 index 0000000000..2b0b530ab8 --- /dev/null +++ b/litellm/integrations/Readme.md @@ -0,0 +1,5 @@ +# Integrations + +This folder contains logging integrations for litellm + +eg. logging to Datadog, Langfuse, Prometheus, s3, GCS Bucket, etc. \ No newline at end of file diff --git a/litellm/integrations/_types/open_inference.py b/litellm/integrations/_types/open_inference.py index bcfabe9b7b..b5076c0e42 100644 --- a/litellm/integrations/_types/open_inference.py +++ b/litellm/integrations/_types/open_inference.py @@ -283,4 +283,4 @@ class OpenInferenceSpanKindValues(Enum): class OpenInferenceMimeTypeValues(Enum): TEXT = "text/plain" - JSON = "application/json" + JSON = "application/json" \ No newline at end of file diff --git a/litellm/integrations/additional_logging_utils.py b/litellm/integrations/additional_logging_utils.py new file mode 100644 index 0000000000..795afd81d4 --- /dev/null +++ b/litellm/integrations/additional_logging_utils.py @@ -0,0 +1,36 @@ +""" +Base class for Additional Logging Utils for CustomLoggers + +- Health Check for the logging util +- Get Request / Response Payload for the logging util +""" + +from abc import ABC, abstractmethod +from datetime import datetime +from typing import Optional + +from litellm.types.integrations.base_health_check import IntegrationHealthCheckStatus + + +class AdditionalLoggingUtils(ABC): + def __init__(self): + super().__init__() + + @abstractmethod + async def async_health_check(self) -> IntegrationHealthCheckStatus: + """ + Check if the service is healthy + """ + pass + + @abstractmethod + async def get_request_response_payload( + self, + request_id: str, + start_time_utc: Optional[datetime], + end_time_utc: Optional[datetime], + ) -> Optional[dict]: + """ + Get the request and response payload for a given `request_id` + """ + return None diff --git a/litellm/integrations/arize/_utils.py b/litellm/integrations/arize/_utils.py new file mode 100644 index 0000000000..9921d47aff --- /dev/null +++ b/litellm/integrations/arize/_utils.py @@ -0,0 +1,121 @@ +import json +from typing import TYPE_CHECKING, Any, Optional + +from litellm._logging import verbose_logger +from litellm.types.utils import StandardLoggingPayload + +if TYPE_CHECKING: + from opentelemetry.trace import Span as _Span + Span = _Span +else: + Span = Any + + +def set_attributes(span: Span, kwargs, response_obj): + from openinference.semconv.trace import ( + MessageAttributes, + OpenInferenceSpanKindValues, + SpanAttributes, + ) + + try: + litellm_params = kwargs.get("litellm_params", {}) or {} + + ############################################# + ############ LLM CALL METADATA ############## + ############################################# + metadata = litellm_params.get("metadata", {}) or {} + span.set_attribute(SpanAttributes.METADATA, str(metadata)) + + ############################################# + ########## LLM Request Attributes ########### + ############################################# + + # The name of the LLM a request is being made to + if kwargs.get("model"): + span.set_attribute(SpanAttributes.LLM_MODEL_NAME, kwargs.get("model")) + + span.set_attribute( + SpanAttributes.OPENINFERENCE_SPAN_KIND, + OpenInferenceSpanKindValues.LLM.value, + ) + messages = kwargs.get("messages") + + # for /chat/completions + # https://docs.arize.com/arize/large-language-models/tracing/semantic-conventions + if messages: + span.set_attribute( + SpanAttributes.INPUT_VALUE, + messages[-1].get("content", ""), # get the last message for input + ) + + # LLM_INPUT_MESSAGES shows up under `input_messages` tab on the span page + for idx, msg in enumerate(messages): + # Set the role per message + span.set_attribute( + f"{SpanAttributes.LLM_INPUT_MESSAGES}.{idx}.{MessageAttributes.MESSAGE_ROLE}", + msg["role"], + ) + # Set the content per message + span.set_attribute( + f"{SpanAttributes.LLM_INPUT_MESSAGES}.{idx}.{MessageAttributes.MESSAGE_CONTENT}", + msg.get("content", ""), + ) + + standard_logging_payload: Optional[StandardLoggingPayload] = kwargs.get( + "standard_logging_object" + ) + if standard_logging_payload and (model_params := standard_logging_payload["model_parameters"]): + # The Generative AI Provider: Azure, OpenAI, etc. + span.set_attribute( + SpanAttributes.LLM_INVOCATION_PARAMETERS, json.dumps(model_params) + ) + + if model_params.get("user"): + user_id = model_params.get("user") + if user_id is not None: + span.set_attribute(SpanAttributes.USER_ID, user_id) + + ############################################# + ########## LLM Response Attributes ########## + # https://docs.arize.com/arize/large-language-models/tracing/semantic-conventions + ############################################# + if hasattr(response_obj, 'get'): + for choice in response_obj.get("choices", []): + response_message = choice.get("message", {}) + span.set_attribute( + SpanAttributes.OUTPUT_VALUE, response_message.get("content", "") + ) + + # This shows up under `output_messages` tab on the span page + # This code assumes a single response + span.set_attribute( + f"{SpanAttributes.LLM_OUTPUT_MESSAGES}.0.{MessageAttributes.MESSAGE_ROLE}", + response_message.get("role"), + ) + span.set_attribute( + f"{SpanAttributes.LLM_OUTPUT_MESSAGES}.0.{MessageAttributes.MESSAGE_CONTENT}", + response_message.get("content", ""), + ) + + usage = response_obj.get("usage") + if usage: + span.set_attribute( + SpanAttributes.LLM_TOKEN_COUNT_TOTAL, + usage.get("total_tokens"), + ) + + # The number of tokens used in the LLM response (completion). + span.set_attribute( + SpanAttributes.LLM_TOKEN_COUNT_COMPLETION, + usage.get("completion_tokens"), + ) + + # The number of tokens used in the LLM prompt. + span.set_attribute( + SpanAttributes.LLM_TOKEN_COUNT_PROMPT, + usage.get("prompt_tokens"), + ) + pass + except Exception as e: + verbose_logger.error(f"Error setting arize attributes: {e}") diff --git a/litellm/integrations/arize/arize.py b/litellm/integrations/arize/arize.py new file mode 100644 index 0000000000..652957e1ee --- /dev/null +++ b/litellm/integrations/arize/arize.py @@ -0,0 +1,74 @@ +""" +arize AI is OTEL compatible + +this file has Arize ai specific helper functions +""" +import os + +from typing import TYPE_CHECKING, Any +from litellm.integrations.arize import _utils +from litellm.types.integrations.arize import ArizeConfig + +if TYPE_CHECKING: + from litellm.types.integrations.arize import Protocol as _Protocol + from opentelemetry.trace import Span as _Span + + Protocol = _Protocol + Span = _Span +else: + Protocol = Any + Span = Any + + + +class ArizeLogger: + + @staticmethod + def set_arize_attributes(span: Span, kwargs, response_obj): + _utils.set_attributes(span, kwargs, response_obj) + return + + + @staticmethod + def get_arize_config() -> ArizeConfig: + """ + Helper function to get Arize configuration. + + Returns: + ArizeConfig: A Pydantic model containing Arize configuration. + + Raises: + ValueError: If required environment variables are not set. + """ + space_key = os.environ.get("ARIZE_SPACE_KEY") + api_key = os.environ.get("ARIZE_API_KEY") + + if not space_key: + raise ValueError("ARIZE_SPACE_KEY not found in environment variables") + if not api_key: + raise ValueError("ARIZE_API_KEY not found in environment variables") + + grpc_endpoint = os.environ.get("ARIZE_ENDPOINT") + http_endpoint = os.environ.get("ARIZE_HTTP_ENDPOINT") + + endpoint = None + protocol: Protocol = "otlp_grpc" + + if grpc_endpoint: + protocol="otlp_grpc" + endpoint=grpc_endpoint + elif http_endpoint: + protocol="otlp_http" + endpoint=http_endpoint + else: + protocol="otlp_grpc" + endpoint = "https://otlp.arize.com/v1" + + return ArizeConfig( + space_key=space_key, + api_key=api_key, + protocol=protocol, + endpoint=endpoint, + ) + + diff --git a/litellm/integrations/arize/arize_phoenix.py b/litellm/integrations/arize/arize_phoenix.py new file mode 100644 index 0000000000..d7b7d5812b --- /dev/null +++ b/litellm/integrations/arize/arize_phoenix.py @@ -0,0 +1,73 @@ +import os +from typing import TYPE_CHECKING, Any +from litellm.integrations.arize import _utils +from litellm._logging import verbose_logger +from litellm.types.integrations.arize_phoenix import ArizePhoenixConfig + +if TYPE_CHECKING: + from .opentelemetry import OpenTelemetryConfig as _OpenTelemetryConfig + from litellm.types.integrations.arize import Protocol as _Protocol + from opentelemetry.trace import Span as _Span + + Protocol = _Protocol + OpenTelemetryConfig = _OpenTelemetryConfig + Span = _Span +else: + Protocol = Any + OpenTelemetryConfig = Any + Span = Any + + +ARIZE_HOSTED_PHOENIX_ENDPOINT = "https://app.phoenix.arize.com/v1/traces" + +class ArizePhoenixLogger: + @staticmethod + def set_arize_phoenix_attributes(span: Span, kwargs, response_obj): + _utils.set_attributes(span, kwargs, response_obj) + return + + @staticmethod + def get_arize_phoenix_config() -> ArizePhoenixConfig: + """ + Retrieves the Arize Phoenix configuration based on environment variables. + + Returns: + ArizePhoenixConfig: A Pydantic model containing Arize Phoenix configuration. + """ + api_key = os.environ.get("PHOENIX_API_KEY", None) + grpc_endpoint = os.environ.get("PHOENIX_COLLECTOR_ENDPOINT", None) + http_endpoint = os.environ.get("PHOENIX_COLLECTOR_HTTP_ENDPOINT", None) + + endpoint = None + protocol: Protocol = "otlp_http" + + if http_endpoint: + endpoint = http_endpoint + protocol = "otlp_http" + elif grpc_endpoint: + endpoint = grpc_endpoint + protocol = "otlp_grpc" + else: + endpoint = ARIZE_HOSTED_PHOENIX_ENDPOINT + protocol = "otlp_http" + verbose_logger.debug( + f"No PHOENIX_COLLECTOR_ENDPOINT or PHOENIX_COLLECTOR_HTTP_ENDPOINT found, using default endpoint with http: {ARIZE_HOSTED_PHOENIX_ENDPOINT}" + ) + + otlp_auth_headers = None + # If the endpoint is the Arize hosted Phoenix endpoint, use the api_key as the auth header as currently it is uses + # a slightly different auth header format than self hosted phoenix + if endpoint == ARIZE_HOSTED_PHOENIX_ENDPOINT: + if api_key is None: + raise ValueError("PHOENIX_API_KEY must be set when the Arize hosted Phoenix endpoint is used.") + otlp_auth_headers = f"api_key={api_key}" + elif api_key is not None: + # api_key/auth is optional for self hosted phoenix + otlp_auth_headers = f"Authorization=Bearer {api_key}" + + return ArizePhoenixConfig( + otlp_auth_headers=otlp_auth_headers, + protocol=protocol, + endpoint=endpoint + ) + diff --git a/litellm/integrations/arize_ai.py b/litellm/integrations/arize_ai.py deleted file mode 100644 index 10c6af69b1..0000000000 --- a/litellm/integrations/arize_ai.py +++ /dev/null @@ -1,213 +0,0 @@ -""" -arize AI is OTEL compatible - -this file has Arize ai specific helper functions -""" - -import json -from typing import TYPE_CHECKING, Any, Optional - -from litellm._logging import verbose_logger - -if TYPE_CHECKING: - from opentelemetry.trace import Span as _Span - - from .opentelemetry import OpenTelemetryConfig as _OpenTelemetryConfig - - Span = _Span - OpenTelemetryConfig = _OpenTelemetryConfig -else: - Span = Any - OpenTelemetryConfig = Any - -import os - -from litellm.types.integrations.arize import * - - -class ArizeLogger: - @staticmethod - def set_arize_ai_attributes(span: Span, kwargs, response_obj): - from litellm.integrations._types.open_inference import ( - MessageAttributes, - OpenInferenceSpanKindValues, - SpanAttributes, - ) - - try: - - optional_params = kwargs.get("optional_params", {}) - # litellm_params = kwargs.get("litellm_params", {}) or {} - - ############################################# - ############ LLM CALL METADATA ############## - ############################################# - # commented out for now - looks like Arize AI could not log this - # metadata = litellm_params.get("metadata", {}) or {} - # span.set_attribute(SpanAttributes.METADATA, str(metadata)) - - ############################################# - ########## LLM Request Attributes ########### - ############################################# - - # The name of the LLM a request is being made to - if kwargs.get("model"): - span.set_attribute(SpanAttributes.LLM_MODEL_NAME, kwargs.get("model")) - - span.set_attribute( - SpanAttributes.OPENINFERENCE_SPAN_KIND, - OpenInferenceSpanKindValues.LLM.value, - ) - messages = kwargs.get("messages") - - # for /chat/completions - # https://docs.arize.com/arize/large-language-models/tracing/semantic-conventions - if messages: - span.set_attribute( - SpanAttributes.INPUT_VALUE, - messages[-1].get("content", ""), # get the last message for input - ) - - # LLM_INPUT_MESSAGES shows up under `input_messages` tab on the span page - for idx, msg in enumerate(messages): - # Set the role per message - span.set_attribute( - f"{SpanAttributes.LLM_INPUT_MESSAGES}.{idx}.{MessageAttributes.MESSAGE_ROLE}", - msg["role"], - ) - # Set the content per message - span.set_attribute( - f"{SpanAttributes.LLM_INPUT_MESSAGES}.{idx}.{MessageAttributes.MESSAGE_CONTENT}", - msg.get("content", ""), - ) - - # The Generative AI Provider: Azure, OpenAI, etc. - _optional_params = ArizeLogger.make_json_serializable(optional_params) - _json_optional_params = json.dumps(_optional_params) - span.set_attribute( - SpanAttributes.LLM_INVOCATION_PARAMETERS, _json_optional_params - ) - - if optional_params.get("user"): - span.set_attribute(SpanAttributes.USER_ID, optional_params.get("user")) - - ############################################# - ########## LLM Response Attributes ########## - # https://docs.arize.com/arize/large-language-models/tracing/semantic-conventions - ############################################# - for choice in response_obj.get("choices"): - response_message = choice.get("message", {}) - span.set_attribute( - SpanAttributes.OUTPUT_VALUE, response_message.get("content", "") - ) - - # This shows up under `output_messages` tab on the span page - # This code assumes a single response - span.set_attribute( - f"{SpanAttributes.LLM_OUTPUT_MESSAGES}.0.{MessageAttributes.MESSAGE_ROLE}", - response_message["role"], - ) - span.set_attribute( - f"{SpanAttributes.LLM_OUTPUT_MESSAGES}.0.{MessageAttributes.MESSAGE_CONTENT}", - response_message.get("content", ""), - ) - - usage = response_obj.get("usage") - if usage: - span.set_attribute( - SpanAttributes.LLM_TOKEN_COUNT_TOTAL, - usage.get("total_tokens"), - ) - - # The number of tokens used in the LLM response (completion). - span.set_attribute( - SpanAttributes.LLM_TOKEN_COUNT_COMPLETION, - usage.get("completion_tokens"), - ) - - # The number of tokens used in the LLM prompt. - span.set_attribute( - SpanAttributes.LLM_TOKEN_COUNT_PROMPT, - usage.get("prompt_tokens"), - ) - pass - except Exception as e: - verbose_logger.error(f"Error setting arize attributes: {e}") - - ###################### Helper functions ###################### - - @staticmethod - def _get_arize_config() -> ArizeConfig: - """ - Helper function to get Arize configuration. - - Returns: - ArizeConfig: A Pydantic model containing Arize configuration. - - Raises: - ValueError: If required environment variables are not set. - """ - space_key = os.environ.get("ARIZE_SPACE_KEY") - api_key = os.environ.get("ARIZE_API_KEY") - - if not space_key: - raise ValueError("ARIZE_SPACE_KEY not found in environment variables") - if not api_key: - raise ValueError("ARIZE_API_KEY not found in environment variables") - - grpc_endpoint = os.environ.get("ARIZE_ENDPOINT") - http_endpoint = os.environ.get("ARIZE_HTTP_ENDPOINT") - if grpc_endpoint is None and http_endpoint is None: - # use default arize grpc endpoint - verbose_logger.debug( - "No ARIZE_ENDPOINT or ARIZE_HTTP_ENDPOINT found, using default endpoint: https://otlp.arize.com/v1" - ) - grpc_endpoint = "https://otlp.arize.com/v1" - - return ArizeConfig( - space_key=space_key, - api_key=api_key, - grpc_endpoint=grpc_endpoint, - http_endpoint=http_endpoint, - ) - - @staticmethod - def get_arize_opentelemetry_config() -> Optional[OpenTelemetryConfig]: - """ - Helper function to get OpenTelemetry configuration for Arize. - - Args: - arize_config (ArizeConfig): Arize configuration object. - - Returns: - OpenTelemetryConfig: Configuration for OpenTelemetry. - """ - from .opentelemetry import OpenTelemetryConfig - - arize_config = ArizeLogger._get_arize_config() - if arize_config.http_endpoint: - return OpenTelemetryConfig( - exporter="otlp_http", - endpoint=arize_config.http_endpoint, - ) - - # use default arize grpc endpoint - return OpenTelemetryConfig( - exporter="otlp_grpc", - endpoint=arize_config.grpc_endpoint, - ) - - @staticmethod - def make_json_serializable(payload: dict) -> dict: - for key, value in payload.items(): - try: - if isinstance(value, dict): - # recursively sanitize dicts - payload[key] = ArizeLogger.make_json_serializable(value.copy()) - elif not isinstance(value, (str, int, float, bool, type(None))): - # everything else becomes a string - payload[key] = str(value) - except Exception: - # non blocking if it can't cast to a str - pass - return payload diff --git a/litellm/integrations/athina.py b/litellm/integrations/athina.py index 250b384c75..705dc11f1d 100644 --- a/litellm/integrations/athina.py +++ b/litellm/integrations/athina.py @@ -23,6 +23,10 @@ class AthinaLogger: "context", "expected_response", "user_query", + "tags", + "user_feedback", + "model_options", + "custom_attributes", ] def log_event(self, kwargs, response_obj, start_time, end_time, print_verbose): @@ -80,7 +84,6 @@ class AthinaLogger: for key in self.additional_keys: if key in metadata: data[key] = metadata[key] - response = litellm.module_level_client.post( self.athina_logging_url, headers=self.headers, diff --git a/litellm/integrations/base_health_check.py b/litellm/integrations/base_health_check.py deleted file mode 100644 index 35b390692b..0000000000 --- a/litellm/integrations/base_health_check.py +++ /dev/null @@ -1,19 +0,0 @@ -""" -Base class for health check integrations -""" - -from abc import ABC, abstractmethod - -from litellm.types.integrations.base_health_check import IntegrationHealthCheckStatus - - -class HealthCheckIntegration(ABC): - def __init__(self): - super().__init__() - - @abstractmethod - async def async_health_check(self) -> IntegrationHealthCheckStatus: - """ - Check if the service is healthy - """ - pass diff --git a/litellm/integrations/custom_guardrail.py b/litellm/integrations/custom_guardrail.py index 2aac83327a..4421664bfc 100644 --- a/litellm/integrations/custom_guardrail.py +++ b/litellm/integrations/custom_guardrail.py @@ -12,20 +12,51 @@ class CustomGuardrail(CustomLogger): self, guardrail_name: Optional[str] = None, supported_event_hooks: Optional[List[GuardrailEventHooks]] = None, - event_hook: Optional[GuardrailEventHooks] = None, + event_hook: Optional[ + Union[GuardrailEventHooks, List[GuardrailEventHooks]] + ] = None, + default_on: bool = False, **kwargs, ): + """ + Initialize the CustomGuardrail class + + Args: + guardrail_name: The name of the guardrail. This is the name used in your requests. + supported_event_hooks: The event hooks that the guardrail supports + event_hook: The event hook to run the guardrail on + default_on: If True, the guardrail will be run by default on all requests + """ self.guardrail_name = guardrail_name self.supported_event_hooks = supported_event_hooks - self.event_hook: Optional[GuardrailEventHooks] = event_hook + self.event_hook: Optional[ + Union[GuardrailEventHooks, List[GuardrailEventHooks]] + ] = event_hook + self.default_on: bool = default_on if supported_event_hooks: ## validate event_hook is in supported_event_hooks - if event_hook and event_hook not in supported_event_hooks: + self._validate_event_hook(event_hook, supported_event_hooks) + super().__init__(**kwargs) + + def _validate_event_hook( + self, + event_hook: Optional[Union[GuardrailEventHooks, List[GuardrailEventHooks]]], + supported_event_hooks: List[GuardrailEventHooks], + ) -> None: + if event_hook is None: + return + if isinstance(event_hook, list): + for hook in event_hook: + if hook not in supported_event_hooks: + raise ValueError( + f"Event hook {hook} is not in the supported event hooks {supported_event_hooks}" + ) + elif isinstance(event_hook, GuardrailEventHooks): + if event_hook not in supported_event_hooks: raise ValueError( f"Event hook {event_hook} is not in the supported event hooks {supported_event_hooks}" ) - super().__init__(**kwargs) def get_guardrail_from_metadata( self, data: dict @@ -51,16 +82,25 @@ class CustomGuardrail(CustomLogger): return False def should_run_guardrail(self, data, event_type: GuardrailEventHooks) -> bool: + """ + Returns True if the guardrail should be run on the event_type + """ requested_guardrails = self.get_guardrail_from_metadata(data) verbose_logger.debug( - "inside should_run_guardrail for guardrail=%s event_type= %s guardrail_supported_event_hooks= %s requested_guardrails= %s", + "inside should_run_guardrail for guardrail=%s event_type= %s guardrail_supported_event_hooks= %s requested_guardrails= %s self.default_on= %s", self.guardrail_name, event_type, self.event_hook, requested_guardrails, + self.default_on, ) + if self.default_on is True: + if self._event_hook_is_event_type(event_type): + return True + return False + if ( self.event_hook and not self._guardrail_is_in_requested_guardrails(requested_guardrails) @@ -68,11 +108,25 @@ class CustomGuardrail(CustomLogger): ): return False - if self.event_hook and self.event_hook != event_type.value: + if not self._event_hook_is_event_type(event_type): return False return True + def _event_hook_is_event_type(self, event_type: GuardrailEventHooks) -> bool: + """ + Returns True if the event_hook is the same as the event_type + + eg. if `self.event_hook == "pre_call" and event_type == "pre_call"` -> then True + eg. if `self.event_hook == "pre_call" and event_type == "post_call"` -> then False + """ + + if self.event_hook is None: + return True + if isinstance(self.event_hook, list): + return event_type.value in self.event_hook + return self.event_hook == event_type.value + def get_guardrail_dynamic_request_body_params(self, request_data: dict) -> dict: """ Returns `extra_body` to be added to the request body for the Guardrail API call diff --git a/litellm/integrations/datadog/datadog.py b/litellm/integrations/datadog/datadog.py index 89928840e9..4f4b05c84e 100644 --- a/litellm/integrations/datadog/datadog.py +++ b/litellm/integrations/datadog/datadog.py @@ -35,17 +35,23 @@ from litellm.llms.custom_httpx.http_handler import ( ) from litellm.types.integrations.base_health_check import IntegrationHealthCheckStatus from litellm.types.integrations.datadog import * -from litellm.types.services import ServiceLoggerPayload +from litellm.types.services import ServiceLoggerPayload, ServiceTypes from litellm.types.utils import StandardLoggingPayload -from ..base_health_check import HealthCheckIntegration +from ..additional_logging_utils import AdditionalLoggingUtils -DD_MAX_BATCH_SIZE = 1000 # max number of logs DD API can accept +# max number of logs DD API can accept +DD_MAX_BATCH_SIZE = 1000 + +# specify what ServiceTypes are logged as success events to DD. (We don't want to spam DD traces with large number of service types) +DD_LOGGED_SUCCESS_SERVICE_TYPES = [ + ServiceTypes.RESET_BUDGET_JOB, +] class DataDogLogger( CustomBatchLogger, - HealthCheckIntegration, + AdditionalLoggingUtils, ): # Class variables or attributes def __init__( @@ -340,18 +346,16 @@ class DataDogLogger( - example - Redis is failing / erroring, will be logged on DataDog """ - try: - import json - _payload_dict = payload.model_dump() + _payload_dict.update(event_metadata or {}) _dd_message_str = json.dumps(_payload_dict, default=str) _dd_payload = DatadogPayload( - ddsource="litellm", - ddtags="", - hostname="", + ddsource=self._get_datadog_source(), + ddtags=self._get_datadog_tags(), + hostname=self._get_datadog_hostname(), message=_dd_message_str, - service="litellm-server", + service=self._get_datadog_service(), status=DataDogStatus.WARN, ) @@ -377,7 +381,30 @@ class DataDogLogger( No user has asked for this so far, this might be spammy on datatdog. If need arises we can implement this """ - return + try: + # intentionally done. Don't want to log all service types to DD + if payload.service not in DD_LOGGED_SUCCESS_SERVICE_TYPES: + return + + _payload_dict = payload.model_dump() + _payload_dict.update(event_metadata or {}) + + _dd_message_str = json.dumps(_payload_dict, default=str) + _dd_payload = DatadogPayload( + ddsource=self._get_datadog_source(), + ddtags=self._get_datadog_tags(), + hostname=self._get_datadog_hostname(), + message=_dd_message_str, + service=self._get_datadog_service(), + status=DataDogStatus.INFO, + ) + + self.log_queue.append(_dd_payload) + + except Exception as e: + verbose_logger.exception( + f"Datadog: Logger - Exception in async_service_failure_hook: {e}" + ) def _create_v0_logging_payload( self, @@ -543,3 +570,11 @@ class DataDogLogger( status="unhealthy", error_message=str(e), ) + + async def get_request_response_payload( + self, + request_id: str, + start_time_utc: Optional[datetimeObj], + end_time_utc: Optional[datetimeObj], + ) -> Optional[dict]: + pass diff --git a/litellm/integrations/gcs_bucket/gcs_bucket.py b/litellm/integrations/gcs_bucket/gcs_bucket.py index d6a9c316b3..187ab779c0 100644 --- a/litellm/integrations/gcs_bucket/gcs_bucket.py +++ b/litellm/integrations/gcs_bucket/gcs_bucket.py @@ -1,12 +1,16 @@ import asyncio +import json import os import uuid -from datetime import datetime +from datetime import datetime, timedelta, timezone from typing import TYPE_CHECKING, Any, Dict, List, Optional +from urllib.parse import quote from litellm._logging import verbose_logger +from litellm.integrations.additional_logging_utils import AdditionalLoggingUtils from litellm.integrations.gcs_bucket.gcs_bucket_base import GCSBucketBase from litellm.proxy._types import CommonProxyErrors +from litellm.types.integrations.base_health_check import IntegrationHealthCheckStatus from litellm.types.integrations.gcs_bucket import * from litellm.types.utils import StandardLoggingPayload @@ -20,7 +24,7 @@ GCS_DEFAULT_BATCH_SIZE = 2048 GCS_DEFAULT_FLUSH_INTERVAL_SECONDS = 20 -class GCSBucketLogger(GCSBucketBase): +class GCSBucketLogger(GCSBucketBase, AdditionalLoggingUtils): def __init__(self, bucket_name: Optional[str] = None) -> None: from litellm.proxy.proxy_server import premium_user @@ -39,6 +43,7 @@ class GCSBucketLogger(GCSBucketBase): batch_size=self.batch_size, flush_interval=self.flush_interval, ) + AdditionalLoggingUtils.__init__(self) if premium_user is not True: raise ValueError( @@ -150,11 +155,16 @@ class GCSBucketLogger(GCSBucketBase): """ Get the object name to use for the current payload """ - current_date = datetime.now().strftime("%Y-%m-%d") + current_date = self._get_object_date_from_datetime(datetime.now(timezone.utc)) if logging_payload.get("error_str", None) is not None: - object_name = f"{current_date}/failure-{uuid.uuid4().hex}" + object_name = self._generate_failure_object_name( + request_date_str=current_date, + ) else: - object_name = f"{current_date}/{response_obj.get('id', '')}" + object_name = self._generate_success_object_name( + request_date_str=current_date, + response_id=response_obj.get("id", ""), + ) # used for testing _litellm_params = kwargs.get("litellm_params", None) or {} @@ -163,3 +173,65 @@ class GCSBucketLogger(GCSBucketBase): object_name = _metadata["gcs_log_id"] return object_name + + async def get_request_response_payload( + self, + request_id: str, + start_time_utc: Optional[datetime], + end_time_utc: Optional[datetime], + ) -> Optional[dict]: + """ + Get the request and response payload for a given `request_id` + Tries current day, next day, and previous day until it finds the payload + """ + if start_time_utc is None: + raise ValueError( + "start_time_utc is required for getting a payload from GCS Bucket" + ) + + # Try current day, next day, and previous day + dates_to_try = [ + start_time_utc, + start_time_utc + timedelta(days=1), + start_time_utc - timedelta(days=1), + ] + date_str = None + for date in dates_to_try: + try: + date_str = self._get_object_date_from_datetime(datetime_obj=date) + object_name = self._generate_success_object_name( + request_date_str=date_str, + response_id=request_id, + ) + encoded_object_name = quote(object_name, safe="") + response = await self.download_gcs_object(encoded_object_name) + + if response is not None: + loaded_response = json.loads(response) + return loaded_response + except Exception as e: + verbose_logger.debug( + f"Failed to fetch payload for date {date_str}: {str(e)}" + ) + continue + + return None + + def _generate_success_object_name( + self, + request_date_str: str, + response_id: str, + ) -> str: + return f"{request_date_str}/{response_id}" + + def _generate_failure_object_name( + self, + request_date_str: str, + ) -> str: + return f"{request_date_str}/failure-{uuid.uuid4().hex}" + + def _get_object_date_from_datetime(self, datetime_obj: datetime) -> str: + return datetime_obj.strftime("%Y-%m-%d") + + async def async_health_check(self) -> IntegrationHealthCheckStatus: + raise NotImplementedError("GCS Bucket does not support health check") diff --git a/litellm/integrations/gcs_pubsub/pub_sub.py b/litellm/integrations/gcs_pubsub/pub_sub.py new file mode 100644 index 0000000000..e94c853f3f --- /dev/null +++ b/litellm/integrations/gcs_pubsub/pub_sub.py @@ -0,0 +1,203 @@ +""" +BETA + +This is the PubSub logger for GCS PubSub, this sends LiteLLM SpendLogs Payloads to GCS PubSub. + +Users can use this instead of sending their SpendLogs to their Postgres database. +""" + +import asyncio +import json +import os +import traceback +from typing import TYPE_CHECKING, Any, Dict, List, Optional + +if TYPE_CHECKING: + from litellm.proxy._types import SpendLogsPayload +else: + SpendLogsPayload = Any + +from litellm._logging import verbose_logger +from litellm.integrations.custom_batch_logger import CustomBatchLogger +from litellm.llms.custom_httpx.http_handler import ( + get_async_httpx_client, + httpxSpecialProvider, +) + + +class GcsPubSubLogger(CustomBatchLogger): + def __init__( + self, + project_id: Optional[str] = None, + topic_id: Optional[str] = None, + credentials_path: Optional[str] = None, + **kwargs, + ): + """ + Initialize Google Cloud Pub/Sub publisher + + Args: + project_id (str): Google Cloud project ID + topic_id (str): Pub/Sub topic ID + credentials_path (str, optional): Path to Google Cloud credentials JSON file + """ + from litellm.proxy.utils import _premium_user_check + + _premium_user_check() + + self.async_httpx_client = get_async_httpx_client( + llm_provider=httpxSpecialProvider.LoggingCallback + ) + + self.project_id = project_id or os.getenv("GCS_PUBSUB_PROJECT_ID") + self.topic_id = topic_id or os.getenv("GCS_PUBSUB_TOPIC_ID") + self.path_service_account_json = credentials_path or os.getenv( + "GCS_PATH_SERVICE_ACCOUNT" + ) + + if not self.project_id or not self.topic_id: + raise ValueError("Both project_id and topic_id must be provided") + + self.flush_lock = asyncio.Lock() + super().__init__(**kwargs, flush_lock=self.flush_lock) + asyncio.create_task(self.periodic_flush()) + self.log_queue: List[SpendLogsPayload] = [] + + async def construct_request_headers(self) -> Dict[str, str]: + """Construct authorization headers using Vertex AI auth""" + from litellm import vertex_chat_completion + + _auth_header, vertex_project = ( + await vertex_chat_completion._ensure_access_token_async( + credentials=self.path_service_account_json, + project_id=None, + custom_llm_provider="vertex_ai", + ) + ) + + auth_header, _ = vertex_chat_completion._get_token_and_url( + model="pub-sub", + auth_header=_auth_header, + vertex_credentials=self.path_service_account_json, + vertex_project=vertex_project, + vertex_location=None, + gemini_api_key=None, + stream=None, + custom_llm_provider="vertex_ai", + api_base=None, + ) + + headers = { + "Authorization": f"Bearer {auth_header}", + "Content-Type": "application/json", + } + return headers + + async def async_log_success_event(self, kwargs, response_obj, start_time, end_time): + """ + Async Log success events to GCS PubSub Topic + + - Creates a SpendLogsPayload + - Adds to batch queue + - Flushes based on CustomBatchLogger settings + + Raises: + Raises a NON Blocking verbose_logger.exception if an error occurs + """ + from litellm.proxy.spend_tracking.spend_tracking_utils import ( + get_logging_payload, + ) + from litellm.proxy.utils import _premium_user_check + + _premium_user_check() + + try: + verbose_logger.debug( + "PubSub: Logging - Enters logging function for model %s", kwargs + ) + spend_logs_payload = get_logging_payload( + kwargs=kwargs, + response_obj=response_obj, + start_time=start_time, + end_time=end_time, + ) + self.log_queue.append(spend_logs_payload) + + if len(self.log_queue) >= self.batch_size: + await self.async_send_batch() + + except Exception as e: + verbose_logger.exception( + f"PubSub Layer Error - {str(e)}\n{traceback.format_exc()}" + ) + pass + + async def async_send_batch(self): + """ + Sends the batch of messages to Pub/Sub + """ + try: + if not self.log_queue: + return + + verbose_logger.debug( + f"PubSub - about to flush {len(self.log_queue)} events" + ) + + for message in self.log_queue: + await self.publish_message(message) + + except Exception as e: + verbose_logger.exception( + f"PubSub Error sending batch - {str(e)}\n{traceback.format_exc()}" + ) + finally: + self.log_queue.clear() + + async def publish_message( + self, message: SpendLogsPayload + ) -> Optional[Dict[str, Any]]: + """ + Publish message to Google Cloud Pub/Sub using REST API + + Args: + message: Message to publish (dict or string) + + Returns: + dict: Published message response + """ + try: + headers = await self.construct_request_headers() + + # Prepare message data + if isinstance(message, str): + message_data = message + else: + message_data = json.dumps(message, default=str) + + # Base64 encode the message + import base64 + + encoded_message = base64.b64encode(message_data.encode("utf-8")).decode( + "utf-8" + ) + + # Construct request body + request_body = {"messages": [{"data": encoded_message}]} + + url = f"https://pubsub.googleapis.com/v1/projects/{self.project_id}/topics/{self.topic_id}:publish" + + response = await self.async_httpx_client.post( + url=url, headers=headers, json=request_body + ) + + if response.status_code not in [200, 202]: + verbose_logger.error("Pub/Sub publish error: %s", str(response.text)) + raise Exception(f"Failed to publish message: {response.text}") + + verbose_logger.debug("Pub/Sub response: %s", response.text) + return response.json() + + except Exception as e: + verbose_logger.error("Pub/Sub publish error: %s", str(e)) + return None diff --git a/litellm/integrations/langfuse/langfuse.py b/litellm/integrations/langfuse/langfuse.py index 20d2befe65..f990a316c4 100644 --- a/litellm/integrations/langfuse/langfuse.py +++ b/litellm/integrations/langfuse/langfuse.py @@ -3,7 +3,8 @@ import copy import os import traceback -from typing import TYPE_CHECKING, Any, Dict, List, Optional, cast +from datetime import datetime +from typing import TYPE_CHECKING, Any, Dict, List, Optional, Tuple, Union, cast from packaging.version import Version @@ -13,9 +14,16 @@ from litellm.litellm_core_utils.redact_messages import redact_user_api_key_info from litellm.llms.custom_httpx.http_handler import _get_httpx_client from litellm.secret_managers.main import str_to_bool from litellm.types.integrations.langfuse import * +from litellm.types.llms.openai import HttpxBinaryResponseContent from litellm.types.utils import ( + EmbeddingResponse, + ImageResponse, + ModelResponse, + RerankResponse, StandardLoggingPayload, StandardLoggingPromptManagementMetadata, + TextCompletionResponse, + TranscriptionResponse, ) if TYPE_CHECKING: @@ -54,8 +62,8 @@ class LangFuseLogger: self.langfuse_host = "http://" + self.langfuse_host self.langfuse_release = os.getenv("LANGFUSE_RELEASE") self.langfuse_debug = os.getenv("LANGFUSE_DEBUG") - self.langfuse_flush_interval = ( - os.getenv("LANGFUSE_FLUSH_INTERVAL") or flush_interval + self.langfuse_flush_interval = LangFuseLogger._get_langfuse_flush_interval( + flush_interval ) http_client = _get_httpx_client() self.langfuse_client = http_client.client @@ -150,19 +158,29 @@ class LangFuseLogger: return metadata - def _old_log_event( # noqa: PLR0915 + def log_event_on_langfuse( self, - kwargs, - response_obj, - start_time, - end_time, - user_id, - print_verbose, - level="DEFAULT", - status_message=None, + kwargs: dict, + response_obj: Union[ + None, + dict, + EmbeddingResponse, + ModelResponse, + TextCompletionResponse, + ImageResponse, + TranscriptionResponse, + RerankResponse, + HttpxBinaryResponseContent, + ], + start_time: Optional[datetime] = None, + end_time: Optional[datetime] = None, + user_id: Optional[str] = None, + level: str = "DEFAULT", + status_message: Optional[str] = None, ) -> dict: - # Method definition - + """ + Logs a success or error event on Langfuse + """ try: verbose_logger.debug( f"Langfuse Logging - Enters logging function for model {kwargs}" @@ -198,66 +216,13 @@ class LangFuseLogger: # if casting value to str fails don't block logging pass - # end of processing langfuse ######################## - if ( - level == "ERROR" - and status_message is not None - and isinstance(status_message, str) - ): - input = prompt - output = status_message - elif response_obj is not None and ( - kwargs.get("call_type", None) == "embedding" - or isinstance(response_obj, litellm.EmbeddingResponse) - ): - input = prompt - output = None - elif response_obj is not None and isinstance( - response_obj, litellm.ModelResponse - ): - input = prompt - output = response_obj["choices"][0]["message"].json() - elif response_obj is not None and isinstance( - response_obj, litellm.HttpxBinaryResponseContent - ): - input = prompt - output = "speech-output" - elif response_obj is not None and isinstance( - response_obj, litellm.TextCompletionResponse - ): - input = prompt - output = response_obj.choices[0].text - elif response_obj is not None and isinstance( - response_obj, litellm.ImageResponse - ): - input = prompt - output = response_obj["data"] - elif response_obj is not None and isinstance( - response_obj, litellm.TranscriptionResponse - ): - input = prompt - output = response_obj["text"] - elif response_obj is not None and isinstance( - response_obj, litellm.RerankResponse - ): - input = prompt - output = response_obj.results - elif ( - kwargs.get("call_type") is not None - and kwargs.get("call_type") == "_arealtime" - and response_obj is not None - and isinstance(response_obj, list) - ): - input = kwargs.get("input") - output = response_obj - elif ( - kwargs.get("call_type") is not None - and kwargs.get("call_type") == "pass_through_endpoint" - and response_obj is not None - and isinstance(response_obj, dict) - ): - input = prompt - output = response_obj.get("response", "") + input, output = self._get_langfuse_input_output_content( + kwargs=kwargs, + response_obj=response_obj, + prompt=prompt, + level=level, + status_message=status_message, + ) verbose_logger.debug( f"OUTPUT IN LANGFUSE: {output}; original: {response_obj}" ) @@ -265,31 +230,30 @@ class LangFuseLogger: generation_id = None if self._is_langfuse_v2(): trace_id, generation_id = self._log_langfuse_v2( - user_id, - metadata, - litellm_params, - output, - start_time, - end_time, - kwargs, - optional_params, - input, - response_obj, - level, - print_verbose, - litellm_call_id, + user_id=user_id, + metadata=metadata, + litellm_params=litellm_params, + output=output, + start_time=start_time, + end_time=end_time, + kwargs=kwargs, + optional_params=optional_params, + input=input, + response_obj=response_obj, + level=level, + litellm_call_id=litellm_call_id, ) elif response_obj is not None: self._log_langfuse_v1( - user_id, - metadata, - output, - start_time, - end_time, - kwargs, - optional_params, - input, - response_obj, + user_id=user_id, + metadata=metadata, + output=output, + start_time=start_time, + end_time=end_time, + kwargs=kwargs, + optional_params=optional_params, + input=input, + response_obj=response_obj, ) verbose_logger.debug( f"Langfuse Layer Logging - final response object: {response_obj}" @@ -303,11 +267,108 @@ class LangFuseLogger: ) return {"trace_id": None, "generation_id": None} + def _get_langfuse_input_output_content( + self, + kwargs: dict, + response_obj: Union[ + None, + dict, + EmbeddingResponse, + ModelResponse, + TextCompletionResponse, + ImageResponse, + TranscriptionResponse, + RerankResponse, + HttpxBinaryResponseContent, + ], + prompt: dict, + level: str, + status_message: Optional[str], + ) -> Tuple[Optional[dict], Optional[Union[str, dict, list]]]: + """ + Get the input and output content for Langfuse logging + + Args: + kwargs: The keyword arguments passed to the function + response_obj: The response object returned by the function + prompt: The prompt used to generate the response + level: The level of the log message + status_message: The status message of the log message + + Returns: + input: The input content for Langfuse logging + output: The output content for Langfuse logging + """ + input = None + output: Optional[Union[str, dict, List[Any]]] = None + if ( + level == "ERROR" + and status_message is not None + and isinstance(status_message, str) + ): + input = prompt + output = status_message + elif response_obj is not None and ( + kwargs.get("call_type", None) == "embedding" + or isinstance(response_obj, litellm.EmbeddingResponse) + ): + input = prompt + output = None + elif response_obj is not None and isinstance( + response_obj, litellm.ModelResponse + ): + input = prompt + output = self._get_chat_content_for_langfuse(response_obj) + elif response_obj is not None and isinstance( + response_obj, litellm.HttpxBinaryResponseContent + ): + input = prompt + output = "speech-output" + elif response_obj is not None and isinstance( + response_obj, litellm.TextCompletionResponse + ): + input = prompt + output = self._get_text_completion_content_for_langfuse(response_obj) + elif response_obj is not None and isinstance( + response_obj, litellm.ImageResponse + ): + input = prompt + output = response_obj.get("data", None) + elif response_obj is not None and isinstance( + response_obj, litellm.TranscriptionResponse + ): + input = prompt + output = response_obj.get("text", None) + elif response_obj is not None and isinstance( + response_obj, litellm.RerankResponse + ): + input = prompt + output = response_obj.results + elif ( + kwargs.get("call_type") is not None + and kwargs.get("call_type") == "_arealtime" + and response_obj is not None + and isinstance(response_obj, list) + ): + input = kwargs.get("input") + output = response_obj + elif ( + kwargs.get("call_type") is not None + and kwargs.get("call_type") == "pass_through_endpoint" + and response_obj is not None + and isinstance(response_obj, dict) + ): + input = prompt + output = response_obj.get("response", "") + return input, output + async def _async_log_event( - self, kwargs, response_obj, start_time, end_time, user_id, print_verbose + self, kwargs, response_obj, start_time, end_time, user_id ): """ - TODO: support async calls when langfuse is truly async + Langfuse SDK uses a background thread to log events + + This approach does not impact latency and runs in the background """ def _is_langfuse_v2(self): @@ -361,19 +422,18 @@ class LangFuseLogger: def _log_langfuse_v2( # noqa: PLR0915 self, - user_id, - metadata, - litellm_params, - output, - start_time, - end_time, - kwargs, - optional_params, - input, + user_id: Optional[str], + metadata: dict, + litellm_params: dict, + output: Optional[Union[str, dict, list]], + start_time: Optional[datetime], + end_time: Optional[datetime], + kwargs: dict, + optional_params: dict, + input: Optional[dict], response_obj, - level, - print_verbose, - litellm_call_id, + level: str, + litellm_call_id: Optional[str], ) -> tuple: verbose_logger.debug("Langfuse Layer Logging - logging to langfuse v2") @@ -657,6 +717,31 @@ class LangFuseLogger: verbose_logger.error(f"Langfuse Layer Error - {traceback.format_exc()}") return None, None + @staticmethod + def _get_chat_content_for_langfuse( + response_obj: ModelResponse, + ): + """ + Get the chat content for Langfuse logging + """ + if response_obj.choices and len(response_obj.choices) > 0: + output = response_obj["choices"][0]["message"].json() + return output + else: + return None + + @staticmethod + def _get_text_completion_content_for_langfuse( + response_obj: TextCompletionResponse, + ): + """ + Get the text completion content for Langfuse logging + """ + if response_obj.choices and len(response_obj.choices) > 0: + return response_obj.choices[0].text + else: + return None + @staticmethod def _get_langfuse_tags( standard_logging_object: Optional[StandardLoggingPayload], @@ -708,6 +793,22 @@ class LangFuseLogger: """Check if current langfuse version supports completion start time""" return Version(self.langfuse_sdk_version) >= Version("2.7.3") + @staticmethod + def _get_langfuse_flush_interval(flush_interval: int) -> int: + """ + Get the langfuse flush interval to initialize the Langfuse client + + Reads `LANGFUSE_FLUSH_INTERVAL` from the environment variable. + If not set, uses the flush interval passed in as an argument. + + Args: + flush_interval: The flush interval to use if LANGFUSE_FLUSH_INTERVAL is not set + + Returns: + [int] The flush interval to use to initialize the Langfuse client + """ + return int(os.getenv("LANGFUSE_FLUSH_INTERVAL") or flush_interval) + def _add_prompt_to_generation_params( generation_params: dict, diff --git a/litellm/integrations/langfuse/langfuse_prompt_management.py b/litellm/integrations/langfuse/langfuse_prompt_management.py index 1a14968240..1f4ca84db3 100644 --- a/litellm/integrations/langfuse/langfuse_prompt_management.py +++ b/litellm/integrations/langfuse/langfuse_prompt_management.py @@ -11,6 +11,7 @@ from typing_extensions import TypeAlias from litellm.integrations.custom_logger import CustomLogger from litellm.integrations.prompt_management_base import PromptManagementClient +from litellm.litellm_core_utils.asyncify import run_async_function from litellm.types.llms.openai import AllMessageValues, ChatCompletionSystemMessage from litellm.types.utils import StandardCallbackDynamicParams, StandardLoggingPayload @@ -39,6 +40,7 @@ in_memory_dynamic_logger_cache = DynamicLoggingCache() def langfuse_client_init( langfuse_public_key=None, langfuse_secret=None, + langfuse_secret_key=None, langfuse_host=None, flush_interval=1, ) -> LangfuseClass: @@ -66,7 +68,10 @@ def langfuse_client_init( ) # Instance variables - secret_key = langfuse_secret or os.getenv("LANGFUSE_SECRET_KEY") + + secret_key = ( + langfuse_secret or langfuse_secret_key or os.getenv("LANGFUSE_SECRET_KEY") + ) public_key = langfuse_public_key or os.getenv("LANGFUSE_PUBLIC_KEY") langfuse_host = langfuse_host or os.getenv( "LANGFUSE_HOST", "https://cloud.langfuse.com" @@ -80,7 +85,6 @@ def langfuse_client_init( langfuse_release = os.getenv("LANGFUSE_RELEASE") langfuse_debug = os.getenv("LANGFUSE_DEBUG") - langfuse_flush_interval = os.getenv("LANGFUSE_FLUSH_INTERVAL") or flush_interval parameters = { "public_key": public_key, @@ -88,7 +92,9 @@ def langfuse_client_init( "host": langfuse_host, "release": langfuse_release, "debug": langfuse_debug, - "flush_interval": langfuse_flush_interval, # flush interval in seconds + "flush_interval": LangFuseLogger._get_langfuse_flush_interval( + flush_interval + ), # flush interval in seconds } if Version(langfuse.version.__version__) >= Version("2.6.0"): @@ -188,6 +194,7 @@ class LangfusePromptManagement(LangFuseLogger, PromptManagementBase, CustomLogge langfuse_client = langfuse_client_init( langfuse_public_key=dynamic_callback_params.get("langfuse_public_key"), langfuse_secret=dynamic_callback_params.get("langfuse_secret"), + langfuse_secret_key=dynamic_callback_params.get("langfuse_secret_key"), langfuse_host=dynamic_callback_params.get("langfuse_host"), ) langfuse_prompt_client = self._get_prompt_from_id( @@ -204,6 +211,7 @@ class LangfusePromptManagement(LangFuseLogger, PromptManagementBase, CustomLogge langfuse_client = langfuse_client_init( langfuse_public_key=dynamic_callback_params.get("langfuse_public_key"), langfuse_secret=dynamic_callback_params.get("langfuse_secret"), + langfuse_secret_key=dynamic_callback_params.get("langfuse_secret_key"), langfuse_host=dynamic_callback_params.get("langfuse_host"), ) langfuse_prompt_client = self._get_prompt_from_id( @@ -231,6 +239,11 @@ class LangfusePromptManagement(LangFuseLogger, PromptManagementBase, CustomLogge completed_messages=None, ) + def log_success_event(self, kwargs, response_obj, start_time, end_time): + return run_async_function( + self.async_log_success_event, kwargs, response_obj, start_time, end_time + ) + async def async_log_success_event(self, kwargs, response_obj, start_time, end_time): standard_callback_dynamic_params = kwargs.get( "standard_callback_dynamic_params" @@ -240,13 +253,12 @@ class LangfusePromptManagement(LangFuseLogger, PromptManagementBase, CustomLogge standard_callback_dynamic_params=standard_callback_dynamic_params, in_memory_dynamic_logger_cache=in_memory_dynamic_logger_cache, ) - langfuse_logger_to_use._old_log_event( + langfuse_logger_to_use.log_event_on_langfuse( kwargs=kwargs, response_obj=response_obj, start_time=start_time, end_time=end_time, user_id=kwargs.get("user", None), - print_verbose=None, ) async def async_log_failure_event(self, kwargs, response_obj, start_time, end_time): @@ -264,12 +276,11 @@ class LangfusePromptManagement(LangFuseLogger, PromptManagementBase, CustomLogge ) if standard_logging_object is None: return - langfuse_logger_to_use._old_log_event( + langfuse_logger_to_use.log_event_on_langfuse( start_time=start_time, end_time=end_time, response_obj=None, user_id=kwargs.get("user", None), - print_verbose=None, status_message=standard_logging_object["error_str"], level="ERROR", kwargs=kwargs, diff --git a/litellm/integrations/langsmith.py b/litellm/integrations/langsmith.py index b727c69e03..1ef90c1822 100644 --- a/litellm/integrations/langsmith.py +++ b/litellm/integrations/langsmith.py @@ -351,6 +351,16 @@ class LangsmithLogger(CustomBatchLogger): queue_objects=batch_group.queue_objects, ) + def _add_endpoint_to_url( + self, url: str, endpoint: str, api_version: str = "/api/v1" + ) -> str: + if api_version not in url: + url = f"{url.rstrip('/')}{api_version}" + + if url.endswith("/"): + return f"{url}{endpoint}" + return f"{url}/{endpoint}" + async def _log_batch_on_langsmith( self, credentials: LangsmithCredentialsObject, @@ -370,7 +380,7 @@ class LangsmithLogger(CustomBatchLogger): """ langsmith_api_base = credentials["LANGSMITH_BASE_URL"] langsmith_api_key = credentials["LANGSMITH_API_KEY"] - url = f"{langsmith_api_base}/runs/batch" + url = self._add_endpoint_to_url(langsmith_api_base, "runs/batch") headers = {"x-api-key": langsmith_api_key} elements_to_log = [queue_object["data"] for queue_object in queue_objects] diff --git a/litellm/integrations/opentelemetry.py b/litellm/integrations/opentelemetry.py index 8ca3ff7432..0ec7358037 100644 --- a/litellm/integrations/opentelemetry.py +++ b/litellm/integrations/opentelemetry.py @@ -444,9 +444,13 @@ class OpenTelemetry(CustomLogger): ): try: if self.callback_name == "arize": - from litellm.integrations.arize_ai import ArizeLogger + from litellm.integrations.arize.arize import ArizeLogger + ArizeLogger.set_arize_attributes(span, kwargs, response_obj) + return + elif self.callback_name == "arize_phoenix": + from litellm.integrations.arize.arize_phoenix import ArizePhoenixLogger - ArizeLogger.set_arize_ai_attributes(span, kwargs, response_obj) + ArizePhoenixLogger.set_arize_phoenix_attributes(span, kwargs, response_obj) return elif self.callback_name == "langtrace": from litellm.integrations.langtrace import LangtraceAttributes diff --git a/litellm/integrations/opik/opik.py b/litellm/integrations/opik/opik.py index c78c4de4e6..1f7f18f336 100644 --- a/litellm/integrations/opik/opik.py +++ b/litellm/integrations/opik/opik.py @@ -147,13 +147,11 @@ class OpikLogger(CustomBatchLogger): f"OpikLogger - Error: {response.status_code} - {response.text}" ) else: - verbose_logger.debug( + verbose_logger.info( f"OpikLogger - {len(self.log_queue)} Opik events submitted" ) except Exception as e: - verbose_logger.exception( - f"OpikLogger failed to send batch - {str(e)}\n{traceback.format_exc()}" - ) + verbose_logger.exception(f"OpikLogger failed to send batch - {str(e)}") def _create_opik_headers(self): headers = {} @@ -165,7 +163,7 @@ class OpikLogger(CustomBatchLogger): return headers async def async_send_batch(self): - verbose_logger.exception("Calling async_send_batch") + verbose_logger.info("Calling async_send_batch") if not self.log_queue: return @@ -177,10 +175,12 @@ class OpikLogger(CustomBatchLogger): await self._submit_batch( url=self.trace_url, headers=self.headers, batch={"traces": traces} ) + verbose_logger.info(f"Sent {len(traces)} traces") if len(spans) > 0: await self._submit_batch( url=self.span_url, headers=self.headers, batch={"spans": spans} ) + verbose_logger.info(f"Sent {len(spans)} spans") def _create_opik_payload( # noqa: PLR0915 self, kwargs, response_obj, start_time, end_time diff --git a/litellm/integrations/pagerduty/pagerduty.py b/litellm/integrations/pagerduty/pagerduty.py index 2eeb318c9d..6085bc237a 100644 --- a/litellm/integrations/pagerduty/pagerduty.py +++ b/litellm/integrations/pagerduty/pagerduty.py @@ -118,6 +118,7 @@ class PagerDutyAlerting(SlackAlerting): user_api_key_user_id=_meta.get("user_api_key_user_id"), user_api_key_team_alias=_meta.get("user_api_key_team_alias"), user_api_key_end_user_id=_meta.get("user_api_key_end_user_id"), + user_api_key_user_email=_meta.get("user_api_key_user_email"), ) ) @@ -195,6 +196,7 @@ class PagerDutyAlerting(SlackAlerting): user_api_key_user_id=user_api_key_dict.user_id, user_api_key_team_alias=user_api_key_dict.team_alias, user_api_key_end_user_id=user_api_key_dict.end_user_id, + user_api_key_user_email=user_api_key_dict.user_email, ) ) diff --git a/litellm/integrations/prometheus.py b/litellm/integrations/prometheus.py index f496dc707c..d6e47b87ce 100644 --- a/litellm/integrations/prometheus.py +++ b/litellm/integrations/prometheus.py @@ -4,7 +4,7 @@ import asyncio import sys from datetime import datetime, timedelta -from typing import List, Optional, cast +from typing import Any, Awaitable, Callable, List, Literal, Optional, Tuple, cast import litellm from litellm._logging import print_verbose, verbose_logger @@ -423,6 +423,7 @@ class PrometheusLogger(CustomLogger): team=user_api_team, team_alias=user_api_team_alias, user=user_id, + user_email=standard_logging_payload["metadata"]["user_api_key_user_email"], status_code="200", model=model, litellm_model_name=model, @@ -690,14 +691,14 @@ class PrometheusLogger(CustomLogger): start_time: Optional[datetime] = kwargs.get("start_time") api_call_start_time = kwargs.get("api_call_start_time", None) completion_start_time = kwargs.get("completion_start_time", None) + time_to_first_token_seconds = self._safe_duration_seconds( + start_time=api_call_start_time, + end_time=completion_start_time, + ) if ( - completion_start_time is not None - and isinstance(completion_start_time, datetime) + time_to_first_token_seconds is not None and kwargs.get("stream", False) is True # only emit for streaming requests ): - time_to_first_token_seconds = ( - completion_start_time - api_call_start_time - ).total_seconds() self.litellm_llm_api_time_to_first_token_metric.labels( model, user_api_key, @@ -709,11 +710,12 @@ class PrometheusLogger(CustomLogger): verbose_logger.debug( "Time to first token metric not emitted, stream option in model_parameters is not True" ) - if api_call_start_time is not None and isinstance( - api_call_start_time, datetime - ): - api_call_total_time: timedelta = end_time - api_call_start_time - api_call_total_time_seconds = api_call_total_time.total_seconds() + + api_call_total_time_seconds = self._safe_duration_seconds( + start_time=api_call_start_time, + end_time=end_time, + ) + if api_call_total_time_seconds is not None: _labels = prometheus_label_factory( supported_enum_labels=PrometheusMetricLabels.get_labels( label_name="litellm_llm_api_latency_metric" @@ -725,9 +727,11 @@ class PrometheusLogger(CustomLogger): ) # total request latency - if start_time is not None and isinstance(start_time, datetime): - total_time: timedelta = end_time - start_time - total_time_seconds = total_time.total_seconds() + total_time_seconds = self._safe_duration_seconds( + start_time=start_time, + end_time=end_time, + ) + if total_time_seconds is not None: _labels = prometheus_label_factory( supported_enum_labels=PrometheusMetricLabels.get_labels( label_name="litellm_request_total_latency_metric" @@ -806,6 +810,7 @@ class PrometheusLogger(CustomLogger): enum_values = UserAPIKeyLabelValues( end_user=user_api_key_dict.end_user_id, user=user_api_key_dict.user_id, + user_email=user_api_key_dict.user_email, hashed_api_key=user_api_key_dict.api_key, api_key_alias=user_api_key_dict.key_alias, team=user_api_key_dict.team_id, @@ -853,6 +858,7 @@ class PrometheusLogger(CustomLogger): team=user_api_key_dict.team_id, team_alias=user_api_key_dict.team_alias, user=user_api_key_dict.user_id, + user_email=user_api_key_dict.user_email, status_code="200", ) _labels = prometheus_label_factory( @@ -1321,6 +1327,10 @@ class PrometheusLogger(CustomLogger): Helper to create tasks for initializing metrics that are required on startup - eg. remaining budget metrics """ + if litellm.prometheus_initialize_budget_metrics is not True: + verbose_logger.debug("Prometheus: skipping budget metrics initialization") + return + try: if asyncio.get_running_loop(): asyncio.create_task(self._initialize_remaining_budget_metrics()) @@ -1329,15 +1339,20 @@ class PrometheusLogger(CustomLogger): f"No running event loop - skipping budget metrics initialization: {str(e)}" ) - async def _initialize_remaining_budget_metrics(self): + async def _initialize_budget_metrics( + self, + data_fetch_function: Callable[..., Awaitable[Tuple[List[Any], Optional[int]]]], + set_metrics_function: Callable[[List[Any]], Awaitable[None]], + data_type: Literal["teams", "keys"], + ): """ - Initialize remaining budget metrics for all teams to avoid metric discrepancies. + Generic method to initialize budget metrics for teams or API keys. - Runs when prometheus logger starts up. + Args: + data_fetch_function: Function to fetch data with pagination. + set_metrics_function: Function to set metrics for the fetched data. + data_type: String representing the type of data ("teams" or "keys") for logging purposes. """ - from litellm.proxy.management_endpoints.team_endpoints import ( - get_paginated_teams, - ) from litellm.proxy.proxy_server import prisma_client if prisma_client is None: @@ -1346,28 +1361,121 @@ class PrometheusLogger(CustomLogger): try: page = 1 page_size = 50 - teams, total_count = await get_paginated_teams( - prisma_client=prisma_client, page_size=page_size, page=page + data, total_count = await data_fetch_function( + page_size=page_size, page=page ) + if total_count is None: + total_count = len(data) + # Calculate total pages needed total_pages = (total_count + page_size - 1) // page_size - # Set metrics for first page of teams - await self._set_team_list_budget_metrics(teams) + # Set metrics for first page of data + await set_metrics_function(data) # Get and set metrics for remaining pages for page in range(2, total_pages + 1): - teams, _ = await get_paginated_teams( - prisma_client=prisma_client, page_size=page_size, page=page - ) - await self._set_team_list_budget_metrics(teams) + data, _ = await data_fetch_function(page_size=page_size, page=page) + await set_metrics_function(data) except Exception as e: verbose_logger.exception( - f"Error initializing team budget metrics: {str(e)}" + f"Error initializing {data_type} budget metrics: {str(e)}" ) + async def _initialize_team_budget_metrics(self): + """ + Initialize team budget metrics by reusing the generic pagination logic. + """ + from litellm.proxy.management_endpoints.team_endpoints import ( + get_paginated_teams, + ) + from litellm.proxy.proxy_server import prisma_client + + if prisma_client is None: + verbose_logger.debug( + "Prometheus: skipping team metrics initialization, DB not initialized" + ) + return + + async def fetch_teams( + page_size: int, page: int + ) -> Tuple[List[LiteLLM_TeamTable], Optional[int]]: + teams, total_count = await get_paginated_teams( + prisma_client=prisma_client, page_size=page_size, page=page + ) + if total_count is None: + total_count = len(teams) + return teams, total_count + + await self._initialize_budget_metrics( + data_fetch_function=fetch_teams, + set_metrics_function=self._set_team_list_budget_metrics, + data_type="teams", + ) + + async def _initialize_api_key_budget_metrics(self): + """ + Initialize API key budget metrics by reusing the generic pagination logic. + """ + from typing import Union + + from litellm.constants import UI_SESSION_TOKEN_TEAM_ID + from litellm.proxy.management_endpoints.key_management_endpoints import ( + _list_key_helper, + ) + from litellm.proxy.proxy_server import prisma_client + + if prisma_client is None: + verbose_logger.debug( + "Prometheus: skipping key metrics initialization, DB not initialized" + ) + return + + async def fetch_keys( + page_size: int, page: int + ) -> Tuple[List[Union[str, UserAPIKeyAuth]], Optional[int]]: + key_list_response = await _list_key_helper( + prisma_client=prisma_client, + page=page, + size=page_size, + user_id=None, + team_id=None, + key_alias=None, + exclude_team_id=UI_SESSION_TOKEN_TEAM_ID, + return_full_object=True, + organization_id=None, + ) + keys = key_list_response.get("keys", []) + total_count = key_list_response.get("total_count") + if total_count is None: + total_count = len(keys) + return keys, total_count + + await self._initialize_budget_metrics( + data_fetch_function=fetch_keys, + set_metrics_function=self._set_key_list_budget_metrics, + data_type="keys", + ) + + async def _initialize_remaining_budget_metrics(self): + """ + Initialize remaining budget metrics for all teams to avoid metric discrepancies. + + Runs when prometheus logger starts up. + """ + await self._initialize_team_budget_metrics() + await self._initialize_api_key_budget_metrics() + + async def _set_key_list_budget_metrics( + self, keys: List[Union[str, UserAPIKeyAuth]] + ): + """Helper function to set budget metrics for a list of keys""" + for key in keys: + if isinstance(key, UserAPIKeyAuth): + self._set_key_budget_metrics(key) + async def _set_team_list_budget_metrics(self, teams: List[LiteLLM_TeamTable]): """Helper function to set budget metrics for a list of teams""" for team in teams: @@ -1431,7 +1539,7 @@ class PrometheusLogger(CustomLogger): user_api_key_cache=user_api_key_cache, ) except Exception as e: - verbose_logger.exception( + verbose_logger.debug( f"[Non-Blocking] Prometheus: Error getting team info: {str(e)}" ) return team_object @@ -1452,10 +1560,18 @@ class PrometheusLogger(CustomLogger): - Max Budget - Budget Reset At """ - self.litellm_remaining_team_budget_metric.labels( - team.team_id, - team.team_alias or "", - ).set( + enum_values = UserAPIKeyLabelValues( + team=team.team_id, + team_alias=team.team_alias or "", + ) + + _labels = prometheus_label_factory( + supported_enum_labels=PrometheusMetricLabels.get_labels( + label_name="litellm_remaining_team_budget_metric" + ), + enum_values=enum_values, + ) + self.litellm_remaining_team_budget_metric.labels(**_labels).set( self._safe_get_remaining_budget( max_budget=team.max_budget, spend=team.spend, @@ -1463,16 +1579,22 @@ class PrometheusLogger(CustomLogger): ) if team.max_budget is not None: - self.litellm_team_max_budget_metric.labels( - team.team_id, - team.team_alias or "", - ).set(team.max_budget) + _labels = prometheus_label_factory( + supported_enum_labels=PrometheusMetricLabels.get_labels( + label_name="litellm_team_max_budget_metric" + ), + enum_values=enum_values, + ) + self.litellm_team_max_budget_metric.labels(**_labels).set(team.max_budget) if team.budget_reset_at is not None: - self.litellm_team_budget_remaining_hours_metric.labels( - team.team_id, - team.team_alias or "", - ).set( + _labels = prometheus_label_factory( + supported_enum_labels=PrometheusMetricLabels.get_labels( + label_name="litellm_team_budget_remaining_hours_metric" + ), + enum_values=enum_values, + ) + self.litellm_team_budget_remaining_hours_metric.labels(**_labels).set( self._get_remaining_hours_for_budget_reset( budget_reset_at=team.budget_reset_at ) @@ -1486,9 +1608,17 @@ class PrometheusLogger(CustomLogger): - Max Budget - Budget Reset At """ - self.litellm_remaining_api_key_budget_metric.labels( - user_api_key_dict.token, user_api_key_dict.key_alias - ).set( + enum_values = UserAPIKeyLabelValues( + hashed_api_key=user_api_key_dict.token, + api_key_alias=user_api_key_dict.key_alias or "", + ) + _labels = prometheus_label_factory( + supported_enum_labels=PrometheusMetricLabels.get_labels( + label_name="litellm_remaining_api_key_budget_metric" + ), + enum_values=enum_values, + ) + self.litellm_remaining_api_key_budget_metric.labels(**_labels).set( self._safe_get_remaining_budget( max_budget=user_api_key_dict.max_budget, spend=user_api_key_dict.spend, @@ -1496,14 +1626,18 @@ class PrometheusLogger(CustomLogger): ) if user_api_key_dict.max_budget is not None: - self.litellm_api_key_max_budget_metric.labels( - user_api_key_dict.token, user_api_key_dict.key_alias - ).set(user_api_key_dict.max_budget) + _labels = prometheus_label_factory( + supported_enum_labels=PrometheusMetricLabels.get_labels( + label_name="litellm_api_key_max_budget_metric" + ), + enum_values=enum_values, + ) + self.litellm_api_key_max_budget_metric.labels(**_labels).set( + user_api_key_dict.max_budget + ) if user_api_key_dict.budget_reset_at is not None: - self.litellm_api_key_budget_remaining_hours_metric.labels( - user_api_key_dict.token, user_api_key_dict.key_alias - ).set( + self.litellm_api_key_budget_remaining_hours_metric.labels(**_labels).set( self._get_remaining_hours_for_budget_reset( budget_reset_at=user_api_key_dict.budget_reset_at ) @@ -1558,7 +1692,7 @@ class PrometheusLogger(CustomLogger): if key_object: user_api_key_dict.budget_reset_at = key_object.budget_reset_at except Exception as e: - verbose_logger.exception( + verbose_logger.debug( f"[Non-Blocking] Prometheus: Error getting key info: {str(e)}" ) @@ -1572,6 +1706,21 @@ class PrometheusLogger(CustomLogger): budget_reset_at - datetime.now(budget_reset_at.tzinfo) ).total_seconds() / 3600 + def _safe_duration_seconds( + self, + start_time: Any, + end_time: Any, + ) -> Optional[float]: + """ + Compute the duration in seconds between two objects. + + Returns the duration as a float if both start and end are instances of datetime, + otherwise returns None. + """ + if isinstance(start_time, datetime) and isinstance(end_time, datetime): + return (end_time - start_time).total_seconds() + return None + def prometheus_label_factory( supported_enum_labels: List[str], diff --git a/litellm/litellm_core_utils/core_helpers.py b/litellm/litellm_core_utils/core_helpers.py index ceb150946c..2036b93692 100644 --- a/litellm/litellm_core_utils/core_helpers.py +++ b/litellm/litellm_core_utils/core_helpers.py @@ -73,8 +73,19 @@ def remove_index_from_tool_calls( def get_litellm_metadata_from_kwargs(kwargs: dict): """ Helper to get litellm metadata from all litellm request kwargs + + Return `litellm_metadata` if it exists, otherwise return `metadata` """ - return kwargs.get("litellm_params", {}).get("metadata", {}) + litellm_params = kwargs.get("litellm_params", {}) + if litellm_params: + metadata = litellm_params.get("metadata", {}) + litellm_metadata = litellm_params.get("litellm_metadata", {}) + if litellm_metadata: + return litellm_metadata + elif metadata: + return metadata + + return {} # Helper functions used for OTEL logging diff --git a/litellm/litellm_core_utils/dd_tracing.py b/litellm/litellm_core_utils/dd_tracing.py new file mode 100644 index 0000000000..1f866a998a --- /dev/null +++ b/litellm/litellm_core_utils/dd_tracing.py @@ -0,0 +1,73 @@ +""" +Handles Tracing on DataDog Traces. + +If the ddtrace package is not installed, the tracer will be a no-op. +""" + +from contextlib import contextmanager +from typing import TYPE_CHECKING, Any, Union + +from litellm.secret_managers.main import get_secret_bool + +if TYPE_CHECKING: + from ddtrace.tracer import Tracer as DD_TRACER +else: + DD_TRACER = Any + + +class NullSpan: + """A no-op span implementation.""" + + def __enter__(self): + return self + + def __exit__(self, *args): + pass + + def finish(self): + pass + + +@contextmanager +def null_tracer(name, **kwargs): + """Context manager that yields a no-op span.""" + yield NullSpan() + + +class NullTracer: + """A no-op tracer implementation.""" + + def trace(self, name, **kwargs): + return NullSpan() + + def wrap(self, name=None, **kwargs): + # If called with no arguments (as @tracer.wrap()) + if callable(name): + return name + + # If called with arguments (as @tracer.wrap(name="something")) + def decorator(f): + return f + + return decorator + + +def _should_use_dd_tracer(): + """Returns True if `USE_DDTRACE` is set to True in .env""" + return get_secret_bool("USE_DDTRACE", False) is True + + +# Initialize tracer +should_use_dd_tracer = _should_use_dd_tracer() +tracer: Union[NullTracer, DD_TRACER] = NullTracer() +# We need to ensure tracer is never None and always has the required methods +if should_use_dd_tracer: + try: + from ddtrace import tracer as dd_tracer + + # Define the type to match what's expected by the code using this module + tracer = dd_tracer + except ImportError: + tracer = NullTracer() +else: + tracer = NullTracer() diff --git a/litellm/litellm_core_utils/dot_notation_indexing.py b/litellm/litellm_core_utils/dot_notation_indexing.py new file mode 100644 index 0000000000..fda37f6500 --- /dev/null +++ b/litellm/litellm_core_utils/dot_notation_indexing.py @@ -0,0 +1,59 @@ +""" +This file contains the logic for dot notation indexing. + +Used by JWT Auth to get the user role from the token. +""" + +from typing import Any, Dict, Optional, TypeVar + +T = TypeVar("T") + + +def get_nested_value( + data: Dict[str, Any], key_path: str, default: Optional[T] = None +) -> Optional[T]: + """ + Retrieves a value from a nested dictionary using dot notation. + + Args: + data: The dictionary to search in + key_path: The path to the value using dot notation (e.g., "a.b.c") + default: The default value to return if the path is not found + + Returns: + The value at the specified path, or the default value if not found + + Example: + >>> data = {"a": {"b": {"c": "value"}}} + >>> get_nested_value(data, "a.b.c") + 'value' + >>> get_nested_value(data, "a.b.d", "default") + 'default' + """ + if not key_path: + return default + + # Remove metadata. prefix if it exists + key_path = ( + key_path.replace("metadata.", "", 1) + if key_path.startswith("metadata.") + else key_path + ) + + # Split the key path into parts + parts = key_path.split(".") + + # Traverse through the dictionary + current: Any = data + for part in parts: + try: + current = current[part] + except (KeyError, TypeError): + return default + + # If default is None, we can return any type + if default is None: + return current + + # Otherwise, ensure the type matches the default + return current if isinstance(current, type(default)) else default diff --git a/litellm/litellm_core_utils/duration_parser.py b/litellm/litellm_core_utils/duration_parser.py index c8c6bea83d..dbcd72eb1f 100644 --- a/litellm/litellm_core_utils/duration_parser.py +++ b/litellm/litellm_core_utils/duration_parser.py @@ -13,7 +13,7 @@ from typing import Tuple def _extract_from_regex(duration: str) -> Tuple[int, str]: - match = re.match(r"(\d+)(mo|[smhd]?)", duration) + match = re.match(r"(\d+)(mo|[smhdw]?)", duration) if not match: raise ValueError("Invalid duration format") @@ -42,6 +42,7 @@ def duration_in_seconds(duration: str) -> int: - "m" - minutes - "h" - hours - "d" - days + - "w" - weeks - "mo" - months Returns time in seconds till when budget needs to be reset @@ -56,6 +57,8 @@ def duration_in_seconds(duration: str) -> int: return value * 3600 elif unit == "d": return value * 86400 + elif unit == "w": + return value * 604800 elif unit == "mo": now = time.time() current_time = datetime.fromtimestamp(now) diff --git a/litellm/litellm_core_utils/exception_mapping_utils.py b/litellm/litellm_core_utils/exception_mapping_utils.py index edcf90fe41..7a0cffab7b 100644 --- a/litellm/litellm_core_utils/exception_mapping_utils.py +++ b/litellm/litellm_core_utils/exception_mapping_utils.py @@ -14,6 +14,7 @@ from ..exceptions import ( BadRequestError, ContentPolicyViolationError, ContextWindowExceededError, + InternalServerError, NotFoundError, PermissionDeniedError, RateLimitError, @@ -140,7 +141,7 @@ def exception_type( # type: ignore # noqa: PLR0915 "\033[1;31mGive Feedback / Get Help: https://github.com/BerriAI/litellm/issues/new\033[0m" # noqa ) # noqa print( # noqa - "LiteLLM.Info: If you need to debug this error, use `litellm.set_verbose=True'." # noqa + "LiteLLM.Info: If you need to debug this error, use `litellm._turn_on_debug()'." # noqa ) # noqa print() # noqa @@ -222,6 +223,7 @@ def exception_type( # type: ignore # noqa: PLR0915 "Request Timeout Error" in error_str or "Request timed out" in error_str or "Timed out generating response" in error_str + or "The read operation timed out" in error_str ): exception_mapping_worked = True @@ -276,6 +278,7 @@ def exception_type( # type: ignore # noqa: PLR0915 "This model's maximum context length is" in error_str or "string too long. Expected a string with maximum length" in error_str + or "model's maximum context limit" in error_str ): exception_mapping_worked = True raise ContextWindowExceededError( @@ -328,6 +331,7 @@ def exception_type( # type: ignore # noqa: PLR0915 model=model, response=getattr(original_exception, "response", None), litellm_debug_info=extra_information, + body=getattr(original_exception, "body", None), ) elif ( "Web server is returning an unknown error" in error_str @@ -418,6 +422,7 @@ def exception_type( # type: ignore # noqa: PLR0915 llm_provider=custom_llm_provider, response=getattr(original_exception, "response", None), litellm_debug_info=extra_information, + body=getattr(original_exception, "body", None), ) elif original_exception.status_code == 429: exception_mapping_worked = True @@ -467,7 +472,10 @@ def exception_type( # type: ignore # noqa: PLR0915 method="POST", url="https://api.openai.com/v1/" ), ) - elif custom_llm_provider == "anthropic": # one of the anthropics + elif ( + custom_llm_provider == "anthropic" + or custom_llm_provider == "anthropic_text" + ): # one of the anthropics if "prompt is too long" in error_str or "prompt: length" in error_str: exception_mapping_worked = True raise ContextWindowExceededError( @@ -475,6 +483,13 @@ def exception_type( # type: ignore # noqa: PLR0915 model=model, llm_provider="anthropic", ) + elif "overloaded_error" in error_str: + exception_mapping_worked = True + raise InternalServerError( + message="AnthropicError - {}".format(error_str), + model=model, + llm_provider="anthropic", + ) if "Invalid API Key" in error_str: exception_mapping_worked = True raise AuthenticationError( @@ -680,6 +695,13 @@ def exception_type( # type: ignore # noqa: PLR0915 response=getattr(original_exception, "response", None), litellm_debug_info=extra_information, ) + elif "model's maximum context limit" in error_str: + exception_mapping_worked = True + raise ContextWindowExceededError( + message=f"{custom_llm_provider}Exception: Context Window Error - {error_str}", + model=model, + llm_provider=custom_llm_provider, + ) elif "token_quota_reached" in error_str: exception_mapping_worked = True raise RateLimitError( @@ -1940,6 +1962,7 @@ def exception_type( # type: ignore # noqa: PLR0915 model=model, litellm_debug_info=extra_information, response=getattr(original_exception, "response", None), + body=getattr(original_exception, "body", None), ) elif ( "The api_key client option must be set either by passing api_key to the client or by setting" @@ -1971,6 +1994,7 @@ def exception_type( # type: ignore # noqa: PLR0915 model=model, litellm_debug_info=extra_information, response=getattr(original_exception, "response", None), + body=getattr(original_exception, "body", None), ) elif original_exception.status_code == 401: exception_mapping_worked = True diff --git a/litellm/litellm_core_utils/get_litellm_params.py b/litellm/litellm_core_utils/get_litellm_params.py new file mode 100644 index 0000000000..cf62375f33 --- /dev/null +++ b/litellm/litellm_core_utils/get_litellm_params.py @@ -0,0 +1,103 @@ +from typing import Optional + + +def _get_base_model_from_litellm_call_metadata( + metadata: Optional[dict], +) -> Optional[str]: + if metadata is None: + return None + + if metadata is not None: + model_info = metadata.get("model_info", {}) + + if model_info is not None: + base_model = model_info.get("base_model", None) + if base_model is not None: + return base_model + return None + + +def get_litellm_params( + api_key: Optional[str] = None, + force_timeout=600, + azure=False, + logger_fn=None, + verbose=False, + hugging_face=False, + replicate=False, + together_ai=False, + custom_llm_provider: Optional[str] = None, + api_base: Optional[str] = None, + litellm_call_id=None, + model_alias_map=None, + completion_call_id=None, + metadata: Optional[dict] = None, + model_info=None, + proxy_server_request=None, + acompletion=None, + aembedding=None, + preset_cache_key=None, + no_log=None, + input_cost_per_second=None, + input_cost_per_token=None, + output_cost_per_token=None, + output_cost_per_second=None, + cooldown_time=None, + text_completion=None, + azure_ad_token_provider=None, + user_continue_message=None, + base_model: Optional[str] = None, + litellm_trace_id: Optional[str] = None, + hf_model_name: Optional[str] = None, + custom_prompt_dict: Optional[dict] = None, + litellm_metadata: Optional[dict] = None, + disable_add_transform_inline_image_block: Optional[bool] = None, + drop_params: Optional[bool] = None, + prompt_id: Optional[str] = None, + prompt_variables: Optional[dict] = None, + async_call: Optional[bool] = None, + ssl_verify: Optional[bool] = None, + merge_reasoning_content_in_choices: Optional[bool] = None, + **kwargs, +) -> dict: + litellm_params = { + "acompletion": acompletion, + "api_key": api_key, + "force_timeout": force_timeout, + "logger_fn": logger_fn, + "verbose": verbose, + "custom_llm_provider": custom_llm_provider, + "api_base": api_base, + "litellm_call_id": litellm_call_id, + "model_alias_map": model_alias_map, + "completion_call_id": completion_call_id, + "aembedding": aembedding, + "metadata": metadata, + "model_info": model_info, + "proxy_server_request": proxy_server_request, + "preset_cache_key": preset_cache_key, + "no-log": no_log or kwargs.get("no-log"), + "stream_response": {}, # litellm_call_id: ModelResponse Dict + "input_cost_per_token": input_cost_per_token, + "input_cost_per_second": input_cost_per_second, + "output_cost_per_token": output_cost_per_token, + "output_cost_per_second": output_cost_per_second, + "cooldown_time": cooldown_time, + "text_completion": text_completion, + "azure_ad_token_provider": azure_ad_token_provider, + "user_continue_message": user_continue_message, + "base_model": base_model + or _get_base_model_from_litellm_call_metadata(metadata=metadata), + "litellm_trace_id": litellm_trace_id, + "hf_model_name": hf_model_name, + "custom_prompt_dict": custom_prompt_dict, + "litellm_metadata": litellm_metadata, + "disable_add_transform_inline_image_block": disable_add_transform_inline_image_block, + "drop_params": drop_params, + "prompt_id": prompt_id, + "prompt_variables": prompt_variables, + "async_call": async_call, + "ssl_verify": ssl_verify, + "merge_reasoning_content_in_choices": merge_reasoning_content_in_choices, + } + return litellm_params diff --git a/litellm/litellm_core_utils/get_llm_provider_logic.py b/litellm/litellm_core_utils/get_llm_provider_logic.py index 302865629a..a64e7dd700 100644 --- a/litellm/litellm_core_utils/get_llm_provider_logic.py +++ b/litellm/litellm_core_utils/get_llm_provider_logic.py @@ -490,6 +490,7 @@ def _get_openai_compatible_provider_info( # noqa: PLR0915 or get_secret("DEEPSEEK_API_BASE") or "https://api.deepseek.com/beta" ) # type: ignore + dynamic_api_key = api_key or get_secret_str("DEEPSEEK_API_KEY") elif custom_llm_provider == "fireworks_ai": # fireworks is openai compatible, we just need to set this to custom_openai and have the api_base be https://api.fireworks.ai/inference/v1 diff --git a/litellm/litellm_core_utils/get_model_cost_map.py b/litellm/litellm_core_utils/get_model_cost_map.py new file mode 100644 index 0000000000..b8bdaee19c --- /dev/null +++ b/litellm/litellm_core_utils/get_model_cost_map.py @@ -0,0 +1,45 @@ +""" +Pulls the cost + context window + provider route for known models from https://github.com/BerriAI/litellm/blob/main/model_prices_and_context_window.json + +This can be disabled by setting the LITELLM_LOCAL_MODEL_COST_MAP environment variable to True. + +``` +export LITELLM_LOCAL_MODEL_COST_MAP=True +``` +""" + +import os + +import httpx + + +def get_model_cost_map(url: str): + if ( + os.getenv("LITELLM_LOCAL_MODEL_COST_MAP", False) + or os.getenv("LITELLM_LOCAL_MODEL_COST_MAP", False) == "True" + ): + import importlib.resources + import json + + with importlib.resources.open_text( + "litellm", "model_prices_and_context_window_backup.json" + ) as f: + content = json.load(f) + return content + + try: + response = httpx.get( + url, timeout=5 + ) # set a 5 second timeout for the get request + response.raise_for_status() # Raise an exception if the request is unsuccessful + content = response.json() + return content + except Exception: + import importlib.resources + import json + + with importlib.resources.open_text( + "litellm", "model_prices_and_context_window_backup.json" + ) as f: + content = json.load(f) + return content diff --git a/litellm/litellm_core_utils/get_supported_openai_params.py b/litellm/litellm_core_utils/get_supported_openai_params.py index e251784f4e..3d4f8cef6f 100644 --- a/litellm/litellm_core_utils/get_supported_openai_params.py +++ b/litellm/litellm_core_utils/get_supported_openai_params.py @@ -81,7 +81,7 @@ def get_supported_openai_params( # noqa: PLR0915 elif custom_llm_provider == "openai": return litellm.OpenAIConfig().get_supported_openai_params(model=model) elif custom_llm_provider == "azure": - if litellm.AzureOpenAIO1Config().is_o1_model(model=model): + if litellm.AzureOpenAIO1Config().is_o_series_model(model=model): return litellm.AzureOpenAIO1Config().get_supported_openai_params( model=model ) @@ -121,21 +121,26 @@ def get_supported_openai_params( # noqa: PLR0915 ) elif custom_llm_provider == "vertex_ai" or custom_llm_provider == "vertex_ai_beta": if request_type == "chat_completion": - if model.startswith("meta/"): - return litellm.VertexAILlama3Config().get_supported_openai_params() if model.startswith("mistral"): return litellm.MistralConfig().get_supported_openai_params(model=model) - if model.startswith("codestral"): + elif model.startswith("codestral"): return ( litellm.CodestralTextCompletionConfig().get_supported_openai_params( model=model ) ) - if model.startswith("claude"): + elif model.startswith("claude"): return litellm.VertexAIAnthropicConfig().get_supported_openai_params( model=model ) - return litellm.VertexGeminiConfig().get_supported_openai_params(model=model) + elif model.startswith("gemini"): + return litellm.VertexGeminiConfig().get_supported_openai_params( + model=model + ) + else: + return litellm.VertexAILlama3Config().get_supported_openai_params( + model=model + ) elif request_type == "embeddings": return litellm.VertexAITextEmbeddingConfig().get_supported_openai_params() elif custom_llm_provider == "sagemaker": diff --git a/litellm/litellm_core_utils/litellm_logging.py b/litellm/litellm_core_utils/litellm_logging.py index 97884e9d29..a3d9a57a49 100644 --- a/litellm/litellm_core_utils/litellm_logging.py +++ b/litellm/litellm_core_utils/litellm_logging.py @@ -25,6 +25,7 @@ from litellm import ( turn_off_message_logging, ) from litellm._logging import _is_debugging_on, verbose_logger +from litellm.batches.batch_utils import _handle_completed_batch from litellm.caching.caching import DualCache, InMemoryCache from litellm.caching.caching_handler import LLMCachingHandler from litellm.cost_calculator import _select_model_name_for_cost_calc @@ -32,6 +33,8 @@ from litellm.integrations.custom_guardrail import CustomGuardrail from litellm.integrations.custom_logger import CustomLogger from litellm.integrations.mlflow import MlflowLogger from litellm.integrations.pagerduty.pagerduty import PagerDutyAlerting +from litellm.litellm_core_utils.get_litellm_params import get_litellm_params +from litellm.litellm_core_utils.model_param_helper import ModelParamHelper from litellm.litellm_core_utils.redact_messages import ( redact_message_input_output_from_custom_logger, redact_message_input_output_from_logging, @@ -48,9 +51,11 @@ from litellm.types.utils import ( CallTypes, EmbeddingResponse, ImageResponse, + LiteLLMBatch, LiteLLMLoggingBaseClass, ModelResponse, ModelResponseStream, + RawRequestTypedDict, StandardCallbackDynamicParams, StandardLoggingAdditionalHeaders, StandardLoggingHiddenParams, @@ -68,7 +73,8 @@ from litellm.types.utils import ( from litellm.utils import _get_base_model_from_metadata, executor, print_verbose from ..integrations.argilla import ArgillaLogger -from ..integrations.arize_ai import ArizeLogger +from ..integrations.arize.arize import ArizeLogger +from ..integrations.arize.arize_phoenix import ArizePhoenixLogger from ..integrations.athina import AthinaLogger from ..integrations.azure_storage.azure_storage import AzureBlobStorageLogger from ..integrations.braintrust_logging import BraintrustLogger @@ -77,6 +83,7 @@ from ..integrations.datadog.datadog_llm_obs import DataDogLLMObsLogger from ..integrations.dynamodb import DyanmoDBLogger from ..integrations.galileo import GalileoObserve from ..integrations.gcs_bucket.gcs_bucket import GCSBucketLogger +from ..integrations.gcs_pubsub.pub_sub import GcsPubSubLogger from ..integrations.greenscale import GreenscaleLogger from ..integrations.helicone import HeliconeLogger from ..integrations.humanloop import HumanloopLogger @@ -197,7 +204,9 @@ class Logging(LiteLLMLoggingBaseClass): dynamic_async_failure_callbacks: Optional[ List[Union[str, Callable, CustomLogger]] ] = None, + applied_guardrails: Optional[List[str]] = None, kwargs: Optional[Dict] = None, + log_raw_request_response: bool = False, ): _input: Optional[str] = messages # save original value of messages if messages is not None: @@ -226,6 +235,7 @@ class Logging(LiteLLMLoggingBaseClass): self.sync_streaming_chunks: List[Any] = ( [] ) # for generating complete stream response + self.log_raw_request_response = log_raw_request_response # Initialize dynamic callbacks self.dynamic_input_callbacks: Optional[ @@ -256,10 +266,20 @@ class Logging(LiteLLMLoggingBaseClass): self.completion_start_time: Optional[datetime.datetime] = None self._llm_caching_handler: Optional[LLMCachingHandler] = None + # INITIAL LITELLM_PARAMS + litellm_params = {} + if kwargs is not None: + litellm_params = get_litellm_params(**kwargs) + litellm_params = scrub_sensitive_keys_in_metadata(litellm_params) + + self.litellm_params = litellm_params + self.model_call_details: Dict[str, Any] = { "litellm_trace_id": litellm_trace_id, "litellm_call_id": litellm_call_id, "input": _input, + "litellm_params": litellm_params, + "applied_guardrails": applied_guardrails, } def process_dynamic_callbacks(self): @@ -358,7 +378,10 @@ class Logging(LiteLLMLoggingBaseClass): if model is not None: self.model = model self.user = user - self.litellm_params = scrub_sensitive_keys_in_metadata(litellm_params) + self.litellm_params = { + **self.litellm_params, + **scrub_sensitive_keys_in_metadata(litellm_params), + } self.logger_fn = litellm_params.get("logger_fn", None) verbose_logger.debug(f"self.optional_params: {self.optional_params}") @@ -433,6 +456,18 @@ class Logging(LiteLLMLoggingBaseClass): return model, messages, non_default_params + def _get_raw_request_body(self, data: Optional[Union[dict, str]]) -> dict: + if data is None: + return {"error": "Received empty dictionary for raw request body"} + if isinstance(data, str): + try: + return json.loads(data) + except Exception: + return { + "error": "Unable to parse raw request body. Got - {}".format(data) + } + return data + def _pre_call(self, input, api_key, model=None, additional_args={}): """ Common helper function across the sync + async pre-call function @@ -448,6 +483,7 @@ class Logging(LiteLLMLoggingBaseClass): self.model_call_details["model"] = model def pre_call(self, input, api_key, model=None, additional_args={}): # noqa: PLR0915 + # Log the exact input to the LLM API litellm.error_logs["PRE_CALL"] = locals() try: @@ -465,28 +501,54 @@ class Logging(LiteLLMLoggingBaseClass): additional_args=additional_args, ) # log raw request to provider (like LangFuse) -- if opted in. - if log_raw_request_response is True: + if ( + self.log_raw_request_response is True + or log_raw_request_response is True + ): + _litellm_params = self.model_call_details.get("litellm_params", {}) _metadata = _litellm_params.get("metadata", {}) or {} try: # [Non-blocking Extra Debug Information in metadata] - if ( - turn_off_message_logging is not None - and turn_off_message_logging is True - ): + if turn_off_message_logging is True: + _metadata["raw_request"] = ( "redacted by litellm. \ 'litellm.turn_off_message_logging=True'" ) else: + curl_command = self._get_request_curl_command( api_base=additional_args.get("api_base", ""), headers=additional_args.get("headers", {}), additional_args=additional_args, data=additional_args.get("complete_input_dict", {}), ) + _metadata["raw_request"] = str(curl_command) + # split up, so it's easier to parse in the UI + self.model_call_details["raw_request_typed_dict"] = ( + RawRequestTypedDict( + raw_request_api_base=str( + additional_args.get("api_base") or "" + ), + raw_request_body=self._get_raw_request_body( + additional_args.get("complete_input_dict", {}) + ), + raw_request_headers=self._get_masked_headers( + additional_args.get("headers", {}) or {}, + ignore_sensitive_headers=True, + ), + error=None, + ) + ) except Exception as e: + self.model_call_details["raw_request_typed_dict"] = ( + RawRequestTypedDict( + error=str(e), + ) + ) + traceback.print_exc() _metadata["raw_request"] = ( "Unable to Log \ raw request: {}".format( @@ -611,10 +673,6 @@ class Logging(LiteLLMLoggingBaseClass): masked_api_base = api_base self.model_call_details["litellm_params"]["api_base"] = masked_api_base - verbose_logger.debug( - "PRE-API-CALL ADDITIONAL ARGS: %s", additional_args - ) - curl_command = self._get_request_curl_command( api_base=api_base, headers=headers, @@ -623,9 +681,14 @@ class Logging(LiteLLMLoggingBaseClass): ) verbose_logger.debug(f"\033[92m{curl_command}\033[0m\n") + def _get_request_body(self, data: dict) -> str: + return str(data) + def _get_request_curl_command( - self, api_base: str, headers: dict, additional_args: dict, data: dict + self, api_base: str, headers: Optional[dict], additional_args: dict, data: dict ) -> str: + if headers is None: + headers = {} curl_command = "\n\nPOST Request Sent from LiteLLM:\n" curl_command += "curl -X POST \\\n" curl_command += f"{api_base} \\\n" @@ -633,11 +696,10 @@ class Logging(LiteLLMLoggingBaseClass): formatted_headers = " ".join( [f"-H '{k}: {v}'" for k, v in masked_headers.items()] ) - curl_command += ( f"{formatted_headers} \\\n" if formatted_headers.strip() != "" else "" ) - curl_command += f"-d '{str(data)}'\n" + curl_command += f"-d '{self._get_request_body(data)}'\n" if additional_args.get("request_str", None) is not None: # print the sagemaker / bedrock client request curl_command = "\nRequest Sent from LiteLLM:\n" @@ -646,12 +708,20 @@ class Logging(LiteLLMLoggingBaseClass): curl_command = str(self.model_call_details) return curl_command - def _get_masked_headers(self, headers: dict): + def _get_masked_headers( + self, headers: dict, ignore_sensitive_headers: bool = False + ) -> dict: """ Internal debugging helper function Masks the headers of the request sent from LiteLLM """ + sensitive_keywords = [ + "authorization", + "token", + "key", + "secret", + ] return { k: ( (v[:-44] + "*" * 44) @@ -659,6 +729,11 @@ class Logging(LiteLLMLoggingBaseClass): else "*****" ) for k, v in headers.items() + if not ignore_sensitive_headers + or not any( + sensitive_keyword in k.lower() + for sensitive_keyword in sensitive_keywords + ) } def post_call( @@ -784,6 +859,7 @@ class Logging(LiteLLMLoggingBaseClass): used for consistent cost calculation across response headers + logging integrations. """ + ## RESPONSE COST ## custom_pricing = use_custom_pricing_for_model( litellm_params=( @@ -818,7 +894,7 @@ class Logging(LiteLLMLoggingBaseClass): except Exception as e: # error creating kwargs for cost calculation debug_info = StandardLoggingModelCostFailureDebugInformation( error_str=str(e), - traceback_str=traceback.format_exc(), + traceback_str=_get_traceback_str_for_error(str(e)), ) verbose_logger.debug( f"response_cost_failure_debug_information: {debug_info}" @@ -832,6 +908,7 @@ class Logging(LiteLLMLoggingBaseClass): response_cost = litellm.response_cost_calculator( **response_cost_calculator_kwargs ) + verbose_logger.debug(f"response_cost: {response_cost}") return response_cost except Exception as e: # error calculating cost debug_info = StandardLoggingModelCostFailureDebugInformation( @@ -855,6 +932,44 @@ class Logging(LiteLLMLoggingBaseClass): return None + async def _response_cost_calculator_async( + self, + result: Union[ + ModelResponse, + ModelResponseStream, + EmbeddingResponse, + ImageResponse, + TranscriptionResponse, + TextCompletionResponse, + HttpxBinaryResponseContent, + RerankResponse, + Batch, + FineTuningJob, + ], + cache_hit: Optional[bool] = None, + ) -> Optional[float]: + return self._response_cost_calculator(result=result, cache_hit=cache_hit) + + def should_run_callback( + self, callback: litellm.CALLBACK_TYPES, litellm_params: dict, event_hook: str + ) -> bool: + + if litellm.global_disable_no_log_param: + return True + + if litellm_params.get("no-log", False) is True: + # proxy cost tracking cal backs should run + + if not ( + isinstance(callback, CustomLogger) + and "_PROXY_" in callback.__class__.__name__ + ): + verbose_logger.debug( + f"no-log request, skipping logging for {event_hook} event" + ) + return False + return True + def _success_handler_helper_fn( self, result=None, @@ -876,6 +991,9 @@ class Logging(LiteLLMLoggingBaseClass): self.model_call_details["log_event_type"] = "successful_api_call" self.model_call_details["end_time"] = end_time self.model_call_details["cache_hit"] = cache_hit + + if self.call_type == CallTypes.anthropic_messages.value: + result = self._handle_anthropic_messages_response_logging(result=result) ## if model in model cost map - log the response cost ## else set cost to None if ( @@ -892,8 +1010,8 @@ class Logging(LiteLLMLoggingBaseClass): or isinstance(result, TextCompletionResponse) or isinstance(result, HttpxBinaryResponseContent) # tts or isinstance(result, RerankResponse) - or isinstance(result, Batch) or isinstance(result, FineTuningJob) + or isinstance(result, LiteLLMBatch) ): ## HIDDEN PARAMS ## hidden_params = getattr(result, "_hidden_params", {}) @@ -997,21 +1115,13 @@ class Logging(LiteLLMLoggingBaseClass): ] = None if "complete_streaming_response" in self.model_call_details: return # break out of this. - if self.stream and ( - isinstance(result, litellm.ModelResponse) - or isinstance(result, TextCompletionResponse) - or isinstance(result, ModelResponseStream) - ): - complete_streaming_response: Optional[ - Union[ModelResponse, TextCompletionResponse] - ] = _assemble_complete_response_from_streaming_chunks( - result=result, - start_time=start_time, - end_time=end_time, - request_kwargs=self.model_call_details, - streaming_chunks=self.sync_streaming_chunks, - is_async=False, - ) + complete_streaming_response = self._get_assembled_streaming_response( + result=result, + start_time=start_time, + end_time=end_time, + is_async=False, + streaming_chunks=self.sync_streaming_chunks, + ) if complete_streaming_response is not None: verbose_logger.debug( "Logging Details LiteLLM-Success Call streaming complete" @@ -1060,14 +1170,13 @@ class Logging(LiteLLMLoggingBaseClass): for callback in callbacks: try: litellm_params = self.model_call_details.get("litellm_params", {}) - if litellm_params.get("no-log", False) is True: - # proxy cost tracking cal backs should run - if not ( - isinstance(callback, CustomLogger) - and "_PROXY_" in callback.__class__.__name__ - ): - verbose_logger.info("no-log request, skipping logging") - continue + should_run = self.should_run_callback( + callback=callback, + litellm_params=litellm_params, + event_hook="success_handler", + ) + if not should_run: + continue if callback == "promptlayer" and promptLayerLogger is not None: print_verbose("reaches promptlayer for logging!") promptLayerLogger.log_event( @@ -1224,13 +1333,12 @@ class Logging(LiteLLMLoggingBaseClass): in_memory_dynamic_logger_cache=in_memory_dynamic_logger_cache, ) if langfuse_logger_to_use is not None: - _response = langfuse_logger_to_use._old_log_event( + _response = langfuse_logger_to_use.log_event_on_langfuse( kwargs=kwargs, response_obj=result, start_time=start_time, end_time=end_time, user_id=kwargs.get("user", None), - print_verbose=print_verbose, ) if _response is not None and isinstance(_response, dict): _trace_id = _response.get("trace_id", None) @@ -1499,6 +1607,20 @@ class Logging(LiteLLMLoggingBaseClass): print_verbose( "Logging Details LiteLLM-Async Success Call, cache_hit={}".format(cache_hit) ) + + ## CALCULATE COST FOR BATCH JOBS + if self.call_type == CallTypes.aretrieve_batch.value and isinstance( + result, LiteLLMBatch + ): + + response_cost, batch_usage, batch_models = await _handle_completed_batch( + batch=result, custom_llm_provider=self.custom_llm_provider + ) + + result._hidden_params["response_cost"] = response_cost + result._hidden_params["batch_models"] = batch_models + result.usage = batch_usage + start_time, end_time, result = self._success_handler_helper_fn( start_time=start_time, end_time=end_time, @@ -1506,27 +1628,19 @@ class Logging(LiteLLMLoggingBaseClass): cache_hit=cache_hit, standard_logging_object=kwargs.get("standard_logging_object", None), ) + ## BUILD COMPLETE STREAMED RESPONSE if "async_complete_streaming_response" in self.model_call_details: return # break out of this. complete_streaming_response: Optional[ Union[ModelResponse, TextCompletionResponse] - ] = None - if self.stream is True and ( - isinstance(result, litellm.ModelResponse) - or isinstance(result, litellm.ModelResponseStream) - or isinstance(result, TextCompletionResponse) - ): - complete_streaming_response: Optional[ - Union[ModelResponse, TextCompletionResponse] - ] = _assemble_complete_response_from_streaming_chunks( - result=result, - start_time=start_time, - end_time=end_time, - request_kwargs=self.model_call_details, - streaming_chunks=self.streaming_chunks, - is_async=True, - ) + ] = self._get_assembled_streaming_response( + result=result, + start_time=start_time, + end_time=end_time, + is_async=True, + streaming_chunks=self.streaming_chunks, + ) if complete_streaming_response is not None: print_verbose("Async success callbacks: Got a complete streaming response") @@ -1614,18 +1728,14 @@ class Logging(LiteLLMLoggingBaseClass): for callback in callbacks: # check if callback can run for this request litellm_params = self.model_call_details.get("litellm_params", {}) - if litellm_params.get("no-log", False) is True: - # proxy cost tracking cal backs should run - if not ( - isinstance(callback, CustomLogger) - and "_PROXY_" in callback.__class__.__name__ - ): - print_verbose("no-log request, skipping logging") - continue + should_run = self.should_run_callback( + callback=callback, + litellm_params=litellm_params, + event_hook="async_success_handler", + ) + if not should_run: + continue try: - if kwargs.get("no-log", False) is True: - print_verbose("no-log request, skipping logging") - continue if callback == "openmeter" and openMeterLogger is not None: if self.stream is True: if ( @@ -1947,12 +2057,11 @@ class Logging(LiteLLMLoggingBaseClass): standard_callback_dynamic_params=self.standard_callback_dynamic_params, in_memory_dynamic_logger_cache=in_memory_dynamic_logger_cache, ) - _response = langfuse_logger_to_use._old_log_event( + _response = langfuse_logger_to_use.log_event_on_langfuse( start_time=start_time, end_time=end_time, response_obj=None, user_id=kwargs.get("user", None), - print_verbose=print_verbose, status_message=str(exception), level="ERROR", kwargs=self.model_call_details, @@ -2232,6 +2341,63 @@ class Logging(LiteLLMLoggingBaseClass): _new_callbacks.append(_c) return _new_callbacks + def _get_assembled_streaming_response( + self, + result: Union[ModelResponse, TextCompletionResponse, ModelResponseStream, Any], + start_time: datetime.datetime, + end_time: datetime.datetime, + is_async: bool, + streaming_chunks: List[Any], + ) -> Optional[Union[ModelResponse, TextCompletionResponse]]: + if isinstance(result, ModelResponse): + return result + elif isinstance(result, TextCompletionResponse): + return result + elif isinstance(result, ModelResponseStream): + complete_streaming_response: Optional[ + Union[ModelResponse, TextCompletionResponse] + ] = _assemble_complete_response_from_streaming_chunks( + result=result, + start_time=start_time, + end_time=end_time, + request_kwargs=self.model_call_details, + streaming_chunks=streaming_chunks, + is_async=is_async, + ) + return complete_streaming_response + return None + + def _handle_anthropic_messages_response_logging(self, result: Any) -> ModelResponse: + """ + Handles logging for Anthropic messages responses. + + Args: + result: The response object from the model call + + Returns: + The the response object from the model call + + - For Non-streaming responses, we need to transform the response to a ModelResponse object. + - For streaming responses, anthropic_messages handler calls success_handler with a assembled ModelResponse. + """ + if self.stream and isinstance(result, ModelResponse): + return result + + result = litellm.AnthropicConfig().transform_response( + raw_response=self.model_call_details["httpx_response"], + model_response=litellm.ModelResponse(), + model=self.model, + messages=[], + logging_obj=self, + optional_params={}, + api_key="", + request_data={}, + encoding=litellm.encoding, + json_mode=False, + litellm_params={}, + ) + return result + def set_callbacks(callback_list, function_id=None): # noqa: PLR0915 """ @@ -2440,13 +2606,18 @@ def _init_custom_logger_compatible_class( # noqa: PLR0915 OpenTelemetryConfig, ) - otel_config = ArizeLogger.get_arize_opentelemetry_config() - if otel_config is None: + arize_config = ArizeLogger.get_arize_config() + if arize_config.endpoint is None: raise ValueError( "No valid endpoint found for Arize, please set 'ARIZE_ENDPOINT' to your GRPC endpoint or 'ARIZE_HTTP_ENDPOINT' to your HTTP endpoint" ) + otel_config = OpenTelemetryConfig( + exporter=arize_config.protocol, + endpoint=arize_config.endpoint, + ) + os.environ["OTEL_EXPORTER_OTLP_TRACES_HEADERS"] = ( - f"space_key={os.getenv('ARIZE_SPACE_KEY')},api_key={os.getenv('ARIZE_API_KEY')}" + f"space_key={arize_config.space_key},api_key={arize_config.api_key}" ) for callback in _in_memory_loggers: if ( @@ -2457,6 +2628,35 @@ def _init_custom_logger_compatible_class( # noqa: PLR0915 _otel_logger = OpenTelemetry(config=otel_config, callback_name="arize") _in_memory_loggers.append(_otel_logger) return _otel_logger # type: ignore + elif logging_integration == "arize_phoenix": + from litellm.integrations.opentelemetry import ( + OpenTelemetry, + OpenTelemetryConfig, + ) + + arize_phoenix_config = ArizePhoenixLogger.get_arize_phoenix_config() + otel_config = OpenTelemetryConfig( + exporter=arize_phoenix_config.protocol, + endpoint=arize_phoenix_config.endpoint, + ) + + # auth can be disabled on local deployments of arize phoenix + if arize_phoenix_config.otlp_auth_headers is not None: + os.environ["OTEL_EXPORTER_OTLP_TRACES_HEADERS"] = ( + arize_phoenix_config.otlp_auth_headers + ) + + for callback in _in_memory_loggers: + if ( + isinstance(callback, OpenTelemetry) + and callback.callback_name == "arize_phoenix" + ): + return callback # type: ignore + _otel_logger = OpenTelemetry( + config=otel_config, callback_name="arize_phoenix" + ) + _in_memory_loggers.append(_otel_logger) + return _otel_logger # type: ignore elif logging_integration == "otel": from litellm.integrations.opentelemetry import OpenTelemetry @@ -2571,6 +2771,13 @@ def _init_custom_logger_compatible_class( # noqa: PLR0915 pagerduty_logger = PagerDutyAlerting(**custom_logger_init_args) _in_memory_loggers.append(pagerduty_logger) return pagerduty_logger # type: ignore + elif logging_integration == "gcs_pubsub": + for callback in _in_memory_loggers: + if isinstance(callback, GcsPubSubLogger): + return callback + _gcs_pubsub_logger = GcsPubSubLogger() + _in_memory_loggers.append(_gcs_pubsub_logger) + return _gcs_pubsub_logger # type: ignore elif logging_integration == "humanloop": for callback in _in_memory_loggers: if isinstance(callback, HumanloopLogger): @@ -2704,6 +2911,10 @@ def get_custom_logger_compatible_class( # noqa: PLR0915 for callback in _in_memory_loggers: if isinstance(callback, PagerDutyAlerting): return callback + elif logging_integration == "gcs_pubsub": + for callback in _in_memory_loggers: + if isinstance(callback, GcsPubSubLogger): + return callback return None except Exception as e: @@ -2807,6 +3018,7 @@ class StandardLoggingPayloadSetup: metadata: Optional[Dict[str, Any]], litellm_params: Optional[dict] = None, prompt_integration: Optional[str] = None, + applied_guardrails: Optional[List[str]] = None, ) -> StandardLoggingMetadata: """ Clean and filter the metadata dictionary to include only the specified keys in StandardLoggingMetadata. @@ -2821,6 +3033,7 @@ class StandardLoggingPayloadSetup: - If the input metadata is None or not a dictionary, an empty StandardLoggingMetadata object is returned. - If 'user_api_key' is present in metadata and is a valid SHA256 hash, it's stored as 'user_api_key_hash'. """ + prompt_management_metadata: Optional[ StandardLoggingPromptManagementMetadata ] = None @@ -2845,11 +3058,13 @@ class StandardLoggingPayloadSetup: user_api_key_org_id=None, user_api_key_user_id=None, user_api_key_team_alias=None, + user_api_key_user_email=None, spend_logs_metadata=None, requester_ip_address=None, requester_metadata=None, user_api_key_end_user_id=None, prompt_management_metadata=prompt_management_metadata, + applied_guardrails=applied_guardrails, ) if isinstance(metadata, dict): # Filter the metadata dictionary to include only the specified keys @@ -2999,6 +3214,7 @@ class StandardLoggingPayloadSetup: response_cost=None, additional_headers=None, litellm_overhead_time_ms=None, + batch_models=None, ) if hidden_params is not None: for key in StandardLoggingHiddenParams.__annotations__.keys(): @@ -3028,10 +3244,26 @@ class StandardLoggingPayloadSetup: str(original_exception.__class__.__name__) if original_exception else "" ) _llm_provider_in_exception = getattr(original_exception, "llm_provider", "") + + # Get traceback information (first 100 lines) + traceback_info = "" + if original_exception: + tb = getattr(original_exception, "__traceback__", None) + if tb: + import traceback + + tb_lines = traceback.format_tb(tb) + traceback_info = "".join(tb_lines[:100]) # Limit to first 100 lines + + # Get additional error details + error_message = str(original_exception) + return StandardLoggingPayloadErrorInformation( error_code=error_status, error_class=error_class, llm_provider=_llm_provider_in_exception, + traceback=traceback_info, + error_message=error_message if original_exception else "", ) @staticmethod @@ -3096,6 +3328,7 @@ def get_standard_logging_object_payload( api_base=None, response_cost=None, litellm_overhead_time_ms=None, + batch_models=None, ) ) @@ -3148,6 +3381,7 @@ def get_standard_logging_object_payload( metadata=metadata, litellm_params=litellm_params, prompt_integration=kwargs.get("prompt_integration", None), + applied_guardrails=kwargs.get("applied_guardrails", None), ) _request_body = proxy_server_request.get("body", {}) @@ -3227,7 +3461,9 @@ def get_standard_logging_object_payload( requester_ip_address=clean_metadata.get("requester_ip_address", None), messages=kwargs.get("messages"), response=final_response_obj, - model_parameters=kwargs.get("optional_params", None), + model_parameters=ModelParamHelper.get_standard_logging_model_parameters( + kwargs.get("optional_params", None) or {} + ), hidden_params=clean_hidden_params, model_map_information=model_cost_information, error_str=error_str, @@ -3277,12 +3513,14 @@ def get_standard_logging_metadata( user_api_key_team_id=None, user_api_key_org_id=None, user_api_key_user_id=None, + user_api_key_user_email=None, user_api_key_team_alias=None, spend_logs_metadata=None, requester_ip_address=None, requester_metadata=None, user_api_key_end_user_id=None, prompt_management_metadata=None, + applied_guardrails=None, ) if isinstance(metadata, dict): # Filter the metadata dictionary to include only the specified keys @@ -3375,6 +3613,7 @@ def create_dummy_standard_logging_payload() -> StandardLoggingPayload: response_cost=None, additional_headers=None, litellm_overhead_time_ms=None, + batch_models=None, ) # Convert numeric values to appropriate types diff --git a/litellm/litellm_core_utils/llm_response_utils/convert_dict_to_response.py b/litellm/litellm_core_utils/llm_response_utils/convert_dict_to_response.py index 28d546796d..ebb1032a19 100644 --- a/litellm/litellm_core_utils/llm_response_utils/convert_dict_to_response.py +++ b/litellm/litellm_core_utils/llm_response_utils/convert_dict_to_response.py @@ -1,12 +1,15 @@ import asyncio import json +import re import time import traceback import uuid -from typing import Dict, Iterable, List, Literal, Optional, Union +from typing import Dict, Iterable, List, Literal, Optional, Tuple, Union import litellm from litellm._logging import verbose_logger +from litellm.constants import RESPONSE_FORMAT_TOOL_NAME +from litellm.types.llms.openai import ChatCompletionThinkingBlock from litellm.types.utils import ( ChatCompletionDeltaToolCall, ChatCompletionMessageToolCall, @@ -126,12 +129,7 @@ def convert_to_streaming_response(response_object: Optional[dict] = None): model_response_object = ModelResponse(stream=True) choice_list = [] for idx, choice in enumerate(response_object["choices"]): - delta = Delta( - content=choice["message"].get("content", None), - role=choice["message"]["role"], - function_call=choice["message"].get("function_call", None), - tool_calls=choice["message"].get("tool_calls", None), - ) + delta = Delta(**choice["message"]) finish_reason = choice.get("finish_reason", None) if finish_reason is None: # gpt-4 vision can return 'finish_reason' or 'finish_details' @@ -220,6 +218,45 @@ def _handle_invalid_parallel_tool_calls( return tool_calls +def _parse_content_for_reasoning( + message_text: Optional[str], +) -> Tuple[Optional[str], Optional[str]]: + """ + Parse the content for reasoning + + Returns: + - reasoning_content: The content of the reasoning + - content: The content of the message + """ + if not message_text: + return None, message_text + + reasoning_match = re.match(r"(.*?)(.*)", message_text, re.DOTALL) + + if reasoning_match: + return reasoning_match.group(1), reasoning_match.group(2) + + return None, message_text + + +def _extract_reasoning_content(message: dict) -> Tuple[Optional[str], Optional[str]]: + """ + Extract reasoning content and main content from a message. + + Args: + message (dict): The message dictionary that may contain reasoning_content + + Returns: + tuple[Optional[str], Optional[str]]: A tuple of (reasoning_content, content) + """ + if "reasoning_content" in message: + return message["reasoning_content"], message["content"] + elif "reasoning" in message: + return message["reasoning"], message["content"] + else: + return _parse_content_for_reasoning(message.get("content")) + + class LiteLLMResponseObjectHandler: @staticmethod @@ -313,6 +350,23 @@ class LiteLLMResponseObjectHandler: return transformed_logprobs +def _should_convert_tool_call_to_json_mode( + tool_calls: Optional[List[ChatCompletionMessageToolCall]] = None, + convert_tool_call_to_json_mode: Optional[bool] = None, +) -> bool: + """ + Determine if tool calls should be converted to JSON mode + """ + if ( + convert_tool_call_to_json_mode + and tool_calls is not None + and len(tool_calls) == 1 + and tool_calls[0]["function"]["name"] == RESPONSE_FORMAT_TOOL_NAME + ): + return True + return False + + def convert_to_model_response_object( # noqa: PLR0915 response_object: Optional[dict] = None, model_response_object: Optional[ @@ -397,10 +451,9 @@ def convert_to_model_response_object( # noqa: PLR0915 message: Optional[Message] = None finish_reason: Optional[str] = None - if ( - convert_tool_call_to_json_mode - and tool_calls is not None - and len(tool_calls) == 1 + if _should_convert_tool_call_to_json_mode( + tool_calls=tool_calls, + convert_tool_call_to_json_mode=convert_tool_call_to_json_mode, ): # to support 'json_schema' logic on older models json_mode_content_str: Optional[str] = tool_calls[0][ @@ -415,13 +468,32 @@ def convert_to_model_response_object( # noqa: PLR0915 for field in choice["message"].keys(): if field not in message_keys: provider_specific_fields[field] = choice["message"][field] + + # Handle reasoning models that display `reasoning_content` within `content` + reasoning_content, content = _extract_reasoning_content( + choice["message"] + ) + + # Handle thinking models that display `thinking_blocks` within `content` + thinking_blocks: Optional[List[ChatCompletionThinkingBlock]] = None + if "thinking_blocks" in choice["message"]: + thinking_blocks = choice["message"]["thinking_blocks"] + provider_specific_fields["thinking_blocks"] = thinking_blocks + + if reasoning_content: + provider_specific_fields["reasoning_content"] = ( + reasoning_content + ) + message = Message( - content=choice["message"].get("content", None), + content=content, role=choice["message"]["role"] or "assistant", function_call=choice["message"].get("function_call", None), tool_calls=tool_calls, audio=choice["message"].get("audio", None), provider_specific_fields=provider_specific_fields, + reasoning_content=reasoning_content, + thinking_blocks=thinking_blocks, ) finish_reason = choice.get("finish_reason", None) if finish_reason is None: diff --git a/litellm/litellm_core_utils/logging_callback_manager.py b/litellm/litellm_core_utils/logging_callback_manager.py new file mode 100644 index 0000000000..a20e826c43 --- /dev/null +++ b/litellm/litellm_core_utils/logging_callback_manager.py @@ -0,0 +1,256 @@ +from typing import Callable, List, Set, Union + +import litellm +from litellm._logging import verbose_logger +from litellm.integrations.additional_logging_utils import AdditionalLoggingUtils +from litellm.integrations.custom_logger import CustomLogger + + +class LoggingCallbackManager: + """ + A centralized class that allows easy add / remove callbacks for litellm. + + Goals of this class: + - Prevent adding duplicate callbacks / success_callback / failure_callback + - Keep a reasonable MAX_CALLBACKS limit (this ensures callbacks don't exponentially grow and consume CPU Resources) + """ + + # healthy maximum number of callbacks - unlikely someone needs more than 20 + MAX_CALLBACKS = 30 + + def add_litellm_input_callback(self, callback: Union[CustomLogger, str]): + """ + Add a input callback to litellm.input_callback + """ + self._safe_add_callback_to_list( + callback=callback, parent_list=litellm.input_callback + ) + + def add_litellm_service_callback( + self, callback: Union[CustomLogger, str, Callable] + ): + """ + Add a service callback to litellm.service_callback + """ + self._safe_add_callback_to_list( + callback=callback, parent_list=litellm.service_callback + ) + + def add_litellm_callback(self, callback: Union[CustomLogger, str, Callable]): + """ + Add a callback to litellm.callbacks + + Ensures no duplicates are added. + """ + self._safe_add_callback_to_list( + callback=callback, parent_list=litellm.callbacks # type: ignore + ) + + def add_litellm_success_callback( + self, callback: Union[CustomLogger, str, Callable] + ): + """ + Add a success callback to `litellm.success_callback` + """ + self._safe_add_callback_to_list( + callback=callback, parent_list=litellm.success_callback + ) + + def add_litellm_failure_callback( + self, callback: Union[CustomLogger, str, Callable] + ): + """ + Add a failure callback to `litellm.failure_callback` + """ + self._safe_add_callback_to_list( + callback=callback, parent_list=litellm.failure_callback + ) + + def add_litellm_async_success_callback( + self, callback: Union[CustomLogger, Callable, str] + ): + """ + Add a success callback to litellm._async_success_callback + """ + self._safe_add_callback_to_list( + callback=callback, parent_list=litellm._async_success_callback + ) + + def add_litellm_async_failure_callback( + self, callback: Union[CustomLogger, Callable, str] + ): + """ + Add a failure callback to litellm._async_failure_callback + """ + self._safe_add_callback_to_list( + callback=callback, parent_list=litellm._async_failure_callback + ) + + def remove_callback_from_list_by_object( + self, callback_list, obj + ): + """ + Remove callbacks that are methods of a particular object (e.g., router cleanup) + """ + if not isinstance(callback_list, list): # Not list -> do nothing + return + + remove_list=[c for c in callback_list if hasattr(c, '__self__') and c.__self__ == obj] + + for c in remove_list: + callback_list.remove(c) + + + def _add_string_callback_to_list( + self, callback: str, parent_list: List[Union[CustomLogger, Callable, str]] + ): + """ + Add a string callback to a list, if the callback is already in the list, do not add it again. + """ + if callback not in parent_list: + parent_list.append(callback) + else: + verbose_logger.debug( + f"Callback {callback} already exists in {parent_list}, not adding again.." + ) + + def _check_callback_list_size( + self, parent_list: List[Union[CustomLogger, Callable, str]] + ) -> bool: + """ + Check if adding another callback would exceed MAX_CALLBACKS + Returns True if safe to add, False if would exceed limit + """ + if len(parent_list) >= self.MAX_CALLBACKS: + verbose_logger.warning( + f"Cannot add callback - would exceed MAX_CALLBACKS limit of {self.MAX_CALLBACKS}. Current callbacks: {len(parent_list)}" + ) + return False + return True + + def _safe_add_callback_to_list( + self, + callback: Union[CustomLogger, Callable, str], + parent_list: List[Union[CustomLogger, Callable, str]], + ): + """ + Safe add a callback to a list, if the callback is already in the list, do not add it again. + + Ensures no duplicates are added for `str`, `Callable`, and `CustomLogger` callbacks. + """ + # Check max callbacks limit first + if not self._check_callback_list_size(parent_list): + return + + if isinstance(callback, str): + self._add_string_callback_to_list( + callback=callback, parent_list=parent_list + ) + elif isinstance(callback, CustomLogger): + self._add_custom_logger_to_list( + custom_logger=callback, + parent_list=parent_list, + ) + elif callable(callback): + self._add_callback_function_to_list( + callback=callback, parent_list=parent_list + ) + + def _add_callback_function_to_list( + self, callback: Callable, parent_list: List[Union[CustomLogger, Callable, str]] + ): + """ + Add a callback function to a list, if the callback is already in the list, do not add it again. + """ + # Check if the function already exists in the list by comparing function objects + if callback not in parent_list: + parent_list.append(callback) + else: + verbose_logger.debug( + f"Callback function {callback.__name__} already exists in {parent_list}, not adding again.." + ) + + def _add_custom_logger_to_list( + self, + custom_logger: CustomLogger, + parent_list: List[Union[CustomLogger, Callable, str]], + ): + """ + Add a custom logger to a list, if another instance of the same custom logger exists in the list, do not add it again. + """ + # Check if an instance of the same class already exists in the list + custom_logger_key = self._get_custom_logger_key(custom_logger) + custom_logger_type_name = type(custom_logger).__name__ + for existing_logger in parent_list: + if ( + isinstance(existing_logger, CustomLogger) + and self._get_custom_logger_key(existing_logger) == custom_logger_key + ): + verbose_logger.debug( + f"Custom logger of type {custom_logger_type_name}, key: {custom_logger_key} already exists in {parent_list}, not adding again.." + ) + return + parent_list.append(custom_logger) + + def _get_custom_logger_key(self, custom_logger: CustomLogger): + """ + Get a unique key for a custom logger that considers only fundamental instance variables + + Returns: + str: A unique key combining the class name and fundamental instance variables (str, bool, int) + """ + key_parts = [type(custom_logger).__name__] + + # Add only fundamental type instance variables to the key + for attr_name, attr_value in vars(custom_logger).items(): + if not attr_name.startswith("_"): # Skip private attributes + if isinstance(attr_value, (str, bool, int)): + key_parts.append(f"{attr_name}={attr_value}") + + return "-".join(key_parts) + + def _reset_all_callbacks(self): + """ + Reset all callbacks to an empty list + + Note: this is an internal function and should be used sparingly. + """ + litellm.input_callback = [] + litellm.success_callback = [] + litellm.failure_callback = [] + litellm._async_success_callback = [] + litellm._async_failure_callback = [] + litellm.callbacks = [] + + def _get_all_callbacks(self) -> List[Union[CustomLogger, Callable, str]]: + """ + Get all callbacks from litellm.callbacks, litellm.success_callback, litellm.failure_callback, litellm._async_success_callback, litellm._async_failure_callback + """ + return ( + litellm.callbacks + + litellm.success_callback + + litellm.failure_callback + + litellm._async_success_callback + + litellm._async_failure_callback + ) + + def get_active_additional_logging_utils_from_custom_logger( + self, + ) -> Set[AdditionalLoggingUtils]: + """ + Get all custom loggers that are instances of the given class type + + Args: + class_type: The class type to match against (e.g., AdditionalLoggingUtils) + + Returns: + Set[CustomLogger]: Set of custom loggers that are instances of the given class type + """ + all_callbacks = self._get_all_callbacks() + matched_callbacks: Set[AdditionalLoggingUtils] = set() + for callback in all_callbacks: + if isinstance(callback, CustomLogger) and isinstance( + callback, AdditionalLoggingUtils + ): + matched_callbacks.add(callback) + return matched_callbacks diff --git a/litellm/litellm_core_utils/mock_functions.py b/litellm/litellm_core_utils/mock_functions.py index a6e560c751..9f62e0479b 100644 --- a/litellm/litellm_core_utils/mock_functions.py +++ b/litellm/litellm_core_utils/mock_functions.py @@ -1,6 +1,12 @@ from typing import List, Optional -from ..types.utils import Embedding, EmbeddingResponse, ImageObject, ImageResponse +from ..types.utils import ( + Embedding, + EmbeddingResponse, + ImageObject, + ImageResponse, + Usage, +) def mock_embedding(model: str, mock_response: Optional[List[float]]): @@ -9,6 +15,7 @@ def mock_embedding(model: str, mock_response: Optional[List[float]]): return EmbeddingResponse( model=model, data=[Embedding(embedding=mock_response, index=0, object="embedding")], + usage=Usage(prompt_tokens=10, completion_tokens=0), ) diff --git a/litellm/litellm_core_utils/model_param_helper.py b/litellm/litellm_core_utils/model_param_helper.py new file mode 100644 index 0000000000..09a2c15a77 --- /dev/null +++ b/litellm/litellm_core_utils/model_param_helper.py @@ -0,0 +1,133 @@ +from typing import Set + +from openai.types.audio.transcription_create_params import TranscriptionCreateParams +from openai.types.chat.completion_create_params import ( + CompletionCreateParamsNonStreaming, + CompletionCreateParamsStreaming, +) +from openai.types.completion_create_params import ( + CompletionCreateParamsNonStreaming as TextCompletionCreateParamsNonStreaming, +) +from openai.types.completion_create_params import ( + CompletionCreateParamsStreaming as TextCompletionCreateParamsStreaming, +) +from openai.types.embedding_create_params import EmbeddingCreateParams + +from litellm.types.rerank import RerankRequest + + +class ModelParamHelper: + + @staticmethod + def get_standard_logging_model_parameters( + model_parameters: dict, + ) -> dict: + """ """ + standard_logging_model_parameters: dict = {} + supported_model_parameters = ( + ModelParamHelper._get_relevant_args_to_use_for_logging() + ) + + for key, value in model_parameters.items(): + if key in supported_model_parameters: + standard_logging_model_parameters[key] = value + return standard_logging_model_parameters + + @staticmethod + def get_exclude_params_for_model_parameters() -> Set[str]: + return set(["messages", "prompt", "input"]) + + @staticmethod + def _get_relevant_args_to_use_for_logging() -> Set[str]: + """ + Gets all relevant llm api params besides the ones with prompt content + """ + all_openai_llm_api_params = ModelParamHelper._get_all_llm_api_params() + # Exclude parameters that contain prompt content + combined_kwargs = all_openai_llm_api_params.difference( + set(ModelParamHelper.get_exclude_params_for_model_parameters()) + ) + return combined_kwargs + + @staticmethod + def _get_all_llm_api_params() -> Set[str]: + """ + Gets the supported kwargs for each call type and combines them + """ + chat_completion_kwargs = ( + ModelParamHelper._get_litellm_supported_chat_completion_kwargs() + ) + text_completion_kwargs = ( + ModelParamHelper._get_litellm_supported_text_completion_kwargs() + ) + embedding_kwargs = ModelParamHelper._get_litellm_supported_embedding_kwargs() + transcription_kwargs = ( + ModelParamHelper._get_litellm_supported_transcription_kwargs() + ) + rerank_kwargs = ModelParamHelper._get_litellm_supported_rerank_kwargs() + exclude_kwargs = ModelParamHelper._get_exclude_kwargs() + + combined_kwargs = chat_completion_kwargs.union( + text_completion_kwargs, + embedding_kwargs, + transcription_kwargs, + rerank_kwargs, + ) + combined_kwargs = combined_kwargs.difference(exclude_kwargs) + return combined_kwargs + + @staticmethod + def _get_litellm_supported_chat_completion_kwargs() -> Set[str]: + """ + Get the litellm supported chat completion kwargs + + This follows the OpenAI API Spec + """ + all_chat_completion_kwargs = set( + CompletionCreateParamsNonStreaming.__annotations__.keys() + ).union(set(CompletionCreateParamsStreaming.__annotations__.keys())) + return all_chat_completion_kwargs + + @staticmethod + def _get_litellm_supported_text_completion_kwargs() -> Set[str]: + """ + Get the litellm supported text completion kwargs + + This follows the OpenAI API Spec + """ + all_text_completion_kwargs = set( + TextCompletionCreateParamsNonStreaming.__annotations__.keys() + ).union(set(TextCompletionCreateParamsStreaming.__annotations__.keys())) + return all_text_completion_kwargs + + @staticmethod + def _get_litellm_supported_rerank_kwargs() -> Set[str]: + """ + Get the litellm supported rerank kwargs + """ + return set(RerankRequest.model_fields.keys()) + + @staticmethod + def _get_litellm_supported_embedding_kwargs() -> Set[str]: + """ + Get the litellm supported embedding kwargs + + This follows the OpenAI API Spec + """ + return set(EmbeddingCreateParams.__annotations__.keys()) + + @staticmethod + def _get_litellm_supported_transcription_kwargs() -> Set[str]: + """ + Get the litellm supported transcription kwargs + + This follows the OpenAI API Spec + """ + return set(TranscriptionCreateParams.__annotations__.keys()) + + @staticmethod + def _get_exclude_kwargs() -> Set[str]: + """ + Get the kwargs to exclude from the cache key + """ + return set(["metadata"]) diff --git a/litellm/litellm_core_utils/prompt_templates/factory.py b/litellm/litellm_core_utils/prompt_templates/factory.py index 772f80777a..df7aa2cbd0 100644 --- a/litellm/litellm_core_utils/prompt_templates/factory.py +++ b/litellm/litellm_core_utils/prompt_templates/factory.py @@ -13,9 +13,10 @@ import litellm import litellm.types import litellm.types.llms from litellm import verbose_logger -from litellm.llms.custom_httpx.http_handler import HTTPHandler +from litellm.llms.custom_httpx.http_handler import HTTPHandler, get_async_httpx_client from litellm.types.llms.anthropic import * from litellm.types.llms.bedrock import MessageBlock as BedrockMessageBlock +from litellm.types.llms.custom_http import httpxSpecialProvider from litellm.types.llms.ollama import OllamaVisionModelObject from litellm.types.llms.openai import ( AllMessageValues, @@ -186,53 +187,125 @@ def ollama_pt( final_prompt_value="### Response:", messages=messages, ) - elif "llava" in model: - prompt = "" - images = [] - for message in messages: - if isinstance(message["content"], str): - prompt += message["content"] - elif isinstance(message["content"], list): - # see https://docs.litellm.ai/docs/providers/openai#openai-vision-models - for element in message["content"]: - if isinstance(element, dict): - if element["type"] == "text": - prompt += element["text"] - elif element["type"] == "image_url": - base64_image = convert_to_ollama_image( - element["image_url"]["url"] - ) - images.append(base64_image) - return {"prompt": prompt, "images": images} else: + user_message_types = {"user", "tool", "function"} + msg_i = 0 + images = [] prompt = "" - for message in messages: - role = message["role"] - content = message.get("content", "") + while msg_i < len(messages): + init_msg_i = msg_i + user_content_str = "" + ## MERGE CONSECUTIVE USER CONTENT ## + while ( + msg_i < len(messages) and messages[msg_i]["role"] in user_message_types + ): + msg_content = messages[msg_i].get("content") + if msg_content: + if isinstance(msg_content, list): + for m in msg_content: + if m.get("type", "") == "image_url": + if isinstance(m["image_url"], str): + images.append(m["image_url"]) + elif isinstance(m["image_url"], dict): + images.append(m["image_url"]["url"]) + elif m.get("type", "") == "text": + user_content_str += m["text"] + else: + # Tool message content will always be a string + user_content_str += msg_content - if "tool_calls" in message: - tool_calls = [] + msg_i += 1 - for call in message["tool_calls"]: - call_id: str = call["id"] - function_name: str = call["function"]["name"] - arguments = json.loads(call["function"]["arguments"]) + if user_content_str: + prompt += f"### User:\n{user_content_str}\n\n" - tool_calls.append( - { - "id": call_id, - "type": "function", - "function": {"name": function_name, "arguments": arguments}, - } + assistant_content_str = "" + ## MERGE CONSECUTIVE ASSISTANT CONTENT ## + while msg_i < len(messages) and messages[msg_i]["role"] == "assistant": + msg_content = messages[msg_i].get("content") + if msg_content: + if isinstance(msg_content, list): + for m in msg_content: + if m.get("type", "") == "text": + assistant_content_str += m["text"] + elif isinstance(msg_content, str): + # Tool message content will always be a string + assistant_content_str += msg_content + + tool_calls = messages[msg_i].get("tool_calls") + ollama_tool_calls = [] + if tool_calls: + for call in tool_calls: + call_id: str = call["id"] + function_name: str = call["function"]["name"] + arguments = json.loads(call["function"]["arguments"]) + + ollama_tool_calls.append( + { + "id": call_id, + "type": "function", + "function": { + "name": function_name, + "arguments": arguments, + }, + } + ) + + if ollama_tool_calls: + assistant_content_str += ( + f"Tool Calls: {json.dumps(ollama_tool_calls, indent=2)}" ) - prompt += f"### Assistant:\nTool Calls: {json.dumps(tool_calls, indent=2)}\n\n" + msg_i += 1 - elif "tool_call_id" in message: - prompt += f"### User:\n{message['content']}\n\n" + if assistant_content_str: + prompt += f"### Assistant:\n{assistant_content_str}\n\n" - elif content: - prompt += f"### {role.capitalize()}:\n{content}\n\n" + if msg_i == init_msg_i: # prevent infinite loops + raise litellm.BadRequestError( + message=BAD_MESSAGE_ERROR_STR + f"passed in {messages[msg_i]}", + model=model, + llm_provider="ollama", + ) + # prompt = "" + # images = [] + # for message in messages: + # if isinstance(message["content"], str): + # prompt += message["content"] + # elif isinstance(message["content"], list): + # # see https://docs.litellm.ai/docs/providers/openai#openai-vision-models + # for element in message["content"]: + # if isinstance(element, dict): + # if element["type"] == "text": + # prompt += element["text"] + # elif element["type"] == "image_url": + # base64_image = convert_to_ollama_image( + # element["image_url"]["url"] + # ) + # images.append(base64_image) + + # if "tool_calls" in message: + # tool_calls = [] + + # for call in message["tool_calls"]: + # call_id: str = call["id"] + # function_name: str = call["function"]["name"] + # arguments = json.loads(call["function"]["arguments"]) + + # tool_calls.append( + # { + # "id": call_id, + # "type": "function", + # "function": {"name": function_name, "arguments": arguments}, + # } + # ) + + # prompt += f"### Assistant:\nTool Calls: {json.dumps(tool_calls, indent=2)}\n\n" + + # elif "tool_call_id" in message: + # prompt += f"### User:\n{message['content']}\n\n" + + return {"prompt": prompt, "images": images} return prompt @@ -324,26 +397,6 @@ def phind_codellama_pt(messages): return prompt -known_tokenizer_config = { - "mistralai/Mistral-7B-Instruct-v0.1": { - "tokenizer": { - "chat_template": "{{ bos_token }}{% for message in messages %}{% if (message['role'] == 'user') != (loop.index0 % 2 == 0) %}{{ raise_exception('Conversation roles must alternate user/assistant/user/assistant/...') }}{% endif %}{% if message['role'] == 'user' %}{{ '[INST] ' + message['content'] + ' [/INST]' }}{% elif message['role'] == 'assistant' %}{{ message['content'] + eos_token + ' ' }}{% else %}{{ raise_exception('Only user and assistant roles are supported!') }}{% endif %}{% endfor %}", - "bos_token": "", - "eos_token": "", - }, - "status": "success", - }, - "meta-llama/Meta-Llama-3-8B-Instruct": { - "tokenizer": { - "chat_template": "{% set loop_messages = messages %}{% for message in loop_messages %}{% set content = '<|start_header_id|>' + message['role'] + '<|end_header_id|>\n\n'+ message['content'] | trim + '<|eot_id|>' %}{% if loop.index0 == 0 %}{% set content = bos_token + content %}{% endif %}{{ content }}{% endfor %}{{ '<|start_header_id|>assistant<|end_header_id|>\n\n' }}", - "bos_token": "<|begin_of_text|>", - "eos_token": "", - }, - "status": "success", - }, -} - - def hf_chat_template( # noqa: PLR0915 model: str, messages: list, chat_template: Optional[Any] = None ): @@ -377,11 +430,11 @@ def hf_chat_template( # noqa: PLR0915 else: return {"status": "failure"} - if model in known_tokenizer_config: - tokenizer_config = known_tokenizer_config[model] + if model in litellm.known_tokenizer_config: + tokenizer_config = litellm.known_tokenizer_config[model] else: tokenizer_config = _get_tokenizer_config(model) - known_tokenizer_config.update({model: tokenizer_config}) + litellm.known_tokenizer_config.update({model: tokenizer_config}) if ( tokenizer_config["status"] == "failure" @@ -474,6 +527,12 @@ def hf_chat_template( # noqa: PLR0915 ) # don't use verbose_logger.exception, if exception is raised +def deepseek_r1_pt(messages): + return hf_chat_template( + model="deepseek-r1/deepseek-r1-7b-instruct", messages=messages + ) + + # Anthropic template def claude_2_1_pt( messages: list, @@ -693,12 +752,13 @@ def convert_generic_image_chunk_to_openai_image_obj( Return: "data:image/jpeg;base64,{base64_image}" """ - return "data:{};{},{}".format( - image_chunk["media_type"], image_chunk["type"], image_chunk["data"] - ) + media_type = image_chunk["media_type"] + return "data:{};{},{}".format(media_type, image_chunk["type"], image_chunk["data"]) -def convert_to_anthropic_image_obj(openai_image_url: str) -> GenericImageParsingChunk: +def convert_to_anthropic_image_obj( + openai_image_url: str, format: Optional[str] +) -> GenericImageParsingChunk: """ Input: "image_url": "data:image/jpeg;base64,{base64_image}", @@ -715,7 +775,11 @@ def convert_to_anthropic_image_obj(openai_image_url: str) -> GenericImageParsing openai_image_url = convert_url_to_base64(url=openai_image_url) # Extract the media type and base64 data media_type, base64_data = openai_image_url.split("data:")[1].split(";base64,") - media_type = media_type.replace("\\/", "/") + + if format: + media_type = format + else: + media_type = media_type.replace("\\/", "/") return GenericImageParsingChunk( type="base64", @@ -833,11 +897,12 @@ def anthropic_messages_pt_xml(messages: list): if isinstance(messages[msg_i]["content"], list): for m in messages[msg_i]["content"]: if m.get("type", "") == "image_url": + format = m["image_url"].get("format") user_content.append( { "type": "image", "source": convert_to_anthropic_image_obj( - m["image_url"]["url"] + m["image_url"]["url"], format=format ), } ) @@ -1169,10 +1234,13 @@ def convert_to_anthropic_tool_result( ) elif content["type"] == "image_url": if isinstance(content["image_url"], str): - image_chunk = convert_to_anthropic_image_obj(content["image_url"]) - else: image_chunk = convert_to_anthropic_image_obj( - content["image_url"]["url"] + content["image_url"], format=None + ) + else: + format = content["image_url"].get("format") + image_chunk = convert_to_anthropic_image_obj( + content["image_url"]["url"], format=format ) anthropic_content_list.append( AnthropicMessagesImageParam( @@ -1295,6 +1363,7 @@ def add_cache_control_to_content( AnthropicMessagesImageParam, AnthropicMessagesTextParam, AnthropicMessagesDocumentParam, + ChatCompletionThinkingBlock, ], orignal_content_element: Union[dict, AllMessageValues], ): @@ -1330,6 +1399,7 @@ def _anthropic_content_element_factory( data=image_chunk["data"], ), ) + return _anthropic_content_element @@ -1381,13 +1451,16 @@ def anthropic_messages_pt( # noqa: PLR0915 for m in user_message_types_block["content"]: if m.get("type", "") == "image_url": m = cast(ChatCompletionImageObject, m) + format: Optional[str] = None if isinstance(m["image_url"], str): image_chunk = convert_to_anthropic_image_obj( - openai_image_url=m["image_url"] + openai_image_url=m["image_url"], format=None ) else: + format = m["image_url"].get("format") image_chunk = convert_to_anthropic_image_obj( - openai_image_url=m["image_url"]["url"] + openai_image_url=m["image_url"]["url"], + format=format, ) _anthropic_content_element = ( @@ -1420,6 +1493,8 @@ def anthropic_messages_pt( # noqa: PLR0915 ) user_content.append(_content_element) + elif m.get("type", "") == "document": + user_content.append(cast(AnthropicMessagesDocumentParam, m)) elif isinstance(user_message_types_block["content"], str): _anthropic_content_text_element: AnthropicMessagesTextParam = { "type": "text", @@ -1455,16 +1530,33 @@ def anthropic_messages_pt( # noqa: PLR0915 ## MERGE CONSECUTIVE ASSISTANT CONTENT ## while msg_i < len(messages) and messages[msg_i]["role"] == "assistant": assistant_content_block: ChatCompletionAssistantMessage = messages[msg_i] # type: ignore + + thinking_blocks = assistant_content_block.get("thinking_blocks", None) + if ( + thinking_blocks is not None + ): # IMPORTANT: ADD THIS FIRST, ELSE ANTHROPIC WILL RAISE AN ERROR + assistant_content.extend(thinking_blocks) if "content" in assistant_content_block and isinstance( assistant_content_block["content"], list ): for m in assistant_content_block["content"]: - # handle text + # handle thinking blocks + thinking_block = cast(str, m.get("thinking", "")) + text_block = cast(str, m.get("text", "")) if ( - m.get("type", "") == "text" and len(m.get("text", "")) > 0 + m.get("type", "") == "thinking" and len(thinking_block) > 0 + ): # don't pass empty text blocks. anthropic api raises errors. + anthropic_message: Union[ + ChatCompletionThinkingBlock, + AnthropicMessagesTextParam, + ] = cast(ChatCompletionThinkingBlock, m) + assistant_content.append(anthropic_message) + # handle text + elif ( + m.get("type", "") == "text" and len(text_block) > 0 ): # don't pass empty text blocks. anthropic api raises errors. anthropic_message = AnthropicMessagesTextParam( - type="text", text=m.get("text") + type="text", text=text_block ) _cached_message = add_cache_control_to_content( anthropic_content_element=anthropic_message, @@ -1517,6 +1609,7 @@ def anthropic_messages_pt( # noqa: PLR0915 msg_i += 1 if assistant_content: + new_messages.append({"role": "assistant", "content": assistant_content}) if msg_i == init_msg_i: # prevent infinite loops @@ -1525,17 +1618,6 @@ def anthropic_messages_pt( # noqa: PLR0915 model=model, llm_provider=llm_provider, ) - if not new_messages or new_messages[0]["role"] != "user": - if litellm.modify_params: - new_messages.insert( - 0, {"role": "user", "content": [{"type": "text", "text": "."}]} - ) - else: - raise Exception( - "Invalid first message={}. Should always start with 'role'='user' for Anthropic. System prompt is sent separately for Anthropic. set 'litellm.modify_params = True' or 'litellm_settings:modify_params = True' on proxy, to insert a placeholder user message - '.' as the first message, ".format( - new_messages - ) - ) if new_messages[-1]["role"] == "assistant": if isinstance(new_messages[-1]["content"], str): @@ -2150,6 +2232,16 @@ def stringify_json_tool_call_content(messages: List) -> List: ###### AMAZON BEDROCK ####### +import base64 +import mimetypes +from email.message import Message + +import httpx + +from litellm.types.llms.bedrock import ( + BedrockConverseReasoningContentBlock, + BedrockConverseReasoningTextBlock, +) from litellm.types.llms.bedrock import ContentBlock as BedrockContentBlock from litellm.types.llms.bedrock import DocumentBlock as BedrockDocumentBlock from litellm.types.llms.bedrock import ImageBlock as BedrockImageBlock @@ -2166,42 +2258,65 @@ from litellm.types.llms.bedrock import ToolSpecBlock as BedrockToolSpecBlock from litellm.types.llms.bedrock import ToolUseBlock as BedrockToolUseBlock -def get_image_details(image_url) -> Tuple[str, str]: - try: - import base64 +def _parse_content_type(content_type: str) -> str: + m = Message() + m["content-type"] = content_type + return m.get_content_type() - client = HTTPHandler(concurrent_limit=1) - # Send a GET request to the image URL - response = client.get(image_url) - response.raise_for_status() # Raise an exception for HTTP errors +class BedrockImageProcessor: + """Handles both sync and async image processing for Bedrock conversations.""" + + @staticmethod + def _post_call_image_processing(response: httpx.Response) -> Tuple[str, str]: # Check the response's content type to ensure it is an image content_type = response.headers.get("content-type") - if not content_type or "image" not in content_type: + if not content_type: raise ValueError( - f"URL does not point to a valid image (content-type: {content_type})" + f"URL does not contain content-type (content-type: {content_type})" ) + content_type = _parse_content_type(content_type) # Convert the image content to base64 bytes base64_bytes = base64.b64encode(response.content).decode("utf-8") return base64_bytes, content_type - except Exception as e: - raise e + @staticmethod + async def get_image_details_async(image_url) -> Tuple[str, str]: + try: + client = get_async_httpx_client( + llm_provider=httpxSpecialProvider.PromptFactory, + params={"concurrent_limit": 1}, + ) + # Send a GET request to the image URL + response = await client.get(image_url, follow_redirects=True) + response.raise_for_status() # Raise an exception for HTTP errors -def _process_bedrock_converse_image_block( - image_url: str, -) -> BedrockContentBlock: - if "base64" in image_url: - # Case 1: Images with base64 encoding - import re + return BedrockImageProcessor._post_call_image_processing(response) - # base 64 is passed as data:image/jpeg;base64, + except Exception as e: + raise e + + @staticmethod + def get_image_details(image_url) -> Tuple[str, str]: + try: + client = HTTPHandler(concurrent_limit=1) + # Send a GET request to the image URL + response = client.get(image_url, follow_redirects=True) + response.raise_for_status() # Raise an exception for HTTP errors + + return BedrockImageProcessor._post_call_image_processing(response) + + except Exception as e: + raise e + + @staticmethod + def _parse_base64_image(image_url: str) -> Tuple[str, str, str]: + """Parse base64 encoded image data.""" image_metadata, img_without_base_64 = image_url.split(",") - # read mime_type from img_without_base_64=data:image/jpeg;base64 # Extract MIME type using regular expression mime_type_match = re.match(r"data:(.*?);base64", image_metadata) if mime_type_match: @@ -2210,37 +2325,115 @@ def _process_bedrock_converse_image_block( else: mime_type = "image/jpeg" image_format = "jpeg" - _blob = BedrockSourceBlock(bytes=img_without_base_64) - elif "https:/" in image_url: - # Case 2: Images with direct links - image_bytes, mime_type = get_image_details(image_url) - image_format = mime_type.split("/")[1] + return img_without_base_64, mime_type, image_format + + @staticmethod + def _validate_format(mime_type: str, image_format: str) -> str: + """Validate image format and mime type for both images and documents.""" + + supported_image_formats = ( + litellm.AmazonConverseConfig().get_supported_image_types() + ) + supported_doc_formats = ( + litellm.AmazonConverseConfig().get_supported_document_types() + ) + + document_types = ["application", "text"] + is_document = any(mime_type.startswith(doc_type) for doc_type in document_types) + + if is_document: + potential_extensions = mimetypes.guess_all_extensions(mime_type) + valid_extensions = [ + ext[1:] + for ext in potential_extensions + if ext[1:] in supported_doc_formats + ] + + if not valid_extensions: + raise ValueError( + f"No supported extensions for MIME type: {mime_type}. Supported formats: {supported_doc_formats}" + ) + + # Use first valid extension instead of provided image_format + return valid_extensions[0] + else: + if image_format not in supported_image_formats: + raise ValueError( + f"Unsupported image format: {image_format}. Supported formats: {supported_image_formats}" + ) + return image_format + + @staticmethod + def _create_bedrock_block( + image_bytes: str, mime_type: str, image_format: str + ) -> BedrockContentBlock: + """Create appropriate Bedrock content block based on mime type.""" _blob = BedrockSourceBlock(bytes=image_bytes) - else: - raise ValueError( - "Unsupported image type. Expected either image url or base64 encoded string - \ - e.g. 'data:image/jpeg;base64,'" - ) - supported_image_formats = litellm.AmazonConverseConfig().get_supported_image_types() + document_types = ["application", "text"] + is_document = any(mime_type.startswith(doc_type) for doc_type in document_types) - document_types = ["application", "text"] - is_document = any( - mime_type.startswith(document_type) for document_type in document_types - ) - - if image_format in supported_image_formats: - return BedrockContentBlock(image=BedrockImageBlock(source=_blob, format=image_format)) # type: ignore - elif is_document: - return BedrockContentBlock(document=BedrockDocumentBlock(source=_blob, format=image_format, name="DocumentPDFmessages_{}".format(str(uuid.uuid4())))) # type: ignore - else: - # Handle the case when the image format is not supported - raise ValueError( - "Unsupported image format: {}. Supported formats: {}".format( - image_format, supported_image_formats + if is_document: + return BedrockContentBlock( + document=BedrockDocumentBlock( + source=_blob, + format=image_format, + name=f"DocumentPDFmessages_{str(uuid.uuid4())}", + ) ) - ) + else: + return BedrockContentBlock( + image=BedrockImageBlock(source=_blob, format=image_format) + ) + + @classmethod + def process_image_sync( + cls, image_url: str, format: Optional[str] = None + ) -> BedrockContentBlock: + """Synchronous image processing.""" + + if "base64" in image_url: + img_bytes, mime_type, image_format = cls._parse_base64_image(image_url) + elif "http://" in image_url or "https://" in image_url: + img_bytes, mime_type = BedrockImageProcessor.get_image_details(image_url) + image_format = mime_type.split("/")[1] + else: + raise ValueError( + "Unsupported image type. Expected either image url or base64 encoded string" + ) + + if format: + mime_type = format + image_format = mime_type.split("/")[1] + + image_format = cls._validate_format(mime_type, image_format) + return cls._create_bedrock_block(img_bytes, mime_type, image_format) + + @classmethod + async def process_image_async( + cls, image_url: str, format: Optional[str] + ) -> BedrockContentBlock: + """Asynchronous image processing.""" + + if "base64" in image_url: + img_bytes, mime_type, image_format = cls._parse_base64_image(image_url) + elif "http://" in image_url or "https://" in image_url: + img_bytes, mime_type = await BedrockImageProcessor.get_image_details_async( + image_url + ) + image_format = mime_type.split("/")[1] + else: + raise ValueError( + "Unsupported image type. Expected either image url or base64 encoded string" + ) + + if format: # override with user-defined params + mime_type = format + image_format = mime_type.split("/")[1] + + image_format = cls._validate_format(mime_type, image_format) + return cls._create_bedrock_block(img_bytes, mime_type, image_format) def _convert_to_bedrock_tool_call_invoke( @@ -2662,6 +2855,250 @@ def get_assistant_message_block_or_continue_message( raise ValueError(f"Unsupported content type: {type(content_block)}") +class BedrockConverseMessagesProcessor: + @staticmethod + def _initial_message_setup( + messages: List, + user_continue_message: Optional[ChatCompletionUserMessage] = None, + ) -> List: + if messages[0].get("role") is not None and messages[0]["role"] == "assistant": + if user_continue_message is not None: + messages.insert(0, user_continue_message) + elif litellm.modify_params: + messages.insert(0, DEFAULT_USER_CONTINUE_MESSAGE) + + # if final message is assistant message + if messages[-1].get("role") is not None and messages[-1]["role"] == "assistant": + if user_continue_message is not None: + messages.append(user_continue_message) + elif litellm.modify_params: + messages.append(DEFAULT_USER_CONTINUE_MESSAGE) + return messages + + @staticmethod + async def _bedrock_converse_messages_pt_async( # noqa: PLR0915 + messages: List, + model: str, + llm_provider: str, + user_continue_message: Optional[ChatCompletionUserMessage] = None, + assistant_continue_message: Optional[ + Union[str, ChatCompletionAssistantMessage] + ] = None, + ) -> List[BedrockMessageBlock]: + contents: List[BedrockMessageBlock] = [] + msg_i = 0 + + ## BASE CASE ## + if len(messages) == 0: + raise litellm.BadRequestError( + message=BAD_MESSAGE_ERROR_STR + + "bedrock requires at least one non-system message", + model=model, + llm_provider=llm_provider, + ) + + # if initial message is assistant message + messages = BedrockConverseMessagesProcessor._initial_message_setup( + messages, user_continue_message + ) + + while msg_i < len(messages): + user_content: List[BedrockContentBlock] = [] + init_msg_i = msg_i + ## MERGE CONSECUTIVE USER CONTENT ## + while msg_i < len(messages) and messages[msg_i]["role"] == "user": + message_block = get_user_message_block_or_continue_message( + message=messages[msg_i], + user_continue_message=user_continue_message, + ) + if isinstance(message_block["content"], list): + _parts: List[BedrockContentBlock] = [] + for element in message_block["content"]: + if isinstance(element, dict): + if element["type"] == "text": + _part = BedrockContentBlock(text=element["text"]) + _parts.append(_part) + elif element["type"] == "image_url": + format: Optional[str] = None + if isinstance(element["image_url"], dict): + image_url = element["image_url"]["url"] + format = element["image_url"].get("format") + else: + image_url = element["image_url"] + _part = await BedrockImageProcessor.process_image_async( # type: ignore + image_url=image_url, format=format + ) + _parts.append(_part) # type: ignore + _cache_point_block = ( + litellm.AmazonConverseConfig()._get_cache_point_block( + message_block=cast( + OpenAIMessageContentListBlock, element + ), + block_type="content_block", + ) + ) + if _cache_point_block is not None: + _parts.append(_cache_point_block) + user_content.extend(_parts) + elif message_block["content"] and isinstance( + message_block["content"], str + ): + _part = BedrockContentBlock(text=messages[msg_i]["content"]) + _cache_point_block = ( + litellm.AmazonConverseConfig()._get_cache_point_block( + message_block, block_type="content_block" + ) + ) + user_content.append(_part) + if _cache_point_block is not None: + user_content.append(_cache_point_block) + + msg_i += 1 + if user_content: + if len(contents) > 0 and contents[-1]["role"] == "user": + if ( + assistant_continue_message is not None + or litellm.modify_params is True + ): + # if last message was a 'user' message, then add a dummy assistant message (bedrock requires alternating roles) + contents = _insert_assistant_continue_message( + messages=contents, + assistant_continue_message=assistant_continue_message, + ) + contents.append( + BedrockMessageBlock(role="user", content=user_content) + ) + else: + verbose_logger.warning( + "Potential consecutive user/tool blocks. Trying to merge. If error occurs, please set a 'assistant_continue_message' or set 'modify_params=True' to insert a dummy assistant message for bedrock calls." + ) + contents[-1]["content"].extend(user_content) + else: + contents.append( + BedrockMessageBlock(role="user", content=user_content) + ) + + ## MERGE CONSECUTIVE TOOL CALL MESSAGES ## + tool_content: List[BedrockContentBlock] = [] + while msg_i < len(messages) and messages[msg_i]["role"] == "tool": + tool_call_result = _convert_to_bedrock_tool_call_result(messages[msg_i]) + + tool_content.append(tool_call_result) + msg_i += 1 + if tool_content: + # if last message was a 'user' message, then add a blank assistant message (bedrock requires alternating roles) + if len(contents) > 0 and contents[-1]["role"] == "user": + if ( + assistant_continue_message is not None + or litellm.modify_params is True + ): + # if last message was a 'user' message, then add a dummy assistant message (bedrock requires alternating roles) + contents = _insert_assistant_continue_message( + messages=contents, + assistant_continue_message=assistant_continue_message, + ) + contents.append( + BedrockMessageBlock(role="user", content=tool_content) + ) + else: + verbose_logger.warning( + "Potential consecutive user/tool blocks. Trying to merge. If error occurs, please set a 'assistant_continue_message' or set 'modify_params=True' to insert a dummy assistant message for bedrock calls." + ) + contents[-1]["content"].extend(tool_content) + else: + contents.append( + BedrockMessageBlock(role="user", content=tool_content) + ) + assistant_content: List[BedrockContentBlock] = [] + ## MERGE CONSECUTIVE ASSISTANT CONTENT ## + while msg_i < len(messages) and messages[msg_i]["role"] == "assistant": + assistant_message_block = ( + get_assistant_message_block_or_continue_message( + message=messages[msg_i], + assistant_continue_message=assistant_continue_message, + ) + ) + _assistant_content = assistant_message_block.get("content", None) + + if _assistant_content is not None and isinstance( + _assistant_content, list + ): + assistants_parts: List[BedrockContentBlock] = [] + for element in _assistant_content: + if isinstance(element, dict): + if element["type"] == "thinking": + thinking_block = BedrockConverseMessagesProcessor.translate_thinking_blocks_to_reasoning_content_blocks( + thinking_blocks=[ + cast(ChatCompletionThinkingBlock, element) + ] + ) + assistants_parts.extend(thinking_block) + elif element["type"] == "text": + assistants_part = BedrockContentBlock( + text=element["text"] + ) + assistants_parts.append(assistants_part) + elif element["type"] == "image_url": + if isinstance(element["image_url"], dict): + image_url = element["image_url"]["url"] + else: + image_url = element["image_url"] + assistants_part = await BedrockImageProcessor.process_image_async( # type: ignore + image_url=image_url + ) + assistants_parts.append(assistants_part) + assistant_content.extend(assistants_parts) + elif _assistant_content is not None and isinstance( + _assistant_content, str + ): + assistant_content.append( + BedrockContentBlock(text=_assistant_content) + ) + _tool_calls = assistant_message_block.get("tool_calls", []) + if _tool_calls: + assistant_content.extend( + _convert_to_bedrock_tool_call_invoke(_tool_calls) + ) + + msg_i += 1 + + if assistant_content: + contents.append( + BedrockMessageBlock(role="assistant", content=assistant_content) + ) + + if msg_i == init_msg_i: # prevent infinite loops + raise litellm.BadRequestError( + message=BAD_MESSAGE_ERROR_STR + f"passed in {messages[msg_i]}", + model=model, + llm_provider=llm_provider, + ) + + return contents + + @staticmethod + def translate_thinking_blocks_to_reasoning_content_blocks( + thinking_blocks: List[ChatCompletionThinkingBlock], + ) -> List[BedrockContentBlock]: + reasoning_content_blocks: List[BedrockContentBlock] = [] + for thinking_block in thinking_blocks: + reasoning_text = thinking_block.get("thinking") + reasoning_signature = thinking_block.get("signature") + text_block = BedrockConverseReasoningTextBlock( + text=reasoning_text or "", + ) + if reasoning_signature is not None: + text_block["signature"] = reasoning_signature + reasoning_content_block = BedrockConverseReasoningContentBlock( + reasoningText=text_block, + ) + bedrock_content_block = BedrockContentBlock( + reasoningContent=reasoning_content_block + ) + reasoning_content_blocks.append(bedrock_content_block) + return reasoning_content_blocks + + def _bedrock_converse_messages_pt( # noqa: PLR0915 messages: List, model: str, @@ -2722,12 +3159,15 @@ def _bedrock_converse_messages_pt( # noqa: PLR0915 _part = BedrockContentBlock(text=element["text"]) _parts.append(_part) elif element["type"] == "image_url": + format: Optional[str] = None if isinstance(element["image_url"], dict): image_url = element["image_url"]["url"] + format = element["image_url"].get("format") else: image_url = element["image_url"] - _part = _process_bedrock_converse_image_block( # type: ignore - image_url=image_url + _part = BedrockImageProcessor.process_image_sync( # type: ignore + image_url=image_url, + format=format, ) _parts.append(_part) # type: ignore _cache_point_block = ( @@ -2807,17 +3247,36 @@ def _bedrock_converse_messages_pt( # noqa: PLR0915 assistant_content: List[BedrockContentBlock] = [] ## MERGE CONSECUTIVE ASSISTANT CONTENT ## while msg_i < len(messages) and messages[msg_i]["role"] == "assistant": + assistant_message_block = get_assistant_message_block_or_continue_message( message=messages[msg_i], assistant_continue_message=assistant_continue_message, ) _assistant_content = assistant_message_block.get("content", None) + thinking_blocks = cast( + Optional[List[ChatCompletionThinkingBlock]], + assistant_message_block.get("thinking_blocks"), + ) + + if thinking_blocks is not None: + assistant_content.extend( + BedrockConverseMessagesProcessor.translate_thinking_blocks_to_reasoning_content_blocks( + thinking_blocks + ) + ) if _assistant_content is not None and isinstance(_assistant_content, list): assistants_parts: List[BedrockContentBlock] = [] for element in _assistant_content: if isinstance(element, dict): - if element["type"] == "text": + if element["type"] == "thinking": + thinking_block = BedrockConverseMessagesProcessor.translate_thinking_blocks_to_reasoning_content_blocks( + thinking_blocks=[ + cast(ChatCompletionThinkingBlock, element) + ] + ) + assistants_parts.extend(thinking_block) + elif element["type"] == "text": assistants_part = BedrockContentBlock(text=element["text"]) assistants_parts.append(assistants_part) elif element["type"] == "image_url": @@ -2825,7 +3284,7 @@ def _bedrock_converse_messages_pt( # noqa: PLR0915 image_url = element["image_url"]["url"] else: image_url = element["image_url"] - assistants_part = _process_bedrock_converse_image_block( # type: ignore + assistants_part = BedrockImageProcessor.process_image_sync( # type: ignore image_url=image_url ) assistants_parts.append(assistants_part) diff --git a/litellm/litellm_core_utils/redact_messages.py b/litellm/litellm_core_utils/redact_messages.py index 3d0cec8d72..50e0e0b575 100644 --- a/litellm/litellm_core_utils/redact_messages.py +++ b/litellm/litellm_core_utils/redact_messages.py @@ -73,12 +73,9 @@ def perform_redaction(model_call_details: dict, result): return {"text": "redacted-by-litellm"} -def redact_message_input_output_from_logging( - model_call_details: dict, result, input: Optional[Any] = None -): +def should_redact_message_logging(model_call_details: dict) -> bool: """ - Removes messages, prompts, input, response from logging. This modifies the data in-place - only redacts when litellm.turn_off_message_logging == True + Determine if message logging should be redacted. """ _request_headers = ( model_call_details.get("litellm_params", {}).get("metadata", {}) or {} @@ -86,25 +83,48 @@ def redact_message_input_output_from_logging( request_headers = _request_headers.get("headers", {}) + possible_request_headers = [ + "litellm-enable-message-redaction", # old header. maintain backwards compatibility + "x-litellm-enable-message-redaction", # new header + ] + + is_redaction_enabled_via_header = False + for header in possible_request_headers: + if bool(request_headers.get(header, False)): + is_redaction_enabled_via_header = True + break + # check if user opted out of logging message/response to callbacks if ( litellm.turn_off_message_logging is not True - and request_headers.get("litellm-enable-message-redaction", False) is not True + and is_redaction_enabled_via_header is not True and _get_turn_off_message_logging_from_dynamic_params(model_call_details) is not True ): - return result + return False - if request_headers and request_headers.get( - "litellm-disable-message-redaction", False + if request_headers and bool( + request_headers.get("litellm-disable-message-redaction", False) ): - return result + return False # user has OPTED OUT of message redaction if _get_turn_off_message_logging_from_dynamic_params(model_call_details) is False: - return result + return False - return perform_redaction(model_call_details, result) + return True + + +def redact_message_input_output_from_logging( + model_call_details: dict, result, input: Optional[Any] = None +) -> Any: + """ + Removes messages, prompts, input, response from logging. This modifies the data in-place + only redacts when litellm.turn_off_message_logging == True + """ + if should_redact_message_logging(model_call_details): + return perform_redaction(model_call_details, result) + return result def _get_turn_off_message_logging_from_dynamic_params( diff --git a/litellm/litellm_core_utils/safe_json_dumps.py b/litellm/litellm_core_utils/safe_json_dumps.py new file mode 100644 index 0000000000..990c0ed561 --- /dev/null +++ b/litellm/litellm_core_utils/safe_json_dumps.py @@ -0,0 +1,50 @@ +import json +from typing import Any, Union + + +def safe_dumps(data: Any, max_depth: int = 10) -> str: + """ + Recursively serialize data while detecting circular references. + If a circular reference is detected then a marker string is returned. + """ + + def _serialize(obj: Any, seen: set, depth: int) -> Any: + # Check for maximum depth. + if depth > max_depth: + return "MaxDepthExceeded" + # Base-case: if it is a primitive, simply return it. + if isinstance(obj, (str, int, float, bool, type(None))): + return obj + # Check for circular reference. + if id(obj) in seen: + return "CircularReference Detected" + seen.add(id(obj)) + result: Union[dict, list, tuple, set, str] + if isinstance(obj, dict): + result = {} + for k, v in obj.items(): + if isinstance(k, (str)): + result[k] = _serialize(v, seen, depth + 1) + seen.remove(id(obj)) + return result + elif isinstance(obj, list): + result = [_serialize(item, seen, depth + 1) for item in obj] + seen.remove(id(obj)) + return result + elif isinstance(obj, tuple): + result = tuple(_serialize(item, seen, depth + 1) for item in obj) + seen.remove(id(obj)) + return result + elif isinstance(obj, set): + result = sorted([_serialize(item, seen, depth + 1) for item in obj]) + seen.remove(id(obj)) + return result + else: + # Fall back to string conversion for non-serializable objects. + try: + return str(obj) + except Exception: + return "Unserializable Object" + + safe_data = _serialize(data, set(), 0) + return json.dumps(safe_data, default=str) diff --git a/litellm/litellm_core_utils/sensitive_data_masker.py b/litellm/litellm_core_utils/sensitive_data_masker.py new file mode 100644 index 0000000000..a1df115ff0 --- /dev/null +++ b/litellm/litellm_core_utils/sensitive_data_masker.py @@ -0,0 +1,81 @@ +from typing import Any, Dict, Optional, Set + + +class SensitiveDataMasker: + def __init__( + self, + sensitive_patterns: Optional[Set[str]] = None, + visible_prefix: int = 4, + visible_suffix: int = 4, + mask_char: str = "*", + ): + self.sensitive_patterns = sensitive_patterns or { + "password", + "secret", + "key", + "token", + "auth", + "credential", + "access", + "private", + "certificate", + } + + self.visible_prefix = visible_prefix + self.visible_suffix = visible_suffix + self.mask_char = mask_char + + def _mask_value(self, value: str) -> str: + if not value or len(str(value)) < (self.visible_prefix + self.visible_suffix): + return value + + value_str = str(value) + masked_length = len(value_str) - (self.visible_prefix + self.visible_suffix) + return f"{value_str[:self.visible_prefix]}{self.mask_char * masked_length}{value_str[-self.visible_suffix:]}" + + def is_sensitive_key(self, key: str) -> bool: + key_lower = str(key).lower() + result = any(pattern in key_lower for pattern in self.sensitive_patterns) + return result + + def mask_dict( + self, data: Dict[str, Any], depth: int = 0, max_depth: int = 10 + ) -> Dict[str, Any]: + if depth >= max_depth: + return data + + masked_data: Dict[str, Any] = {} + for k, v in data.items(): + try: + if isinstance(v, dict): + masked_data[k] = self.mask_dict(v, depth + 1) + elif hasattr(v, "__dict__") and not isinstance(v, type): + masked_data[k] = self.mask_dict(vars(v), depth + 1) + elif self.is_sensitive_key(k): + str_value = str(v) if v is not None else "" + masked_data[k] = self._mask_value(str_value) + else: + masked_data[k] = ( + v if isinstance(v, (int, float, bool, str)) else str(v) + ) + except Exception: + masked_data[k] = "" + + return masked_data + + +# Usage example: +""" +masker = SensitiveDataMasker() +data = { + "api_key": "sk-1234567890abcdef", + "redis_password": "very_secret_pass", + "port": 6379 +} +masked = masker.mask_dict(data) +# Result: { +# "api_key": "sk-1****cdef", +# "redis_password": "very****pass", +# "port": 6379 +# } +""" diff --git a/litellm/litellm_core_utils/streaming_handler.py b/litellm/litellm_core_utils/streaming_handler.py index ba8cb167c8..5d5a8bf256 100644 --- a/litellm/litellm_core_utils/streaming_handler.py +++ b/litellm/litellm_core_utils/streaming_handler.py @@ -5,8 +5,7 @@ import threading import time import traceback import uuid -from concurrent.futures import ThreadPoolExecutor -from typing import Any, Callable, Dict, List, Optional, cast +from typing import Any, Callable, Dict, List, Optional, Union, cast import httpx from pydantic import BaseModel @@ -14,6 +13,9 @@ from pydantic import BaseModel import litellm from litellm import verbose_logger from litellm.litellm_core_utils.redact_messages import LiteLLMLoggingObject +from litellm.litellm_core_utils.thread_pool_executor import executor +from litellm.types.llms.openai import ChatCompletionChunk +from litellm.types.router import GenericLiteLLMParams from litellm.types.utils import Delta from litellm.types.utils import GenericStreamingChunk as GChunk from litellm.types.utils import ( @@ -29,11 +31,6 @@ from .exception_mapping_utils import exception_type from .llm_response_utils.get_api_base import get_api_base from .rules import Rules -MAX_THREADS = 100 - -# Create a ThreadPoolExecutor -executor = ThreadPoolExecutor(max_workers=MAX_THREADS) - def is_async_iterable(obj: Any) -> bool: """ @@ -74,6 +71,17 @@ class CustomStreamWrapper: self.completion_stream = completion_stream self.sent_first_chunk = False self.sent_last_chunk = False + + litellm_params: GenericLiteLLMParams = GenericLiteLLMParams( + **self.logging_obj.model_call_details.get("litellm_params", {}) + ) + self.merge_reasoning_content_in_choices: bool = ( + litellm_params.merge_reasoning_content_in_choices or False + ) + self.sent_first_thinking_block = False + self.sent_last_thinking_block = False + self.thinking_content = "" + self.system_fingerprint: Optional[str] = None self.received_finish_reason: Optional[str] = None self.intermittent_finish_reason: Optional[str] = ( @@ -91,12 +99,7 @@ class CustomStreamWrapper: self.holding_chunk = "" self.complete_response = "" self.response_uptil_now = "" - _model_info = ( - self.logging_obj.model_call_details.get("litellm_params", {}).get( - "model_info", {} - ) - or {} - ) + _model_info: Dict = litellm_params.model_info or {} _api_base = get_api_base( model=model or "", @@ -115,7 +118,7 @@ class CustomStreamWrapper: ) # GUARANTEE OPENAI HEADERS IN RESPONSE self._response_headers = _response_headers - self.response_id = None + self.response_id: Optional[str] = None self.logging_loop = None self.rules = Rules() self.stream_options = stream_options or getattr( @@ -471,6 +474,7 @@ class CustomStreamWrapper: finish_reason = None logprobs = None usage = None + if str_line and str_line.choices and len(str_line.choices) > 0: if ( str_line.choices[0].delta is not None @@ -633,7 +637,10 @@ class CustomStreamWrapper: if isinstance(chunk, bytes): chunk = chunk.decode("utf-8") if "text_output" in chunk: - response = chunk.replace("data: ", "").strip() + response = ( + CustomStreamWrapper._strip_sse_data_from_chunk(chunk) or "" + ) + response = response.strip() parsed_response = json.loads(response) else: return { @@ -717,7 +724,7 @@ class CustomStreamWrapper: def is_delta_empty(self, delta: Delta) -> bool: is_empty = True - if delta.content is not None: + if delta.content: is_empty = False elif delta.tool_calls is not None: is_empty = False @@ -725,16 +732,45 @@ class CustomStreamWrapper: is_empty = False return is_empty - def return_processed_chunk_logic( # noqa + def set_model_id( + self, id: str, model_response: ModelResponseStream + ) -> ModelResponseStream: + """ + Set the model id and response id to the given id. + + Ensure model id is always the same across all chunks. + + If first chunk sent + id set, use that id for all chunks. + """ + if self.response_id is None: + self.response_id = id + if self.response_id is not None and isinstance(self.response_id, str): + model_response.id = self.response_id + return model_response + + def copy_model_response_level_provider_specific_fields( + self, + original_chunk: Union[ModelResponseStream, ChatCompletionChunk], + model_response: ModelResponseStream, + ) -> ModelResponseStream: + """ + Copy provider_specific_fields from original_chunk to model_response. + """ + provider_specific_fields = getattr( + original_chunk, "provider_specific_fields", None + ) + if provider_specific_fields is not None: + model_response.provider_specific_fields = provider_specific_fields + for k, v in provider_specific_fields.items(): + setattr(model_response, k, v) + return model_response + + def is_chunk_non_empty( self, completion_obj: Dict[str, Any], model_response: ModelResponseStream, response_obj: Dict[str, Any], - ): - - print_verbose( - f"completion_obj: {completion_obj}, model_response.choices[0]: {model_response.choices[0]}, response_obj: {response_obj}" - ) + ) -> bool: if ( "content" in completion_obj and ( @@ -750,12 +786,40 @@ class CustomStreamWrapper: "function_call" in completion_obj and completion_obj["function_call"] is not None ) + or ( + "reasoning_content" in model_response.choices[0].delta + and model_response.choices[0].delta.reasoning_content is not None + ) + or (model_response.choices[0].delta.provider_specific_fields is not None) + or ( + "provider_specific_fields" in model_response + and model_response.choices[0].delta.provider_specific_fields is not None + ) or ( "provider_specific_fields" in response_obj and response_obj["provider_specific_fields"] is not None ) - ): # cannot set content of an OpenAI Object to be an empty string + ): + return True + else: + return False + def return_processed_chunk_logic( # noqa + self, + completion_obj: Dict[str, Any], + model_response: ModelResponseStream, + response_obj: Dict[str, Any], + ): + + print_verbose( + f"completion_obj: {completion_obj}, model_response.choices[0]: {model_response.choices[0]}, response_obj: {response_obj}" + ) + is_chunk_non_empty = self.is_chunk_non_empty( + completion_obj, model_response, response_obj + ) + if ( + is_chunk_non_empty + ): # cannot set content of an OpenAI Object to be an empty string self.safety_checker() hold, model_response_str = self.check_special_tokens( chunk=completion_obj["content"], @@ -766,14 +830,12 @@ class CustomStreamWrapper: ## check if openai/azure chunk original_chunk = response_obj.get("original_chunk", None) if original_chunk: - model_response.id = original_chunk.id - self.response_id = original_chunk.id if len(original_chunk.choices) > 0: choices = [] for choice in original_chunk.choices: try: if isinstance(choice, BaseModel): - choice_json = choice.model_dump() + choice_json = choice.model_dump() # type: ignore choice_json.pop( "finish_reason", None ) # for mistral etc. which return a value in their last chunk (not-openai compatible). @@ -801,9 +863,10 @@ class CustomStreamWrapper: model_response.choices[0].delta, "role" ): _initial_delta = model_response.choices[0].delta.model_dump() + _initial_delta.pop("role", None) model_response.choices[0].delta = Delta(**_initial_delta) - print_verbose( + verbose_logger.debug( f"model_response.choices[0].delta: {model_response.choices[0].delta}" ) else: @@ -812,11 +875,18 @@ class CustomStreamWrapper: if self.sent_first_chunk is False: completion_obj["role"] = "assistant" self.sent_first_chunk = True - + if response_obj.get("provider_specific_fields") is not None: + completion_obj["provider_specific_fields"] = response_obj[ + "provider_specific_fields" + ] model_response.choices[0].delta = Delta(**completion_obj) _index: Optional[int] = completion_obj.get("index") if _index is not None: model_response.choices[0].index = _index + + self._optional_combine_thinking_block_in_choices( + model_response=model_response + ) print_verbose(f"returning model_response: {model_response}") return model_response else: @@ -842,6 +912,9 @@ class CustomStreamWrapper: _is_delta_empty = self.is_delta_empty(delta=model_response.choices[0].delta) if _is_delta_empty: + model_response.choices[0].delta = Delta( + content=None + ) # ensure empty delta chunk returned # get any function call arguments model_response.choices[0].finish_reason = map_finish_reason( finish_reason=self.received_finish_reason @@ -870,7 +943,49 @@ class CustomStreamWrapper: self.chunks.append(model_response) return - def chunk_creator(self, chunk): # type: ignore # noqa: PLR0915 + def _optional_combine_thinking_block_in_choices( + self, model_response: ModelResponseStream + ) -> None: + """ + UI's Like OpenWebUI expect to get 1 chunk with ... tags in the chunk content + + In place updates the model_response object with reasoning_content in content with ... tags + + Enabled when `merge_reasoning_content_in_choices=True` passed in request params + + + """ + if self.merge_reasoning_content_in_choices is True: + reasoning_content = getattr( + model_response.choices[0].delta, "reasoning_content", None + ) + if reasoning_content: + if self.sent_first_thinking_block is False: + model_response.choices[0].delta.content += ( + "" + reasoning_content + ) + self.sent_first_thinking_block = True + elif ( + self.sent_first_thinking_block is True + and hasattr(model_response.choices[0].delta, "reasoning_content") + and model_response.choices[0].delta.reasoning_content + ): + model_response.choices[0].delta.content = reasoning_content + elif ( + self.sent_first_thinking_block is True + and not self.sent_last_thinking_block + and model_response.choices[0].delta.content + ): + model_response.choices[0].delta.content = ( + "" + model_response.choices[0].delta.content + ) + self.sent_last_thinking_block = True + + if hasattr(model_response.choices[0].delta, "reasoning_content"): + del model_response.choices[0].delta.reasoning_content + return + + def chunk_creator(self, chunk: Any): # type: ignore # noqa: PLR0915 model_response = self.model_response_creator() response_obj: Dict[str, Any] = {} @@ -886,16 +1001,13 @@ class CustomStreamWrapper: ) # check if chunk is a generic streaming chunk ) or ( self.custom_llm_provider - and ( - self.custom_llm_provider == "anthropic" - or self.custom_llm_provider in litellm._custom_providers - ) + and self.custom_llm_provider in litellm._custom_providers ): if self.received_finish_reason is not None: if "provider_specific_fields" not in chunk: raise StopIteration - anthropic_response_obj: GChunk = chunk + anthropic_response_obj: GChunk = cast(GChunk, chunk) completion_obj["content"] = anthropic_response_obj["text"] if anthropic_response_obj["is_finished"]: self.received_finish_reason = anthropic_response_obj[ @@ -927,7 +1039,7 @@ class CustomStreamWrapper: ].items(): setattr(model_response, key, value) - response_obj = anthropic_response_obj + response_obj = cast(Dict[str, Any], anthropic_response_obj) elif self.model == "replicate" or self.custom_llm_provider == "replicate": response_obj = self.handle_replicate_chunk(chunk) completion_obj["content"] = response_obj["text"] @@ -989,6 +1101,7 @@ class CustomStreamWrapper: try: completion_obj["content"] = chunk.text except Exception as e: + original_exception = e if "Part has no text." in str(e): ## check for function calling function_call = ( @@ -1030,7 +1143,7 @@ class CustomStreamWrapper: _model_response.choices = [_streaming_response] response_obj = {"original_chunk": _model_response} else: - raise e + raise original_exception if ( hasattr(chunk.candidates[0], "finish_reason") and chunk.candidates[0].finish_reason.name @@ -1093,8 +1206,9 @@ class CustomStreamWrapper: total_tokens=response_obj["usage"].total_tokens, ) elif self.custom_llm_provider == "text-completion-codestral": - response_obj = litellm.CodestralTextCompletionConfig()._chunk_parser( - chunk + response_obj = cast( + Dict[str, Any], + litellm.CodestralTextCompletionConfig()._chunk_parser(chunk), ) completion_obj["content"] = response_obj["text"] print_verbose(f"completion obj content: {completion_obj['content']}") @@ -1156,8 +1270,9 @@ class CustomStreamWrapper: self.received_finish_reason = response_obj["finish_reason"] if response_obj.get("original_chunk", None) is not None: if hasattr(response_obj["original_chunk"], "id"): - model_response.id = response_obj["original_chunk"].id - self.response_id = model_response.id + model_response = self.set_model_id( + response_obj["original_chunk"].id, model_response + ) if hasattr(response_obj["original_chunk"], "system_fingerprint"): model_response.system_fingerprint = response_obj[ "original_chunk" @@ -1206,8 +1321,16 @@ class CustomStreamWrapper: ): # function / tool calling branch - only set for openai/azure compatible endpoints # enter this branch when no content has been passed in response original_chunk = response_obj.get("original_chunk", None) - model_response.id = original_chunk.id - self.response_id = original_chunk.id + if hasattr(original_chunk, "id"): + model_response = self.set_model_id( + original_chunk.id, model_response + ) + if hasattr(original_chunk, "provider_specific_fields"): + model_response = ( + self.copy_model_response_level_provider_specific_fields( + original_chunk, model_response + ) + ) if original_chunk.choices and len(original_chunk.choices) > 0: delta = original_chunk.choices[0].delta if delta is not None and ( @@ -1566,21 +1689,6 @@ class CustomStreamWrapper: ) if processed_chunk is None: continue - ## LOGGING - ## LOGGING - executor.submit( - self.logging_obj.success_handler, - result=processed_chunk, - start_time=None, - end_time=None, - cache_hit=cache_hit, - ) - - asyncio.create_task( - self.logging_obj.async_success_handler( - processed_chunk, cache_hit=cache_hit - ) - ) if self.logging_obj._llm_caching_handler is not None: asyncio.create_task( @@ -1632,16 +1740,6 @@ class CustomStreamWrapper: ) if processed_chunk is None: continue - ## LOGGING - threading.Thread( - target=self.logging_obj.success_handler, - args=(processed_chunk, None, None, cache_hit), - ).start() # log processed_chunk - asyncio.create_task( - self.logging_obj.async_success_handler( - processed_chunk, cache_hit=cache_hit - ) - ) choice = processed_chunk.choices[0] if isinstance(choice, StreamingChoices): @@ -1669,33 +1767,31 @@ class CustomStreamWrapper: "usage", getattr(complete_streaming_response, "usage"), ) - ## LOGGING - threading.Thread( - target=self.logging_obj.success_handler, - args=(response, None, None, cache_hit), - ).start() # log response - asyncio.create_task( - self.logging_obj.async_success_handler( - response, cache_hit=cache_hit - ) - ) if self.sent_stream_usage is False and self.send_stream_usage is True: self.sent_stream_usage = True return response + + asyncio.create_task( + self.logging_obj.async_success_handler( + complete_streaming_response, + cache_hit=cache_hit, + start_time=None, + end_time=None, + ) + ) + + executor.submit( + self.logging_obj.success_handler, + complete_streaming_response, + cache_hit=cache_hit, + start_time=None, + end_time=None, + ) + raise StopAsyncIteration # Re-raise StopIteration else: self.sent_last_chunk = True processed_chunk = self.finish_reason_handler() - ## LOGGING - threading.Thread( - target=self.logging_obj.success_handler, - args=(processed_chunk, None, None, cache_hit), - ).start() # log response - asyncio.create_task( - self.logging_obj.async_success_handler( - processed_chunk, cache_hit=cache_hit - ) - ) return processed_chunk except httpx.TimeoutException as e: # if httpx read timeout error occues traceback_exception = traceback.format_exc() @@ -1735,6 +1831,42 @@ class CustomStreamWrapper: extra_kwargs={}, ) + @staticmethod + def _strip_sse_data_from_chunk(chunk: Optional[str]) -> Optional[str]: + """ + Strips the 'data: ' prefix from Server-Sent Events (SSE) chunks. + + Some providers like sagemaker send it as `data:`, need to handle both + + SSE messages are prefixed with 'data: ' which is part of the protocol, + not the actual content from the LLM. This method removes that prefix + and returns the actual content. + + Args: + chunk: The SSE chunk that may contain the 'data: ' prefix (string or bytes) + + Returns: + The chunk with the 'data: ' prefix removed, or the original chunk + if no prefix was found. Returns None if input is None. + + See OpenAI Python Ref for this: https://github.com/openai/openai-python/blob/041bf5a8ec54da19aad0169671793c2078bd6173/openai/api_requestor.py#L100 + """ + if chunk is None: + return None + + if isinstance(chunk, str): + # OpenAI sends `data: ` + if chunk.startswith("data: "): + # Strip the prefix and any leading whitespace that might follow it + _length_of_sse_data_prefix = len("data: ") + return chunk[_length_of_sse_data_prefix:] + elif chunk.startswith("data:"): + # Sagemaker sends `data:`, no trailing whitespace + _length_of_sse_data_prefix = len("data:") + return chunk[_length_of_sse_data_prefix:] + + return chunk + def calculate_total_usage(chunks: List[ModelResponse]) -> Usage: """Assume most recent usage chunk has total usage uptil then.""" diff --git a/litellm/litellm_core_utils/thread_pool_executor.py b/litellm/litellm_core_utils/thread_pool_executor.py new file mode 100644 index 0000000000..b7c630b20d --- /dev/null +++ b/litellm/litellm_core_utils/thread_pool_executor.py @@ -0,0 +1,5 @@ +from concurrent.futures import ThreadPoolExecutor + +MAX_THREADS = 100 +# Create a ThreadPoolExecutor +executor = ThreadPoolExecutor(max_workers=MAX_THREADS) diff --git a/litellm/llms/aiohttp_openai/chat/transformation.py b/litellm/llms/aiohttp_openai/chat/transformation.py index 53157ad113..625704dbea 100644 --- a/litellm/llms/aiohttp_openai/chat/transformation.py +++ b/litellm/llms/aiohttp_openai/chat/transformation.py @@ -26,7 +26,7 @@ else: class AiohttpOpenAIChatConfig(OpenAILikeChatConfig): def get_complete_url( self, - api_base: str, + api_base: Optional[str], model: str, optional_params: dict, stream: Optional[bool] = None, @@ -35,6 +35,8 @@ class AiohttpOpenAIChatConfig(OpenAILikeChatConfig): Ensure - /v1/chat/completions is at the end of the url """ + if api_base is None: + api_base = "https://api.openai.com" if not api_base.endswith("/chat/completions"): api_base += "/chat/completions" diff --git a/litellm/llms/anthropic/chat/handler.py b/litellm/llms/anthropic/chat/handler.py index fdd1d79c7a..f2c5f390d7 100644 --- a/litellm/llms/anthropic/chat/handler.py +++ b/litellm/llms/anthropic/chat/handler.py @@ -4,7 +4,7 @@ Calling + translation logic for anthropic's `/v1/messages` endpoint import copy import json -from typing import Any, Callable, List, Optional, Tuple, Union +from typing import Any, Callable, Dict, List, Optional, Tuple, Union import httpx # type: ignore @@ -30,10 +30,16 @@ from litellm.types.llms.anthropic import ( UsageDelta, ) from litellm.types.llms.openai import ( + ChatCompletionThinkingBlock, ChatCompletionToolCallChunk, ChatCompletionUsageBlock, ) -from litellm.types.utils import GenericStreamingChunk +from litellm.types.utils import ( + Delta, + GenericStreamingChunk, + ModelResponseStream, + StreamingChoices, +) from litellm.utils import CustomStreamWrapper, ModelResponse, ProviderConfigManager from ...base import BaseLLM @@ -468,7 +474,10 @@ class ModelResponseIterator: if len(self.content_blocks) == 0: return False - if self.content_blocks[0]["delta"]["type"] == "text_delta": + if ( + self.content_blocks[0]["delta"]["type"] == "text_delta" + or self.content_blocks[0]["delta"]["type"] == "thinking_delta" + ): return False for block in self.content_blocks: @@ -506,15 +515,76 @@ class ModelResponseIterator: return usage_block - def chunk_parser(self, chunk: dict) -> GenericStreamingChunk: + def _content_block_delta_helper(self, chunk: dict) -> Tuple[ + str, + Optional[ChatCompletionToolCallChunk], + List[ChatCompletionThinkingBlock], + Dict[str, Any], + ]: + """ + Helper function to handle the content block delta + """ + + text = "" + tool_use: Optional[ChatCompletionToolCallChunk] = None + provider_specific_fields = {} + content_block = ContentBlockDelta(**chunk) # type: ignore + thinking_blocks: List[ChatCompletionThinkingBlock] = [] + + self.content_blocks.append(content_block) + if "text" in content_block["delta"]: + text = content_block["delta"]["text"] + elif "partial_json" in content_block["delta"]: + tool_use = { + "id": None, + "type": "function", + "function": { + "name": None, + "arguments": content_block["delta"]["partial_json"], + }, + "index": self.tool_index, + } + elif "citation" in content_block["delta"]: + provider_specific_fields["citation"] = content_block["delta"]["citation"] + elif ( + "thinking" in content_block["delta"] + or "signature" in content_block["delta"] + ): + thinking_blocks = [ + ChatCompletionThinkingBlock( + type="thinking", + thinking=content_block["delta"].get("thinking") or "", + signature=content_block["delta"].get("signature"), + ) + ] + provider_specific_fields["thinking_blocks"] = thinking_blocks + return text, tool_use, thinking_blocks, provider_specific_fields + + def _handle_reasoning_content( + self, thinking_blocks: List[ChatCompletionThinkingBlock] + ) -> Optional[str]: + """ + Handle the reasoning content + """ + reasoning_content = None + for block in thinking_blocks: + if reasoning_content is None: + reasoning_content = "" + if "thinking" in block: + reasoning_content += block["thinking"] + return reasoning_content + + def chunk_parser(self, chunk: dict) -> ModelResponseStream: try: type_chunk = chunk.get("type", "") or "" text = "" tool_use: Optional[ChatCompletionToolCallChunk] = None - is_finished = False finish_reason = "" usage: Optional[ChatCompletionUsageBlock] = None + provider_specific_fields: Dict[str, Any] = {} + reasoning_content: Optional[str] = None + thinking_blocks: Optional[List[ChatCompletionThinkingBlock]] = None index = int(chunk.get("index", 0)) if type_chunk == "content_block_delta": @@ -522,20 +592,13 @@ class ModelResponseIterator: Anthropic content chunk chunk = {'type': 'content_block_delta', 'index': 0, 'delta': {'type': 'text_delta', 'text': 'Hello'}} """ - content_block = ContentBlockDelta(**chunk) # type: ignore - self.content_blocks.append(content_block) - if "text" in content_block["delta"]: - text = content_block["delta"]["text"] - elif "partial_json" in content_block["delta"]: - tool_use = { - "id": None, - "type": "function", - "function": { - "name": None, - "arguments": content_block["delta"]["partial_json"], - }, - "index": self.tool_index, - } + text, tool_use, thinking_blocks, provider_specific_fields = ( + self._content_block_delta_helper(chunk=chunk) + ) + if thinking_blocks: + reasoning_content = self._handle_reasoning_content( + thinking_blocks=thinking_blocks + ) elif type_chunk == "content_block_start": """ event: content_block_start @@ -557,9 +620,11 @@ class ModelResponseIterator: "index": self.tool_index, } elif type_chunk == "content_block_stop": + ContentBlockStop(**chunk) # type: ignore # check if tool call content block is_empty = self.check_empty_tool_call_args() + if is_empty: tool_use = { "id": None, @@ -582,7 +647,6 @@ class ModelResponseIterator: or "stop" ) usage = self._handle_usage(anthropic_usage_chunk=message_delta["usage"]) - is_finished = True elif type_chunk == "message_start": """ Anthropic @@ -621,13 +685,27 @@ class ModelResponseIterator: text, tool_use = self._handle_json_mode_chunk(text=text, tool_use=tool_use) - returned_chunk = GenericStreamingChunk( - text=text, - tool_use=tool_use, - is_finished=is_finished, - finish_reason=finish_reason, + returned_chunk = ModelResponseStream( + choices=[ + StreamingChoices( + index=index, + delta=Delta( + content=text, + tool_calls=[tool_use] if tool_use is not None else None, + provider_specific_fields=( + provider_specific_fields + if provider_specific_fields + else None + ), + thinking_blocks=( + thinking_blocks if thinking_blocks else None + ), + reasoning_content=reasoning_content, + ), + finish_reason=finish_reason, + ) + ], usage=usage, - index=index, ) return returned_chunk @@ -738,7 +816,7 @@ class ModelResponseIterator: except ValueError as e: raise RuntimeError(f"Error parsing chunk: {e},\nReceived chunk: {chunk}") - def convert_str_chunk_to_generic_chunk(self, chunk: str) -> GenericStreamingChunk: + def convert_str_chunk_to_generic_chunk(self, chunk: str) -> ModelResponseStream: """ Convert a string chunk to a GenericStreamingChunk @@ -758,11 +836,4 @@ class ModelResponseIterator: data_json = json.loads(str_line[5:]) return self.chunk_parser(chunk=data_json) else: - return GenericStreamingChunk( - text="", - is_finished=False, - finish_reason="", - usage=None, - index=0, - tool_use=None, - ) + return ModelResponseStream() diff --git a/litellm/llms/anthropic/chat/transformation.py b/litellm/llms/anthropic/chat/transformation.py index 29e4e0fa4e..383c1cd3e5 100644 --- a/litellm/llms/anthropic/chat/transformation.py +++ b/litellm/llms/anthropic/chat/transformation.py @@ -1,6 +1,6 @@ import json import time -from typing import TYPE_CHECKING, Any, Dict, List, Optional, Union, cast +from typing import TYPE_CHECKING, Any, Dict, List, Optional, Tuple, Union, cast import httpx @@ -23,6 +23,7 @@ from litellm.types.llms.openai import ( AllMessageValues, ChatCompletionCachedContent, ChatCompletionSystemMessage, + ChatCompletionThinkingBlock, ChatCompletionToolCallChunk, ChatCompletionToolCallFunctionChunk, ChatCompletionToolParam, @@ -70,7 +71,7 @@ class AnthropicConfig(BaseConfig): metadata: Optional[dict] = None, system: Optional[str] = None, ) -> None: - locals_ = locals() + locals_ = locals().copy() for key, value in locals_.items(): if key != "self" and value is not None: setattr(self.__class__, key, value) @@ -80,7 +81,7 @@ class AnthropicConfig(BaseConfig): return super().get_config() def get_supported_openai_params(self, model: str): - return [ + params = [ "stream", "stop", "temperature", @@ -95,9 +96,15 @@ class AnthropicConfig(BaseConfig): "user", ] + if "claude-3-7-sonnet" in model: + params.append("thinking") + + return params + def get_json_schema_from_pydantic_object( self, response_format: Union[Any, Dict, None] ) -> Optional[dict]: + return type_to_response_format_param( response_format, ref_template="/$defs/{model}" ) # Relevant issue: https://github.com/BerriAI/litellm/issues/7755 @@ -116,15 +123,16 @@ class AnthropicConfig(BaseConfig): prompt_caching_set: bool = False, pdf_used: bool = False, is_vertex_request: bool = False, + user_anthropic_beta_headers: Optional[List[str]] = None, ) -> dict: - betas = [] + betas = set() if prompt_caching_set: - betas.append("prompt-caching-2024-07-31") + betas.add("prompt-caching-2024-07-31") if computer_tool_used: - betas.append("computer-use-2024-10-22") + betas.add("computer-use-2024-10-22") if pdf_used: - betas.append("pdfs-2024-09-25") + betas.add("pdfs-2024-09-25") headers = { "anthropic-version": anthropic_version or "2023-06-01", "x-api-key": api_key, @@ -132,6 +140,9 @@ class AnthropicConfig(BaseConfig): "content-type": "application/json", } + if user_anthropic_beta_headers is not None: + betas.update(user_anthropic_beta_headers) + # Don't send any beta headers to Vertex, Vertex has failed requests when they are sent if is_vertex_request is True: pass @@ -282,18 +293,6 @@ class AnthropicConfig(BaseConfig): new_stop = new_v return new_stop - def _add_tools_to_optional_params( - self, optional_params: dict, tools: List[AllAnthropicToolsValues] - ) -> dict: - if "tools" not in optional_params: - optional_params["tools"] = tools - else: - optional_params["tools"] = [ - *optional_params["tools"], - *tools, - ] - return optional_params - def map_openai_params( self, non_default_params: dict, @@ -334,6 +333,10 @@ class AnthropicConfig(BaseConfig): optional_params["top_p"] = value if param == "response_format" and isinstance(value, dict): + ignore_response_format_types = ["text"] + if value["type"] in ignore_response_format_types: # value is a no-op + continue + json_schema: Optional[dict] = None if "response_schema" in value: json_schema = value["response_schema"] @@ -357,7 +360,8 @@ class AnthropicConfig(BaseConfig): optional_params["json_mode"] = True if param == "user": optional_params["metadata"] = {"user_id": value} - + if param == "thinking": + optional_params["thinking"] = value return optional_params def _create_json_tool_call_for_response_format( @@ -580,6 +584,50 @@ class AnthropicConfig(BaseConfig): ) return _message + def extract_response_content(self, completion_response: dict) -> Tuple[ + str, + Optional[List[Any]], + Optional[List[ChatCompletionThinkingBlock]], + Optional[str], + List[ChatCompletionToolCallChunk], + ]: + text_content = "" + citations: Optional[List[Any]] = None + thinking_blocks: Optional[List[ChatCompletionThinkingBlock]] = None + reasoning_content: Optional[str] = None + tool_calls: List[ChatCompletionToolCallChunk] = [] + for idx, content in enumerate(completion_response["content"]): + if content["type"] == "text": + text_content += content["text"] + ## TOOL CALLING + elif content["type"] == "tool_use": + tool_calls.append( + ChatCompletionToolCallChunk( + id=content["id"], + type="function", + function=ChatCompletionToolCallFunctionChunk( + name=content["name"], + arguments=json.dumps(content["input"]), + ), + index=idx, + ) + ) + ## CITATIONS + if content.get("citations", None) is not None: + if citations is None: + citations = [] + citations.append(content["citations"]) + if content.get("thinking", None) is not None: + if thinking_blocks is None: + thinking_blocks = [] + thinking_blocks.append(cast(ChatCompletionThinkingBlock, content)) + if thinking_blocks is not None: + reasoning_content = "" + for block in thinking_blocks: + if "thinking" in block: + reasoning_content += block["thinking"] + return text_content, citations, thinking_blocks, reasoning_content, tool_calls + def transform_response( self, model: str, @@ -627,27 +675,24 @@ class AnthropicConfig(BaseConfig): ) else: text_content = "" + citations: Optional[List[Any]] = None + thinking_blocks: Optional[List[ChatCompletionThinkingBlock]] = None + reasoning_content: Optional[str] = None tool_calls: List[ChatCompletionToolCallChunk] = [] - for idx, content in enumerate(completion_response["content"]): - if content["type"] == "text": - text_content += content["text"] - ## TOOL CALLING - elif content["type"] == "tool_use": - tool_calls.append( - ChatCompletionToolCallChunk( - id=content["id"], - type="function", - function=ChatCompletionToolCallFunctionChunk( - name=content["name"], - arguments=json.dumps(content["input"]), - ), - index=idx, - ) - ) + + text_content, citations, thinking_blocks, reasoning_content, tool_calls = ( + self.extract_response_content(completion_response=completion_response) + ) _message = litellm.Message( tool_calls=tool_calls, content=text_content or None, + provider_specific_fields={ + "citations": citations, + "thinking_blocks": thinking_blocks, + }, + thinking_blocks=thinking_blocks, + reasoning_content=reasoning_content, ) ## HANDLE JSON MODE - anthropic returns single function call @@ -742,6 +787,13 @@ class AnthropicConfig(BaseConfig): headers=cast(httpx.Headers, headers), ) + def _get_user_anthropic_beta_headers( + self, anthropic_beta_header: Optional[str] + ) -> Optional[List[str]]: + if anthropic_beta_header is None: + return None + return anthropic_beta_header.split(",") + def validate_environment( self, headers: dict, @@ -762,13 +814,18 @@ class AnthropicConfig(BaseConfig): prompt_caching_set = self.is_cache_control_set(messages=messages) computer_tool_used = self.is_computer_tool_used(tools=tools) pdf_used = self.is_pdf_used(messages=messages) + user_anthropic_beta_headers = self._get_user_anthropic_beta_headers( + anthropic_beta_header=headers.get("anthropic-beta") + ) anthropic_headers = self.get_anthropic_headers( computer_tool_used=computer_tool_used, prompt_caching_set=prompt_caching_set, pdf_used=pdf_used, api_key=api_key, is_vertex_request=optional_params.get("is_vertex_request", False), + user_anthropic_beta_headers=user_anthropic_beta_headers, ) headers = {**headers, **anthropic_headers} + return headers diff --git a/litellm/llms/anthropic/completion/transformation.py b/litellm/llms/anthropic/completion/transformation.py index e2510d6a98..7a260b6f94 100644 --- a/litellm/llms/anthropic/completion/transformation.py +++ b/litellm/llms/anthropic/completion/transformation.py @@ -72,7 +72,7 @@ class AnthropicTextConfig(BaseConfig): top_k: Optional[int] = None, metadata: Optional[dict] = None, ) -> None: - locals_ = locals() + locals_ = locals().copy() for key, value in locals_.items(): if key != "self" and value is not None: setattr(self.__class__, key, value) diff --git a/litellm/llms/anthropic/experimental_pass_through/messages/handler.py b/litellm/llms/anthropic/experimental_pass_through/messages/handler.py new file mode 100644 index 0000000000..a7dfff74d9 --- /dev/null +++ b/litellm/llms/anthropic/experimental_pass_through/messages/handler.py @@ -0,0 +1,179 @@ +""" +- call /messages on Anthropic API +- Make streaming + non-streaming request - just pass it through direct to Anthropic. No need to do anything special here +- Ensure requests are logged in the DB - stream + non-stream + +""" + +import json +from typing import Any, AsyncIterator, Dict, Optional, Union, cast + +import httpx + +import litellm +from litellm.litellm_core_utils.litellm_logging import Logging as LiteLLMLoggingObj +from litellm.llms.base_llm.anthropic_messages.transformation import ( + BaseAnthropicMessagesConfig, +) +from litellm.llms.custom_httpx.http_handler import ( + AsyncHTTPHandler, + get_async_httpx_client, +) +from litellm.types.router import GenericLiteLLMParams +from litellm.types.utils import ProviderSpecificHeader +from litellm.utils import ProviderConfigManager, client + + +class AnthropicMessagesHandler: + + @staticmethod + async def _handle_anthropic_streaming( + response: httpx.Response, + request_body: dict, + litellm_logging_obj: LiteLLMLoggingObj, + ) -> AsyncIterator: + """Helper function to handle Anthropic streaming responses using the existing logging handlers""" + from datetime import datetime + + from litellm.proxy.pass_through_endpoints.streaming_handler import ( + PassThroughStreamingHandler, + ) + from litellm.proxy.pass_through_endpoints.success_handler import ( + PassThroughEndpointLogging, + ) + from litellm.proxy.pass_through_endpoints.types import EndpointType + + # Create success handler object + passthrough_success_handler_obj = PassThroughEndpointLogging() + + # Use the existing streaming handler for Anthropic + start_time = datetime.now() + return PassThroughStreamingHandler.chunk_processor( + response=response, + request_body=request_body, + litellm_logging_obj=litellm_logging_obj, + endpoint_type=EndpointType.ANTHROPIC, + start_time=start_time, + passthrough_success_handler_obj=passthrough_success_handler_obj, + url_route="/v1/messages", + ) + + +@client +async def anthropic_messages( + api_key: str, + model: str, + stream: bool = False, + api_base: Optional[str] = None, + client: Optional[AsyncHTTPHandler] = None, + custom_llm_provider: Optional[str] = None, + **kwargs, +) -> Union[Dict[str, Any], AsyncIterator]: + """ + Makes Anthropic `/v1/messages` API calls In the Anthropic API Spec + """ + # Use provided client or create a new one + optional_params = GenericLiteLLMParams(**kwargs) + model, _custom_llm_provider, dynamic_api_key, dynamic_api_base = ( + litellm.get_llm_provider( + model=model, + custom_llm_provider=custom_llm_provider, + api_base=optional_params.api_base, + api_key=optional_params.api_key, + ) + ) + anthropic_messages_provider_config: Optional[BaseAnthropicMessagesConfig] = ( + ProviderConfigManager.get_provider_anthropic_messages_config( + model=model, + provider=litellm.LlmProviders(_custom_llm_provider), + ) + ) + if anthropic_messages_provider_config is None: + raise ValueError( + f"Anthropic messages provider config not found for model: {model}" + ) + if client is None or not isinstance(client, AsyncHTTPHandler): + async_httpx_client = get_async_httpx_client( + llm_provider=litellm.LlmProviders.ANTHROPIC + ) + else: + async_httpx_client = client + + litellm_logging_obj: LiteLLMLoggingObj = kwargs.get("litellm_logging_obj", None) + + # Prepare headers + provider_specific_header = cast( + Optional[ProviderSpecificHeader], kwargs.get("provider_specific_header", None) + ) + extra_headers = ( + provider_specific_header.get("extra_headers", {}) + if provider_specific_header + else {} + ) + headers = anthropic_messages_provider_config.validate_environment( + headers=extra_headers or {}, + model=model, + api_key=api_key, + ) + + litellm_logging_obj.update_environment_variables( + model=model, + optional_params=dict(optional_params), + litellm_params={ + "metadata": kwargs.get("metadata", {}), + "preset_cache_key": None, + "stream_response": {}, + **optional_params.model_dump(exclude_unset=True), + }, + custom_llm_provider=_custom_llm_provider, + ) + litellm_logging_obj.model_call_details.update(kwargs) + + # Prepare request body + request_body = kwargs.copy() + request_body = { + k: v + for k, v in request_body.items() + if k + in anthropic_messages_provider_config.get_supported_anthropic_messages_params( + model=model + ) + } + request_body["stream"] = stream + request_body["model"] = model + litellm_logging_obj.stream = stream + + # Make the request + request_url = anthropic_messages_provider_config.get_complete_url( + api_base=api_base, model=model + ) + + litellm_logging_obj.pre_call( + input=[{"role": "user", "content": json.dumps(request_body)}], + api_key="", + additional_args={ + "complete_input_dict": request_body, + "api_base": str(request_url), + "headers": headers, + }, + ) + + response = await async_httpx_client.post( + url=request_url, + headers=headers, + data=json.dumps(request_body), + stream=stream, + ) + response.raise_for_status() + + # used for logging + cost tracking + litellm_logging_obj.model_call_details["httpx_response"] = response + + if stream: + return await AnthropicMessagesHandler._handle_anthropic_streaming( + response=response, + request_body=request_body, + litellm_logging_obj=litellm_logging_obj, + ) + else: + return response.json() diff --git a/litellm/llms/anthropic/experimental_pass_through/messages/transformation.py b/litellm/llms/anthropic/experimental_pass_through/messages/transformation.py new file mode 100644 index 0000000000..e9b598f18d --- /dev/null +++ b/litellm/llms/anthropic/experimental_pass_through/messages/transformation.py @@ -0,0 +1,47 @@ +from typing import Optional + +from litellm.llms.base_llm.anthropic_messages.transformation import ( + BaseAnthropicMessagesConfig, +) + +DEFAULT_ANTHROPIC_API_BASE = "https://api.anthropic.com" +DEFAULT_ANTHROPIC_API_VERSION = "2023-06-01" + + +class AnthropicMessagesConfig(BaseAnthropicMessagesConfig): + def get_supported_anthropic_messages_params(self, model: str) -> list: + return [ + "messages", + "model", + "system", + "max_tokens", + "stop_sequences", + "temperature", + "top_p", + "top_k", + "tools", + "tool_choice", + "thinking", + # TODO: Add Anthropic `metadata` support + # "metadata", + ] + + def get_complete_url(self, api_base: Optional[str], model: str) -> str: + api_base = api_base or DEFAULT_ANTHROPIC_API_BASE + if not api_base.endswith("/v1/messages"): + api_base = f"{api_base}/v1/messages" + return api_base + + def validate_environment( + self, + headers: dict, + model: str, + api_key: Optional[str] = None, + ) -> dict: + if "x-api-key" not in headers: + headers["x-api-key"] = api_key + if "anthropic-version" not in headers: + headers["anthropic-version"] = DEFAULT_ANTHROPIC_API_VERSION + if "content-type" not in headers: + headers["content-type"] = "application/json" + return headers diff --git a/litellm/llms/anthropic/experimental_pass_through/transformation.py b/litellm/llms/anthropic/experimental_pass_through/transformation.py deleted file mode 100644 index b24cf47ad4..0000000000 --- a/litellm/llms/anthropic/experimental_pass_through/transformation.py +++ /dev/null @@ -1,412 +0,0 @@ -import json -from typing import List, Literal, Optional, Tuple, Union - -from openai.types.chat.chat_completion_chunk import Choice as OpenAIStreamingChoice - -from litellm.types.llms.anthropic import ( - AllAnthropicToolsValues, - AnthopicMessagesAssistantMessageParam, - AnthropicFinishReason, - AnthropicMessagesRequest, - AnthropicMessagesToolChoice, - AnthropicMessagesUserMessageParam, - AnthropicResponse, - AnthropicResponseContentBlockText, - AnthropicResponseContentBlockToolUse, - AnthropicResponseUsageBlock, - ContentBlockDelta, - ContentJsonBlockDelta, - ContentTextBlockDelta, - MessageBlockDelta, - MessageDelta, - UsageDelta, -) -from litellm.types.llms.openai import ( - AllMessageValues, - ChatCompletionAssistantMessage, - ChatCompletionAssistantToolCall, - ChatCompletionImageObject, - ChatCompletionImageUrlObject, - ChatCompletionRequest, - ChatCompletionSystemMessage, - ChatCompletionTextObject, - ChatCompletionToolCallFunctionChunk, - ChatCompletionToolChoiceFunctionParam, - ChatCompletionToolChoiceObjectParam, - ChatCompletionToolChoiceValues, - ChatCompletionToolMessage, - ChatCompletionToolParam, - ChatCompletionToolParamFunctionChunk, - ChatCompletionUserMessage, -) -from litellm.types.utils import Choices, ModelResponse, Usage - - -class AnthropicExperimentalPassThroughConfig: - def __init__(self): - pass - - ### FOR [BETA] `/v1/messages` endpoint support - - def translatable_anthropic_params(self) -> List: - """ - Which anthropic params, we need to translate to the openai format. - """ - return ["messages", "metadata", "system", "tool_choice", "tools"] - - def translate_anthropic_messages_to_openai( # noqa: PLR0915 - self, - messages: List[ - Union[ - AnthropicMessagesUserMessageParam, - AnthopicMessagesAssistantMessageParam, - ] - ], - ) -> List: - new_messages: List[AllMessageValues] = [] - for m in messages: - user_message: Optional[ChatCompletionUserMessage] = None - tool_message_list: List[ChatCompletionToolMessage] = [] - new_user_content_list: List[ - Union[ChatCompletionTextObject, ChatCompletionImageObject] - ] = [] - ## USER MESSAGE ## - if m["role"] == "user": - ## translate user message - message_content = m.get("content") - if message_content and isinstance(message_content, str): - user_message = ChatCompletionUserMessage( - role="user", content=message_content - ) - elif message_content and isinstance(message_content, list): - for content in message_content: - if content["type"] == "text": - text_obj = ChatCompletionTextObject( - type="text", text=content["text"] - ) - new_user_content_list.append(text_obj) - elif content["type"] == "image": - image_url = ChatCompletionImageUrlObject( - url=f"data:{content['type']};base64,{content['source']}" - ) - image_obj = ChatCompletionImageObject( - type="image_url", image_url=image_url - ) - - new_user_content_list.append(image_obj) - elif content["type"] == "tool_result": - if "content" not in content: - tool_result = ChatCompletionToolMessage( - role="tool", - tool_call_id=content["tool_use_id"], - content="", - ) - tool_message_list.append(tool_result) - elif isinstance(content["content"], str): - tool_result = ChatCompletionToolMessage( - role="tool", - tool_call_id=content["tool_use_id"], - content=content["content"], - ) - tool_message_list.append(tool_result) - elif isinstance(content["content"], list): - for c in content["content"]: - if c["type"] == "text": - tool_result = ChatCompletionToolMessage( - role="tool", - tool_call_id=content["tool_use_id"], - content=c["text"], - ) - tool_message_list.append(tool_result) - elif c["type"] == "image": - image_str = ( - f"data:{c['type']};base64,{c['source']}" - ) - tool_result = ChatCompletionToolMessage( - role="tool", - tool_call_id=content["tool_use_id"], - content=image_str, - ) - tool_message_list.append(tool_result) - - if user_message is not None: - new_messages.append(user_message) - - if len(new_user_content_list) > 0: - new_messages.append({"role": "user", "content": new_user_content_list}) # type: ignore - - if len(tool_message_list) > 0: - new_messages.extend(tool_message_list) - - ## ASSISTANT MESSAGE ## - assistant_message_str: Optional[str] = None - tool_calls: List[ChatCompletionAssistantToolCall] = [] - if m["role"] == "assistant": - if isinstance(m["content"], str): - assistant_message_str = m["content"] - elif isinstance(m["content"], list): - for content in m["content"]: - if content["type"] == "text": - if assistant_message_str is None: - assistant_message_str = content["text"] - else: - assistant_message_str += content["text"] - elif content["type"] == "tool_use": - function_chunk = ChatCompletionToolCallFunctionChunk( - name=content["name"], - arguments=json.dumps(content["input"]), - ) - - tool_calls.append( - ChatCompletionAssistantToolCall( - id=content["id"], - type="function", - function=function_chunk, - ) - ) - - if assistant_message_str is not None or len(tool_calls) > 0: - assistant_message = ChatCompletionAssistantMessage( - role="assistant", - content=assistant_message_str, - ) - if len(tool_calls) > 0: - assistant_message["tool_calls"] = tool_calls - new_messages.append(assistant_message) - - return new_messages - - def translate_anthropic_tool_choice_to_openai( - self, tool_choice: AnthropicMessagesToolChoice - ) -> ChatCompletionToolChoiceValues: - if tool_choice["type"] == "any": - return "required" - elif tool_choice["type"] == "auto": - return "auto" - elif tool_choice["type"] == "tool": - tc_function_param = ChatCompletionToolChoiceFunctionParam( - name=tool_choice.get("name", "") - ) - return ChatCompletionToolChoiceObjectParam( - type="function", function=tc_function_param - ) - else: - raise ValueError( - "Incompatible tool choice param submitted - {}".format(tool_choice) - ) - - def translate_anthropic_tools_to_openai( - self, tools: List[AllAnthropicToolsValues] - ) -> List[ChatCompletionToolParam]: - new_tools: List[ChatCompletionToolParam] = [] - mapped_tool_params = ["name", "input_schema", "description"] - for tool in tools: - function_chunk = ChatCompletionToolParamFunctionChunk( - name=tool["name"], - ) - if "input_schema" in tool: - function_chunk["parameters"] = tool["input_schema"] # type: ignore - if "description" in tool: - function_chunk["description"] = tool["description"] # type: ignore - - for k, v in tool.items(): - if k not in mapped_tool_params: # pass additional computer kwargs - function_chunk.setdefault("parameters", {}).update({k: v}) - new_tools.append( - ChatCompletionToolParam(type="function", function=function_chunk) - ) - - return new_tools - - def translate_anthropic_to_openai( - self, anthropic_message_request: AnthropicMessagesRequest - ) -> ChatCompletionRequest: - """ - This is used by the beta Anthropic Adapter, for translating anthropic `/v1/messages` requests to the openai format. - """ - new_messages: List[AllMessageValues] = [] - - ## CONVERT ANTHROPIC MESSAGES TO OPENAI - new_messages = self.translate_anthropic_messages_to_openai( - messages=anthropic_message_request["messages"] - ) - ## ADD SYSTEM MESSAGE TO MESSAGES - if "system" in anthropic_message_request: - new_messages.insert( - 0, - ChatCompletionSystemMessage( - role="system", content=anthropic_message_request["system"] - ), - ) - - new_kwargs: ChatCompletionRequest = { - "model": anthropic_message_request["model"], - "messages": new_messages, - } - ## CONVERT METADATA (user_id) - if "metadata" in anthropic_message_request: - if "user_id" in anthropic_message_request["metadata"]: - new_kwargs["user"] = anthropic_message_request["metadata"]["user_id"] - - # Pass litellm proxy specific metadata - if "litellm_metadata" in anthropic_message_request: - # metadata will be passed to litellm.acompletion(), it's a litellm_param - new_kwargs["metadata"] = anthropic_message_request.pop("litellm_metadata") - - ## CONVERT TOOL CHOICE - if "tool_choice" in anthropic_message_request: - new_kwargs["tool_choice"] = self.translate_anthropic_tool_choice_to_openai( - tool_choice=anthropic_message_request["tool_choice"] - ) - ## CONVERT TOOLS - if "tools" in anthropic_message_request: - new_kwargs["tools"] = self.translate_anthropic_tools_to_openai( - tools=anthropic_message_request["tools"] - ) - - translatable_params = self.translatable_anthropic_params() - for k, v in anthropic_message_request.items(): - if k not in translatable_params: # pass remaining params as is - new_kwargs[k] = v # type: ignore - - return new_kwargs - - def _translate_openai_content_to_anthropic( - self, choices: List[Choices] - ) -> List[ - Union[AnthropicResponseContentBlockText, AnthropicResponseContentBlockToolUse] - ]: - new_content: List[ - Union[ - AnthropicResponseContentBlockText, AnthropicResponseContentBlockToolUse - ] - ] = [] - for choice in choices: - if ( - choice.message.tool_calls is not None - and len(choice.message.tool_calls) > 0 - ): - for tool_call in choice.message.tool_calls: - new_content.append( - AnthropicResponseContentBlockToolUse( - type="tool_use", - id=tool_call.id, - name=tool_call.function.name or "", - input=json.loads(tool_call.function.arguments), - ) - ) - elif choice.message.content is not None: - new_content.append( - AnthropicResponseContentBlockText( - type="text", text=choice.message.content - ) - ) - - return new_content - - def _translate_openai_finish_reason_to_anthropic( - self, openai_finish_reason: str - ) -> AnthropicFinishReason: - if openai_finish_reason == "stop": - return "end_turn" - elif openai_finish_reason == "length": - return "max_tokens" - elif openai_finish_reason == "tool_calls": - return "tool_use" - return "end_turn" - - def translate_openai_response_to_anthropic( - self, response: ModelResponse - ) -> AnthropicResponse: - ## translate content block - anthropic_content = self._translate_openai_content_to_anthropic(choices=response.choices) # type: ignore - ## extract finish reason - anthropic_finish_reason = self._translate_openai_finish_reason_to_anthropic( - openai_finish_reason=response.choices[0].finish_reason # type: ignore - ) - # extract usage - usage: Usage = getattr(response, "usage") - anthropic_usage = AnthropicResponseUsageBlock( - input_tokens=usage.prompt_tokens or 0, - output_tokens=usage.completion_tokens or 0, - ) - translated_obj = AnthropicResponse( - id=response.id, - type="message", - role="assistant", - model=response.model or "unknown-model", - stop_sequence=None, - usage=anthropic_usage, - content=anthropic_content, - stop_reason=anthropic_finish_reason, - ) - - return translated_obj - - def _translate_streaming_openai_chunk_to_anthropic( - self, choices: List[OpenAIStreamingChoice] - ) -> Tuple[ - Literal["text_delta", "input_json_delta"], - Union[ContentTextBlockDelta, ContentJsonBlockDelta], - ]: - text: str = "" - partial_json: Optional[str] = None - for choice in choices: - if choice.delta.content is not None: - text += choice.delta.content - elif choice.delta.tool_calls is not None: - partial_json = "" - for tool in choice.delta.tool_calls: - if ( - tool.function is not None - and tool.function.arguments is not None - ): - partial_json += tool.function.arguments - - if partial_json is not None: - return "input_json_delta", ContentJsonBlockDelta( - type="input_json_delta", partial_json=partial_json - ) - else: - return "text_delta", ContentTextBlockDelta(type="text_delta", text=text) - - def translate_streaming_openai_response_to_anthropic( - self, response: ModelResponse - ) -> Union[ContentBlockDelta, MessageBlockDelta]: - ## base case - final chunk w/ finish reason - if response.choices[0].finish_reason is not None: - delta = MessageDelta( - stop_reason=self._translate_openai_finish_reason_to_anthropic( - response.choices[0].finish_reason - ), - ) - if getattr(response, "usage", None) is not None: - litellm_usage_chunk: Optional[Usage] = response.usage # type: ignore - elif ( - hasattr(response, "_hidden_params") - and "usage" in response._hidden_params - ): - litellm_usage_chunk = response._hidden_params["usage"] - else: - litellm_usage_chunk = None - if litellm_usage_chunk is not None: - usage_delta = UsageDelta( - input_tokens=litellm_usage_chunk.prompt_tokens or 0, - output_tokens=litellm_usage_chunk.completion_tokens or 0, - ) - else: - usage_delta = UsageDelta(input_tokens=0, output_tokens=0) - return MessageBlockDelta( - type="message_delta", delta=delta, usage=usage_delta - ) - ( - type_of_content, - content_block_delta, - ) = self._translate_streaming_openai_chunk_to_anthropic( - choices=response.choices # type: ignore - ) - return ContentBlockDelta( - type="content_block_delta", - index=response.choices[0].index, - delta=content_block_delta, - ) diff --git a/litellm/llms/azure/azure.py b/litellm/llms/azure/azure.py index f771532133..dcd5af7b96 100644 --- a/litellm/llms/azure/azure.py +++ b/litellm/llms/azure/azure.py @@ -2,13 +2,14 @@ import asyncio import json import os import time -from typing import Any, Callable, List, Literal, Optional, Union +from typing import Any, Callable, Dict, List, Literal, Optional, Union import httpx # type: ignore -from openai import AsyncAzureOpenAI, AzureOpenAI +from openai import APITimeoutError, AsyncAzureOpenAI, AzureOpenAI import litellm from litellm.caching.caching import DualCache +from litellm.constants import DEFAULT_MAX_RETRIES from litellm.litellm_core_utils.litellm_logging import Logging as LiteLLMLoggingObj from litellm.llms.custom_httpx.http_handler import ( AsyncHTTPHandler, @@ -98,14 +99,6 @@ class AzureOpenAIAssistantsAPIConfig: def select_azure_base_url_or_endpoint(azure_client_params: dict): - # azure_client_params = { - # "api_version": api_version, - # "azure_endpoint": api_base, - # "azure_deployment": model, - # "http_client": litellm.client_session, - # "max_retries": max_retries, - # "timeout": timeout, - # } azure_endpoint = azure_client_params.get("azure_endpoint", None) if azure_endpoint is not None: # see : https://github.com/openai/openai-python/blob/3d61ed42aba652b547029095a7eb269ad4e1e957/src/openai/lib/azure.py#L192 @@ -217,7 +210,7 @@ class AzureChatCompletion(BaseLLM): def __init__(self) -> None: super().__init__() - def validate_environment(self, api_key, azure_ad_token): + def validate_environment(self, api_key, azure_ad_token, azure_ad_token_provider): headers = { "content-type": "application/json", } @@ -227,6 +220,10 @@ class AzureChatCompletion(BaseLLM): if azure_ad_token.startswith("oidc/"): azure_ad_token = get_azure_ad_token_from_oidc(azure_ad_token) headers["Authorization"] = f"Bearer {azure_ad_token}" + elif azure_ad_token_provider is not None: + azure_ad_token = azure_ad_token_provider() + headers["Authorization"] = f"Bearer {azure_ad_token}" + return headers def _get_sync_azure_client( @@ -235,6 +232,7 @@ class AzureChatCompletion(BaseLLM): api_base: Optional[str], api_key: Optional[str], azure_ad_token: Optional[str], + azure_ad_token_provider: Optional[Callable], model: str, max_retries: int, timeout: Union[float, httpx.Timeout], @@ -242,7 +240,7 @@ class AzureChatCompletion(BaseLLM): client_type: Literal["sync", "async"], ): # init AzureOpenAI Client - azure_client_params = { + azure_client_params: Dict[str, Any] = { "api_version": api_version, "azure_endpoint": api_base, "azure_deployment": model, @@ -259,6 +257,8 @@ class AzureChatCompletion(BaseLLM): if azure_ad_token.startswith("oidc/"): azure_ad_token = get_azure_ad_token_from_oidc(azure_ad_token) azure_client_params["azure_ad_token"] = azure_ad_token + elif azure_ad_token_provider is not None: + azure_client_params["azure_ad_token_provider"] = azure_ad_token_provider if client is None: if client_type == "sync": azure_client = AzureOpenAI(**azure_client_params) # type: ignore @@ -305,6 +305,7 @@ class AzureChatCompletion(BaseLLM): - call chat.completions.create.with_raw_response when litellm.return_response_headers is True - call chat.completions.create by default """ + start_time = time.time() try: raw_response = await azure_client.chat.completions.with_raw_response.create( **data, timeout=timeout @@ -313,6 +314,11 @@ class AzureChatCompletion(BaseLLM): headers = dict(raw_response.headers) response = raw_response.parse() return headers, response + except APITimeoutError as e: + end_time = time.time() + time_delta = round(end_time - start_time, 2) + e.message += f" - timeout value={timeout}, time taken={time_delta} seconds" + raise e except Exception as e: raise e @@ -326,6 +332,7 @@ class AzureChatCompletion(BaseLLM): api_version: str, api_type: str, azure_ad_token: str, + azure_ad_token_provider: Callable, dynamic_params: bool, print_verbose: Callable, timeout: Union[float, httpx.Timeout], @@ -345,7 +352,9 @@ class AzureChatCompletion(BaseLLM): status_code=422, message="Missing model or messages" ) - max_retries = optional_params.pop("max_retries", 2) + max_retries = optional_params.pop("max_retries", None) + if max_retries is None: + max_retries = DEFAULT_MAX_RETRIES json_mode: Optional[bool] = optional_params.pop("json_mode", False) ### CHECK IF CLOUDFLARE AI GATEWAY ### @@ -373,6 +382,10 @@ class AzureChatCompletion(BaseLLM): ) azure_client_params["azure_ad_token"] = azure_ad_token + elif azure_ad_token_provider is not None: + azure_client_params["azure_ad_token_provider"] = ( + azure_ad_token_provider + ) if acompletion is True: client = AsyncAzureOpenAI(**azure_client_params) @@ -400,8 +413,10 @@ class AzureChatCompletion(BaseLLM): api_key=api_key, api_version=api_version, azure_ad_token=azure_ad_token, + azure_ad_token_provider=azure_ad_token_provider, timeout=timeout, client=client, + max_retries=max_retries, ) else: return self.acompletion( @@ -412,10 +427,12 @@ class AzureChatCompletion(BaseLLM): api_version=api_version, model=model, azure_ad_token=azure_ad_token, + azure_ad_token_provider=azure_ad_token_provider, dynamic_params=dynamic_params, timeout=timeout, client=client, logging_obj=logging_obj, + max_retries=max_retries, convert_tool_call_to_json_mode=json_mode, ) elif "stream" in optional_params and optional_params["stream"] is True: @@ -428,8 +445,10 @@ class AzureChatCompletion(BaseLLM): api_key=api_key, api_version=api_version, azure_ad_token=azure_ad_token, + azure_ad_token_provider=azure_ad_token_provider, timeout=timeout, client=client, + max_retries=max_retries, ) else: ## LOGGING @@ -468,6 +487,10 @@ class AzureChatCompletion(BaseLLM): if azure_ad_token.startswith("oidc/"): azure_ad_token = get_azure_ad_token_from_oidc(azure_ad_token) azure_client_params["azure_ad_token"] = azure_ad_token + elif azure_ad_token_provider is not None: + azure_client_params["azure_ad_token_provider"] = ( + azure_ad_token_provider + ) if ( client is None @@ -517,10 +540,14 @@ class AzureChatCompletion(BaseLLM): status_code = getattr(e, "status_code", 500) error_headers = getattr(e, "headers", None) error_response = getattr(e, "response", None) + error_body = getattr(e, "body", None) if error_headers is None and error_response: error_headers = getattr(error_response, "headers", None) raise AzureOpenAIError( - status_code=status_code, message=str(e), headers=error_headers + status_code=status_code, + message=str(e), + headers=error_headers, + body=error_body, ) async def acompletion( @@ -534,18 +561,14 @@ class AzureChatCompletion(BaseLLM): dynamic_params: bool, model_response: ModelResponse, logging_obj: LiteLLMLoggingObj, + max_retries: int, azure_ad_token: Optional[str] = None, + azure_ad_token_provider: Optional[Callable] = None, convert_tool_call_to_json_mode: Optional[bool] = None, client=None, # this is the AsyncAzureOpenAI ): response = None try: - max_retries = data.pop("max_retries", 2) - if not isinstance(max_retries, int): - raise AzureOpenAIError( - status_code=422, message="max retries must be an int" - ) - # init AzureOpenAI Client azure_client_params = { "api_version": api_version, @@ -564,6 +587,8 @@ class AzureChatCompletion(BaseLLM): if azure_ad_token.startswith("oidc/"): azure_ad_token = get_azure_ad_token_from_oidc(azure_ad_token) azure_client_params["azure_ad_token"] = azure_ad_token + elif azure_ad_token_provider is not None: + azure_client_params["azure_ad_token_provider"] = azure_ad_token_provider # setting Azure client if client is None or dynamic_params: @@ -627,6 +652,8 @@ class AzureChatCompletion(BaseLLM): ) raise AzureOpenAIError(status_code=500, message=str(e)) except Exception as e: + message = getattr(e, "message", str(e)) + body = getattr(e, "body", None) ## LOGGING logging_obj.post_call( input=data["messages"], @@ -637,7 +664,7 @@ class AzureChatCompletion(BaseLLM): if hasattr(e, "status_code"): raise e else: - raise AzureOpenAIError(status_code=500, message=str(e)) + raise AzureOpenAIError(status_code=500, message=message, body=body) def streaming( self, @@ -649,14 +676,11 @@ class AzureChatCompletion(BaseLLM): data: dict, model: str, timeout: Any, + max_retries: int, azure_ad_token: Optional[str] = None, + azure_ad_token_provider: Optional[Callable] = None, client=None, ): - max_retries = data.pop("max_retries", 2) - if not isinstance(max_retries, int): - raise AzureOpenAIError( - status_code=422, message="max retries must be an int" - ) # init AzureOpenAI Client azure_client_params = { "api_version": api_version, @@ -675,6 +699,8 @@ class AzureChatCompletion(BaseLLM): if azure_ad_token.startswith("oidc/"): azure_ad_token = get_azure_ad_token_from_oidc(azure_ad_token) azure_client_params["azure_ad_token"] = azure_ad_token + elif azure_ad_token_provider is not None: + azure_client_params["azure_ad_token_provider"] = azure_ad_token_provider if client is None or dynamic_params: azure_client = AzureOpenAI(**azure_client_params) @@ -717,7 +743,9 @@ class AzureChatCompletion(BaseLLM): data: dict, model: str, timeout: Any, + max_retries: int, azure_ad_token: Optional[str] = None, + azure_ad_token_provider: Optional[Callable] = None, client=None, ): try: @@ -727,7 +755,7 @@ class AzureChatCompletion(BaseLLM): "azure_endpoint": api_base, "azure_deployment": model, "http_client": litellm.aclient_session, - "max_retries": data.pop("max_retries", 2), + "max_retries": max_retries, "timeout": timeout, } azure_client_params = select_azure_base_url_or_endpoint( @@ -739,6 +767,8 @@ class AzureChatCompletion(BaseLLM): if azure_ad_token.startswith("oidc/"): azure_ad_token = get_azure_ad_token_from_oidc(azure_ad_token) azure_client_params["azure_ad_token"] = azure_ad_token + elif azure_ad_token_provider is not None: + azure_client_params["azure_ad_token_provider"] = azure_ad_token_provider if client is None or dynamic_params: azure_client = AsyncAzureOpenAI(**azure_client_params) else: @@ -779,10 +809,15 @@ class AzureChatCompletion(BaseLLM): status_code = getattr(e, "status_code", 500) error_headers = getattr(e, "headers", None) error_response = getattr(e, "response", None) + message = getattr(e, "message", str(e)) + error_body = getattr(e, "body", None) if error_headers is None and error_response: error_headers = getattr(error_response, "headers", None) raise AzureOpenAIError( - status_code=status_code, message=str(e), headers=error_headers + status_code=status_code, + message=message, + headers=error_headers, + body=error_body, ) async def aembedding( @@ -844,6 +879,7 @@ class AzureChatCompletion(BaseLLM): optional_params: dict, api_key: Optional[str] = None, azure_ad_token: Optional[str] = None, + azure_ad_token_provider: Optional[Callable] = None, max_retries: Optional[int] = None, client=None, aembedding=None, @@ -883,6 +919,8 @@ class AzureChatCompletion(BaseLLM): if azure_ad_token.startswith("oidc/"): azure_ad_token = get_azure_ad_token_from_oidc(azure_ad_token) azure_client_params["azure_ad_token"] = azure_ad_token + elif azure_ad_token_provider is not None: + azure_client_params["azure_ad_token_provider"] = azure_ad_token_provider ## LOGGING logging_obj.pre_call( @@ -1240,6 +1278,7 @@ class AzureChatCompletion(BaseLLM): api_version: Optional[str] = None, model_response: Optional[ImageResponse] = None, azure_ad_token: Optional[str] = None, + azure_ad_token_provider: Optional[Callable] = None, client=None, aimg_generation=None, ) -> ImageResponse: @@ -1266,7 +1305,7 @@ class AzureChatCompletion(BaseLLM): ) # init AzureOpenAI Client - azure_client_params = { + azure_client_params: Dict[str, Any] = { "api_version": api_version, "azure_endpoint": api_base, "azure_deployment": model, @@ -1282,6 +1321,8 @@ class AzureChatCompletion(BaseLLM): if azure_ad_token.startswith("oidc/"): azure_ad_token = get_azure_ad_token_from_oidc(azure_ad_token) azure_client_params["azure_ad_token"] = azure_ad_token + elif azure_ad_token_provider is not None: + azure_client_params["azure_ad_token_provider"] = azure_ad_token_provider if aimg_generation is True: return self.aimage_generation(data=data, input=input, logging_obj=logging_obj, model_response=model_response, api_key=api_key, client=client, azure_client_params=azure_client_params, timeout=timeout, headers=headers) # type: ignore @@ -1342,6 +1383,7 @@ class AzureChatCompletion(BaseLLM): max_retries: int, timeout: Union[float, httpx.Timeout], azure_ad_token: Optional[str] = None, + azure_ad_token_provider: Optional[Callable] = None, aspeech: Optional[bool] = None, client=None, ) -> HttpxBinaryResponseContent: @@ -1358,6 +1400,7 @@ class AzureChatCompletion(BaseLLM): api_base=api_base, api_version=api_version, azure_ad_token=azure_ad_token, + azure_ad_token_provider=azure_ad_token_provider, max_retries=max_retries, timeout=timeout, client=client, @@ -1368,6 +1411,7 @@ class AzureChatCompletion(BaseLLM): api_version=api_version, api_key=api_key, azure_ad_token=azure_ad_token, + azure_ad_token_provider=azure_ad_token_provider, model=model, max_retries=max_retries, timeout=timeout, @@ -1393,6 +1437,7 @@ class AzureChatCompletion(BaseLLM): api_base: Optional[str], api_version: Optional[str], azure_ad_token: Optional[str], + azure_ad_token_provider: Optional[Callable], max_retries: int, timeout: Union[float, httpx.Timeout], client=None, @@ -1403,6 +1448,7 @@ class AzureChatCompletion(BaseLLM): api_version=api_version, api_key=api_key, azure_ad_token=azure_ad_token, + azure_ad_token_provider=azure_ad_token_provider, model=model, max_retries=max_retries, timeout=timeout, diff --git a/litellm/llms/azure/batches/handler.py b/litellm/llms/azure/batches/handler.py index 5fae527670..d36ae648ab 100644 --- a/litellm/llms/azure/batches/handler.py +++ b/litellm/llms/azure/batches/handler.py @@ -2,7 +2,7 @@ Azure Batches API Handler """ -from typing import Any, Coroutine, Optional, Union +from typing import Any, Coroutine, Optional, Union, cast import httpx @@ -14,6 +14,7 @@ from litellm.types.llms.openai import ( CreateBatchRequest, RetrieveBatchRequest, ) +from litellm.types.utils import LiteLLMBatch class AzureBatchesAPI: @@ -64,9 +65,9 @@ class AzureBatchesAPI: self, create_batch_data: CreateBatchRequest, azure_client: AsyncAzureOpenAI, - ) -> Batch: + ) -> LiteLLMBatch: response = await azure_client.batches.create(**create_batch_data) - return response + return LiteLLMBatch(**response.model_dump()) def create_batch( self, @@ -78,7 +79,7 @@ class AzureBatchesAPI: timeout: Union[float, httpx.Timeout], max_retries: Optional[int], client: Optional[Union[AzureOpenAI, AsyncAzureOpenAI]] = None, - ) -> Union[Batch, Coroutine[Any, Any, Batch]]: + ) -> Union[LiteLLMBatch, Coroutine[Any, Any, LiteLLMBatch]]: azure_client: Optional[Union[AzureOpenAI, AsyncAzureOpenAI]] = ( self.get_azure_openai_client( api_key=api_key, @@ -103,16 +104,16 @@ class AzureBatchesAPI: return self.acreate_batch( # type: ignore create_batch_data=create_batch_data, azure_client=azure_client ) - response = azure_client.batches.create(**create_batch_data) - return response + response = cast(AzureOpenAI, azure_client).batches.create(**create_batch_data) + return LiteLLMBatch(**response.model_dump()) async def aretrieve_batch( self, retrieve_batch_data: RetrieveBatchRequest, client: AsyncAzureOpenAI, - ) -> Batch: + ) -> LiteLLMBatch: response = await client.batches.retrieve(**retrieve_batch_data) - return response + return LiteLLMBatch(**response.model_dump()) def retrieve_batch( self, @@ -149,8 +150,10 @@ class AzureBatchesAPI: return self.aretrieve_batch( # type: ignore retrieve_batch_data=retrieve_batch_data, client=azure_client ) - response = azure_client.batches.retrieve(**retrieve_batch_data) - return response + response = cast(AzureOpenAI, azure_client).batches.retrieve( + **retrieve_batch_data + ) + return LiteLLMBatch(**response.model_dump()) async def acancel_batch( self, diff --git a/litellm/llms/azure/chat/gpt_transformation.py b/litellm/llms/azure/chat/gpt_transformation.py index 00e336d69a..7aa4fffab5 100644 --- a/litellm/llms/azure/chat/gpt_transformation.py +++ b/litellm/llms/azure/chat/gpt_transformation.py @@ -11,13 +11,7 @@ from litellm.types.utils import ModelResponse from litellm.utils import supports_response_schema from ....exceptions import UnsupportedParamsError -from ....types.llms.openai import ( - AllMessageValues, - ChatCompletionToolChoiceFunctionParam, - ChatCompletionToolChoiceObjectParam, - ChatCompletionToolParam, - ChatCompletionToolParamFunctionChunk, -) +from ....types.llms.openai import AllMessageValues from ...base_llm.chat.transformation import BaseConfig from ..common_utils import AzureOpenAIError @@ -104,6 +98,7 @@ class AzureOpenAIConfig(BaseConfig): "seed", "extra_headers", "parallel_tool_calls", + "prediction", ] def _is_response_format_supported_model(self, model: str) -> bool: @@ -119,6 +114,17 @@ class AzureOpenAIConfig(BaseConfig): return False + def _is_response_format_supported_api_version( + self, api_version_year: str, api_version_month: str + ) -> bool: + """ + - check if api_version is supported for response_format + """ + + is_supported = int(api_version_year) <= 2024 and int(api_version_month) >= 8 + + return is_supported + def map_openai_params( self, non_default_params: dict, @@ -174,49 +180,27 @@ class AzureOpenAIConfig(BaseConfig): else: optional_params["tool_choice"] = value elif param == "response_format" and isinstance(value, dict): - json_schema: Optional[dict] = None - schema_name: str = "" - if "response_schema" in value: - json_schema = value["response_schema"] - schema_name = "json_tool_call" - elif "json_schema" in value: - json_schema = value["json_schema"]["schema"] - schema_name = value["json_schema"]["name"] - """ - Follow similar approach to anthropic - translate to a single tool call. - - When using tools in this way: - https://docs.anthropic.com/en/docs/build-with-claude/tool-use#json-mode - - You usually want to provide a single tool - - You should set tool_choice (see Forcing tool use) to instruct the model to explicitly use that tool - - Remember that the model will pass the input to the tool, so the name of the tool and description should be from the model’s perspective. - """ _is_response_format_supported_model = ( self._is_response_format_supported_model(model) ) - if json_schema is not None and ( - (api_version_year <= "2024" and api_version_month < "08") - or not _is_response_format_supported_model - ): # azure api version "2024-08-01-preview" onwards supports 'json_schema' only for gpt-4o/3.5 models - _tool_choice = ChatCompletionToolChoiceObjectParam( - type="function", - function=ChatCompletionToolChoiceFunctionParam( - name=schema_name - ), + is_response_format_supported_api_version = ( + self._is_response_format_supported_api_version( + api_version_year, api_version_month ) - - _tool = ChatCompletionToolParam( - type="function", - function=ChatCompletionToolParamFunctionChunk( - name=schema_name, parameters=json_schema - ), - ) - - optional_params["tools"] = [_tool] - optional_params["tool_choice"] = _tool_choice - optional_params["json_mode"] = True - else: - optional_params["response_format"] = value + ) + is_response_format_supported = ( + is_response_format_supported_api_version + and _is_response_format_supported_model + ) + optional_params = self._add_response_format_to_tools( + optional_params=optional_params, + value=value, + is_response_format_supported=is_response_format_supported, + ) + elif param == "tools" and isinstance(value, list): + optional_params.setdefault("tools", []) + optional_params["tools"].extend(value) elif param in supported_openai_params: optional_params[param] = value diff --git a/litellm/llms/azure/chat/o1_transformation.py b/litellm/llms/azure/chat/o1_transformation.py deleted file mode 100644 index 0b56aa1fb4..0000000000 --- a/litellm/llms/azure/chat/o1_transformation.py +++ /dev/null @@ -1,51 +0,0 @@ -""" -Support for o1 model family - -https://platform.openai.com/docs/guides/reasoning - -Translations handled by LiteLLM: -- modalities: image => drop param (if user opts in to dropping param) -- role: system ==> translate to role 'user' -- streaming => faked by LiteLLM -- Tools, response_format => drop param (if user opts in to dropping param) -- Logprobs => drop param (if user opts in to dropping param) -- Temperature => drop param (if user opts in to dropping param) -""" - -from typing import Optional - -from litellm import verbose_logger -from litellm.utils import get_model_info - -from ...openai.chat.o1_transformation import OpenAIO1Config - - -class AzureOpenAIO1Config(OpenAIO1Config): - def should_fake_stream( - self, - model: Optional[str], - stream: Optional[bool], - custom_llm_provider: Optional[str] = None, - ) -> bool: - """ - Currently no Azure OpenAI models support native streaming. - """ - if stream is not True: - return False - - if model is not None: - try: - model_info = get_model_info( - model=model, custom_llm_provider=custom_llm_provider - ) - if model_info.get("supports_native_streaming") is True: - return False - except Exception as e: - verbose_logger.debug( - f"Error getting model info in AzureOpenAIO1Config: {e}" - ) - - return True - - def is_o1_model(self, model: str) -> bool: - return "o1" in model diff --git a/litellm/llms/azure/chat/o1_handler.py b/litellm/llms/azure/chat/o_series_handler.py similarity index 84% rename from litellm/llms/azure/chat/o1_handler.py rename to litellm/llms/azure/chat/o_series_handler.py index 1cb6f888c3..a2042b3e2a 100644 --- a/litellm/llms/azure/chat/o1_handler.py +++ b/litellm/llms/azure/chat/o_series_handler.py @@ -1,7 +1,7 @@ """ -Handler file for calls to Azure OpenAI's o1 family of models +Handler file for calls to Azure OpenAI's o1/o3 family of models -Written separately to handle faking streaming for o1 models. +Written separately to handle faking streaming for o1 and o3 models. """ from typing import Optional, Union @@ -36,7 +36,9 @@ class AzureOpenAIO1ChatCompletion(OpenAIChatCompletion): ]: # Override to use Azure-specific client initialization - if isinstance(client, OpenAI) or isinstance(client, AsyncOpenAI): + if not isinstance(client, AzureOpenAI) and not isinstance( + client, AsyncAzureOpenAI + ): client = None return get_azure_openai_client( diff --git a/litellm/llms/azure/chat/o_series_transformation.py b/litellm/llms/azure/chat/o_series_transformation.py new file mode 100644 index 0000000000..0ca3a28d23 --- /dev/null +++ b/litellm/llms/azure/chat/o_series_transformation.py @@ -0,0 +1,75 @@ +""" +Support for o1 and o3 model families + +https://platform.openai.com/docs/guides/reasoning + +Translations handled by LiteLLM: +- modalities: image => drop param (if user opts in to dropping param) +- role: system ==> translate to role 'user' +- streaming => faked by LiteLLM +- Tools, response_format => drop param (if user opts in to dropping param) +- Logprobs => drop param (if user opts in to dropping param) +- Temperature => drop param (if user opts in to dropping param) +""" + +from typing import List, Optional + +from litellm import verbose_logger +from litellm.types.llms.openai import AllMessageValues +from litellm.utils import get_model_info + +from ...openai.chat.o_series_transformation import OpenAIOSeriesConfig + + +class AzureOpenAIO1Config(OpenAIOSeriesConfig): + def should_fake_stream( + self, + model: Optional[str], + stream: Optional[bool], + custom_llm_provider: Optional[str] = None, + ) -> bool: + """ + Currently no Azure O Series models support native streaming. + """ + + if stream is not True: + return False + + if ( + model and "o3" in model + ): # o3 models support streaming - https://github.com/BerriAI/litellm/issues/8274 + return False + + if model is not None: + try: + model_info = get_model_info( + model=model, custom_llm_provider=custom_llm_provider + ) # allow user to override default with model_info={"supports_native_streaming": true} + + if ( + model_info.get("supports_native_streaming") is True + ): # allow user to override default with model_info={"supports_native_streaming": true} + return False + except Exception as e: + verbose_logger.debug( + f"Error getting model info in AzureOpenAIO1Config: {e}" + ) + return True + + def is_o_series_model(self, model: str) -> bool: + return "o1" in model or "o3" in model or "o_series/" in model + + def transform_request( + self, + model: str, + messages: List[AllMessageValues], + optional_params: dict, + litellm_params: dict, + headers: dict, + ) -> dict: + model = model.replace( + "o_series/", "" + ) # handle o_series/my-random-deployment-name + return super().transform_request( + model, messages, optional_params, litellm_params, headers + ) diff --git a/litellm/llms/azure/common_utils.py b/litellm/llms/azure/common_utils.py index 2a96f5c39c..43f3480ed6 100644 --- a/litellm/llms/azure/common_utils.py +++ b/litellm/llms/azure/common_utils.py @@ -17,6 +17,7 @@ class AzureOpenAIError(BaseLLMException): request: Optional[httpx.Request] = None, response: Optional[httpx.Response] = None, headers: Optional[Union[httpx.Headers, dict]] = None, + body: Optional[dict] = None, ): super().__init__( status_code=status_code, @@ -24,6 +25,7 @@ class AzureOpenAIError(BaseLLMException): request=request, response=response, headers=headers, + body=body, ) diff --git a/litellm/llms/azure/completion/handler.py b/litellm/llms/azure/completion/handler.py index 42309bdd23..fafa5665bb 100644 --- a/litellm/llms/azure/completion/handler.py +++ b/litellm/llms/azure/completion/handler.py @@ -49,6 +49,7 @@ class AzureTextCompletion(BaseLLM): api_version: str, api_type: str, azure_ad_token: str, + azure_ad_token_provider: Optional[Callable], print_verbose: Callable, timeout, logging_obj, @@ -130,6 +131,7 @@ class AzureTextCompletion(BaseLLM): timeout=timeout, client=client, logging_obj=logging_obj, + max_retries=max_retries, ) elif "stream" in optional_params and optional_params["stream"] is True: return self.streaming( @@ -170,6 +172,7 @@ class AzureTextCompletion(BaseLLM): "http_client": litellm.client_session, "max_retries": max_retries, "timeout": timeout, + "azure_ad_token_provider": azure_ad_token_provider, } azure_client_params = select_azure_base_url_or_endpoint( azure_client_params=azure_client_params @@ -234,17 +237,12 @@ class AzureTextCompletion(BaseLLM): timeout: Any, model_response: ModelResponse, logging_obj: Any, + max_retries: int, azure_ad_token: Optional[str] = None, client=None, # this is the AsyncAzureOpenAI ): response = None try: - max_retries = data.pop("max_retries", 2) - if not isinstance(max_retries, int): - raise AzureOpenAIError( - status_code=422, message="max retries must be an int" - ) - # init AzureOpenAI Client azure_client_params = { "api_version": api_version, diff --git a/litellm/llms/azure_ai/chat/transformation.py b/litellm/llms/azure_ai/chat/transformation.py index afedc95001..46a1a6bf9c 100644 --- a/litellm/llms/azure_ai/chat/transformation.py +++ b/litellm/llms/azure_ai/chat/transformation.py @@ -1,4 +1,5 @@ from typing import Any, List, Optional, Tuple, cast +from urllib.parse import urlparse import httpx from httpx import Response @@ -28,16 +29,29 @@ class AzureAIStudioConfig(OpenAIConfig): api_key: Optional[str] = None, api_base: Optional[str] = None, ) -> dict: - if api_base and "services.ai.azure.com" in api_base: + if api_base and self._should_use_api_key_header(api_base): headers["api-key"] = api_key else: headers["Authorization"] = f"Bearer {api_key}" return headers + def _should_use_api_key_header(self, api_base: str) -> bool: + """ + Returns True if the request should use `api-key` header for authentication. + """ + parsed_url = urlparse(api_base) + host = parsed_url.hostname + if host and ( + host.endswith(".services.ai.azure.com") + or host.endswith(".openai.azure.com") + ): + return True + return False + def get_complete_url( self, - api_base: str, + api_base: Optional[str], model: str, optional_params: dict, stream: Optional[bool] = None, @@ -58,6 +72,10 @@ class AzureAIStudioConfig(OpenAIConfig): - A complete URL string, e.g., "https://litellm8397336933.services.ai.azure.com/models/chat/completions?api-version=2024-05-01-preview" """ + if api_base is None: + raise ValueError( + f"api_base is required for Azure AI Studio. Please set the api_base parameter. Passed `api_base={api_base}`" + ) original_url = httpx.URL(api_base) # Extract api_version or use default diff --git a/litellm/llms/azure_ai/cost_calculator.py b/litellm/llms/azure_ai/cost_calculator.py deleted file mode 100644 index 96d7018458..0000000000 --- a/litellm/llms/azure_ai/cost_calculator.py +++ /dev/null @@ -1,32 +0,0 @@ -""" -Handles custom cost calculation for Azure AI models. - -Custom cost calculation for Azure AI models only requied for rerank. -""" - -from typing import Tuple - -from litellm.utils import get_model_info - - -def cost_per_query(model: str, num_queries: int = 1) -> Tuple[float, float]: - """ - Calculates the cost per query for a given rerank model. - - Input: - - model: str, the model name without provider prefix - - Returns: - Tuple[float, float] - prompt_cost_in_usd, completion_cost_in_usd - """ - model_info = get_model_info(model=model, custom_llm_provider="azure_ai") - - if ( - "input_cost_per_query" not in model_info - or model_info["input_cost_per_query"] is None - ): - return 0.0, 0.0 - - prompt_cost = model_info["input_cost_per_query"] * num_queries - - return prompt_cost, 0.0 diff --git a/litellm/llms/azure_ai/rerank/transformation.py b/litellm/llms/azure_ai/rerank/transformation.py index 4465e0d70a..842511f30d 100644 --- a/litellm/llms/azure_ai/rerank/transformation.py +++ b/litellm/llms/azure_ai/rerank/transformation.py @@ -17,7 +17,6 @@ class AzureAIRerankConfig(CohereRerankConfig): """ Azure AI Rerank - Follows the same Spec as Cohere Rerank """ - def get_complete_url(self, api_base: Optional[str], model: str) -> str: if api_base is None: raise ValueError( diff --git a/litellm/llms/base_llm/anthropic_messages/transformation.py b/litellm/llms/base_llm/anthropic_messages/transformation.py new file mode 100644 index 0000000000..7619ffbbf6 --- /dev/null +++ b/litellm/llms/base_llm/anthropic_messages/transformation.py @@ -0,0 +1,35 @@ +from abc import ABC, abstractmethod +from typing import TYPE_CHECKING, Any, Optional + +if TYPE_CHECKING: + from litellm.litellm_core_utils.litellm_logging import Logging as _LiteLLMLoggingObj + + LiteLLMLoggingObj = _LiteLLMLoggingObj +else: + LiteLLMLoggingObj = Any + + +class BaseAnthropicMessagesConfig(ABC): + @abstractmethod + def validate_environment( + self, + headers: dict, + model: str, + api_key: Optional[str] = None, + ) -> dict: + pass + + @abstractmethod + def get_complete_url(self, api_base: Optional[str], model: str) -> str: + """ + OPTIONAL + + Get the complete url for the request + + Some providers need `model` in `api_base` + """ + return api_base or "" + + @abstractmethod + def get_supported_anthropic_messages_params(self, model: str) -> list: + pass diff --git a/litellm/llms/base_llm/base_utils.py b/litellm/llms/base_llm/base_utils.py index 88b3115351..919cdbfd02 100644 --- a/litellm/llms/base_llm/base_utils.py +++ b/litellm/llms/base_llm/base_utils.py @@ -1,20 +1,25 @@ +""" +Utility functions for base LLM classes. +""" + +import copy from abc import ABC, abstractmethod from typing import List, Optional, Type, Union from openai.lib import _parsing, _pydantic from pydantic import BaseModel -from litellm.types.utils import ModelInfoBase +from litellm._logging import verbose_logger +from litellm.types.llms.openai import AllMessageValues +from litellm.types.utils import ProviderSpecificModelInfo class BaseLLMModelInfo(ABC): - @abstractmethod - def get_model_info( + def get_provider_info( self, model: str, - existing_model_info: Optional[ModelInfoBase] = None, - ) -> Optional[ModelInfoBase]: - pass + ) -> Optional[ProviderSpecificModelInfo]: + return None @abstractmethod def get_models(self) -> List[str]: @@ -30,6 +35,58 @@ class BaseLLMModelInfo(ABC): def get_api_base(api_base: Optional[str] = None) -> Optional[str]: pass + @staticmethod + @abstractmethod + def get_base_model(model: str) -> Optional[str]: + """ + Returns the base model name from the given model name. + + Some providers like bedrock - can receive model=`invoke/anthropic.claude-3-opus-20240229-v1:0` or `converse/anthropic.claude-3-opus-20240229-v1:0` + This function will return `anthropic.claude-3-opus-20240229-v1:0` + """ + pass + + +def _dict_to_response_format_helper( + response_format: dict, ref_template: Optional[str] = None +) -> dict: + if ref_template is not None and response_format.get("type") == "json_schema": + # Deep copy to avoid modifying original + modified_format = copy.deepcopy(response_format) + schema = modified_format["json_schema"]["schema"] + + # Update all $ref values in the schema + def update_refs(schema): + stack = [(schema, [])] + visited = set() + + while stack: + obj, path = stack.pop() + obj_id = id(obj) + + if obj_id in visited: + continue + visited.add(obj_id) + + if isinstance(obj, dict): + if "$ref" in obj: + ref_path = obj["$ref"] + model_name = ref_path.split("/")[-1] + obj["$ref"] = ref_template.format(model=model_name) + + for k, v in obj.items(): + if isinstance(v, (dict, list)): + stack.append((v, path + [k])) + + elif isinstance(obj, list): + for i, item in enumerate(obj): + if isinstance(item, (dict, list)): + stack.append((item, path + [i])) + + update_refs(schema) + return modified_format + return response_format + def type_to_response_format_param( response_format: Optional[Union[Type[BaseModel], dict]], @@ -44,7 +101,7 @@ def type_to_response_format_param( return None if isinstance(response_format, dict): - return response_format + return _dict_to_response_format_helper(response_format, ref_template) # type checkers don't narrow the negation of a `TypeGuard` as it isn't # a safe default behaviour but we know that at this point the `response_format` @@ -65,3 +122,21 @@ def type_to_response_format_param( "strict": True, }, } + + +def map_developer_role_to_system_role( + messages: List[AllMessageValues], +) -> List[AllMessageValues]: + """ + Translate `developer` role to `system` role for non-OpenAI providers. + """ + new_messages: List[AllMessageValues] = [] + for m in messages: + if m["role"] == "developer": + verbose_logger.debug( + "Translating developer role to system role for non-OpenAI providers." + ) # ensure user knows what's happening with their input. + new_messages.append({"role": "system", "content": m["content"]}) + else: + new_messages.append(m) + return new_messages diff --git a/litellm/llms/base_llm/chat/transformation.py b/litellm/llms/base_llm/chat/transformation.py index 85ca3fe8b9..8327a10464 100644 --- a/litellm/llms/base_llm/chat/transformation.py +++ b/litellm/llms/base_llm/chat/transformation.py @@ -18,10 +18,22 @@ from typing import ( import httpx from pydantic import BaseModel -from litellm.types.llms.openai import AllMessageValues +from litellm.constants import RESPONSE_FORMAT_TOOL_NAME +from litellm.llms.custom_httpx.http_handler import AsyncHTTPHandler, HTTPHandler +from litellm.types.llms.openai import ( + AllMessageValues, + ChatCompletionToolChoiceFunctionParam, + ChatCompletionToolChoiceObjectParam, + ChatCompletionToolParam, + ChatCompletionToolParamFunctionChunk, +) from litellm.types.utils import ModelResponse +from litellm.utils import CustomStreamWrapper -from ..base_utils import type_to_response_format_param +from ..base_utils import ( + map_developer_role_to_system_role, + type_to_response_format_param, +) if TYPE_CHECKING: from litellm.litellm_core_utils.litellm_logging import Logging as _LiteLLMLoggingObj @@ -39,6 +51,7 @@ class BaseLLMException(Exception): headers: Optional[Union[dict, httpx.Headers]] = None, request: Optional[httpx.Request] = None, response: Optional[httpx.Response] = None, + body: Optional[dict] = None, ): self.status_code = status_code self.message: str = message @@ -55,6 +68,7 @@ class BaseLLMException(Exception): self.response = httpx.Response( status_code=status_code, request=self.request ) + self.body = body super().__init__( self.message ) # Call the base class constructor with the parameters it needs @@ -99,6 +113,30 @@ class BaseConfig(ABC): """ return False + def _add_tools_to_optional_params(self, optional_params: dict, tools: List) -> dict: + """ + Helper util to add tools to optional_params. + """ + if "tools" not in optional_params: + optional_params["tools"] = tools + else: + optional_params["tools"] = [ + *optional_params["tools"], + *tools, + ] + return optional_params + + def translate_developer_role_to_system_role( + self, + messages: List[AllMessageValues], + ) -> List[AllMessageValues]: + """ + Translate `developer` role to `system` role for non-OpenAI providers. + + Overriden by OpenAI/Azure + """ + return map_developer_role_to_system_role(messages=messages) + def should_retry_llm_api_inside_llm_translation_on_http_error( self, e: httpx.HTTPStatusError, litellm_params: dict ) -> bool: @@ -130,6 +168,57 @@ class BaseConfig(ABC): def get_supported_openai_params(self, model: str) -> list: pass + def _add_response_format_to_tools( + self, + optional_params: dict, + value: dict, + is_response_format_supported: bool, + enforce_tool_choice: bool = True, + ) -> dict: + """ + Follow similar approach to anthropic - translate to a single tool call. + + When using tools in this way: - https://docs.anthropic.com/en/docs/build-with-claude/tool-use#json-mode + - You usually want to provide a single tool + - You should set tool_choice (see Forcing tool use) to instruct the model to explicitly use that tool + - Remember that the model will pass the input to the tool, so the name of the tool and description should be from the model’s perspective. + + Add response format to tools + + This is used to translate response_format to a tool call, for models/APIs that don't support response_format directly. + """ + json_schema: Optional[dict] = None + if "response_schema" in value: + json_schema = value["response_schema"] + elif "json_schema" in value: + json_schema = value["json_schema"]["schema"] + + if json_schema and not is_response_format_supported: + + _tool_choice = ChatCompletionToolChoiceObjectParam( + type="function", + function=ChatCompletionToolChoiceFunctionParam( + name=RESPONSE_FORMAT_TOOL_NAME + ), + ) + + _tool = ChatCompletionToolParam( + type="function", + function=ChatCompletionToolParamFunctionChunk( + name=RESPONSE_FORMAT_TOOL_NAME, parameters=json_schema + ), + ) + + optional_params.setdefault("tools", []) + optional_params["tools"].append(_tool) + if enforce_tool_choice: + optional_params["tool_choice"] = _tool_choice + + optional_params["json_mode"] = True + elif is_response_format_supported: + optional_params["response_format"] = value + return optional_params + @abstractmethod def map_openai_params( self, @@ -152,9 +241,33 @@ class BaseConfig(ABC): ) -> dict: pass + def sign_request( + self, + headers: dict, + optional_params: dict, + request_data: dict, + api_base: str, + model: Optional[str] = None, + stream: Optional[bool] = None, + fake_stream: Optional[bool] = None, + ) -> dict: + """ + Some providers like Bedrock require signing the request. The sign request funtion needs access to `request_data` and `complete_url` + Args: + headers: dict + optional_params: dict + request_data: dict - the request body being sent in http request + api_base: str - the complete url being sent in http request + Returns: + dict - the signed headers + + Update the headers with the signed headers in this function. The return values will be sent as headers in the http request. + """ + return headers + def get_complete_url( self, - api_base: str, + api_base: Optional[str], model: str, optional_params: dict, stream: Optional[bool] = None, @@ -166,6 +279,8 @@ class BaseConfig(ABC): Some providers need `model` in `api_base` """ + if api_base is None: + raise ValueError("api_base is required") return api_base @abstractmethod @@ -209,3 +324,48 @@ class BaseConfig(ABC): json_mode: Optional[bool] = False, ) -> Any: pass + + def get_async_custom_stream_wrapper( + self, + model: str, + custom_llm_provider: str, + logging_obj: LiteLLMLoggingObj, + api_base: str, + headers: dict, + data: dict, + messages: list, + client: Optional[AsyncHTTPHandler] = None, + json_mode: Optional[bool] = None, + ) -> CustomStreamWrapper: + raise NotImplementedError + + def get_sync_custom_stream_wrapper( + self, + model: str, + custom_llm_provider: str, + logging_obj: LiteLLMLoggingObj, + api_base: str, + headers: dict, + data: dict, + messages: list, + client: Optional[Union[HTTPHandler, AsyncHTTPHandler]] = None, + json_mode: Optional[bool] = None, + ) -> CustomStreamWrapper: + raise NotImplementedError + + @property + def custom_llm_provider(self) -> Optional[str]: + return None + + @property + def has_custom_stream_wrapper(self) -> bool: + return False + + @property + def supports_stream_param_in_request_body(self) -> bool: + """ + Some providers like Bedrock invoke do not support the stream parameter in the request body. + + By default, this is true for almost all providers. + """ + return True diff --git a/litellm/llms/base_llm/rerank/transformation.py b/litellm/llms/base_llm/rerank/transformation.py index d956c9a555..8701fe57bf 100644 --- a/litellm/llms/base_llm/rerank/transformation.py +++ b/litellm/llms/base_llm/rerank/transformation.py @@ -1,9 +1,10 @@ from abc import ABC, abstractmethod -from typing import TYPE_CHECKING, Any, Dict, List, Optional, Union +from typing import TYPE_CHECKING, Any, Dict, List, Optional, Tuple, Union import httpx -from litellm.types.rerank import OptionalRerankParams, RerankResponse +from litellm.types.rerank import OptionalRerankParams, RerankBilledUnits, RerankResponse +from litellm.types.utils import ModelInfo from ..chat.transformation import BaseLLMException @@ -66,7 +67,7 @@ class BaseRerankConfig(ABC): @abstractmethod def map_cohere_rerank_params( self, - non_default_params: Optional[dict], + non_default_params: dict, model: str, drop_params: bool, query: str, @@ -76,11 +77,52 @@ class BaseRerankConfig(ABC): rank_fields: Optional[List[str]] = None, return_documents: Optional[bool] = True, max_chunks_per_doc: Optional[int] = None, + max_tokens_per_doc: Optional[int] = None, ) -> OptionalRerankParams: pass - @abstractmethod def get_error_class( self, error_message: str, status_code: int, headers: Union[dict, httpx.Headers] ) -> BaseLLMException: - pass + raise BaseLLMException( + status_code=status_code, + message=error_message, + headers=headers, + ) + + def calculate_rerank_cost( + self, + model: str, + custom_llm_provider: Optional[str] = None, + billed_units: Optional[RerankBilledUnits] = None, + model_info: Optional[ModelInfo] = None, + ) -> Tuple[float, float]: + """ + Calculates the cost per query for a given rerank model. + + Input: + - model: str, the model name without provider prefix + - custom_llm_provider: str, the provider used for the model. If provided, used to check if the litellm model info is for that provider. + - num_queries: int, the number of queries to calculate the cost for + - model_info: ModelInfo, the model info for the given model + + Returns: + Tuple[float, float] - prompt_cost_in_usd, completion_cost_in_usd + """ + + if ( + model_info is None + or "input_cost_per_query" not in model_info + or model_info["input_cost_per_query"] is None + or billed_units is None + ): + return 0.0, 0.0 + + search_units = billed_units.get("search_units") + + if search_units is None: + return 0.0, 0.0 + + prompt_cost = model_info["input_cost_per_query"] * search_units + + return prompt_cost, 0.0 diff --git a/litellm/llms/baseten.py b/litellm/llms/baseten.py index 7bcf2fbafb..e1d513d6d1 100644 --- a/litellm/llms/baseten.py +++ b/litellm/llms/baseten.py @@ -142,7 +142,7 @@ def completion( sum_logprob = 0 for token in completion_response[0]["details"]["tokens"]: sum_logprob += token["logprob"] - model_response.choices[0].logprobs = sum_logprob + model_response.choices[0].logprobs = sum_logprob # type: ignore else: raise BasetenError( message=f"Unable to parse response. Original response: {response.text}", diff --git a/litellm/llms/bedrock/base_aws_llm.py b/litellm/llms/bedrock/base_aws_llm.py index 8c64203fd7..86b47675d4 100644 --- a/litellm/llms/bedrock/base_aws_llm.py +++ b/litellm/llms/bedrock/base_aws_llm.py @@ -2,14 +2,16 @@ import hashlib import json import os from datetime import datetime -from typing import TYPE_CHECKING, Any, Dict, List, Optional, Tuple +from typing import TYPE_CHECKING, Any, Dict, List, Optional, Tuple, cast, get_args import httpx from pydantic import BaseModel from litellm._logging import verbose_logger from litellm.caching.caching import DualCache -from litellm.secret_managers.main import get_secret, get_secret_str +from litellm.constants import BEDROCK_INVOKE_PROVIDERS_LITERAL +from litellm.litellm_core_utils.dd_tracing import tracer +from litellm.secret_managers.main import get_secret if TYPE_CHECKING: from botocore.awsrequest import AWSPreparedRequest @@ -42,6 +44,18 @@ class BaseAWSLLM: def __init__(self) -> None: self.iam_cache = DualCache() super().__init__() + self.aws_authentication_params = [ + "aws_access_key_id", + "aws_secret_access_key", + "aws_session_token", + "aws_region_name", + "aws_session_name", + "aws_profile_name", + "aws_role_name", + "aws_web_identity_token", + "aws_sts_endpoint", + "aws_bedrock_runtime_endpoint", + ] def get_cache_key(self, credential_args: Dict[str, Optional[str]]) -> str: """ @@ -51,6 +65,7 @@ class BaseAWSLLM: credential_str = json.dumps(credential_args, sort_keys=True) return hashlib.sha256(credential_str.encode()).hexdigest() + @tracer.wrap() def get_credentials( self, aws_access_key_id: Optional[str] = None, @@ -67,17 +82,6 @@ class BaseAWSLLM: Return a boto3.Credentials object """ ## CHECK IS 'os.environ/' passed in - param_names = [ - "aws_access_key_id", - "aws_secret_access_key", - "aws_session_token", - "aws_region_name", - "aws_session_name", - "aws_profile_name", - "aws_role_name", - "aws_web_identity_token", - "aws_sts_endpoint", - ] params_to_check: List[Optional[str]] = [ aws_access_key_id, aws_secret_access_key, @@ -97,7 +101,7 @@ class BaseAWSLLM: if _v is not None and isinstance(_v, str): params_to_check[i] = _v elif param is None: # check if uppercase value in env - key = param_names[i] + key = self.aws_authentication_params[i] if key.upper() in os.environ: params_to_check[i] = os.getenv(key) @@ -199,6 +203,116 @@ class BaseAWSLLM: self.iam_cache.set_cache(cache_key, credentials, ttl=_cache_ttl) return credentials + def _get_aws_region_from_model_arn(self, model: Optional[str]) -> Optional[str]: + try: + # First check if the string contains the expected prefix + if not isinstance(model, str) or "arn:aws:bedrock" not in model: + return None + + # Split the ARN and check if we have enough parts + parts = model.split(":") + if len(parts) < 4: + return None + + # Get the region from the correct position + region = parts[3] + if not region: # Check if region is empty + return None + + return region + except Exception: + # Catch any unexpected errors and return None + return None + + @staticmethod + def _get_provider_from_model_path( + model_path: str, + ) -> Optional[BEDROCK_INVOKE_PROVIDERS_LITERAL]: + """ + Helper function to get the provider from a model path with format: provider/model-name + + Args: + model_path (str): The model path (e.g., 'llama/arn:aws:bedrock:us-east-1:086734376398:imported-model/r4c4kewx2s0n' or 'anthropic/model-name') + + Returns: + Optional[str]: The provider name, or None if no valid provider found + """ + parts = model_path.split("/") + if len(parts) >= 1: + provider = parts[0] + if provider in get_args(BEDROCK_INVOKE_PROVIDERS_LITERAL): + return cast(BEDROCK_INVOKE_PROVIDERS_LITERAL, provider) + return None + + @staticmethod + def get_bedrock_invoke_provider( + model: str, + ) -> Optional[BEDROCK_INVOKE_PROVIDERS_LITERAL]: + """ + Helper function to get the bedrock provider from the model + + handles 3 scenarions: + 1. model=invoke/anthropic.claude-3-5-sonnet-20240620-v1:0 -> Returns `anthropic` + 2. model=anthropic.claude-3-5-sonnet-20240620-v1:0 -> Returns `anthropic` + 3. model=llama/arn:aws:bedrock:us-east-1:086734376398:imported-model/r4c4kewx2s0n -> Returns `llama` + 4. model=us.amazon.nova-pro-v1:0 -> Returns `nova` + """ + if model.startswith("invoke/"): + model = model.replace("invoke/", "", 1) + + _split_model = model.split(".")[0] + if _split_model in get_args(BEDROCK_INVOKE_PROVIDERS_LITERAL): + return cast(BEDROCK_INVOKE_PROVIDERS_LITERAL, _split_model) + + # If not a known provider, check for pattern with two slashes + provider = BaseAWSLLM._get_provider_from_model_path(model) + if provider is not None: + return provider + + # check if provider == "nova" + if "nova" in model: + return "nova" + else: + for provider in get_args(BEDROCK_INVOKE_PROVIDERS_LITERAL): + if provider in model: + return provider + return None + + def _get_aws_region_name( + self, optional_params: dict, model: Optional[str] = None + ) -> str: + """ + Get the AWS region name from the environment variables + """ + aws_region_name = optional_params.get("aws_region_name", None) + ### SET REGION NAME ### + if aws_region_name is None: + # check model arn # + aws_region_name = self._get_aws_region_from_model_arn(model) + # check env # + litellm_aws_region_name = get_secret("AWS_REGION_NAME", None) + + if ( + aws_region_name is None + and litellm_aws_region_name is not None + and isinstance(litellm_aws_region_name, str) + ): + aws_region_name = litellm_aws_region_name + + standard_aws_region_name = get_secret("AWS_REGION", None) + if ( + aws_region_name is None + and standard_aws_region_name is not None + and isinstance(standard_aws_region_name, str) + ): + aws_region_name = standard_aws_region_name + + if aws_region_name is None: + aws_region_name = "us-west-2" + + return aws_region_name + + @tracer.wrap() def _auth_with_web_identity_token( self, aws_web_identity_token: str, @@ -229,11 +343,12 @@ class BaseAWSLLM: status_code=401, ) - sts_client = boto3.client( - "sts", - region_name=aws_region_name, - endpoint_url=sts_endpoint, - ) + with tracer.trace("boto3.client(sts)"): + sts_client = boto3.client( + "sts", + region_name=aws_region_name, + endpoint_url=sts_endpoint, + ) # https://docs.aws.amazon.com/STS/latest/APIReference/API_AssumeRoleWithWebIdentity.html # https://boto3.amazonaws.com/v1/documentation/api/latest/reference/services/sts/client/assume_role_with_web_identity.html @@ -257,11 +372,13 @@ class BaseAWSLLM: f"The policy size is greater than 75% of the allowed size, PackedPolicySize: {sts_response['PackedPolicySize']}" ) - session = boto3.Session(**iam_creds_dict) + with tracer.trace("boto3.Session(**iam_creds_dict)"): + session = boto3.Session(**iam_creds_dict) iam_creds = session.get_credentials() return iam_creds, self._get_default_ttl_for_boto3_credentials() + @tracer.wrap() def _auth_with_aws_role( self, aws_access_key_id: Optional[str], @@ -275,11 +392,12 @@ class BaseAWSLLM: import boto3 from botocore.credentials import Credentials - sts_client = boto3.client( - "sts", - aws_access_key_id=aws_access_key_id, # [OPTIONAL] - aws_secret_access_key=aws_secret_access_key, # [OPTIONAL] - ) + with tracer.trace("boto3.client(sts)"): + sts_client = boto3.client( + "sts", + aws_access_key_id=aws_access_key_id, # [OPTIONAL] + aws_secret_access_key=aws_secret_access_key, # [OPTIONAL] + ) sts_response = sts_client.assume_role( RoleArn=aws_role_name, RoleSessionName=aws_session_name @@ -287,7 +405,6 @@ class BaseAWSLLM: # Extract the credentials from the response and convert to Session Credentials sts_credentials = sts_response["Credentials"] - credentials = Credentials( access_key=sts_credentials["AccessKeyId"], secret_key=sts_credentials["SecretAccessKey"], @@ -300,6 +417,7 @@ class BaseAWSLLM: sts_ttl = (sts_expiry - current_time).total_seconds() - 60 return credentials, sts_ttl + @tracer.wrap() def _auth_with_aws_profile( self, aws_profile_name: str ) -> Tuple[Credentials, Optional[int]]: @@ -309,9 +427,11 @@ class BaseAWSLLM: import boto3 # uses auth values from AWS profile usually stored in ~/.aws/credentials - client = boto3.Session(profile_name=aws_profile_name) - return client.get_credentials(), None + with tracer.trace("boto3.Session(profile_name=aws_profile_name)"): + client = boto3.Session(profile_name=aws_profile_name) + return client.get_credentials(), None + @tracer.wrap() def _auth_with_aws_session_token( self, aws_access_key_id: str, @@ -332,6 +452,7 @@ class BaseAWSLLM: return credentials, None + @tracer.wrap() def _auth_with_access_key_and_secret_key( self, aws_access_key_id: str, @@ -344,26 +465,31 @@ class BaseAWSLLM: import boto3 # Check if credentials are already in cache. These credentials have no expiry time. - - session = boto3.Session( - aws_access_key_id=aws_access_key_id, - aws_secret_access_key=aws_secret_access_key, - region_name=aws_region_name, - ) + with tracer.trace( + "boto3.Session(aws_access_key_id=aws_access_key_id, aws_secret_access_key=aws_secret_access_key, region_name=aws_region_name)" + ): + session = boto3.Session( + aws_access_key_id=aws_access_key_id, + aws_secret_access_key=aws_secret_access_key, + region_name=aws_region_name, + ) credentials = session.get_credentials() return credentials, self._get_default_ttl_for_boto3_credentials() + @tracer.wrap() def _auth_with_env_vars(self) -> Tuple[Credentials, Optional[int]]: """ Authenticate with AWS Environment Variables """ import boto3 - session = boto3.Session() - credentials = session.get_credentials() - return credentials, None + with tracer.trace("boto3.Session()"): + session = boto3.Session() + credentials = session.get_credentials() + return credentials, None + @tracer.wrap() def _get_default_ttl_for_boto3_credentials(self) -> int: """ Get the default TTL for boto3 credentials @@ -407,7 +533,7 @@ class BaseAWSLLM: return endpoint_url, proxy_endpoint_url def _get_boto_credentials_from_optional_params( - self, optional_params: dict + self, optional_params: dict, model: Optional[str] = None ) -> Boto3CredentialsInfo: """ Get boto3 credentials from optional params @@ -427,7 +553,8 @@ class BaseAWSLLM: aws_secret_access_key = optional_params.pop("aws_secret_access_key", None) aws_access_key_id = optional_params.pop("aws_access_key_id", None) aws_session_token = optional_params.pop("aws_session_token", None) - aws_region_name = optional_params.pop("aws_region_name", None) + aws_region_name = self._get_aws_region_name(optional_params, model) + optional_params.pop("aws_region_name", None) aws_role_name = optional_params.pop("aws_role_name", None) aws_session_name = optional_params.pop("aws_session_name", None) aws_profile_name = optional_params.pop("aws_profile_name", None) @@ -437,25 +564,6 @@ class BaseAWSLLM: "aws_bedrock_runtime_endpoint", None ) # https://bedrock-runtime.{region_name}.amazonaws.com - ### SET REGION NAME ### - if aws_region_name is None: - # check env # - litellm_aws_region_name = get_secret_str("AWS_REGION_NAME", None) - - if litellm_aws_region_name is not None and isinstance( - litellm_aws_region_name, str - ): - aws_region_name = litellm_aws_region_name - - standard_aws_region_name = get_secret_str("AWS_REGION", None) - if standard_aws_region_name is not None and isinstance( - standard_aws_region_name, str - ): - aws_region_name = standard_aws_region_name - - if aws_region_name is None: - aws_region_name = "us-west-2" - credentials: Credentials = self.get_credentials( aws_access_key_id=aws_access_key_id, aws_secret_access_key=aws_secret_access_key, @@ -474,6 +582,7 @@ class BaseAWSLLM: aws_bedrock_runtime_endpoint=aws_bedrock_runtime_endpoint, ) + @tracer.wrap() def get_request_headers( self, credentials: Credentials, diff --git a/litellm/llms/bedrock/chat/converse_handler.py b/litellm/llms/bedrock/chat/converse_handler.py index 57cccad7e0..b70c15b3e1 100644 --- a/litellm/llms/bedrock/chat/converse_handler.py +++ b/litellm/llms/bedrock/chat/converse_handler.py @@ -1,6 +1,6 @@ import json import urllib -from typing import Any, Callable, Optional, Union +from typing import Any, Optional, Union import httpx @@ -60,7 +60,6 @@ def make_sync_call( api_key="", data=data, messages=messages, - print_verbose=litellm.print_verbose, encoding=litellm.encoding, ) # type: ignore completion_stream: Any = MockResponseIterator( @@ -102,7 +101,6 @@ class BedrockConverseLLM(BaseAWSLLM): messages: list, api_base: str, model_response: ModelResponse, - print_verbose: Callable, timeout: Optional[Union[float, httpx.Timeout]], encoding, logging_obj, @@ -170,7 +168,6 @@ class BedrockConverseLLM(BaseAWSLLM): messages: list, api_base: str, model_response: ModelResponse, - print_verbose: Callable, timeout: Optional[Union[float, httpx.Timeout]], encoding, logging_obj: LiteLLMLoggingObject, @@ -247,7 +244,6 @@ class BedrockConverseLLM(BaseAWSLLM): api_key="", data=data, messages=messages, - print_verbose=print_verbose, optional_params=optional_params, encoding=encoding, ) @@ -259,7 +255,6 @@ class BedrockConverseLLM(BaseAWSLLM): api_base: Optional[str], custom_prompt_dict: dict, model_response: ModelResponse, - print_verbose: Callable, encoding, logging_obj: LiteLLMLoggingObject, optional_params: dict, @@ -271,11 +266,6 @@ class BedrockConverseLLM(BaseAWSLLM): client: Optional[Union[AsyncHTTPHandler, HTTPHandler]] = None, ): - try: - from botocore.credentials import Credentials - except ImportError: - raise ImportError("Missing boto3 to call bedrock. Run 'pip install boto3'.") - ## SETUP ## stream = optional_params.pop("stream", None) modelId = optional_params.pop("model_id", None) @@ -367,7 +357,6 @@ class BedrockConverseLLM(BaseAWSLLM): messages=messages, api_base=proxy_endpoint_url, model_response=model_response, - print_verbose=print_verbose, encoding=encoding, logging_obj=logging_obj, optional_params=optional_params, @@ -387,7 +376,6 @@ class BedrockConverseLLM(BaseAWSLLM): messages=messages, api_base=proxy_endpoint_url, model_response=model_response, - print_verbose=print_verbose, encoding=encoding, logging_obj=logging_obj, optional_params=optional_params, @@ -489,7 +477,6 @@ class BedrockConverseLLM(BaseAWSLLM): api_key="", data=data, messages=messages, - print_verbose=print_verbose, optional_params=optional_params, encoding=encoding, ) diff --git a/litellm/llms/bedrock/chat/converse_like/handler.py b/litellm/llms/bedrock/chat/converse_like/handler.py new file mode 100644 index 0000000000..c26886b713 --- /dev/null +++ b/litellm/llms/bedrock/chat/converse_like/handler.py @@ -0,0 +1,5 @@ +""" +Uses base_llm_http_handler to call the 'converse like' endpoint. + +Relevant issue: https://github.com/BerriAI/litellm/issues/8085 +""" diff --git a/litellm/llms/bedrock/chat/converse_like/transformation.py b/litellm/llms/bedrock/chat/converse_like/transformation.py new file mode 100644 index 0000000000..7833202242 --- /dev/null +++ b/litellm/llms/bedrock/chat/converse_like/transformation.py @@ -0,0 +1,3 @@ +""" +Uses `converse_transformation.py` to transform the messages to the format required by Bedrock Converse. +""" diff --git a/litellm/llms/bedrock/chat/converse_transformation.py b/litellm/llms/bedrock/chat/converse_transformation.py index 52c42b790f..0b0d55f23d 100644 --- a/litellm/llms/bedrock/chat/converse_transformation.py +++ b/litellm/llms/bedrock/chat/converse_transformation.py @@ -5,23 +5,25 @@ Translating between OpenAI's `/chat/completion` format and Amazon's `/converse` import copy import time import types -from typing import List, Literal, Optional, Tuple, Union, overload +from typing import List, Literal, Optional, Tuple, Union, cast, overload import httpx import litellm -from litellm.litellm_core_utils.asyncify import asyncify from litellm.litellm_core_utils.core_helpers import map_finish_reason from litellm.litellm_core_utils.litellm_logging import Logging from litellm.litellm_core_utils.prompt_templates.factory import ( + BedrockConverseMessagesProcessor, _bedrock_converse_messages_pt, _bedrock_tools_pt, ) +from litellm.llms.base_llm.chat.transformation import BaseConfig, BaseLLMException from litellm.types.llms.bedrock import * from litellm.types.llms.openai import ( AllMessageValues, ChatCompletionResponseMessage, ChatCompletionSystemMessage, + ChatCompletionThinkingBlock, ChatCompletionToolCallChunk, ChatCompletionToolCallFunctionChunk, ChatCompletionToolParam, @@ -30,19 +32,12 @@ from litellm.types.llms.openai import ( OpenAIMessageContentListBlock, ) from litellm.types.utils import ModelResponse, Usage -from litellm.utils import CustomStreamWrapper, add_dummy_tool, has_tool_call_blocks +from litellm.utils import add_dummy_tool, has_tool_call_blocks -from ..common_utils import ( - AmazonBedrockGlobalConfig, - BedrockError, - get_bedrock_tool_name, -) - -global_config = AmazonBedrockGlobalConfig() -all_global_regions = global_config.get_all_regions() +from ..common_utils import BedrockError, BedrockModelInfo, get_bedrock_tool_name -class AmazonConverseConfig: +class AmazonConverseConfig(BaseConfig): """ Reference - https://docs.aws.amazon.com/bedrock/latest/APIReference/API_runtime_Converse.html #2 - https://docs.aws.amazon.com/bedrock/latest/userguide/conversation-inference.html#conversation-inference-supported-models-features @@ -62,11 +57,15 @@ class AmazonConverseConfig: topP: Optional[int] = None, topK: Optional[int] = None, ) -> None: - locals_ = locals() + locals_ = locals().copy() for key, value in locals_.items(): if key != "self" and value is not None: setattr(self.__class__, key, value) + @property + def custom_llm_provider(self) -> Optional[str]: + return "bedrock_converse" + @classmethod def get_config(cls): return { @@ -99,7 +98,7 @@ class AmazonConverseConfig: ] ## Filter out 'cross-region' from model name - base_model = self._get_base_model(model) + base_model = BedrockModelInfo.get_base_model(model) if ( base_model.startswith("anthropic") @@ -107,14 +106,21 @@ class AmazonConverseConfig: or base_model.startswith("cohere") or base_model.startswith("meta.llama3-1") or base_model.startswith("meta.llama3-2") + or base_model.startswith("meta.llama3-3") or base_model.startswith("amazon.nova") ): supported_params.append("tools") - if base_model.startswith("anthropic") or base_model.startswith("mistral"): + if litellm.utils.supports_tool_choice( + model=model, custom_llm_provider=self.custom_llm_provider + ): # only anthropic and mistral support tool choice config. otherwise (E.g. cohere) will fail the call - https://docs.aws.amazon.com/bedrock/latest/APIReference/API_runtime_ToolChoice.html supported_params.append("tool_choice") + if ( + "claude-3-7" in model + ): # [TODO]: move to a 'supports_reasoning_content' param from model cost map + supported_params.append("thinking") return supported_params def map_tool_choice_values( @@ -154,10 +160,14 @@ class AmazonConverseConfig: def get_supported_document_types(self) -> List[str]: return ["pdf", "csv", "doc", "docx", "xls", "xlsx", "html", "txt", "md"] + def get_all_supported_content_types(self) -> List[str]: + return self.get_supported_image_types() + self.get_supported_document_types() + def _create_json_tool_call_for_response_format( self, json_schema: Optional[dict] = None, schema_name: str = "json_tool_call", + description: Optional[str] = None, ) -> ChatCompletionToolParam: """ Handles creating a tool call for getting responses in JSON format. @@ -180,32 +190,47 @@ class AmazonConverseConfig: else: _input_schema = json_schema + tool_param_function_chunk = ChatCompletionToolParamFunctionChunk( + name=schema_name, parameters=_input_schema + ) + if description: + tool_param_function_chunk["description"] = description + _tool = ChatCompletionToolParam( type="function", - function=ChatCompletionToolParamFunctionChunk( - name=schema_name, parameters=_input_schema - ), + function=tool_param_function_chunk, ) return _tool def map_openai_params( self, - model: str, non_default_params: dict, optional_params: dict, + model: str, drop_params: bool, messages: Optional[List[AllMessageValues]] = None, ) -> dict: for param, value in non_default_params.items(): - if param == "response_format": + if param == "response_format" and isinstance(value, dict): + + ignore_response_format_types = ["text"] + if value["type"] in ignore_response_format_types: # value is a no-op + continue + json_schema: Optional[dict] = None schema_name: str = "" + description: Optional[str] = None if "response_schema" in value: json_schema = value["response_schema"] schema_name = "json_tool_call" elif "json_schema" in value: json_schema = value["json_schema"]["schema"] schema_name = value["json_schema"]["name"] + description = value["json_schema"].get("description") + + if "type" in value and value["type"] == "text": + continue + """ Follow similar approach to anthropic - translate to a single tool call. @@ -214,17 +239,22 @@ class AmazonConverseConfig: - You should set tool_choice (see Forcing tool use) to instruct the model to explicitly use that tool - Remember that the model will pass the input to the tool, so the name of the tool and description should be from the model’s perspective. """ - _tool_choice = {"name": schema_name, "type": "tool"} _tool = self._create_json_tool_call_for_response_format( json_schema=json_schema, schema_name=schema_name if schema_name != "" else "json_tool_call", + description=description, ) - optional_params["tools"] = [_tool] - optional_params["tool_choice"] = ToolChoiceValuesBlock( - tool=SpecificToolChoiceBlock( - name=schema_name if schema_name != "" else "json_tool_call" + optional_params = self._add_tools_to_optional_params( + optional_params=optional_params, tools=[_tool] + ) + if litellm.utils.supports_tool_choice( + model=model, custom_llm_provider=self.custom_llm_provider + ): + optional_params["tool_choice"] = ToolChoiceValuesBlock( + tool=SpecificToolChoiceBlock( + name=schema_name if schema_name != "" else "json_tool_call" + ) ) - ) optional_params["json_mode"] = True if non_default_params.get("stream", False) is True: optional_params["fake_stream"] = True @@ -242,34 +272,18 @@ class AmazonConverseConfig: optional_params["temperature"] = value if param == "top_p": optional_params["topP"] = value - if param == "tools": - optional_params["tools"] = value + if param == "tools" and isinstance(value, list): + optional_params = self._add_tools_to_optional_params( + optional_params=optional_params, tools=value + ) if param == "tool_choice": _tool_choice_value = self.map_tool_choice_values( model=model, tool_choice=value, drop_params=drop_params # type: ignore ) if _tool_choice_value is not None: optional_params["tool_choice"] = _tool_choice_value - - ## VALIDATE REQUEST - """ - Bedrock doesn't support tool calling without `tools=` param specified. - """ - if ( - "tools" not in non_default_params - and messages is not None - and has_tool_call_blocks(messages) - ): - if litellm.modify_params: - optional_params["tools"] = add_dummy_tool( - custom_llm_provider="bedrock_converse" - ) - else: - raise litellm.UnsupportedParamsError( - message="Bedrock doesn't support tool calling without `tools=` param specified. Pass `tools=` param OR set `litellm.modify_params = True` // `litellm_settings::modify_params: True` to add dummy tool to the request.", - model="", - llm_provider="bedrock", - ) + if param == "thinking": + optional_params["thinking"] = value return optional_params @overload @@ -348,39 +362,76 @@ class AmazonConverseConfig: inference_params["topK"] = inference_params.pop("top_k") return InferenceConfig(**inference_params) + def _handle_top_k_value(self, model: str, inference_params: dict) -> dict: + base_model = BedrockModelInfo.get_base_model(model) + + val_top_k = None + if "topK" in inference_params: + val_top_k = inference_params.pop("topK") + elif "top_k" in inference_params: + val_top_k = inference_params.pop("top_k") + + if val_top_k: + if base_model.startswith("anthropic"): + return {"top_k": val_top_k} + if base_model.startswith("amazon.nova"): + return {"inferenceConfig": {"topK": val_top_k}} + + return {} + def _transform_request_helper( - self, system_content_blocks: List[SystemContentBlock], optional_params: dict + self, + model: str, + system_content_blocks: List[SystemContentBlock], + optional_params: dict, + messages: Optional[List[AllMessageValues]] = None, ) -> CommonRequestObject: + + ## VALIDATE REQUEST + """ + Bedrock doesn't support tool calling without `tools=` param specified. + """ + if ( + "tools" not in optional_params + and messages is not None + and has_tool_call_blocks(messages) + ): + if litellm.modify_params: + optional_params["tools"] = add_dummy_tool( + custom_llm_provider="bedrock_converse" + ) + else: + raise litellm.UnsupportedParamsError( + message="Bedrock doesn't support tool calling without `tools=` param specified. Pass `tools=` param OR set `litellm.modify_params = True` // `litellm_settings::modify_params: True` to add dummy tool to the request.", + model="", + llm_provider="bedrock", + ) + inference_params = copy.deepcopy(optional_params) - additional_request_keys = [] - additional_request_params = {} supported_converse_params = list( AmazonConverseConfig.__annotations__.keys() ) + ["top_k"] supported_tool_call_params = ["tools", "tool_choice"] supported_guardrail_params = ["guardrailConfig"] + total_supported_params = ( + supported_converse_params + + supported_tool_call_params + + supported_guardrail_params + ) inference_params.pop("json_mode", None) # used for handling json_schema - # send all model-specific params in 'additional_request_params' - for k, v in inference_params.items(): - if ( - k not in supported_converse_params - and k not in supported_tool_call_params - and k not in supported_guardrail_params - ): - additional_request_params[k] = v - additional_request_keys.append(k) - for key in additional_request_keys: - inference_params.pop(key, None) + # keep supported params in 'inference_params', and set all model-specific params in 'additional_request_params' + additional_request_params = { + k: v for k, v in inference_params.items() if k not in total_supported_params + } + inference_params = { + k: v for k, v in inference_params.items() if k in total_supported_params + } - if "topK" in inference_params: - additional_request_params["inferenceConfig"] = { - "topK": inference_params.pop("topK") - } - elif "top_k" in inference_params: - additional_request_params["inferenceConfig"] = { - "topK": inference_params.pop("top_k") - } + # Only set the topK value in for models that support it + additional_request_params.update( + self._handle_top_k_value(model, inference_params) + ) bedrock_tools: List[ToolBlock] = _bedrock_tools_pt( inference_params.pop("tools", []) @@ -426,24 +477,45 @@ class AmazonConverseConfig: ) -> RequestObject: messages, system_content_blocks = self._transform_system_message(messages) ## TRANSFORMATION ## - bedrock_messages: List[MessageBlock] = await asyncify( - _bedrock_converse_messages_pt - )( - messages=messages, - model=model, - llm_provider="bedrock_converse", - user_continue_message=litellm_params.pop("user_continue_message", None), - ) _data: CommonRequestObject = self._transform_request_helper( + model=model, system_content_blocks=system_content_blocks, optional_params=optional_params, + messages=messages, + ) + + bedrock_messages = ( + await BedrockConverseMessagesProcessor._bedrock_converse_messages_pt_async( + messages=messages, + model=model, + llm_provider="bedrock_converse", + user_continue_message=litellm_params.pop("user_continue_message", None), + ) ) data: RequestObject = {"messages": bedrock_messages, **_data} return data + def transform_request( + self, + model: str, + messages: List[AllMessageValues], + optional_params: dict, + litellm_params: dict, + headers: dict, + ) -> dict: + return cast( + dict, + self._transform_request( + model=model, + messages=messages, + optional_params=optional_params, + litellm_params=litellm_params, + ), + ) + def _transform_request( self, model: str, @@ -452,6 +524,14 @@ class AmazonConverseConfig: litellm_params: dict, ) -> RequestObject: messages, system_content_blocks = self._transform_system_message(messages) + + _data: CommonRequestObject = self._transform_request_helper( + model=model, + system_content_blocks=system_content_blocks, + optional_params=optional_params, + messages=messages, + ) + ## TRANSFORMATION ## bedrock_messages: List[MessageBlock] = _bedrock_converse_messages_pt( messages=messages, @@ -460,15 +540,68 @@ class AmazonConverseConfig: user_continue_message=litellm_params.pop("user_continue_message", None), ) - _data: CommonRequestObject = self._transform_request_helper( - system_content_blocks=system_content_blocks, - optional_params=optional_params, - ) - data: RequestObject = {"messages": bedrock_messages, **_data} return data + def transform_response( + self, + model: str, + raw_response: httpx.Response, + model_response: ModelResponse, + logging_obj: Logging, + request_data: dict, + messages: List[AllMessageValues], + optional_params: dict, + litellm_params: dict, + encoding: Any, + api_key: Optional[str] = None, + json_mode: Optional[bool] = None, + ) -> ModelResponse: + return self._transform_response( + model=model, + response=raw_response, + model_response=model_response, + stream=optional_params.get("stream", False), + logging_obj=logging_obj, + optional_params=optional_params, + api_key=api_key, + data=request_data, + messages=messages, + encoding=encoding, + ) + + def _transform_reasoning_content( + self, reasoning_content_blocks: List[BedrockConverseReasoningContentBlock] + ) -> str: + """ + Extract the reasoning text from the reasoning content blocks + + Ensures deepseek reasoning content compatible output. + """ + reasoning_content_str = "" + for block in reasoning_content_blocks: + if "reasoningText" in block: + reasoning_content_str += block["reasoningText"]["text"] + return reasoning_content_str + + def _transform_thinking_blocks( + self, thinking_blocks: List[BedrockConverseReasoningContentBlock] + ) -> List[ChatCompletionThinkingBlock]: + """Return a consistent format for thinking blocks between Anthropic and Bedrock.""" + thinking_blocks_list: List[ChatCompletionThinkingBlock] = [] + for block in thinking_blocks: + if "reasoningText" in block: + _thinking_block = ChatCompletionThinkingBlock(type="thinking") + _text = block["reasoningText"].get("text") + _signature = block["reasoningText"].get("signature") + if _text is not None: + _thinking_block["thinking"] = _text + if _signature is not None: + _thinking_block["signature"] = _signature + thinking_blocks_list.append(_thinking_block) + return thinking_blocks_list + def _transform_response( self, model: str, @@ -477,12 +610,11 @@ class AmazonConverseConfig: stream: bool, logging_obj: Optional[Logging], optional_params: dict, - api_key: str, + api_key: Optional[str], data: Union[dict, str], messages: List, - print_verbose, encoding, - ) -> Union[ModelResponse, CustomStreamWrapper]: + ) -> ModelResponse: ## LOGGING if logging_obj is not None: logging_obj.post_call( @@ -491,7 +623,7 @@ class AmazonConverseConfig: original_response=response.text, additional_args={"complete_input_dict": data}, ) - print_verbose(f"raw model_response: {response.text}") + json_mode: Optional[bool] = optional_params.pop("json_mode", None) ## RESPONSE OBJECT try: @@ -543,6 +675,10 @@ class AmazonConverseConfig: chat_completion_message: ChatCompletionResponseMessage = {"role": "assistant"} content_str = "" tools: List[ChatCompletionToolCallChunk] = [] + reasoningContentBlocks: Optional[List[BedrockConverseReasoningContentBlock]] = ( + None + ) + if message is not None: for idx, content in enumerate(message["content"]): """ @@ -569,8 +705,22 @@ class AmazonConverseConfig: index=idx, ) tools.append(_tool_response_chunk) - chat_completion_message["content"] = content_str + if "reasoningContent" in content: + if reasoningContentBlocks is None: + reasoningContentBlocks = [] + reasoningContentBlocks.append(content["reasoningContent"]) + if reasoningContentBlocks is not None: + chat_completion_message["provider_specific_fields"] = { + "reasoningContentBlocks": reasoningContentBlocks, + } + chat_completion_message["reasoning_content"] = ( + self._transform_reasoning_content(reasoningContentBlocks) + ) + chat_completion_message["thinking_blocks"] = ( + self._transform_thinking_blocks(reasoningContentBlocks) + ) + chat_completion_message["content"] = content_str if json_mode is True and tools is not None and len(tools) == 1: # to support 'json_schema' logic on bedrock models json_mode_content_str: Optional[str] = tools[0]["function"].get("arguments") @@ -606,37 +756,24 @@ class AmazonConverseConfig: return model_response - def _supported_cross_region_inference_region(self) -> List[str]: - """ - Abbreviations of regions AWS Bedrock supports for cross region inference - """ - return ["us", "eu", "apac"] + def get_error_class( + self, error_message: str, status_code: int, headers: Union[dict, httpx.Headers] + ) -> BaseLLMException: + return BedrockError( + message=error_message, + status_code=status_code, + headers=headers, + ) - def _get_base_model(self, model: str) -> str: - """ - Get the base model from the given model name. - - Handle model names like - "us.meta.llama3-2-11b-instruct-v1:0" -> "meta.llama3-2-11b-instruct-v1" - AND "meta.llama3-2-11b-instruct-v1:0" -> "meta.llama3-2-11b-instruct-v1" - """ - - if model.startswith("bedrock/"): - model = model.split("/", 1)[1] - - if model.startswith("converse/"): - model = model.split("/", 1)[1] - - potential_region = model.split(".", 1)[0] - - alt_potential_region = model.split("/", 1)[ - 0 - ] # in model cost map we store regional information like `/us-west-2/bedrock-model` - - if potential_region in self._supported_cross_region_inference_region(): - return model.split(".", 1)[1] - elif ( - alt_potential_region in all_global_regions and len(model.split("/", 1)) > 1 - ): - return model.split("/", 1)[1] - - return model + def validate_environment( + self, + headers: dict, + model: str, + messages: List[AllMessageValues], + optional_params: dict, + api_key: Optional[str] = None, + api_base: Optional[str] = None, + ) -> dict: + if api_key: + headers["Authorization"] = f"Bearer {api_key}" + return headers diff --git a/litellm/llms/bedrock/chat/invoke_handler.py b/litellm/llms/bedrock/chat/invoke_handler.py index 5ade1dc2dc..27289164f7 100644 --- a/litellm/llms/bedrock/chat/invoke_handler.py +++ b/litellm/llms/bedrock/chat/invoke_handler.py @@ -1,5 +1,5 @@ """ -Manages calling Bedrock's `/converse` API + `/invoke` API +TODO: DELETE FILE. Bedrock LLM is no longer used. Goto `litellm/llms/bedrock/chat/invoke_transformations/base_invoke_transformation.py` """ import copy @@ -19,6 +19,7 @@ from typing import ( Tuple, Union, cast, + get_args, ) import httpx # type: ignore @@ -38,6 +39,9 @@ from litellm.litellm_core_utils.prompt_templates.factory import ( parse_xml_params, prompt_factory, ) +from litellm.llms.anthropic.chat.handler import ( + ModelResponseIterator as AnthropicModelResponseIterator, +) from litellm.llms.custom_httpx.http_handler import ( AsyncHTTPHandler, HTTPHandler, @@ -46,13 +50,19 @@ from litellm.llms.custom_httpx.http_handler import ( ) from litellm.types.llms.bedrock import * from litellm.types.llms.openai import ( + ChatCompletionThinkingBlock, ChatCompletionToolCallChunk, ChatCompletionToolCallFunctionChunk, ChatCompletionUsageBlock, ) -from litellm.types.utils import ChatCompletionMessageToolCall, Choices +from litellm.types.utils import ChatCompletionMessageToolCall, Choices, Delta from litellm.types.utils import GenericStreamingChunk as GChunk -from litellm.types.utils import ModelResponse, Usage +from litellm.types.utils import ( + ModelResponse, + ModelResponseStream, + StreamingChoices, + Usage, +) from litellm.utils import CustomStreamWrapper, get_secret from ..base_aws_llm import BaseAWSLLM @@ -101,7 +111,7 @@ class AmazonCohereChatConfig: stop_sequences: Optional[str] = None, raw_prompting: Optional[bool] = None, ) -> None: - locals_ = locals() + locals_ = locals().copy() for key, value in locals_.items(): if key != "self" and value is not None: setattr(self.__class__, key, value) @@ -175,6 +185,7 @@ async def make_call( logging_obj: Logging, fake_stream: bool = False, json_mode: Optional[bool] = False, + bedrock_invoke_provider: Optional[litellm.BEDROCK_INVOKE_PROVIDERS_LITERAL] = None, ): try: if client is None: @@ -206,12 +217,28 @@ async def make_call( api_key="", data=data, messages=messages, - print_verbose=litellm.print_verbose, encoding=litellm.encoding, ) # type: ignore completion_stream: Any = MockResponseIterator( model_response=model_response, json_mode=json_mode ) + elif bedrock_invoke_provider == "anthropic": + decoder: AWSEventStreamDecoder = AmazonAnthropicClaudeStreamDecoder( + model=model, + sync_stream=False, + json_mode=json_mode, + ) + completion_stream = decoder.aiter_bytes( + response.aiter_bytes(chunk_size=1024) + ) + elif bedrock_invoke_provider == "deepseek_r1": + decoder = AmazonDeepSeekR1StreamDecoder( + model=model, + sync_stream=False, + ) + completion_stream = decoder.aiter_bytes( + response.aiter_bytes(chunk_size=1024) + ) else: decoder = AWSEventStreamDecoder(model=model) completion_stream = decoder.aiter_bytes( @@ -236,6 +263,86 @@ async def make_call( raise BedrockError(status_code=500, message=str(e)) +def make_sync_call( + client: Optional[HTTPHandler], + api_base: str, + headers: dict, + data: str, + model: str, + messages: list, + logging_obj: Logging, + fake_stream: bool = False, + json_mode: Optional[bool] = False, + bedrock_invoke_provider: Optional[litellm.BEDROCK_INVOKE_PROVIDERS_LITERAL] = None, +): + try: + if client is None: + client = _get_httpx_client(params={}) + + response = client.post( + api_base, + headers=headers, + data=data, + stream=not fake_stream, + logging_obj=logging_obj, + ) + + if response.status_code != 200: + raise BedrockError(status_code=response.status_code, message=response.text) + + if fake_stream: + model_response: ( + ModelResponse + ) = litellm.AmazonConverseConfig()._transform_response( + model=model, + response=response, + model_response=litellm.ModelResponse(), + stream=True, + logging_obj=logging_obj, + optional_params={}, + api_key="", + data=data, + messages=messages, + encoding=litellm.encoding, + ) # type: ignore + completion_stream: Any = MockResponseIterator( + model_response=model_response, json_mode=json_mode + ) + elif bedrock_invoke_provider == "anthropic": + decoder: AWSEventStreamDecoder = AmazonAnthropicClaudeStreamDecoder( + model=model, + sync_stream=True, + json_mode=json_mode, + ) + completion_stream = decoder.iter_bytes(response.iter_bytes(chunk_size=1024)) + elif bedrock_invoke_provider == "deepseek_r1": + decoder = AmazonDeepSeekR1StreamDecoder( + model=model, + sync_stream=True, + ) + completion_stream = decoder.iter_bytes(response.iter_bytes(chunk_size=1024)) + else: + decoder = AWSEventStreamDecoder(model=model) + completion_stream = decoder.iter_bytes(response.iter_bytes(chunk_size=1024)) + + # LOGGING + logging_obj.post_call( + input=messages, + api_key="", + original_response="first stream response received", + additional_args={"complete_input_dict": data}, + ) + + return completion_stream + except httpx.HTTPStatusError as err: + error_code = err.response.status_code + raise BedrockError(status_code=error_code, message=err.response.text) + except httpx.TimeoutException: + raise BedrockError(status_code=408, message="Timeout error occurred.") + except Exception as e: + raise BedrockError(status_code=500, message=str(e)) + + class BedrockLLM(BaseAWSLLM): """ Example call @@ -286,7 +393,7 @@ class BedrockLLM(BaseAWSLLM): prompt = prompt_factory( model=model, messages=messages, custom_llm_provider="bedrock" ) - elif provider == "meta": + elif provider == "meta" or provider == "llama": prompt = prompt_factory( model=model, messages=messages, custom_llm_provider="bedrock" ) @@ -309,7 +416,7 @@ class BedrockLLM(BaseAWSLLM): model: str, response: httpx.Response, model_response: ModelResponse, - stream: bool, + stream: Optional[bool], logging_obj: Logging, optional_params: dict, api_key: str, @@ -318,7 +425,7 @@ class BedrockLLM(BaseAWSLLM): print_verbose, encoding, ) -> Union[ModelResponse, CustomStreamWrapper]: - provider = model.split(".")[0] + provider = self.get_bedrock_invoke_provider(model) ## LOGGING logging_obj.post_call( input=messages, @@ -423,7 +530,7 @@ class BedrockLLM(BaseAWSLLM): ].message.tool_calls: _tool_call = {**tool_call.dict(), "index": 0} _tool_calls.append(_tool_call) - delta_obj = litellm.utils.Delta( + delta_obj = Delta( content=getattr( model_response.choices[0].message, "content", None ), @@ -465,7 +572,7 @@ class BedrockLLM(BaseAWSLLM): outputText = ( completion_response.get("completions")[0].get("data").get("text") ) - elif provider == "meta": + elif provider == "meta" or provider == "llama": outputText = completion_response["generation"] elif provider == "mistral": outputText = completion_response["outputs"][0]["text"] @@ -597,13 +704,13 @@ class BedrockLLM(BaseAWSLLM): ## SETUP ## stream = optional_params.pop("stream", None) - modelId = optional_params.pop("model_id", None) - if modelId is not None: - modelId = self.encode_model_id(model_id=modelId) - else: - modelId = model - provider = model.split(".")[0] + provider = self.get_bedrock_invoke_provider(model) + modelId = self.get_bedrock_model_id( + model=model, + provider=provider, + optional_params=optional_params, + ) ## CREDENTIALS ## # pop aws_secret_access_key, aws_access_key_id, aws_session_token, aws_region_name from kwargs, since completion calls fail with them @@ -785,7 +892,7 @@ class BedrockLLM(BaseAWSLLM): "textGenerationConfig": inference_params, } ) - elif provider == "meta": + elif provider == "meta" or provider == "llama": ## LOAD CONFIG config = litellm.AmazonLlamaConfig.get_config() for k, v in config.items(): @@ -1032,7 +1139,7 @@ class BedrockLLM(BaseAWSLLM): client=client, api_base=api_base, headers=headers, - data=data, + data=data, # type: ignore model=model, messages=messages, logging_obj=logging_obj, @@ -1044,6 +1151,53 @@ class BedrockLLM(BaseAWSLLM): ) return streaming_response + @staticmethod + def _get_provider_from_model_path( + model_path: str, + ) -> Optional[litellm.BEDROCK_INVOKE_PROVIDERS_LITERAL]: + """ + Helper function to get the provider from a model path with format: provider/model-name + + Args: + model_path (str): The model path (e.g., 'llama/arn:aws:bedrock:us-east-1:086734376398:imported-model/r4c4kewx2s0n' or 'anthropic/model-name') + + Returns: + Optional[str]: The provider name, or None if no valid provider found + """ + parts = model_path.split("/") + if len(parts) >= 1: + provider = parts[0] + if provider in get_args(litellm.BEDROCK_INVOKE_PROVIDERS_LITERAL): + return cast(litellm.BEDROCK_INVOKE_PROVIDERS_LITERAL, provider) + return None + + def get_bedrock_model_id( + self, + optional_params: dict, + provider: Optional[litellm.BEDROCK_INVOKE_PROVIDERS_LITERAL], + model: str, + ) -> str: + modelId = optional_params.pop("model_id", None) + if modelId is not None: + modelId = self.encode_model_id(model_id=modelId) + else: + modelId = model + + if provider == "llama" and "llama/" in modelId: + modelId = self._get_model_id_for_llama_like_model(modelId) + + return modelId + + def _get_model_id_for_llama_like_model( + self, + model: str, + ) -> str: + """ + Remove `llama` from modelID since `llama` is simply a spec to follow for custom bedrock models + """ + model_id = model.replace("llama/", "") + return self.encode_model_id(model_id=model_id) + def get_response_stream_shape(): global _response_stream_shape_cache @@ -1088,14 +1242,40 @@ class AWSEventStreamDecoder: return True return False - def converse_chunk_parser(self, chunk_data: dict) -> GChunk: + def extract_reasoning_content_str( + self, reasoning_content_block: BedrockConverseReasoningContentBlockDelta + ) -> Optional[str]: + if "text" in reasoning_content_block: + return reasoning_content_block["text"] + return None + + def translate_thinking_blocks( + self, thinking_block: BedrockConverseReasoningContentBlockDelta + ) -> Optional[List[ChatCompletionThinkingBlock]]: + """ + Translate the thinking blocks to a string + """ + + thinking_blocks_list: List[ChatCompletionThinkingBlock] = [] + _thinking_block = ChatCompletionThinkingBlock(type="thinking") + if "text" in thinking_block: + _thinking_block["thinking"] = thinking_block["text"] + elif "signature" in thinking_block: + _thinking_block["signature"] = thinking_block["signature"] + _thinking_block["thinking"] = "" # consistent with anthropic response + thinking_blocks_list.append(_thinking_block) + return thinking_blocks_list + + def converse_chunk_parser(self, chunk_data: dict) -> ModelResponseStream: try: verbose_logger.debug("\n\nRaw Chunk: {}\n\n".format(chunk_data)) text = "" tool_use: Optional[ChatCompletionToolCallChunk] = None - is_finished = False finish_reason = "" usage: Optional[ChatCompletionUsageBlock] = None + provider_specific_fields: dict = {} + reasoning_content: Optional[str] = None + thinking_blocks: Optional[List[ChatCompletionThinkingBlock]] = None index = int(chunk_data.get("contentBlockIndex", 0)) if "start" in chunk_data: @@ -1135,6 +1315,22 @@ class AWSEventStreamDecoder: }, "index": index, } + elif "reasoningContent" in delta_obj: + provider_specific_fields = { + "reasoningContent": delta_obj["reasoningContent"], + } + reasoning_content = self.extract_reasoning_content_str( + delta_obj["reasoningContent"] + ) + thinking_blocks = self.translate_thinking_blocks( + delta_obj["reasoningContent"] + ) + if ( + thinking_blocks + and len(thinking_blocks) > 0 + and reasoning_content is None + ): + reasoning_content = "" # set to non-empty string to ensure consistency with Anthropic elif ( "contentBlockIndex" in chunk_data ): # stop block, no 'start' or 'delta' object @@ -1151,7 +1347,6 @@ class AWSEventStreamDecoder: } elif "stopReason" in chunk_data: finish_reason = map_finish_reason(chunk_data.get("stopReason", "stop")) - is_finished = True elif "usage" in chunk_data: usage = ChatCompletionUsageBlock( prompt_tokens=chunk_data.get("inputTokens", 0), @@ -1159,23 +1354,38 @@ class AWSEventStreamDecoder: total_tokens=chunk_data.get("totalTokens", 0), ) - response = GChunk( - text=text, - tool_use=tool_use, - is_finished=is_finished, - finish_reason=finish_reason, - usage=usage, - index=index, - ) - + model_response_provider_specific_fields = {} if "trace" in chunk_data: trace = chunk_data.get("trace") - response["provider_specific_fields"] = {"trace": trace} + model_response_provider_specific_fields["trace"] = trace + response = ModelResponseStream( + choices=[ + StreamingChoices( + finish_reason=finish_reason, + index=index, + delta=Delta( + content=text, + role="assistant", + tool_calls=[tool_use] if tool_use else None, + provider_specific_fields=( + provider_specific_fields + if provider_specific_fields + else None + ), + thinking_blocks=thinking_blocks, + reasoning_content=reasoning_content, + ), + ) + ], + usage=usage, + provider_specific_fields=model_response_provider_specific_fields, + ) + return response except Exception as e: raise Exception("Received streaming error - {}".format(str(e))) - def _chunk_parser(self, chunk_data: dict) -> GChunk: + def _chunk_parser(self, chunk_data: dict) -> Union[GChunk, ModelResponseStream]: text = "" is_finished = False finish_reason = "" @@ -1186,7 +1396,7 @@ class AWSEventStreamDecoder: text = chunk_data.get("completions")[0].get("data").get("text") # type: ignore is_finished = True finish_reason = "stop" - ######## bedrock.anthropic mappings ############### + ######## /bedrock/converse mappings ############### elif ( "contentBlockIndex" in chunk_data or "stopReason" in chunk_data @@ -1194,6 +1404,11 @@ class AWSEventStreamDecoder: or "trace" in chunk_data ): return self.converse_chunk_parser(chunk_data=chunk_data) + ######### /bedrock/invoke nova mappings ############### + elif "contentBlockDelta" in chunk_data: + # when using /bedrock/invoke/nova, the chunk_data is nested under "contentBlockDelta" + _chunk_data = chunk_data.get("contentBlockDelta", None) + return self.converse_chunk_parser(chunk_data=_chunk_data) ######## bedrock.mistral mappings ############### elif "outputs" in chunk_data: if ( @@ -1228,7 +1443,9 @@ class AWSEventStreamDecoder: tool_use=None, ) - def iter_bytes(self, iterator: Iterator[bytes]) -> Iterator[GChunk]: + def iter_bytes( + self, iterator: Iterator[bytes] + ) -> Iterator[Union[GChunk, ModelResponseStream]]: """Given an iterator that yields lines, iterate over it & yield every event encountered""" from botocore.eventstream import EventStreamBuffer @@ -1244,7 +1461,7 @@ class AWSEventStreamDecoder: async def aiter_bytes( self, iterator: AsyncIterator[bytes] - ) -> AsyncIterator[GChunk]: + ) -> AsyncIterator[Union[GChunk, ModelResponseStream]]: """Given an async iterator that yields lines, iterate over it & yield every event encountered""" from botocore.eventstream import EventStreamBuffer @@ -1292,6 +1509,50 @@ class AWSEventStreamDecoder: return chunk.decode() # type: ignore[no-any-return] +class AmazonAnthropicClaudeStreamDecoder(AWSEventStreamDecoder): + def __init__( + self, + model: str, + sync_stream: bool, + json_mode: Optional[bool] = None, + ) -> None: + """ + Child class of AWSEventStreamDecoder that handles the streaming response from the Anthropic family of models + + The only difference between AWSEventStreamDecoder and AmazonAnthropicClaudeStreamDecoder is the `chunk_parser` method + """ + super().__init__(model=model) + self.anthropic_model_response_iterator = AnthropicModelResponseIterator( + streaming_response=None, + sync_stream=sync_stream, + json_mode=json_mode, + ) + + def _chunk_parser(self, chunk_data: dict) -> ModelResponseStream: + return self.anthropic_model_response_iterator.chunk_parser(chunk=chunk_data) + + +class AmazonDeepSeekR1StreamDecoder(AWSEventStreamDecoder): + def __init__( + self, + model: str, + sync_stream: bool, + ) -> None: + + super().__init__(model=model) + from litellm.llms.bedrock.chat.invoke_transformations.amazon_deepseek_transformation import ( + AmazonDeepseekR1ResponseIterator, + ) + + self.deepseek_model_response_iterator = AmazonDeepseekR1ResponseIterator( + streaming_response=None, + sync_stream=sync_stream, + ) + + def _chunk_parser(self, chunk_data: dict) -> Union[GChunk, ModelResponseStream]: + return self.deepseek_model_response_iterator.chunk_parser(chunk=chunk_data) + + class MockResponseIterator: # for returning ai21 streaming responses def __init__(self, model_response, json_mode: Optional[bool] = False): self.model_response = model_response diff --git a/litellm/llms/bedrock/chat/invoke_transformations/amazon_ai21_transformation.py b/litellm/llms/bedrock/chat/invoke_transformations/amazon_ai21_transformation.py new file mode 100644 index 0000000000..50fa6f170b --- /dev/null +++ b/litellm/llms/bedrock/chat/invoke_transformations/amazon_ai21_transformation.py @@ -0,0 +1,99 @@ +import types +from typing import List, Optional + +from litellm.llms.base_llm.chat.transformation import BaseConfig +from litellm.llms.bedrock.chat.invoke_transformations.base_invoke_transformation import ( + AmazonInvokeConfig, +) + + +class AmazonAI21Config(AmazonInvokeConfig, BaseConfig): + """ + Reference: https://us-west-2.console.aws.amazon.com/bedrock/home?region=us-west-2#/providers?model=j2-ultra + + Supported Params for the Amazon / AI21 models: + + - `maxTokens` (int32): The maximum number of tokens to generate per result. Optional, default is 16. If no `stopSequences` are given, generation stops after producing `maxTokens`. + + - `temperature` (float): Modifies the distribution from which tokens are sampled. Optional, default is 0.7. A value of 0 essentially disables sampling and results in greedy decoding. + + - `topP` (float): Used for sampling tokens from the corresponding top percentile of probability mass. Optional, default is 1. For instance, a value of 0.9 considers only tokens comprising the top 90% probability mass. + + - `stopSequences` (array of strings): Stops decoding if any of the input strings is generated. Optional. + + - `frequencyPenalty` (object): Placeholder for frequency penalty object. + + - `presencePenalty` (object): Placeholder for presence penalty object. + + - `countPenalty` (object): Placeholder for count penalty object. + """ + + maxTokens: Optional[int] = None + temperature: Optional[float] = None + topP: Optional[float] = None + stopSequences: Optional[list] = None + frequencePenalty: Optional[dict] = None + presencePenalty: Optional[dict] = None + countPenalty: Optional[dict] = None + + def __init__( + self, + maxTokens: Optional[int] = None, + temperature: Optional[float] = None, + topP: Optional[float] = None, + stopSequences: Optional[list] = None, + frequencePenalty: Optional[dict] = None, + presencePenalty: Optional[dict] = None, + countPenalty: Optional[dict] = None, + ) -> None: + locals_ = locals().copy() + for key, value in locals_.items(): + if key != "self" and value is not None: + setattr(self.__class__, key, value) + + AmazonInvokeConfig.__init__(self) + + @classmethod + def get_config(cls): + return { + k: v + for k, v in cls.__dict__.items() + if not k.startswith("__") + and not k.startswith("_abc") + and not isinstance( + v, + ( + types.FunctionType, + types.BuiltinFunctionType, + classmethod, + staticmethod, + ), + ) + and v is not None + } + + def get_supported_openai_params(self, model: str) -> List: + return [ + "max_tokens", + "temperature", + "top_p", + "stream", + ] + + def map_openai_params( + self, + non_default_params: dict, + optional_params: dict, + model: str, + drop_params: bool, + ) -> dict: + for k, v in non_default_params.items(): + if k == "max_tokens": + optional_params["maxTokens"] = v + if k == "temperature": + optional_params["temperature"] = v + if k == "top_p": + optional_params["topP"] = v + if k == "stream": + optional_params["stream"] = v + return optional_params diff --git a/litellm/llms/bedrock/chat/invoke_transformations/amazon_cohere_transformation.py b/litellm/llms/bedrock/chat/invoke_transformations/amazon_cohere_transformation.py new file mode 100644 index 0000000000..e9479c8f32 --- /dev/null +++ b/litellm/llms/bedrock/chat/invoke_transformations/amazon_cohere_transformation.py @@ -0,0 +1,78 @@ +import types +from typing import List, Optional + +from litellm.llms.base_llm.chat.transformation import BaseConfig +from litellm.llms.bedrock.chat.invoke_transformations.base_invoke_transformation import ( + AmazonInvokeConfig, +) + + +class AmazonCohereConfig(AmazonInvokeConfig, BaseConfig): + """ + Reference: https://us-west-2.console.aws.amazon.com/bedrock/home?region=us-west-2#/providers?model=command + + Supported Params for the Amazon / Cohere models: + + - `max_tokens` (integer) max tokens, + - `temperature` (float) model temperature, + - `return_likelihood` (string) n/a + """ + + max_tokens: Optional[int] = None + temperature: Optional[float] = None + return_likelihood: Optional[str] = None + + def __init__( + self, + max_tokens: Optional[int] = None, + temperature: Optional[float] = None, + return_likelihood: Optional[str] = None, + ) -> None: + locals_ = locals().copy() + for key, value in locals_.items(): + if key != "self" and value is not None: + setattr(self.__class__, key, value) + + AmazonInvokeConfig.__init__(self) + + @classmethod + def get_config(cls): + return { + k: v + for k, v in cls.__dict__.items() + if not k.startswith("__") + and not k.startswith("_abc") + and not isinstance( + v, + ( + types.FunctionType, + types.BuiltinFunctionType, + classmethod, + staticmethod, + ), + ) + and v is not None + } + + def get_supported_openai_params(self, model: str) -> List[str]: + return [ + "max_tokens", + "temperature", + "stream", + ] + + def map_openai_params( + self, + non_default_params: dict, + optional_params: dict, + model: str, + drop_params: bool, + ) -> dict: + for k, v in non_default_params.items(): + if k == "stream": + optional_params["stream"] = v + if k == "temperature": + optional_params["temperature"] = v + if k == "max_tokens": + optional_params["max_tokens"] = v + return optional_params diff --git a/litellm/llms/bedrock/chat/invoke_transformations/amazon_deepseek_transformation.py b/litellm/llms/bedrock/chat/invoke_transformations/amazon_deepseek_transformation.py new file mode 100644 index 0000000000..d7ceec1f1c --- /dev/null +++ b/litellm/llms/bedrock/chat/invoke_transformations/amazon_deepseek_transformation.py @@ -0,0 +1,135 @@ +from typing import Any, List, Optional, cast + +from httpx import Response + +from litellm import verbose_logger +from litellm.litellm_core_utils.llm_response_utils.convert_dict_to_response import ( + _parse_content_for_reasoning, +) +from litellm.llms.base_llm.base_model_iterator import BaseModelResponseIterator +from litellm.llms.bedrock.chat.invoke_transformations.base_invoke_transformation import ( + LiteLLMLoggingObj, +) +from litellm.types.llms.bedrock import AmazonDeepSeekR1StreamingResponse +from litellm.types.llms.openai import AllMessageValues +from litellm.types.utils import ( + ChatCompletionUsageBlock, + Choices, + Delta, + Message, + ModelResponse, + ModelResponseStream, + StreamingChoices, +) + +from .amazon_llama_transformation import AmazonLlamaConfig + + +class AmazonDeepSeekR1Config(AmazonLlamaConfig): + def transform_response( + self, + model: str, + raw_response: Response, + model_response: ModelResponse, + logging_obj: LiteLLMLoggingObj, + request_data: dict, + messages: List[AllMessageValues], + optional_params: dict, + litellm_params: dict, + encoding: Any, + api_key: Optional[str] = None, + json_mode: Optional[bool] = None, + ) -> ModelResponse: + """ + Extract the reasoning content, and return it as a separate field in the response. + """ + response = super().transform_response( + model, + raw_response, + model_response, + logging_obj, + request_data, + messages, + optional_params, + litellm_params, + encoding, + api_key, + json_mode, + ) + prompt = cast(Optional[str], request_data.get("prompt")) + message_content = cast( + Optional[str], cast(Choices, response.choices[0]).message.get("content") + ) + if prompt and prompt.strip().endswith("") and message_content: + message_content_with_reasoning_token = "" + message_content + reasoning, content = _parse_content_for_reasoning( + message_content_with_reasoning_token + ) + provider_specific_fields = ( + cast(Choices, response.choices[0]).message.provider_specific_fields + or {} + ) + if reasoning: + provider_specific_fields["reasoning_content"] = reasoning + + message = Message( + **{ + **cast(Choices, response.choices[0]).message.model_dump(), + "content": content, + "provider_specific_fields": provider_specific_fields, + } + ) + cast(Choices, response.choices[0]).message = message + return response + + +class AmazonDeepseekR1ResponseIterator(BaseModelResponseIterator): + def __init__(self, streaming_response: Any, sync_stream: bool) -> None: + super().__init__(streaming_response=streaming_response, sync_stream=sync_stream) + self.has_finished_thinking = False + + def chunk_parser(self, chunk: dict) -> ModelResponseStream: + """ + Deepseek r1 starts by thinking, then it generates the response. + """ + try: + typed_chunk = AmazonDeepSeekR1StreamingResponse(**chunk) # type: ignore + generated_content = typed_chunk["generation"] + if generated_content == "" and not self.has_finished_thinking: + verbose_logger.debug( + "Deepseek r1: received, setting has_finished_thinking to True" + ) + generated_content = "" + self.has_finished_thinking = True + + prompt_token_count = typed_chunk.get("prompt_token_count") or 0 + generation_token_count = typed_chunk.get("generation_token_count") or 0 + usage = ChatCompletionUsageBlock( + prompt_tokens=prompt_token_count, + completion_tokens=generation_token_count, + total_tokens=prompt_token_count + generation_token_count, + ) + + return ModelResponseStream( + choices=[ + StreamingChoices( + finish_reason=typed_chunk["stop_reason"], + delta=Delta( + content=( + generated_content + if self.has_finished_thinking + else None + ), + reasoning_content=( + generated_content + if not self.has_finished_thinking + else None + ), + ), + ) + ], + usage=usage, + ) + + except Exception as e: + raise e diff --git a/litellm/llms/bedrock/chat/invoke_transformations/amazon_llama_transformation.py b/litellm/llms/bedrock/chat/invoke_transformations/amazon_llama_transformation.py new file mode 100644 index 0000000000..9f84844fcb --- /dev/null +++ b/litellm/llms/bedrock/chat/invoke_transformations/amazon_llama_transformation.py @@ -0,0 +1,80 @@ +import types +from typing import List, Optional + +from litellm.llms.base_llm.chat.transformation import BaseConfig +from litellm.llms.bedrock.chat.invoke_transformations.base_invoke_transformation import ( + AmazonInvokeConfig, +) + + +class AmazonLlamaConfig(AmazonInvokeConfig, BaseConfig): + """ + Reference: https://us-west-2.console.aws.amazon.com/bedrock/home?region=us-west-2#/providers?model=meta.llama2-13b-chat-v1 + + Supported Params for the Amazon / Meta Llama models: + + - `max_gen_len` (integer) max tokens, + - `temperature` (float) temperature for model, + - `top_p` (float) top p for model + """ + + max_gen_len: Optional[int] = None + temperature: Optional[float] = None + topP: Optional[float] = None + + def __init__( + self, + maxTokenCount: Optional[int] = None, + temperature: Optional[float] = None, + topP: Optional[int] = None, + ) -> None: + locals_ = locals().copy() + for key, value in locals_.items(): + if key != "self" and value is not None: + setattr(self.__class__, key, value) + AmazonInvokeConfig.__init__(self) + + @classmethod + def get_config(cls): + return { + k: v + for k, v in cls.__dict__.items() + if not k.startswith("__") + and not k.startswith("_abc") + and not isinstance( + v, + ( + types.FunctionType, + types.BuiltinFunctionType, + classmethod, + staticmethod, + ), + ) + and v is not None + } + + def get_supported_openai_params(self, model: str) -> List: + return [ + "max_tokens", + "temperature", + "top_p", + "stream", + ] + + def map_openai_params( + self, + non_default_params: dict, + optional_params: dict, + model: str, + drop_params: bool, + ) -> dict: + for k, v in non_default_params.items(): + if k == "max_tokens": + optional_params["max_gen_len"] = v + if k == "temperature": + optional_params["temperature"] = v + if k == "top_p": + optional_params["top_p"] = v + if k == "stream": + optional_params["stream"] = v + return optional_params diff --git a/litellm/llms/bedrock/chat/invoke_transformations/amazon_mistral_transformation.py b/litellm/llms/bedrock/chat/invoke_transformations/amazon_mistral_transformation.py new file mode 100644 index 0000000000..ef3c237f9d --- /dev/null +++ b/litellm/llms/bedrock/chat/invoke_transformations/amazon_mistral_transformation.py @@ -0,0 +1,83 @@ +import types +from typing import List, Optional + +from litellm.llms.base_llm.chat.transformation import BaseConfig +from litellm.llms.bedrock.chat.invoke_transformations.base_invoke_transformation import ( + AmazonInvokeConfig, +) + + +class AmazonMistralConfig(AmazonInvokeConfig, BaseConfig): + """ + Reference: https://docs.aws.amazon.com/bedrock/latest/userguide/model-parameters-mistral.html + Supported Params for the Amazon / Mistral models: + + - `max_tokens` (integer) max tokens, + - `temperature` (float) temperature for model, + - `top_p` (float) top p for model + - `stop` [string] A list of stop sequences that if generated by the model, stops the model from generating further output. + - `top_k` (float) top k for model + """ + + max_tokens: Optional[int] = None + temperature: Optional[float] = None + top_p: Optional[float] = None + top_k: Optional[float] = None + stop: Optional[List[str]] = None + + def __init__( + self, + max_tokens: Optional[int] = None, + temperature: Optional[float] = None, + top_p: Optional[int] = None, + top_k: Optional[float] = None, + stop: Optional[List[str]] = None, + ) -> None: + locals_ = locals().copy() + for key, value in locals_.items(): + if key != "self" and value is not None: + setattr(self.__class__, key, value) + + AmazonInvokeConfig.__init__(self) + + @classmethod + def get_config(cls): + return { + k: v + for k, v in cls.__dict__.items() + if not k.startswith("__") + and not k.startswith("_abc") + and not isinstance( + v, + ( + types.FunctionType, + types.BuiltinFunctionType, + classmethod, + staticmethod, + ), + ) + and v is not None + } + + def get_supported_openai_params(self, model: str) -> List[str]: + return ["max_tokens", "temperature", "top_p", "stop", "stream"] + + def map_openai_params( + self, + non_default_params: dict, + optional_params: dict, + model: str, + drop_params: bool, + ) -> dict: + for k, v in non_default_params.items(): + if k == "max_tokens": + optional_params["max_tokens"] = v + if k == "temperature": + optional_params["temperature"] = v + if k == "top_p": + optional_params["top_p"] = v + if k == "stop": + optional_params["stop"] = v + if k == "stream": + optional_params["stream"] = v + return optional_params diff --git a/litellm/llms/bedrock/chat/invoke_transformations/amazon_nova_transformation.py b/litellm/llms/bedrock/chat/invoke_transformations/amazon_nova_transformation.py new file mode 100644 index 0000000000..9d41beceff --- /dev/null +++ b/litellm/llms/bedrock/chat/invoke_transformations/amazon_nova_transformation.py @@ -0,0 +1,70 @@ +""" +Handles transforming requests for `bedrock/invoke/{nova} models` + +Inherits from `AmazonConverseConfig` + +Nova + Invoke API Tutorial: https://docs.aws.amazon.com/nova/latest/userguide/using-invoke-api.html +""" + +from typing import List + +import litellm +from litellm.types.llms.bedrock import BedrockInvokeNovaRequest +from litellm.types.llms.openai import AllMessageValues + + +class AmazonInvokeNovaConfig(litellm.AmazonConverseConfig): + """ + Config for sending `nova` requests to `/bedrock/invoke/` + """ + + def __init__(self, **kwargs): + super().__init__(**kwargs) + + def transform_request( + self, + model: str, + messages: List[AllMessageValues], + optional_params: dict, + litellm_params: dict, + headers: dict, + ) -> dict: + _transformed_nova_request = super().transform_request( + model=model, + messages=messages, + optional_params=optional_params, + litellm_params=litellm_params, + headers=headers, + ) + _bedrock_invoke_nova_request = BedrockInvokeNovaRequest( + **_transformed_nova_request + ) + self._remove_empty_system_messages(_bedrock_invoke_nova_request) + bedrock_invoke_nova_request = self._filter_allowed_fields( + _bedrock_invoke_nova_request + ) + return bedrock_invoke_nova_request + + def _filter_allowed_fields( + self, bedrock_invoke_nova_request: BedrockInvokeNovaRequest + ) -> dict: + """ + Filter out fields that are not allowed in the `BedrockInvokeNovaRequest` dataclass. + """ + allowed_fields = set(BedrockInvokeNovaRequest.__annotations__.keys()) + return { + k: v for k, v in bedrock_invoke_nova_request.items() if k in allowed_fields + } + + def _remove_empty_system_messages( + self, bedrock_invoke_nova_request: BedrockInvokeNovaRequest + ) -> None: + """ + In-place remove empty `system` messages from the request. + + /bedrock/invoke/ does not allow empty `system` messages. + """ + _system_message = bedrock_invoke_nova_request.get("system", None) + if isinstance(_system_message, list) and len(_system_message) == 0: + bedrock_invoke_nova_request.pop("system", None) + return diff --git a/litellm/llms/bedrock/chat/invoke_transformations/amazon_titan_transformation.py b/litellm/llms/bedrock/chat/invoke_transformations/amazon_titan_transformation.py new file mode 100644 index 0000000000..367fb84d1a --- /dev/null +++ b/litellm/llms/bedrock/chat/invoke_transformations/amazon_titan_transformation.py @@ -0,0 +1,116 @@ +import re +import types +from typing import List, Optional, Union + +import litellm +from litellm.llms.base_llm.chat.transformation import BaseConfig +from litellm.llms.bedrock.chat.invoke_transformations.base_invoke_transformation import ( + AmazonInvokeConfig, +) + + +class AmazonTitanConfig(AmazonInvokeConfig, BaseConfig): + """ + Reference: https://us-west-2.console.aws.amazon.com/bedrock/home?region=us-west-2#/providers?model=titan-text-express-v1 + + Supported Params for the Amazon Titan models: + + - `maxTokenCount` (integer) max tokens, + - `stopSequences` (string[]) list of stop sequence strings + - `temperature` (float) temperature for model, + - `topP` (int) top p for model + """ + + maxTokenCount: Optional[int] = None + stopSequences: Optional[list] = None + temperature: Optional[float] = None + topP: Optional[int] = None + + def __init__( + self, + maxTokenCount: Optional[int] = None, + stopSequences: Optional[list] = None, + temperature: Optional[float] = None, + topP: Optional[int] = None, + ) -> None: + locals_ = locals().copy() + for key, value in locals_.items(): + if key != "self" and value is not None: + setattr(self.__class__, key, value) + + AmazonInvokeConfig.__init__(self) + + @classmethod + def get_config(cls): + return { + k: v + for k, v in cls.__dict__.items() + if not k.startswith("__") + and not k.startswith("_abc") + and not isinstance( + v, + ( + types.FunctionType, + types.BuiltinFunctionType, + classmethod, + staticmethod, + ), + ) + and v is not None + } + + def _map_and_modify_arg( + self, + supported_params: dict, + provider: str, + model: str, + stop: Union[List[str], str], + ): + """ + filter params to fit the required provider format, drop those that don't fit if user sets `litellm.drop_params = True`. + """ + filtered_stop = None + if "stop" in supported_params and litellm.drop_params: + if provider == "bedrock" and "amazon" in model: + filtered_stop = [] + if isinstance(stop, list): + for s in stop: + if re.match(r"^(\|+|User:)$", s): + filtered_stop.append(s) + if filtered_stop is not None: + supported_params["stop"] = filtered_stop + + return supported_params + + def get_supported_openai_params(self, model: str) -> List[str]: + return [ + "max_tokens", + "max_completion_tokens", + "stop", + "temperature", + "top_p", + "stream", + ] + + def map_openai_params( + self, + non_default_params: dict, + optional_params: dict, + model: str, + drop_params: bool, + ) -> dict: + for k, v in non_default_params.items(): + if k == "max_tokens" or k == "max_completion_tokens": + optional_params["maxTokenCount"] = v + if k == "temperature": + optional_params["temperature"] = v + if k == "stop": + filtered_stop = self._map_and_modify_arg( + {"stop": v}, provider="bedrock", model=model, stop=v + ) + optional_params["stopSequences"] = filtered_stop["stop"] + if k == "top_p": + optional_params["topP"] = v + if k == "stream": + optional_params["stream"] = v + return optional_params diff --git a/litellm/llms/bedrock/chat/invoke_transformations/anthropic_claude2_transformation.py b/litellm/llms/bedrock/chat/invoke_transformations/anthropic_claude2_transformation.py new file mode 100644 index 0000000000..d0d06ef2b2 --- /dev/null +++ b/litellm/llms/bedrock/chat/invoke_transformations/anthropic_claude2_transformation.py @@ -0,0 +1,90 @@ +import types +from typing import Optional + +import litellm + +from .base_invoke_transformation import AmazonInvokeConfig + + +class AmazonAnthropicConfig(AmazonInvokeConfig): + """ + Reference: https://us-west-2.console.aws.amazon.com/bedrock/home?region=us-west-2#/providers?model=claude + + Supported Params for the Amazon / Anthropic models: + + - `max_tokens_to_sample` (integer) max tokens, + - `temperature` (float) model temperature, + - `top_k` (integer) top k, + - `top_p` (integer) top p, + - `stop_sequences` (string[]) list of stop sequences - e.g. ["\\n\\nHuman:"], + - `anthropic_version` (string) version of anthropic for bedrock - e.g. "bedrock-2023-05-31" + """ + + max_tokens_to_sample: Optional[int] = litellm.max_tokens + stop_sequences: Optional[list] = None + temperature: Optional[float] = None + top_k: Optional[int] = None + top_p: Optional[int] = None + anthropic_version: Optional[str] = None + + def __init__( + self, + max_tokens_to_sample: Optional[int] = None, + stop_sequences: Optional[list] = None, + temperature: Optional[float] = None, + top_k: Optional[int] = None, + top_p: Optional[int] = None, + anthropic_version: Optional[str] = None, + ) -> None: + locals_ = locals().copy() + for key, value in locals_.items(): + if key != "self" and value is not None: + setattr(self.__class__, key, value) + + @classmethod + def get_config(cls): + return { + k: v + for k, v in cls.__dict__.items() + if not k.startswith("__") + and not isinstance( + v, + ( + types.FunctionType, + types.BuiltinFunctionType, + classmethod, + staticmethod, + ), + ) + and v is not None + } + + def get_supported_openai_params(self, model: str): + return [ + "max_tokens", + "max_completion_tokens", + "temperature", + "stop", + "top_p", + "stream", + ] + + def map_openai_params( + self, + non_default_params: dict, + optional_params: dict, + model: str, + drop_params: bool, + ): + for param, value in non_default_params.items(): + if param == "max_tokens" or param == "max_completion_tokens": + optional_params["max_tokens_to_sample"] = value + if param == "temperature": + optional_params["temperature"] = value + if param == "top_p": + optional_params["top_p"] = value + if param == "stop": + optional_params["stop_sequences"] = value + if param == "stream" and value is True: + optional_params["stream"] = value + return optional_params diff --git a/litellm/llms/bedrock/chat/invoke_transformations/anthropic_claude3_transformation.py b/litellm/llms/bedrock/chat/invoke_transformations/anthropic_claude3_transformation.py new file mode 100644 index 0000000000..0cac339a3c --- /dev/null +++ b/litellm/llms/bedrock/chat/invoke_transformations/anthropic_claude3_transformation.py @@ -0,0 +1,100 @@ +from typing import TYPE_CHECKING, Any, List, Optional + +import httpx + +from litellm.llms.anthropic.chat.transformation import AnthropicConfig +from litellm.llms.bedrock.chat.invoke_transformations.base_invoke_transformation import ( + AmazonInvokeConfig, +) +from litellm.types.llms.openai import AllMessageValues +from litellm.types.utils import ModelResponse + +if TYPE_CHECKING: + from litellm.litellm_core_utils.litellm_logging import Logging as _LiteLLMLoggingObj + + LiteLLMLoggingObj = _LiteLLMLoggingObj +else: + LiteLLMLoggingObj = Any + + +class AmazonAnthropicClaude3Config(AmazonInvokeConfig, AnthropicConfig): + """ + Reference: + https://us-west-2.console.aws.amazon.com/bedrock/home?region=us-west-2#/providers?model=claude + https://docs.anthropic.com/claude/docs/models-overview#model-comparison + + Supported Params for the Amazon / Anthropic Claude 3 models: + """ + + anthropic_version: str = "bedrock-2023-05-31" + + def get_supported_openai_params(self, model: str) -> List[str]: + return AnthropicConfig.get_supported_openai_params(self, model) + + def map_openai_params( + self, + non_default_params: dict, + optional_params: dict, + model: str, + drop_params: bool, + ) -> dict: + return AnthropicConfig.map_openai_params( + self, + non_default_params, + optional_params, + model, + drop_params, + ) + + def transform_request( + self, + model: str, + messages: List[AllMessageValues], + optional_params: dict, + litellm_params: dict, + headers: dict, + ) -> dict: + _anthropic_request = AnthropicConfig.transform_request( + self, + model=model, + messages=messages, + optional_params=optional_params, + litellm_params=litellm_params, + headers=headers, + ) + + _anthropic_request.pop("model", None) + _anthropic_request.pop("stream", None) + if "anthropic_version" not in _anthropic_request: + _anthropic_request["anthropic_version"] = self.anthropic_version + + return _anthropic_request + + def transform_response( + self, + model: str, + raw_response: httpx.Response, + model_response: ModelResponse, + logging_obj: LiteLLMLoggingObj, + request_data: dict, + messages: List[AllMessageValues], + optional_params: dict, + litellm_params: dict, + encoding: Any, + api_key: Optional[str] = None, + json_mode: Optional[bool] = None, + ) -> ModelResponse: + return AnthropicConfig.transform_response( + self, + model=model, + raw_response=raw_response, + model_response=model_response, + logging_obj=logging_obj, + request_data=request_data, + messages=messages, + optional_params=optional_params, + litellm_params=litellm_params, + encoding=encoding, + api_key=api_key, + json_mode=json_mode, + ) diff --git a/litellm/llms/bedrock/chat/invoke_transformations/base_invoke_transformation.py b/litellm/llms/bedrock/chat/invoke_transformations/base_invoke_transformation.py new file mode 100644 index 0000000000..5414429d4c --- /dev/null +++ b/litellm/llms/bedrock/chat/invoke_transformations/base_invoke_transformation.py @@ -0,0 +1,677 @@ +import copy +import json +import time +import urllib.parse +from functools import partial +from typing import TYPE_CHECKING, Any, List, Optional, Tuple, Union, cast, get_args + +import httpx + +import litellm +from litellm._logging import verbose_logger +from litellm.litellm_core_utils.core_helpers import map_finish_reason +from litellm.litellm_core_utils.logging_utils import track_llm_api_timing +from litellm.litellm_core_utils.prompt_templates.factory import ( + cohere_message_pt, + custom_prompt, + deepseek_r1_pt, + prompt_factory, +) +from litellm.llms.base_llm.chat.transformation import BaseConfig, BaseLLMException +from litellm.llms.bedrock.chat.invoke_handler import make_call, make_sync_call +from litellm.llms.bedrock.common_utils import BedrockError +from litellm.llms.custom_httpx.http_handler import ( + AsyncHTTPHandler, + HTTPHandler, + _get_httpx_client, +) +from litellm.types.llms.openai import AllMessageValues +from litellm.types.utils import ModelResponse, Usage +from litellm.utils import CustomStreamWrapper + +if TYPE_CHECKING: + from litellm.litellm_core_utils.litellm_logging import Logging as _LiteLLMLoggingObj + + LiteLLMLoggingObj = _LiteLLMLoggingObj +else: + LiteLLMLoggingObj = Any + +from litellm.llms.bedrock.base_aws_llm import BaseAWSLLM + + +class AmazonInvokeConfig(BaseConfig, BaseAWSLLM): + def __init__(self, **kwargs): + BaseConfig.__init__(self, **kwargs) + BaseAWSLLM.__init__(self, **kwargs) + + def get_supported_openai_params(self, model: str) -> List[str]: + """ + This is a base invoke model mapping. For Invoke - define a bedrock provider specific config that extends this class. + """ + return [ + "max_tokens", + "max_completion_tokens", + "stream", + ] + + def map_openai_params( + self, + non_default_params: dict, + optional_params: dict, + model: str, + drop_params: bool, + ) -> dict: + """ + This is a base invoke model mapping. For Invoke - define a bedrock provider specific config that extends this class. + """ + for param, value in non_default_params.items(): + if param == "max_tokens" or param == "max_completion_tokens": + optional_params["max_tokens"] = value + if param == "stream": + optional_params["stream"] = value + return optional_params + + def get_complete_url( + self, + api_base: Optional[str], + model: str, + optional_params: dict, + stream: Optional[bool] = None, + ) -> str: + """ + Get the complete url for the request + """ + provider = self.get_bedrock_invoke_provider(model) + modelId = self.get_bedrock_model_id( + model=model, + provider=provider, + optional_params=optional_params, + ) + ### SET RUNTIME ENDPOINT ### + aws_bedrock_runtime_endpoint = optional_params.get( + "aws_bedrock_runtime_endpoint", None + ) # https://bedrock-runtime.{region_name}.amazonaws.com + endpoint_url, proxy_endpoint_url = self.get_runtime_endpoint( + api_base=api_base, + aws_bedrock_runtime_endpoint=aws_bedrock_runtime_endpoint, + aws_region_name=self._get_aws_region_name( + optional_params=optional_params, model=model + ), + ) + + if (stream is not None and stream is True) and provider != "ai21": + endpoint_url = f"{endpoint_url}/model/{modelId}/invoke-with-response-stream" + proxy_endpoint_url = ( + f"{proxy_endpoint_url}/model/{modelId}/invoke-with-response-stream" + ) + else: + endpoint_url = f"{endpoint_url}/model/{modelId}/invoke" + proxy_endpoint_url = f"{proxy_endpoint_url}/model/{modelId}/invoke" + + return endpoint_url + + def sign_request( + self, + headers: dict, + optional_params: dict, + request_data: dict, + api_base: str, + model: Optional[str] = None, + stream: Optional[bool] = None, + fake_stream: Optional[bool] = None, + ) -> dict: + try: + from botocore.auth import SigV4Auth + from botocore.awsrequest import AWSRequest + from botocore.credentials import Credentials + except ImportError: + raise ImportError("Missing boto3 to call bedrock. Run 'pip install boto3'.") + + ## CREDENTIALS ## + # pop aws_secret_access_key, aws_access_key_id, aws_session_token, aws_region_name from kwargs, since completion calls fail with them + aws_secret_access_key = optional_params.get("aws_secret_access_key", None) + aws_access_key_id = optional_params.get("aws_access_key_id", None) + aws_session_token = optional_params.get("aws_session_token", None) + aws_role_name = optional_params.get("aws_role_name", None) + aws_session_name = optional_params.get("aws_session_name", None) + aws_profile_name = optional_params.get("aws_profile_name", None) + aws_web_identity_token = optional_params.get("aws_web_identity_token", None) + aws_sts_endpoint = optional_params.get("aws_sts_endpoint", None) + aws_region_name = self._get_aws_region_name( + optional_params=optional_params, model=model + ) + + credentials: Credentials = self.get_credentials( + aws_access_key_id=aws_access_key_id, + aws_secret_access_key=aws_secret_access_key, + aws_session_token=aws_session_token, + aws_region_name=aws_region_name, + aws_session_name=aws_session_name, + aws_profile_name=aws_profile_name, + aws_role_name=aws_role_name, + aws_web_identity_token=aws_web_identity_token, + aws_sts_endpoint=aws_sts_endpoint, + ) + + sigv4 = SigV4Auth(credentials, "bedrock", aws_region_name) + if headers is not None: + headers = {"Content-Type": "application/json", **headers} + else: + headers = {"Content-Type": "application/json"} + + request = AWSRequest( + method="POST", + url=api_base, + data=json.dumps(request_data), + headers=headers, + ) + sigv4.add_auth(request) + + request_headers_dict = dict(request.headers) + if ( + headers is not None and "Authorization" in headers + ): # prevent sigv4 from overwriting the auth header + request_headers_dict["Authorization"] = headers["Authorization"] + return request_headers_dict + + def transform_request( + self, + model: str, + messages: List[AllMessageValues], + optional_params: dict, + litellm_params: dict, + headers: dict, + ) -> dict: + ## SETUP ## + stream = optional_params.pop("stream", None) + custom_prompt_dict: dict = litellm_params.pop("custom_prompt_dict", None) or {} + hf_model_name = litellm_params.get("hf_model_name", None) + + provider = self.get_bedrock_invoke_provider(model) + + prompt, chat_history = self.convert_messages_to_prompt( + model=hf_model_name or model, + messages=messages, + provider=provider, + custom_prompt_dict=custom_prompt_dict, + ) + inference_params = copy.deepcopy(optional_params) + inference_params = { + k: v + for k, v in inference_params.items() + if k not in self.aws_authentication_params + } + request_data: dict = {} + if provider == "cohere": + if model.startswith("cohere.command-r"): + ## LOAD CONFIG + config = litellm.AmazonCohereChatConfig().get_config() + for k, v in config.items(): + if ( + k not in inference_params + ): # completion(top_k=3) > anthropic_config(top_k=3) <- allows for dynamic variables to be passed in + inference_params[k] = v + _data = {"message": prompt, **inference_params} + if chat_history is not None: + _data["chat_history"] = chat_history + request_data = _data + else: + ## LOAD CONFIG + config = litellm.AmazonCohereConfig.get_config() + for k, v in config.items(): + if ( + k not in inference_params + ): # completion(top_k=3) > anthropic_config(top_k=3) <- allows for dynamic variables to be passed in + inference_params[k] = v + if stream is True: + inference_params["stream"] = ( + True # cohere requires stream = True in inference params + ) + request_data = {"prompt": prompt, **inference_params} + elif provider == "anthropic": + return litellm.AmazonAnthropicClaude3Config().transform_request( + model=model, + messages=messages, + optional_params=optional_params, + litellm_params=litellm_params, + headers=headers, + ) + elif provider == "nova": + return litellm.AmazonInvokeNovaConfig().transform_request( + model=model, + messages=messages, + optional_params=optional_params, + litellm_params=litellm_params, + headers=headers, + ) + elif provider == "ai21": + ## LOAD CONFIG + config = litellm.AmazonAI21Config.get_config() + for k, v in config.items(): + if ( + k not in inference_params + ): # completion(top_k=3) > anthropic_config(top_k=3) <- allows for dynamic variables to be passed in + inference_params[k] = v + + request_data = {"prompt": prompt, **inference_params} + elif provider == "mistral": + ## LOAD CONFIG + config = litellm.AmazonMistralConfig.get_config() + for k, v in config.items(): + if ( + k not in inference_params + ): # completion(top_k=3) > amazon_config(top_k=3) <- allows for dynamic variables to be passed in + inference_params[k] = v + + request_data = {"prompt": prompt, **inference_params} + elif provider == "amazon": # amazon titan + ## LOAD CONFIG + config = litellm.AmazonTitanConfig.get_config() + for k, v in config.items(): + if ( + k not in inference_params + ): # completion(top_k=3) > amazon_config(top_k=3) <- allows for dynamic variables to be passed in + inference_params[k] = v + + request_data = { + "inputText": prompt, + "textGenerationConfig": inference_params, + } + elif provider == "meta" or provider == "llama" or provider == "deepseek_r1": + ## LOAD CONFIG + config = litellm.AmazonLlamaConfig.get_config() + for k, v in config.items(): + if ( + k not in inference_params + ): # completion(top_k=3) > anthropic_config(top_k=3) <- allows for dynamic variables to be passed in + inference_params[k] = v + request_data = {"prompt": prompt, **inference_params} + else: + raise BedrockError( + status_code=404, + message="Bedrock Invoke HTTPX: Unknown provider={}, model={}. Try calling via converse route - `bedrock/converse/`.".format( + provider, model + ), + ) + + return request_data + + def transform_response( # noqa: PLR0915 + self, + model: str, + raw_response: httpx.Response, + model_response: ModelResponse, + logging_obj: LiteLLMLoggingObj, + request_data: dict, + messages: List[AllMessageValues], + optional_params: dict, + litellm_params: dict, + encoding: Any, + api_key: Optional[str] = None, + json_mode: Optional[bool] = None, + ) -> ModelResponse: + + try: + completion_response = raw_response.json() + except Exception: + raise BedrockError( + message=raw_response.text, status_code=raw_response.status_code + ) + verbose_logger.debug( + "bedrock invoke response % s", + json.dumps(completion_response, indent=4, default=str), + ) + provider = self.get_bedrock_invoke_provider(model) + outputText: Optional[str] = None + try: + if provider == "cohere": + if "text" in completion_response: + outputText = completion_response["text"] # type: ignore + elif "generations" in completion_response: + outputText = completion_response["generations"][0]["text"] + model_response.choices[0].finish_reason = map_finish_reason( + completion_response["generations"][0]["finish_reason"] + ) + elif provider == "anthropic": + return litellm.AmazonAnthropicClaude3Config().transform_response( + model=model, + raw_response=raw_response, + model_response=model_response, + logging_obj=logging_obj, + request_data=request_data, + messages=messages, + optional_params=optional_params, + litellm_params=litellm_params, + encoding=encoding, + api_key=api_key, + json_mode=json_mode, + ) + elif provider == "nova": + return litellm.AmazonInvokeNovaConfig().transform_response( + model=model, + raw_response=raw_response, + model_response=model_response, + logging_obj=logging_obj, + request_data=request_data, + messages=messages, + optional_params=optional_params, + litellm_params=litellm_params, + encoding=encoding, + ) + elif provider == "ai21": + outputText = ( + completion_response.get("completions")[0].get("data").get("text") + ) + elif provider == "meta" or provider == "llama" or provider == "deepseek_r1": + outputText = completion_response["generation"] + elif provider == "mistral": + outputText = completion_response["outputs"][0]["text"] + model_response.choices[0].finish_reason = completion_response[ + "outputs" + ][0]["stop_reason"] + else: # amazon titan + outputText = completion_response.get("results")[0].get("outputText") + except Exception as e: + raise BedrockError( + message="Error processing={}, Received error={}".format( + raw_response.text, str(e) + ), + status_code=422, + ) + + try: + if ( + outputText is not None + and len(outputText) > 0 + and hasattr(model_response.choices[0], "message") + and getattr(model_response.choices[0].message, "tool_calls", None) # type: ignore + is None + ): + model_response.choices[0].message.content = outputText # type: ignore + elif ( + hasattr(model_response.choices[0], "message") + and getattr(model_response.choices[0].message, "tool_calls", None) # type: ignore + is not None + ): + pass + else: + raise Exception() + except Exception as e: + raise BedrockError( + message="Error parsing received text={}.\nError-{}".format( + outputText, str(e) + ), + status_code=raw_response.status_code, + ) + + ## CALCULATING USAGE - bedrock returns usage in the headers + bedrock_input_tokens = raw_response.headers.get( + "x-amzn-bedrock-input-token-count", None + ) + bedrock_output_tokens = raw_response.headers.get( + "x-amzn-bedrock-output-token-count", None + ) + + prompt_tokens = int( + bedrock_input_tokens or litellm.token_counter(messages=messages) + ) + + completion_tokens = int( + bedrock_output_tokens + or litellm.token_counter( + text=model_response.choices[0].message.content, # type: ignore + count_response_tokens=True, + ) + ) + + model_response.created = int(time.time()) + model_response.model = model + usage = Usage( + prompt_tokens=prompt_tokens, + completion_tokens=completion_tokens, + total_tokens=prompt_tokens + completion_tokens, + ) + setattr(model_response, "usage", usage) + + return model_response + + def validate_environment( + self, + headers: dict, + model: str, + messages: List[AllMessageValues], + optional_params: dict, + api_key: Optional[str] = None, + api_base: Optional[str] = None, + ) -> dict: + return headers + + def get_error_class( + self, error_message: str, status_code: int, headers: Union[dict, httpx.Headers] + ) -> BaseLLMException: + return BedrockError(status_code=status_code, message=error_message) + + @track_llm_api_timing() + def get_async_custom_stream_wrapper( + self, + model: str, + custom_llm_provider: str, + logging_obj: LiteLLMLoggingObj, + api_base: str, + headers: dict, + data: dict, + messages: list, + client: Optional[AsyncHTTPHandler] = None, + json_mode: Optional[bool] = None, + ) -> CustomStreamWrapper: + streaming_response = CustomStreamWrapper( + completion_stream=None, + make_call=partial( + make_call, + client=client, + api_base=api_base, + headers=headers, + data=json.dumps(data), + model=model, + messages=messages, + logging_obj=logging_obj, + fake_stream=True if "ai21" in api_base else False, + bedrock_invoke_provider=self.get_bedrock_invoke_provider(model), + json_mode=json_mode, + ), + model=model, + custom_llm_provider="bedrock", + logging_obj=logging_obj, + ) + return streaming_response + + @track_llm_api_timing() + def get_sync_custom_stream_wrapper( + self, + model: str, + custom_llm_provider: str, + logging_obj: LiteLLMLoggingObj, + api_base: str, + headers: dict, + data: dict, + messages: list, + client: Optional[Union[HTTPHandler, AsyncHTTPHandler]] = None, + json_mode: Optional[bool] = None, + ) -> CustomStreamWrapper: + if client is None or isinstance(client, AsyncHTTPHandler): + client = _get_httpx_client(params={}) + streaming_response = CustomStreamWrapper( + completion_stream=None, + make_call=partial( + make_sync_call, + client=client, + api_base=api_base, + headers=headers, + data=json.dumps(data), + model=model, + messages=messages, + logging_obj=logging_obj, + fake_stream=True if "ai21" in api_base else False, + bedrock_invoke_provider=self.get_bedrock_invoke_provider(model), + json_mode=json_mode, + ), + model=model, + custom_llm_provider="bedrock", + logging_obj=logging_obj, + ) + return streaming_response + + @property + def has_custom_stream_wrapper(self) -> bool: + return True + + @property + def supports_stream_param_in_request_body(self) -> bool: + """ + Bedrock invoke does not allow passing `stream` in the request body. + """ + return False + + @staticmethod + def get_bedrock_invoke_provider( + model: str, + ) -> Optional[litellm.BEDROCK_INVOKE_PROVIDERS_LITERAL]: + """ + Helper function to get the bedrock provider from the model + + handles 4 scenarios: + 1. model=invoke/anthropic.claude-3-5-sonnet-20240620-v1:0 -> Returns `anthropic` + 2. model=anthropic.claude-3-5-sonnet-20240620-v1:0 -> Returns `anthropic` + 3. model=llama/arn:aws:bedrock:us-east-1:086734376398:imported-model/r4c4kewx2s0n -> Returns `llama` + 4. model=us.amazon.nova-pro-v1:0 -> Returns `nova` + """ + if model.startswith("invoke/"): + model = model.replace("invoke/", "", 1) + + _split_model = model.split(".")[0] + if _split_model in get_args(litellm.BEDROCK_INVOKE_PROVIDERS_LITERAL): + return cast(litellm.BEDROCK_INVOKE_PROVIDERS_LITERAL, _split_model) + + # If not a known provider, check for pattern with two slashes + provider = AmazonInvokeConfig._get_provider_from_model_path(model) + if provider is not None: + return provider + + # check if provider == "nova" + if "nova" in model: + return "nova" + + for provider in get_args(litellm.BEDROCK_INVOKE_PROVIDERS_LITERAL): + if provider in model: + return provider + return None + + @staticmethod + def _get_provider_from_model_path( + model_path: str, + ) -> Optional[litellm.BEDROCK_INVOKE_PROVIDERS_LITERAL]: + """ + Helper function to get the provider from a model path with format: provider/model-name + + Args: + model_path (str): The model path (e.g., 'llama/arn:aws:bedrock:us-east-1:086734376398:imported-model/r4c4kewx2s0n' or 'anthropic/model-name') + + Returns: + Optional[str]: The provider name, or None if no valid provider found + """ + parts = model_path.split("/") + if len(parts) >= 1: + provider = parts[0] + if provider in get_args(litellm.BEDROCK_INVOKE_PROVIDERS_LITERAL): + return cast(litellm.BEDROCK_INVOKE_PROVIDERS_LITERAL, provider) + return None + + def get_bedrock_model_id( + self, + optional_params: dict, + provider: Optional[litellm.BEDROCK_INVOKE_PROVIDERS_LITERAL], + model: str, + ) -> str: + modelId = optional_params.pop("model_id", None) + if modelId is not None: + modelId = self.encode_model_id(model_id=modelId) + else: + modelId = model + + modelId = modelId.replace("invoke/", "", 1) + if provider == "llama" and "llama/" in modelId: + modelId = self._get_model_id_from_model_with_spec(modelId, spec="llama") + elif provider == "deepseek_r1" and "deepseek_r1/" in modelId: + modelId = self._get_model_id_from_model_with_spec( + modelId, spec="deepseek_r1" + ) + return modelId + + def _get_model_id_from_model_with_spec( + self, + model: str, + spec: str, + ) -> str: + """ + Remove `llama` from modelID since `llama` is simply a spec to follow for custom bedrock models + """ + model_id = model.replace(spec + "/", "") + return self.encode_model_id(model_id=model_id) + + def encode_model_id(self, model_id: str) -> str: + """ + Double encode the model ID to ensure it matches the expected double-encoded format. + Args: + model_id (str): The model ID to encode. + Returns: + str: The double-encoded model ID. + """ + return urllib.parse.quote(model_id, safe="") + + def convert_messages_to_prompt( + self, model, messages, provider, custom_prompt_dict + ) -> Tuple[str, Optional[list]]: + # handle anthropic prompts and amazon titan prompts + prompt = "" + chat_history: Optional[list] = None + ## CUSTOM PROMPT + if model in custom_prompt_dict: + # check if the model has a registered custom prompt + model_prompt_details = custom_prompt_dict[model] + prompt = custom_prompt( + role_dict=model_prompt_details["roles"], + initial_prompt_value=model_prompt_details.get( + "initial_prompt_value", "" + ), + final_prompt_value=model_prompt_details.get("final_prompt_value", ""), + messages=messages, + ) + return prompt, None + ## ELSE + if provider == "anthropic" or provider == "amazon": + prompt = prompt_factory( + model=model, messages=messages, custom_llm_provider="bedrock" + ) + elif provider == "mistral": + prompt = prompt_factory( + model=model, messages=messages, custom_llm_provider="bedrock" + ) + elif provider == "meta" or provider == "llama": + prompt = prompt_factory( + model=model, messages=messages, custom_llm_provider="bedrock" + ) + elif provider == "cohere": + prompt, chat_history = cohere_message_pt(messages=messages) + elif provider == "deepseek_r1": + prompt = deepseek_r1_pt(messages=messages) + else: + prompt = "" + for message in messages: + if "role" in message: + if message["role"] == "user": + prompt += f"{message['content']}" + else: + prompt += f"{message['content']}" + else: + prompt += f"{message['content']}" + return prompt, chat_history # type: ignore diff --git a/litellm/llms/bedrock/common_utils.py b/litellm/llms/bedrock/common_utils.py index 7b3040f91a..54be359897 100644 --- a/litellm/llms/bedrock/common_utils.py +++ b/litellm/llms/bedrock/common_utils.py @@ -3,22 +3,14 @@ Common utilities used across bedrock chat/embedding/image generation """ import os -import re -import types -from enum import Enum -from typing import Any, List, Optional, Union +from typing import List, Literal, Optional, Union import httpx import litellm -from litellm.llms.base_llm.chat.transformation import ( - BaseConfig, - BaseLLMException, - LiteLLMLoggingObj, -) +from litellm.llms.base_llm.base_utils import BaseLLMModelInfo +from litellm.llms.base_llm.chat.transformation import BaseLLMException from litellm.secret_managers.main import get_secret -from litellm.types.llms.openai import AllMessageValues -from litellm.types.utils import ModelResponse class BedrockError(BaseLLMException): @@ -84,642 +76,6 @@ class AmazonBedrockGlobalConfig: ] -class AmazonInvokeMixin: - """ - Base class for bedrock models going through invoke_handler.py - """ - - def get_error_class( - self, error_message: str, status_code: int, headers: Union[dict, httpx.Headers] - ) -> BaseLLMException: - return BedrockError( - message=error_message, - status_code=status_code, - headers=headers, - ) - - def transform_request( - self, - model: str, - messages: List[AllMessageValues], - optional_params: dict, - litellm_params: dict, - headers: dict, - ) -> dict: - raise NotImplementedError( - "transform_request not implemented for config. Done in invoke_handler.py" - ) - - def transform_response( - self, - model: str, - raw_response: httpx.Response, - model_response: ModelResponse, - logging_obj: LiteLLMLoggingObj, - request_data: dict, - messages: List[AllMessageValues], - optional_params: dict, - litellm_params: dict, - encoding: Any, - api_key: Optional[str] = None, - json_mode: Optional[bool] = None, - ) -> ModelResponse: - raise NotImplementedError( - "transform_response not implemented for config. Done in invoke_handler.py" - ) - - def validate_environment( - self, - headers: dict, - model: str, - messages: List[AllMessageValues], - optional_params: dict, - api_key: Optional[str] = None, - api_base: Optional[str] = None, - ) -> dict: - raise NotImplementedError( - "validate_environment not implemented for config. Done in invoke_handler.py" - ) - - -class AmazonTitanConfig(AmazonInvokeMixin, BaseConfig): - """ - Reference: https://us-west-2.console.aws.amazon.com/bedrock/home?region=us-west-2#/providers?model=titan-text-express-v1 - - Supported Params for the Amazon Titan models: - - - `maxTokenCount` (integer) max tokens, - - `stopSequences` (string[]) list of stop sequence strings - - `temperature` (float) temperature for model, - - `topP` (int) top p for model - """ - - maxTokenCount: Optional[int] = None - stopSequences: Optional[list] = None - temperature: Optional[float] = None - topP: Optional[int] = None - - def __init__( - self, - maxTokenCount: Optional[int] = None, - stopSequences: Optional[list] = None, - temperature: Optional[float] = None, - topP: Optional[int] = None, - ) -> None: - locals_ = locals() - for key, value in locals_.items(): - if key != "self" and value is not None: - setattr(self.__class__, key, value) - - @classmethod - def get_config(cls): - return { - k: v - for k, v in cls.__dict__.items() - if not k.startswith("__") - and not k.startswith("_abc") - and not isinstance( - v, - ( - types.FunctionType, - types.BuiltinFunctionType, - classmethod, - staticmethod, - ), - ) - and v is not None - } - - def _map_and_modify_arg( - self, - supported_params: dict, - provider: str, - model: str, - stop: Union[List[str], str], - ): - """ - filter params to fit the required provider format, drop those that don't fit if user sets `litellm.drop_params = True`. - """ - filtered_stop = None - if "stop" in supported_params and litellm.drop_params: - if provider == "bedrock" and "amazon" in model: - filtered_stop = [] - if isinstance(stop, list): - for s in stop: - if re.match(r"^(\|+|User:)$", s): - filtered_stop.append(s) - if filtered_stop is not None: - supported_params["stop"] = filtered_stop - - return supported_params - - def get_supported_openai_params(self, model: str) -> List[str]: - return [ - "max_tokens", - "max_completion_tokens", - "stop", - "temperature", - "top_p", - "stream", - ] - - def map_openai_params( - self, - non_default_params: dict, - optional_params: dict, - model: str, - drop_params: bool, - ) -> dict: - for k, v in non_default_params.items(): - if k == "max_tokens" or k == "max_completion_tokens": - optional_params["maxTokenCount"] = v - if k == "temperature": - optional_params["temperature"] = v - if k == "stop": - filtered_stop = self._map_and_modify_arg( - {"stop": v}, provider="bedrock", model=model, stop=v - ) - optional_params["stopSequences"] = filtered_stop["stop"] - if k == "top_p": - optional_params["topP"] = v - if k == "stream": - optional_params["stream"] = v - return optional_params - - -class AmazonAnthropicClaude3Config: - """ - Reference: - https://us-west-2.console.aws.amazon.com/bedrock/home?region=us-west-2#/providers?model=claude - https://docs.anthropic.com/claude/docs/models-overview#model-comparison - - Supported Params for the Amazon / Anthropic Claude 3 models: - - - `max_tokens` Required (integer) max tokens. Default is 4096 - - `anthropic_version` Required (string) version of anthropic for bedrock - e.g. "bedrock-2023-05-31" - - `system` Optional (string) the system prompt, conversion from openai format to this is handled in factory.py - - `temperature` Optional (float) The amount of randomness injected into the response - - `top_p` Optional (float) Use nucleus sampling. - - `top_k` Optional (int) Only sample from the top K options for each subsequent token - - `stop_sequences` Optional (List[str]) Custom text sequences that cause the model to stop generating - """ - - max_tokens: Optional[int] = 4096 # Opus, Sonnet, and Haiku default - anthropic_version: Optional[str] = "bedrock-2023-05-31" - system: Optional[str] = None - temperature: Optional[float] = None - top_p: Optional[float] = None - top_k: Optional[int] = None - stop_sequences: Optional[List[str]] = None - - def __init__( - self, - max_tokens: Optional[int] = None, - anthropic_version: Optional[str] = None, - ) -> None: - locals_ = locals() - for key, value in locals_.items(): - if key != "self" and value is not None: - setattr(self.__class__, key, value) - - @classmethod - def get_config(cls): - return { - k: v - for k, v in cls.__dict__.items() - if not k.startswith("__") - and not isinstance( - v, - ( - types.FunctionType, - types.BuiltinFunctionType, - classmethod, - staticmethod, - ), - ) - and v is not None - } - - def get_supported_openai_params(self): - return [ - "max_tokens", - "max_completion_tokens", - "tools", - "tool_choice", - "stream", - "stop", - "temperature", - "top_p", - "extra_headers", - ] - - def map_openai_params(self, non_default_params: dict, optional_params: dict): - for param, value in non_default_params.items(): - if param == "max_tokens" or param == "max_completion_tokens": - optional_params["max_tokens"] = value - if param == "tools": - optional_params["tools"] = value - if param == "stream": - optional_params["stream"] = value - if param == "stop": - optional_params["stop_sequences"] = value - if param == "temperature": - optional_params["temperature"] = value - if param == "top_p": - optional_params["top_p"] = value - return optional_params - - -class AmazonAnthropicConfig: - """ - Reference: https://us-west-2.console.aws.amazon.com/bedrock/home?region=us-west-2#/providers?model=claude - - Supported Params for the Amazon / Anthropic models: - - - `max_tokens_to_sample` (integer) max tokens, - - `temperature` (float) model temperature, - - `top_k` (integer) top k, - - `top_p` (integer) top p, - - `stop_sequences` (string[]) list of stop sequences - e.g. ["\\n\\nHuman:"], - - `anthropic_version` (string) version of anthropic for bedrock - e.g. "bedrock-2023-05-31" - """ - - max_tokens_to_sample: Optional[int] = litellm.max_tokens - stop_sequences: Optional[list] = None - temperature: Optional[float] = None - top_k: Optional[int] = None - top_p: Optional[int] = None - anthropic_version: Optional[str] = None - - def __init__( - self, - max_tokens_to_sample: Optional[int] = None, - stop_sequences: Optional[list] = None, - temperature: Optional[float] = None, - top_k: Optional[int] = None, - top_p: Optional[int] = None, - anthropic_version: Optional[str] = None, - ) -> None: - locals_ = locals() - for key, value in locals_.items(): - if key != "self" and value is not None: - setattr(self.__class__, key, value) - - @classmethod - def get_config(cls): - return { - k: v - for k, v in cls.__dict__.items() - if not k.startswith("__") - and not isinstance( - v, - ( - types.FunctionType, - types.BuiltinFunctionType, - classmethod, - staticmethod, - ), - ) - and v is not None - } - - def get_supported_openai_params( - self, - ): - return [ - "max_tokens", - "max_completion_tokens", - "temperature", - "stop", - "top_p", - "stream", - ] - - def map_openai_params(self, non_default_params: dict, optional_params: dict): - for param, value in non_default_params.items(): - if param == "max_tokens" or param == "max_completion_tokens": - optional_params["max_tokens_to_sample"] = value - if param == "temperature": - optional_params["temperature"] = value - if param == "top_p": - optional_params["top_p"] = value - if param == "stop": - optional_params["stop_sequences"] = value - if param == "stream" and value is True: - optional_params["stream"] = value - return optional_params - - -class AmazonCohereConfig(AmazonInvokeMixin, BaseConfig): - """ - Reference: https://us-west-2.console.aws.amazon.com/bedrock/home?region=us-west-2#/providers?model=command - - Supported Params for the Amazon / Cohere models: - - - `max_tokens` (integer) max tokens, - - `temperature` (float) model temperature, - - `return_likelihood` (string) n/a - """ - - max_tokens: Optional[int] = None - temperature: Optional[float] = None - return_likelihood: Optional[str] = None - - def __init__( - self, - max_tokens: Optional[int] = None, - temperature: Optional[float] = None, - return_likelihood: Optional[str] = None, - ) -> None: - locals_ = locals() - for key, value in locals_.items(): - if key != "self" and value is not None: - setattr(self.__class__, key, value) - - @classmethod - def get_config(cls): - return { - k: v - for k, v in cls.__dict__.items() - if not k.startswith("__") - and not k.startswith("_abc") - and not isinstance( - v, - ( - types.FunctionType, - types.BuiltinFunctionType, - classmethod, - staticmethod, - ), - ) - and v is not None - } - - def get_supported_openai_params(self, model: str) -> List[str]: - return [ - "max_tokens", - "temperature", - "stream", - ] - - def map_openai_params( - self, - non_default_params: dict, - optional_params: dict, - model: str, - drop_params: bool, - ) -> dict: - for k, v in non_default_params.items(): - if k == "stream": - optional_params["stream"] = v - if k == "temperature": - optional_params["temperature"] = v - if k == "max_tokens": - optional_params["max_tokens"] = v - return optional_params - - -class AmazonAI21Config(AmazonInvokeMixin, BaseConfig): - """ - Reference: https://us-west-2.console.aws.amazon.com/bedrock/home?region=us-west-2#/providers?model=j2-ultra - - Supported Params for the Amazon / AI21 models: - - - `maxTokens` (int32): The maximum number of tokens to generate per result. Optional, default is 16. If no `stopSequences` are given, generation stops after producing `maxTokens`. - - - `temperature` (float): Modifies the distribution from which tokens are sampled. Optional, default is 0.7. A value of 0 essentially disables sampling and results in greedy decoding. - - - `topP` (float): Used for sampling tokens from the corresponding top percentile of probability mass. Optional, default is 1. For instance, a value of 0.9 considers only tokens comprising the top 90% probability mass. - - - `stopSequences` (array of strings): Stops decoding if any of the input strings is generated. Optional. - - - `frequencyPenalty` (object): Placeholder for frequency penalty object. - - - `presencePenalty` (object): Placeholder for presence penalty object. - - - `countPenalty` (object): Placeholder for count penalty object. - """ - - maxTokens: Optional[int] = None - temperature: Optional[float] = None - topP: Optional[float] = None - stopSequences: Optional[list] = None - frequencePenalty: Optional[dict] = None - presencePenalty: Optional[dict] = None - countPenalty: Optional[dict] = None - - def __init__( - self, - maxTokens: Optional[int] = None, - temperature: Optional[float] = None, - topP: Optional[float] = None, - stopSequences: Optional[list] = None, - frequencePenalty: Optional[dict] = None, - presencePenalty: Optional[dict] = None, - countPenalty: Optional[dict] = None, - ) -> None: - locals_ = locals() - for key, value in locals_.items(): - if key != "self" and value is not None: - setattr(self.__class__, key, value) - - @classmethod - def get_config(cls): - return { - k: v - for k, v in cls.__dict__.items() - if not k.startswith("__") - and not k.startswith("_abc") - and not isinstance( - v, - ( - types.FunctionType, - types.BuiltinFunctionType, - classmethod, - staticmethod, - ), - ) - and v is not None - } - - def get_supported_openai_params(self, model: str) -> List: - return [ - "max_tokens", - "temperature", - "top_p", - "stream", - ] - - def map_openai_params( - self, - non_default_params: dict, - optional_params: dict, - model: str, - drop_params: bool, - ) -> dict: - for k, v in non_default_params.items(): - if k == "max_tokens": - optional_params["maxTokens"] = v - if k == "temperature": - optional_params["temperature"] = v - if k == "top_p": - optional_params["topP"] = v - if k == "stream": - optional_params["stream"] = v - return optional_params - - -class AnthropicConstants(Enum): - HUMAN_PROMPT = "\n\nHuman: " - AI_PROMPT = "\n\nAssistant: " - - -class AmazonLlamaConfig(AmazonInvokeMixin, BaseConfig): - """ - Reference: https://us-west-2.console.aws.amazon.com/bedrock/home?region=us-west-2#/providers?model=meta.llama2-13b-chat-v1 - - Supported Params for the Amazon / Meta Llama models: - - - `max_gen_len` (integer) max tokens, - - `temperature` (float) temperature for model, - - `top_p` (float) top p for model - """ - - max_gen_len: Optional[int] = None - temperature: Optional[float] = None - topP: Optional[float] = None - - def __init__( - self, - maxTokenCount: Optional[int] = None, - temperature: Optional[float] = None, - topP: Optional[int] = None, - ) -> None: - locals_ = locals() - for key, value in locals_.items(): - if key != "self" and value is not None: - setattr(self.__class__, key, value) - - @classmethod - def get_config(cls): - return { - k: v - for k, v in cls.__dict__.items() - if not k.startswith("__") - and not k.startswith("_abc") - and not isinstance( - v, - ( - types.FunctionType, - types.BuiltinFunctionType, - classmethod, - staticmethod, - ), - ) - and v is not None - } - - def get_supported_openai_params(self, model: str) -> List: - return [ - "max_tokens", - "temperature", - "top_p", - "stream", - ] - - def map_openai_params( - self, - non_default_params: dict, - optional_params: dict, - model: str, - drop_params: bool, - ) -> dict: - for k, v in non_default_params.items(): - if k == "max_tokens": - optional_params["max_gen_len"] = v - if k == "temperature": - optional_params["temperature"] = v - if k == "top_p": - optional_params["top_p"] = v - if k == "stream": - optional_params["stream"] = v - return optional_params - - -class AmazonMistralConfig(AmazonInvokeMixin, BaseConfig): - """ - Reference: https://docs.aws.amazon.com/bedrock/latest/userguide/model-parameters-mistral.html - Supported Params for the Amazon / Mistral models: - - - `max_tokens` (integer) max tokens, - - `temperature` (float) temperature for model, - - `top_p` (float) top p for model - - `stop` [string] A list of stop sequences that if generated by the model, stops the model from generating further output. - - `top_k` (float) top k for model - """ - - max_tokens: Optional[int] = None - temperature: Optional[float] = None - top_p: Optional[float] = None - top_k: Optional[float] = None - stop: Optional[List[str]] = None - - def __init__( - self, - max_tokens: Optional[int] = None, - temperature: Optional[float] = None, - top_p: Optional[int] = None, - top_k: Optional[float] = None, - stop: Optional[List[str]] = None, - ) -> None: - locals_ = locals() - for key, value in locals_.items(): - if key != "self" and value is not None: - setattr(self.__class__, key, value) - - @classmethod - def get_config(cls): - return { - k: v - for k, v in cls.__dict__.items() - if not k.startswith("__") - and not k.startswith("_abc") - and not isinstance( - v, - ( - types.FunctionType, - types.BuiltinFunctionType, - classmethod, - staticmethod, - ), - ) - and v is not None - } - - def get_supported_openai_params(self, model: str) -> List[str]: - return ["max_tokens", "temperature", "top_p", "stop", "stream"] - - def map_openai_params( - self, - non_default_params: dict, - optional_params: dict, - model: str, - drop_params: bool, - ) -> dict: - for k, v in non_default_params.items(): - if k == "max_tokens": - optional_params["max_tokens"] = v - if k == "temperature": - optional_params["temperature"] = v - if k == "top_p": - optional_params["top_p"] = v - if k == "stop": - optional_params["stop"] = v - if k == "stream": - optional_params["stream"] = v - return optional_params - - def add_custom_header(headers): """Closure to capture the headers and add them.""" @@ -955,3 +311,87 @@ def get_bedrock_tool_name(response_tool_name: str) -> str: response_tool_name ] return response_tool_name + + +class BedrockModelInfo(BaseLLMModelInfo): + + global_config = AmazonBedrockGlobalConfig() + all_global_regions = global_config.get_all_regions() + + @staticmethod + def extract_model_name_from_arn(model: str) -> str: + """ + Extract the model name from an AWS Bedrock ARN. + Returns the string after the last '/' if 'arn' is in the input string. + + Args: + arn (str): The ARN string to parse + + Returns: + str: The extracted model name if 'arn' is in the string, + otherwise returns the original string + """ + if "arn" in model.lower(): + return model.split("/")[-1] + return model + + @staticmethod + def get_base_model(model: str) -> str: + """ + Get the base model from the given model name. + + Handle model names like - "us.meta.llama3-2-11b-instruct-v1:0" -> "meta.llama3-2-11b-instruct-v1" + AND "meta.llama3-2-11b-instruct-v1:0" -> "meta.llama3-2-11b-instruct-v1" + """ + if model.startswith("bedrock/"): + model = model.split("/", 1)[1] + + if model.startswith("converse/"): + model = model.split("/", 1)[1] + + if model.startswith("invoke/"): + model = model.split("/", 1)[1] + + model = BedrockModelInfo.extract_model_name_from_arn(model) + + potential_region = model.split(".", 1)[0] + + alt_potential_region = model.split("/", 1)[ + 0 + ] # in model cost map we store regional information like `/us-west-2/bedrock-model` + + if ( + potential_region + in BedrockModelInfo._supported_cross_region_inference_region() + ): + return model.split(".", 1)[1] + elif ( + alt_potential_region in BedrockModelInfo.all_global_regions + and len(model.split("/", 1)) > 1 + ): + return model.split("/", 1)[1] + + return model + + @staticmethod + def _supported_cross_region_inference_region() -> List[str]: + """ + Abbreviations of regions AWS Bedrock supports for cross region inference + """ + return ["us", "eu", "apac"] + + @staticmethod + def get_bedrock_route(model: str) -> Literal["converse", "invoke", "converse_like"]: + """ + Get the bedrock route for the given model. + """ + base_model = BedrockModelInfo.get_base_model(model) + if "invoke/" in model: + return "invoke" + elif "converse_like" in model: + return "converse_like" + elif "converse/" in model: + return "converse" + elif base_model in litellm.bedrock_converse_models: + return "converse" + return "invoke" diff --git a/litellm/llms/bedrock/embed/amazon_titan_g1_transformation.py b/litellm/llms/bedrock/embed/amazon_titan_g1_transformation.py index 63219868f4..2747551af8 100644 --- a/litellm/llms/bedrock/embed/amazon_titan_g1_transformation.py +++ b/litellm/llms/bedrock/embed/amazon_titan_g1_transformation.py @@ -27,7 +27,7 @@ class AmazonTitanG1Config: def __init__( self, ) -> None: - locals_ = locals() + locals_ = locals().copy() for key, value in locals_.items(): if key != "self" and value is not None: setattr(self.__class__, key, value) diff --git a/litellm/llms/bedrock/embed/amazon_titan_multimodal_transformation.py b/litellm/llms/bedrock/embed/amazon_titan_multimodal_transformation.py index 7aa42b0bf2..6c1147f24a 100644 --- a/litellm/llms/bedrock/embed/amazon_titan_multimodal_transformation.py +++ b/litellm/llms/bedrock/embed/amazon_titan_multimodal_transformation.py @@ -1,5 +1,5 @@ """ -Transformation logic from OpenAI /v1/embeddings format to Bedrock Amazon Titan multimodal /invoke format. +Transformation logic from OpenAI /v1/embeddings format to Bedrock Amazon Titan multimodal /invoke format. Why separate file? Make it easy to see how transformation works diff --git a/litellm/llms/bedrock/embed/amazon_titan_v2_transformation.py b/litellm/llms/bedrock/embed/amazon_titan_v2_transformation.py index 8244a9a334..8056e9e9b2 100644 --- a/litellm/llms/bedrock/embed/amazon_titan_v2_transformation.py +++ b/litellm/llms/bedrock/embed/amazon_titan_v2_transformation.py @@ -1,5 +1,5 @@ """ -Transformation logic from OpenAI /v1/embeddings format to Bedrock Amazon Titan V2 /invoke format. +Transformation logic from OpenAI /v1/embeddings format to Bedrock Amazon Titan V2 /invoke format. Why separate file? Make it easy to see how transformation works @@ -33,7 +33,7 @@ class AmazonTitanV2Config: def __init__( self, normalize: Optional[bool] = None, dimensions: Optional[int] = None ) -> None: - locals_ = locals() + locals_ = locals().copy() for key, value in locals_.items(): if key != "self" and value is not None: setattr(self.__class__, key, value) diff --git a/litellm/llms/bedrock/embed/embedding.py b/litellm/llms/bedrock/embed/embedding.py index 659dbc6715..9e4e4e22d0 100644 --- a/litellm/llms/bedrock/embed/embedding.py +++ b/litellm/llms/bedrock/embed/embedding.py @@ -1,5 +1,5 @@ """ -Handles embedding calls to Bedrock's `/invoke` endpoint +Handles embedding calls to Bedrock's `/invoke` endpoint """ import copy @@ -350,6 +350,11 @@ class BedrockEmbedding(BaseAWSLLM): ### TRANSFORMATION ### provider = model.split(".")[0] inference_params = copy.deepcopy(optional_params) + inference_params = { + k: v + for k, v in inference_params.items() + if k.lower() not in self.aws_authentication_params + } inference_params.pop( "user", None ) # make sure user is not passed in for bedrock call diff --git a/litellm/llms/bedrock/image/amazon_stability1_transformation.py b/litellm/llms/bedrock/image/amazon_stability1_transformation.py index 880881e971..698ecca94b 100644 --- a/litellm/llms/bedrock/image/amazon_stability1_transformation.py +++ b/litellm/llms/bedrock/image/amazon_stability1_transformation.py @@ -49,7 +49,7 @@ class AmazonStabilityConfig: width: Optional[int] = None, height: Optional[int] = None, ) -> None: - locals_ = locals() + locals_ = locals().copy() for key, value in locals_.items(): if key != "self" and value is not None: setattr(self.__class__, key, value) diff --git a/litellm/llms/bedrock/image/image_handler.py b/litellm/llms/bedrock/image/image_handler.py index 5b14833f42..59a80b2222 100644 --- a/litellm/llms/bedrock/image/image_handler.py +++ b/litellm/llms/bedrock/image/image_handler.py @@ -10,6 +10,8 @@ import litellm from litellm._logging import verbose_logger from litellm.litellm_core_utils.litellm_logging import Logging as LitellmLogging from litellm.llms.custom_httpx.http_handler import ( + AsyncHTTPHandler, + HTTPHandler, _get_httpx_client, get_async_httpx_client, ) @@ -51,6 +53,7 @@ class BedrockImageGeneration(BaseAWSLLM): aimg_generation: bool = False, api_base: Optional[str] = None, extra_headers: Optional[dict] = None, + client: Optional[Union[HTTPHandler, AsyncHTTPHandler]] = None, ): prepared_request = self._prepare_request( model=model, @@ -69,9 +72,15 @@ class BedrockImageGeneration(BaseAWSLLM): logging_obj=logging_obj, prompt=prompt, model_response=model_response, + client=( + client + if client is not None and isinstance(client, AsyncHTTPHandler) + else None + ), ) - client = _get_httpx_client() + if client is None or not isinstance(client, HTTPHandler): + client = _get_httpx_client() try: response = client.post(url=prepared_request.endpoint_url, headers=prepared_request.prepped.headers, data=prepared_request.body) # type: ignore response.raise_for_status() @@ -99,13 +108,14 @@ class BedrockImageGeneration(BaseAWSLLM): logging_obj: LitellmLogging, prompt: str, model_response: ImageResponse, + client: Optional[AsyncHTTPHandler] = None, ) -> ImageResponse: """ Asynchronous handler for bedrock image generation Awaits the response from the bedrock image generation endpoint """ - async_client = get_async_httpx_client( + async_client = client or get_async_httpx_client( llm_provider=litellm.LlmProviders.BEDROCK, params={"timeout": timeout}, ) @@ -163,7 +173,7 @@ class BedrockImageGeneration(BaseAWSLLM): except ImportError: raise ImportError("Missing boto3 to call bedrock. Run 'pip install boto3'.") boto3_credentials_info = self._get_boto_credentials_from_optional_params( - optional_params + optional_params, model ) ### SET RUNTIME ENDPOINT ### diff --git a/litellm/llms/bedrock/rerank/handler.py b/litellm/llms/bedrock/rerank/handler.py index 3683be06b6..cd8be6912c 100644 --- a/litellm/llms/bedrock/rerank/handler.py +++ b/litellm/llms/bedrock/rerank/handler.py @@ -6,6 +6,8 @@ import httpx import litellm from litellm.litellm_core_utils.litellm_logging import Logging as LitellmLogging from litellm.llms.custom_httpx.http_handler import ( + AsyncHTTPHandler, + HTTPHandler, _get_httpx_client, get_async_httpx_client, ) @@ -27,8 +29,10 @@ class BedrockRerankHandler(BaseAWSLLM): async def arerank( self, prepared_request: BedrockPreparedRequest, + client: Optional[AsyncHTTPHandler] = None, ): - client = get_async_httpx_client(llm_provider=litellm.LlmProviders.BEDROCK) + if client is None: + client = get_async_httpx_client(llm_provider=litellm.LlmProviders.BEDROCK) try: response = await client.post(url=prepared_request["endpoint_url"], headers=prepared_request["prepped"].headers, data=prepared_request["body"]) # type: ignore response.raise_for_status() @@ -54,7 +58,9 @@ class BedrockRerankHandler(BaseAWSLLM): _is_async: Optional[bool] = False, api_base: Optional[str] = None, extra_headers: Optional[dict] = None, + client: Optional[Union[HTTPHandler, AsyncHTTPHandler]] = None, ) -> RerankResponse: + request_data = RerankRequest( model=model, query=query, @@ -66,6 +72,7 @@ class BedrockRerankHandler(BaseAWSLLM): data = BedrockRerankConfig()._transform_request(request_data) prepared_request = self._prepare_request( + model=model, optional_params=optional_params, api_base=api_base, extra_headers=extra_headers, @@ -83,9 +90,10 @@ class BedrockRerankHandler(BaseAWSLLM): ) if _is_async: - return self.arerank(prepared_request) # type: ignore + return self.arerank(prepared_request, client=client if client is not None and isinstance(client, AsyncHTTPHandler) else None) # type: ignore - client = _get_httpx_client() + if client is None or not isinstance(client, HTTPHandler): + client = _get_httpx_client() try: response = client.post(url=prepared_request["endpoint_url"], headers=prepared_request["prepped"].headers, data=prepared_request["body"]) # type: ignore response.raise_for_status() @@ -95,10 +103,18 @@ class BedrockRerankHandler(BaseAWSLLM): except httpx.TimeoutException: raise BedrockError(status_code=408, message="Timeout error occurred.") - return BedrockRerankConfig()._transform_response(response.json()) + logging_obj.post_call( + original_response=response.text, + api_key="", + ) + + response_json = response.json() + + return BedrockRerankConfig()._transform_response(response_json) def _prepare_request( self, + model: str, api_base: Optional[str], extra_headers: Optional[dict], data: dict, @@ -110,7 +126,7 @@ class BedrockRerankHandler(BaseAWSLLM): except ImportError: raise ImportError("Missing boto3 to call bedrock. Run 'pip install boto3'.") boto3_credentials_info = self._get_boto_credentials_from_optional_params( - optional_params + optional_params, model ) ### SET RUNTIME ENDPOINT ### diff --git a/litellm/llms/bedrock/rerank/transformation.py b/litellm/llms/bedrock/rerank/transformation.py index 7dc9b0aab1..a5380febe9 100644 --- a/litellm/llms/bedrock/rerank/transformation.py +++ b/litellm/llms/bedrock/rerank/transformation.py @@ -91,7 +91,9 @@ class BedrockRerankConfig: example input: {"results":[{"index":0,"relevanceScore":0.6847912669181824},{"index":1,"relevanceScore":0.5980774760246277}]} """ - _billed_units = RerankBilledUnits(**response.get("usage", {})) + _billed_units = RerankBilledUnits( + **response.get("usage", {"search_units": 1}) + ) # by default 1 search unit _tokens = RerankTokens(**response.get("usage", {})) rerank_meta = RerankResponseMeta(billed_units=_billed_units, tokens=_tokens) diff --git a/litellm/llms/clarifai/chat/transformation.py b/litellm/llms/clarifai/chat/transformation.py index 299dd8637c..916da73883 100644 --- a/litellm/llms/clarifai/chat/transformation.py +++ b/litellm/llms/clarifai/chat/transformation.py @@ -45,7 +45,7 @@ class ClarifaiConfig(BaseConfig): temperature: Optional[int] = None, top_k: Optional[int] = None, ) -> None: - locals_ = locals() + locals_ = locals().copy() for key, value in locals_.items(): if key != "self" and value is not None: setattr(self.__class__, key, value) diff --git a/litellm/llms/cloudflare/chat/transformation.py b/litellm/llms/cloudflare/chat/transformation.py index ba1e0697ed..555e3c21f4 100644 --- a/litellm/llms/cloudflare/chat/transformation.py +++ b/litellm/llms/cloudflare/chat/transformation.py @@ -11,6 +11,7 @@ from litellm.llms.base_llm.chat.transformation import ( BaseLLMException, LiteLLMLoggingObj, ) +from litellm.secret_managers.main import get_secret_str from litellm.types.llms.openai import AllMessageValues from litellm.types.utils import ( ChatCompletionToolCallChunk, @@ -44,7 +45,7 @@ class CloudflareChatConfig(BaseConfig): max_tokens: Optional[int] = None, stream: Optional[bool] = None, ) -> None: - locals_ = locals() + locals_ = locals().copy() for key, value in locals_.items(): if key != "self" and value is not None: setattr(self.__class__, key, value) @@ -75,11 +76,16 @@ class CloudflareChatConfig(BaseConfig): def get_complete_url( self, - api_base: str, + api_base: Optional[str], model: str, optional_params: dict, stream: Optional[bool] = None, ) -> str: + if api_base is None: + account_id = get_secret_str("CLOUDFLARE_ACCOUNT_ID") + api_base = ( + f"https://api.cloudflare.com/client/v4/accounts/{account_id}/ai/run/" + ) return api_base + model def get_supported_openai_params(self, model: str) -> List[str]: diff --git a/litellm/llms/codestral/completion/transformation.py b/litellm/llms/codestral/completion/transformation.py index 261744d885..5955e91deb 100644 --- a/litellm/llms/codestral/completion/transformation.py +++ b/litellm/llms/codestral/completion/transformation.py @@ -5,6 +5,7 @@ import litellm from litellm.llms.openai.completion.transformation import OpenAITextCompletionConfig from litellm.types.llms.databricks import GenericStreamingChunk + class CodestralTextCompletionConfig(OpenAITextCompletionConfig): """ Reference: https://docs.mistral.ai/api/#operation/createFIMCompletion @@ -77,12 +78,15 @@ class CodestralTextCompletionConfig(OpenAITextCompletionConfig): return optional_params def _chunk_parser(self, chunk_data: str) -> GenericStreamingChunk: + text = "" is_finished = False finish_reason = None logprobs = None - chunk_data = chunk_data.replace("data:", "") + chunk_data = ( + litellm.CustomStreamWrapper._strip_sse_data_from_chunk(chunk_data) or "" + ) chunk_data = chunk_data.strip() if len(chunk_data) == 0 or chunk_data == "[DONE]": return { @@ -90,7 +94,15 @@ class CodestralTextCompletionConfig(OpenAITextCompletionConfig): "is_finished": is_finished, "finish_reason": finish_reason, } - chunk_data_dict = json.loads(chunk_data) + try: + chunk_data_dict = json.loads(chunk_data) + except json.JSONDecodeError: + return { + "text": "", + "is_finished": is_finished, + "finish_reason": finish_reason, + } + original_chunk = litellm.ModelResponse(**chunk_data_dict, stream=True) _choices = chunk_data_dict.get("choices", []) or [] _choice = _choices[0] diff --git a/litellm/llms/cohere/chat/transformation.py b/litellm/llms/cohere/chat/transformation.py index 1d68735224..3ceec2dbba 100644 --- a/litellm/llms/cohere/chat/transformation.py +++ b/litellm/llms/cohere/chat/transformation.py @@ -104,7 +104,7 @@ class CohereChatConfig(BaseConfig): tool_results: Optional[list] = None, seed: Optional[int] = None, ) -> None: - locals_ = locals() + locals_ = locals().copy() for key, value in locals_.items(): if key != "self" and value is not None: setattr(self.__class__, key, value) diff --git a/litellm/llms/cohere/completion/transformation.py b/litellm/llms/cohere/completion/transformation.py index 7c01523571..bdfcda020e 100644 --- a/litellm/llms/cohere/completion/transformation.py +++ b/litellm/llms/cohere/completion/transformation.py @@ -86,7 +86,7 @@ class CohereTextConfig(BaseConfig): return_likelihoods: Optional[str] = None, logit_bias: Optional[dict] = None, ) -> None: - locals_ = locals() + locals_ = locals().copy() for key, value in locals_.items(): if key != "self" and value is not None: setattr(self.__class__, key, value) diff --git a/litellm/llms/cohere/cost_calculator.py b/litellm/llms/cohere/cost_calculator.py deleted file mode 100644 index 224dd5cfa8..0000000000 --- a/litellm/llms/cohere/cost_calculator.py +++ /dev/null @@ -1,31 +0,0 @@ -""" -Custom cost calculator for Cohere rerank models -""" - -from typing import Tuple - -from litellm.utils import get_model_info - - -def cost_per_query(model: str, num_queries: int = 1) -> Tuple[float, float]: - """ - Calculates the cost per query for a given rerank model. - - Input: - - model: str, the model name without provider prefix - - Returns: - Tuple[float, float] - prompt_cost_in_usd, completion_cost_in_usd - """ - - model_info = get_model_info(model=model, custom_llm_provider="cohere") - - if ( - "input_cost_per_query" not in model_info - or model_info["input_cost_per_query"] is None - ): - return 0.0, 0.0 - - prompt_cost = model_info["input_cost_per_query"] * num_queries - - return prompt_cost, 0.0 diff --git a/litellm/llms/cohere/rerank/transformation.py b/litellm/llms/cohere/rerank/transformation.py index e0836a71f7..f3624d9216 100644 --- a/litellm/llms/cohere/rerank/transformation.py +++ b/litellm/llms/cohere/rerank/transformation.py @@ -52,6 +52,7 @@ class CohereRerankConfig(BaseRerankConfig): rank_fields: Optional[List[str]] = None, return_documents: Optional[bool] = True, max_chunks_per_doc: Optional[int] = None, + max_tokens_per_doc: Optional[int] = None, ) -> OptionalRerankParams: """ Map Cohere rerank params @@ -147,4 +148,4 @@ class CohereRerankConfig(BaseRerankConfig): def get_error_class( self, error_message: str, status_code: int, headers: Union[dict, httpx.Headers] ) -> BaseLLMException: - return CohereError(message=error_message, status_code=status_code) + return CohereError(message=error_message, status_code=status_code) \ No newline at end of file diff --git a/litellm/llms/cohere/rerank_v2/transformation.py b/litellm/llms/cohere/rerank_v2/transformation.py new file mode 100644 index 0000000000..a93cb982a7 --- /dev/null +++ b/litellm/llms/cohere/rerank_v2/transformation.py @@ -0,0 +1,80 @@ +from typing import Any, Dict, List, Optional, Union + +from litellm.llms.cohere.rerank.transformation import CohereRerankConfig +from litellm.types.rerank import OptionalRerankParams, RerankRequest + +class CohereRerankV2Config(CohereRerankConfig): + """ + Reference: https://docs.cohere.com/v2/reference/rerank + """ + + def __init__(self) -> None: + pass + + def get_complete_url(self, api_base: Optional[str], model: str) -> str: + if api_base: + # Remove trailing slashes and ensure clean base URL + api_base = api_base.rstrip("/") + if not api_base.endswith("/v2/rerank"): + api_base = f"{api_base}/v2/rerank" + return api_base + return "https://api.cohere.ai/v2/rerank" + + def get_supported_cohere_rerank_params(self, model: str) -> list: + return [ + "query", + "documents", + "top_n", + "max_tokens_per_doc", + "rank_fields", + "return_documents", + ] + + def map_cohere_rerank_params( + self, + non_default_params: Optional[dict], + model: str, + drop_params: bool, + query: str, + documents: List[Union[str, Dict[str, Any]]], + custom_llm_provider: Optional[str] = None, + top_n: Optional[int] = None, + rank_fields: Optional[List[str]] = None, + return_documents: Optional[bool] = True, + max_chunks_per_doc: Optional[int] = None, + max_tokens_per_doc: Optional[int] = None, + ) -> OptionalRerankParams: + """ + Map Cohere rerank params + + No mapping required - returns all supported params + """ + return OptionalRerankParams( + query=query, + documents=documents, + top_n=top_n, + rank_fields=rank_fields, + return_documents=return_documents, + max_tokens_per_doc=max_tokens_per_doc, + ) + + def transform_rerank_request( + self, + model: str, + optional_rerank_params: OptionalRerankParams, + headers: dict, + ) -> dict: + if "query" not in optional_rerank_params: + raise ValueError("query is required for Cohere rerank") + if "documents" not in optional_rerank_params: + raise ValueError("documents is required for Cohere rerank") + rerank_request = RerankRequest( + model=model, + query=optional_rerank_params["query"], + documents=optional_rerank_params["documents"], + top_n=optional_rerank_params.get("top_n", None), + rank_fields=optional_rerank_params.get("rank_fields", None), + return_documents=optional_rerank_params.get("return_documents", None), + max_tokens_per_doc=optional_rerank_params.get("max_tokens_per_doc", None), + ) + return rerank_request.model_dump(exclude_none=True) \ No newline at end of file diff --git a/litellm/llms/custom_httpx/http_handler.py b/litellm/llms/custom_httpx/http_handler.py index 517cad25b0..736b85dc53 100644 --- a/litellm/llms/custom_httpx/http_handler.py +++ b/litellm/llms/custom_httpx/http_handler.py @@ -1,5 +1,6 @@ import asyncio import os +import time from typing import TYPE_CHECKING, Any, Callable, List, Mapping, Optional, Union import httpx @@ -179,6 +180,7 @@ class AsyncHTTPHandler: stream: bool = False, logging_obj: Optional[LiteLLMLoggingObject] = None, ): + start_time = time.time() try: if timeout is None: timeout = self.timeout @@ -207,6 +209,8 @@ class AsyncHTTPHandler: finally: await new_client.aclose() except httpx.TimeoutException as e: + end_time = time.time() + time_delta = round(end_time - start_time, 3) headers = {} error_response = getattr(e, "response", None) if error_response is not None: @@ -214,7 +218,7 @@ class AsyncHTTPHandler: headers["response_headers-{}".format(key)] = value raise litellm.Timeout( - message=f"Connection timed out after {timeout} seconds.", + message=f"Connection timed out. Timeout passed={timeout}, time taken={time_delta} seconds", model="default-model-name", llm_provider="litellm-httpx-handler", headers=headers, diff --git a/litellm/llms/custom_httpx/llm_http_handler.py b/litellm/llms/custom_httpx/llm_http_handler.py index 71a8a8168b..9d67fd1a85 100644 --- a/litellm/llms/custom_httpx/llm_http_handler.py +++ b/litellm/llms/custom_httpx/llm_http_handler.py @@ -40,6 +40,7 @@ class BaseLLMHTTPHandler: data: dict, timeout: Union[float, httpx.Timeout], litellm_params: dict, + logging_obj: LiteLLMLoggingObj, stream: bool = False, ) -> httpx.Response: """Common implementation across stream + non-stream calls. Meant to ensure consistent error-handling.""" @@ -56,6 +57,7 @@ class BaseLLMHTTPHandler: data=json.dumps(data), timeout=timeout, stream=stream, + logging_obj=logging_obj, ) except httpx.HTTPStatusError as e: hit_max_retry = i + 1 == max_retry_on_unprocessable_entity_error @@ -93,6 +95,7 @@ class BaseLLMHTTPHandler: data: dict, timeout: Union[float, httpx.Timeout], litellm_params: dict, + logging_obj: LiteLLMLoggingObj, stream: bool = False, ) -> httpx.Response: @@ -110,6 +113,7 @@ class BaseLLMHTTPHandler: data=json.dumps(data), timeout=timeout, stream=stream, + logging_obj=logging_obj, ) except httpx.HTTPStatusError as e: hit_max_retry = i + 1 == max_retry_on_unprocessable_entity_error @@ -155,6 +159,7 @@ class BaseLLMHTTPHandler: encoding: Any, api_key: Optional[str] = None, client: Optional[AsyncHTTPHandler] = None, + json_mode: bool = False, ): if client is None: async_httpx_client = get_async_httpx_client( @@ -173,6 +178,7 @@ class BaseLLMHTTPHandler: timeout=timeout, litellm_params=litellm_params, stream=False, + logging_obj=logging_obj, ) return provider_config.transform_response( model=model, @@ -185,6 +191,7 @@ class BaseLLMHTTPHandler: optional_params=optional_params, litellm_params=litellm_params, encoding=encoding, + json_mode=json_mode, ) def completion( @@ -206,9 +213,12 @@ class BaseLLMHTTPHandler: headers: Optional[dict] = {}, client: Optional[Union[HTTPHandler, AsyncHTTPHandler]] = None, ): + json_mode: bool = optional_params.pop("json_mode", False) + provider_config = ProviderConfigManager.get_provider_chat_config( model=model, provider=litellm.LlmProviders(custom_llm_provider) ) + # get config from model, custom llm provider headers = provider_config.validate_environment( api_key=api_key, @@ -234,6 +244,16 @@ class BaseLLMHTTPHandler: headers=headers, ) + headers = provider_config.sign_request( + headers=headers, + optional_params=optional_params, + request_data=data, + api_base=api_base, + stream=stream, + fake_stream=fake_stream, + model=model, + ) + ## LOGGING logging_obj.pre_call( input=messages, @@ -247,8 +267,11 @@ class BaseLLMHTTPHandler: if acompletion is True: if stream is True: - if fake_stream is not True: - data["stream"] = stream + data = self._add_stream_param_to_request_body( + data=data, + provider_config=provider_config, + fake_stream=fake_stream, + ) return self.acompletion_stream_function( model=model, messages=messages, @@ -266,6 +289,7 @@ class BaseLLMHTTPHandler: else None ), litellm_params=litellm_params, + json_mode=json_mode, ) else: @@ -289,11 +313,27 @@ class BaseLLMHTTPHandler: if client is not None and isinstance(client, AsyncHTTPHandler) else None ), + json_mode=json_mode, ) if stream is True: - if fake_stream is not True: - data["stream"] = stream + data = self._add_stream_param_to_request_body( + data=data, + provider_config=provider_config, + fake_stream=fake_stream, + ) + if provider_config.has_custom_stream_wrapper is True: + return provider_config.get_sync_custom_stream_wrapper( + model=model, + custom_llm_provider=custom_llm_provider, + logging_obj=logging_obj, + api_base=api_base, + headers=headers, + data=data, + messages=messages, + client=client, + json_mode=json_mode, + ) completion_stream, headers = self.make_sync_call( provider_config=provider_config, api_base=api_base, @@ -333,6 +373,7 @@ class BaseLLMHTTPHandler: data=data, timeout=timeout, litellm_params=litellm_params, + logging_obj=logging_obj, ) return provider_config.transform_response( model=model, @@ -345,6 +386,7 @@ class BaseLLMHTTPHandler: optional_params=optional_params, litellm_params=litellm_params, encoding=encoding, + json_mode=json_mode, ) def make_sync_call( @@ -382,6 +424,7 @@ class BaseLLMHTTPHandler: timeout=timeout, litellm_params=litellm_params, stream=stream, + logging_obj=logging_obj, ) if fake_stream is True: @@ -417,7 +460,21 @@ class BaseLLMHTTPHandler: litellm_params: dict, fake_stream: bool = False, client: Optional[AsyncHTTPHandler] = None, + json_mode: Optional[bool] = None, ): + if provider_config.has_custom_stream_wrapper is True: + return provider_config.get_async_custom_stream_wrapper( + model=model, + custom_llm_provider=custom_llm_provider, + logging_obj=logging_obj, + api_base=api_base, + headers=headers, + data=data, + messages=messages, + client=client, + json_mode=json_mode, + ) + completion_stream, _response_headers = await self.make_async_call_stream_helper( custom_llm_provider=custom_llm_provider, provider_config=provider_config, @@ -478,6 +535,7 @@ class BaseLLMHTTPHandler: timeout=timeout, litellm_params=litellm_params, stream=stream, + logging_obj=logging_obj, ) if fake_stream is True: @@ -498,6 +556,21 @@ class BaseLLMHTTPHandler: return completion_stream, response.headers + def _add_stream_param_to_request_body( + self, + data: dict, + provider_config: BaseConfig, + fake_stream: bool, + ) -> dict: + """ + Some providers like Bedrock invoke do not support the stream parameter in the request body, we only pass `stream` in the request body the provider supports it. + """ + if fake_stream is True: + return data + if provider_config.supports_stream_param_in_request_body is True: + data["stream"] = True + return data + def embedding( self, model: str, @@ -646,6 +719,7 @@ class BaseLLMHTTPHandler: model: str, custom_llm_provider: str, logging_obj: LiteLLMLoggingObj, + provider_config: BaseRerankConfig, optional_rerank_params: OptionalRerankParams, timeout: Optional[Union[float, httpx.Timeout]], model_response: RerankResponse, @@ -656,9 +730,6 @@ class BaseLLMHTTPHandler: client: Optional[Union[HTTPHandler, AsyncHTTPHandler]] = None, ) -> RerankResponse: - provider_config = ProviderConfigManager.get_provider_rerank_config( - model=model, provider=litellm.LlmProviders(custom_llm_provider) - ) # get config from model, custom llm provider headers = provider_config.validate_environment( api_key=api_key, @@ -802,7 +873,9 @@ class BaseLLMHTTPHandler: elif isinstance(audio_file, bytes): # Assume it's already binary data binary_data = audio_file - elif isinstance(audio_file, io.BufferedReader): + elif isinstance(audio_file, io.BufferedReader) or isinstance( + audio_file, io.BytesIO + ): # Handle file-like objects binary_data = audio_file.read() diff --git a/litellm/llms/databricks/chat/transformation.py b/litellm/llms/databricks/chat/transformation.py index b1f79d565b..94e0203459 100644 --- a/litellm/llms/databricks/chat/transformation.py +++ b/litellm/llms/databricks/chat/transformation.py @@ -37,7 +37,7 @@ class DatabricksConfig(OpenAILikeChatConfig): stop: Optional[Union[List[str], str]] = None, n: Optional[int] = None, ) -> None: - locals_ = locals() + locals_ = locals().copy() for key, value in locals_.items(): if key != "self" and value is not None: setattr(self.__class__, key, value) @@ -73,6 +73,8 @@ class DatabricksConfig(OpenAILikeChatConfig): "max_completion_tokens", "n", "response_format", + "tools", + "tool_choice", ] def _should_fake_stream(self, optional_params: dict) -> bool: diff --git a/litellm/llms/databricks/embed/transformation.py b/litellm/llms/databricks/embed/transformation.py index 8c7e119714..53e3b30dd2 100644 --- a/litellm/llms/databricks/embed/transformation.py +++ b/litellm/llms/databricks/embed/transformation.py @@ -16,7 +16,7 @@ class DatabricksEmbeddingConfig: ) def __init__(self, instruction: Optional[str] = None) -> None: - locals_ = locals() + locals_ = locals().copy() for key, value in locals_.items(): if key != "self" and value is not None: setattr(self.__class__, key, value) diff --git a/litellm/llms/databricks/streaming_utils.py b/litellm/llms/databricks/streaming_utils.py index 0deaa06988..2db53df908 100644 --- a/litellm/llms/databricks/streaming_utils.py +++ b/litellm/llms/databricks/streaming_utils.py @@ -89,7 +89,7 @@ class ModelResponseIterator: raise RuntimeError(f"Error receiving chunk from stream: {e}") try: - chunk = chunk.replace("data:", "") + chunk = litellm.CustomStreamWrapper._strip_sse_data_from_chunk(chunk) or "" chunk = chunk.strip() if len(chunk) > 0: json_chunk = json.loads(chunk) @@ -134,7 +134,7 @@ class ModelResponseIterator: raise RuntimeError(f"Error receiving chunk from stream: {e}") try: - chunk = chunk.replace("data:", "") + chunk = litellm.CustomStreamWrapper._strip_sse_data_from_chunk(chunk) or "" chunk = chunk.strip() if chunk == "[DONE]": raise StopAsyncIteration diff --git a/litellm/llms/deepseek/chat/transformation.py b/litellm/llms/deepseek/chat/transformation.py index e6704de1a1..747129ddd8 100644 --- a/litellm/llms/deepseek/chat/transformation.py +++ b/litellm/llms/deepseek/chat/transformation.py @@ -34,3 +34,21 @@ class DeepSeekChatConfig(OpenAIGPTConfig): ) # type: ignore dynamic_api_key = api_key or get_secret_str("DEEPSEEK_API_KEY") return api_base, dynamic_api_key + + def get_complete_url( + self, + api_base: Optional[str], + model: str, + optional_params: dict, + stream: Optional[bool] = None, + ) -> str: + """ + If api_base is not provided, use the default DeepSeek /chat/completions endpoint. + """ + if not api_base: + api_base = "https://api.deepseek.com/beta" + + if not api_base.endswith("/chat/completions"): + api_base = f"{api_base}/chat/completions" + + return api_base diff --git a/litellm/llms/deprecated_providers/aleph_alpha.py b/litellm/llms/deprecated_providers/aleph_alpha.py index a4c5d155f4..81ad134641 100644 --- a/litellm/llms/deprecated_providers/aleph_alpha.py +++ b/litellm/llms/deprecated_providers/aleph_alpha.py @@ -145,7 +145,7 @@ class AlephAlphaConfig: contextual_control_threshold: Optional[int] = None, control_log_additive: Optional[bool] = None, ) -> None: - locals_ = locals() + locals_ = locals().copy() for key, value in locals_.items(): if key != "self" and value is not None: setattr(self.__class__, key, value) diff --git a/litellm/llms/deprecated_providers/palm.py b/litellm/llms/deprecated_providers/palm.py index 4afc952a51..3039222c0e 100644 --- a/litellm/llms/deprecated_providers/palm.py +++ b/litellm/llms/deprecated_providers/palm.py @@ -63,7 +63,7 @@ class PalmConfig: top_p: Optional[float] = None, max_output_tokens: Optional[int] = None, ) -> None: - locals_ = locals() + locals_ = locals().copy() for key, value in locals_.items(): if key != "self" and value is not None: setattr(self.__class__, key, value) diff --git a/litellm/llms/fireworks_ai/chat/transformation.py b/litellm/llms/fireworks_ai/chat/transformation.py index 30de3c3ed0..1c82f24ac0 100644 --- a/litellm/llms/fireworks_ai/chat/transformation.py +++ b/litellm/llms/fireworks_ai/chat/transformation.py @@ -3,7 +3,7 @@ from typing import List, Literal, Optional, Tuple, Union, cast import litellm from litellm.secret_managers.main import get_secret_str from litellm.types.llms.openai import AllMessageValues, ChatCompletionImageObject -from litellm.types.utils import ModelInfoBase, ProviderSpecificModelInfo +from litellm.types.utils import ProviderSpecificModelInfo from ...openai.chat.gpt_transformation import OpenAIGPTConfig @@ -90,6 +90,11 @@ class FireworksAIConfig(OpenAIGPTConfig): ) -> dict: supported_openai_params = self.get_supported_openai_params(model=model) + is_tools_set = any( + param == "tools" and value is not None + for param, value in non_default_params.items() + ) + for param, value in non_default_params.items(): if param == "tool_choice": if value == "required": @@ -98,18 +103,30 @@ class FireworksAIConfig(OpenAIGPTConfig): else: # pass through the value of tool choice optional_params["tool_choice"] = value - elif ( - param == "response_format" and value.get("type", None) == "json_schema" - ): - optional_params["response_format"] = { - "type": "json_object", - "schema": value["json_schema"]["schema"], - } + elif param == "response_format": + + if ( + is_tools_set + ): # fireworks ai doesn't support tools and response_format together + optional_params = self._add_response_format_to_tools( + optional_params=optional_params, + value=value, + is_response_format_supported=False, + enforce_tool_choice=False, # tools and response_format are both set, don't enforce tool_choice + ) + elif "json_schema" in value: + optional_params["response_format"] = { + "type": "json_object", + "schema": value["json_schema"]["schema"], + } + else: + optional_params["response_format"] = value elif param == "max_completion_tokens": optional_params["max_tokens"] = value elif param in supported_openai_params: if value is not None: optional_params[param] = value + return optional_params def _add_transform_inline_image_block( @@ -159,30 +176,14 @@ class FireworksAIConfig(OpenAIGPTConfig): ) return messages - def get_model_info( - self, model: str, existing_model_info: Optional[ModelInfoBase] = None - ) -> ModelInfoBase: + def get_provider_info(self, model: str) -> ProviderSpecificModelInfo: provider_specific_model_info = ProviderSpecificModelInfo( supports_function_calling=True, supports_prompt_caching=True, # https://docs.fireworks.ai/guides/prompt-caching supports_pdf_input=True, # via document inlining supports_vision=True, # via document inlining ) - if existing_model_info is not None: - return ModelInfoBase( - **{**existing_model_info, **provider_specific_model_info} - ) - return ModelInfoBase( - key=model, - litellm_provider="fireworks_ai", - mode="chat", - input_cost_per_token=0.0, - output_cost_per_token=0.0, - max_tokens=None, - max_input_tokens=None, - max_output_tokens=None, - **provider_specific_model_info, - ) + return provider_specific_model_info def transform_request( self, diff --git a/litellm/llms/gemini/chat/transformation.py b/litellm/llms/gemini/chat/transformation.py index 313bb99af7..fbc1916dcc 100644 --- a/litellm/llms/gemini/chat/transformation.py +++ b/litellm/llms/gemini/chat/transformation.py @@ -57,7 +57,7 @@ class GoogleAIStudioGeminiConfig(VertexGeminiConfig): candidate_count: Optional[int] = None, stop_sequences: Optional[list] = None, ) -> None: - locals_ = locals() + locals_ = locals().copy() for key, value in locals_.items(): if key != "self" and value is not None: setattr(self.__class__, key, value) @@ -114,12 +114,16 @@ class GoogleAIStudioGeminiConfig(VertexGeminiConfig): if element.get("type") == "image_url": img_element = element _image_url: Optional[str] = None + format: Optional[str] = None if isinstance(img_element.get("image_url"), dict): _image_url = img_element["image_url"].get("url") # type: ignore + format = img_element["image_url"].get("format") # type: ignore else: _image_url = img_element.get("image_url") # type: ignore if _image_url and "https://" in _image_url: - image_obj = convert_to_anthropic_image_obj(_image_url) + image_obj = convert_to_anthropic_image_obj( + _image_url, format=format + ) img_element["image_url"] = ( # type: ignore convert_generic_image_chunk_to_openai_image_obj( image_obj diff --git a/litellm/llms/huggingface/chat/transformation.py b/litellm/llms/huggingface/chat/transformation.py index 2f9824b677..858fda473e 100644 --- a/litellm/llms/huggingface/chat/transformation.py +++ b/litellm/llms/huggingface/chat/transformation.py @@ -77,7 +77,7 @@ class HuggingfaceChatConfig(BaseConfig): typical_p: Optional[float] = None, watermark: Optional[bool] = None, ) -> None: - locals_ = locals() + locals_ = locals().copy() for key, value in locals_.items(): if key != "self" and value is not None: setattr(self.__class__, key, value) diff --git a/litellm/llms/infinity/rerank/transformation.py b/litellm/llms/infinity/rerank/transformation.py index 2d34e5299a..1e7234ab17 100644 --- a/litellm/llms/infinity/rerank/transformation.py +++ b/litellm/llms/infinity/rerank/transformation.py @@ -13,13 +13,28 @@ import litellm from litellm.litellm_core_utils.litellm_logging import Logging as LiteLLMLoggingObj from litellm.llms.cohere.rerank.transformation import CohereRerankConfig from litellm.secret_managers.main import get_secret_str -from litellm.types.rerank import RerankBilledUnits, RerankResponseMeta, RerankTokens -from litellm.types.utils import RerankResponse +from litellm.types.rerank import ( + RerankBilledUnits, + RerankResponse, + RerankResponseDocument, + RerankResponseMeta, + RerankResponseResult, + RerankTokens, +) from .common_utils import InfinityError class InfinityRerankConfig(CohereRerankConfig): + def get_complete_url(self, api_base: Optional[str], model: str) -> str: + if api_base is None: + raise ValueError("api_base is required for Infinity rerank") + # Remove trailing slashes and ensure clean base URL + api_base = api_base.rstrip("/") + if not api_base.endswith("/rerank"): + api_base = f"{api_base}/rerank" + return api_base + def validate_environment( self, headers: dict, @@ -79,13 +94,23 @@ class InfinityRerankConfig(CohereRerankConfig): ) rerank_meta = RerankResponseMeta(billed_units=_billed_units, tokens=_tokens) - _results: Optional[List[dict]] = raw_response_json.get("results") - - if _results is None: + cohere_results: List[RerankResponseResult] = [] + if raw_response_json.get("results"): + for result in raw_response_json.get("results"): + _rerank_response = RerankResponseResult( + index=result.get("index"), + relevance_score=result.get("relevance_score"), + ) + if result.get("document"): + _rerank_response["document"] = RerankResponseDocument( + text=result.get("document") + ) + cohere_results.append(_rerank_response) + if cohere_results is None: raise ValueError(f"No results found in the response={raw_response_json}") return RerankResponse( id=raw_response_json.get("id") or str(uuid.uuid4()), - results=_results, # type: ignore + results=cohere_results, meta=rerank_meta, ) # Return response diff --git a/litellm/llms/jina_ai/embedding/transformation.py b/litellm/llms/jina_ai/embedding/transformation.py index a8fca20100..5263be900f 100644 --- a/litellm/llms/jina_ai/embedding/transformation.py +++ b/litellm/llms/jina_ai/embedding/transformation.py @@ -21,7 +21,7 @@ class JinaAIEmbeddingConfig: def __init__( self, ) -> None: - locals_ = locals() + locals_ = locals().copy() for key, value in locals_.items(): if key != "self" and value is not None: setattr(self.__class__, key, value) diff --git a/litellm/llms/jina_ai/rerank/handler.py b/litellm/llms/jina_ai/rerank/handler.py index 355624cd2a..94076da4f3 100644 --- a/litellm/llms/jina_ai/rerank/handler.py +++ b/litellm/llms/jina_ai/rerank/handler.py @@ -1,92 +1,3 @@ """ -Re rank api - -LiteLLM supports the re rank API format, no paramter transformation occurs +HTTP calling migrated to `llm_http_handler.py` """ - -from typing import Any, Dict, List, Optional, Union - -import litellm -from litellm.llms.base import BaseLLM -from litellm.llms.custom_httpx.http_handler import ( - _get_httpx_client, - get_async_httpx_client, -) -from litellm.llms.jina_ai.rerank.transformation import JinaAIRerankConfig -from litellm.types.rerank import RerankRequest, RerankResponse - - -class JinaAIRerank(BaseLLM): - def rerank( - self, - model: str, - api_key: str, - query: str, - documents: List[Union[str, Dict[str, Any]]], - top_n: Optional[int] = None, - rank_fields: Optional[List[str]] = None, - return_documents: Optional[bool] = True, - max_chunks_per_doc: Optional[int] = None, - _is_async: Optional[bool] = False, - ) -> RerankResponse: - client = _get_httpx_client() - - request_data = RerankRequest( - model=model, - query=query, - top_n=top_n, - documents=documents, - rank_fields=rank_fields, - return_documents=return_documents, - ) - - # exclude None values from request_data - request_data_dict = request_data.dict(exclude_none=True) - - if _is_async: - return self.async_rerank(request_data_dict, api_key) # type: ignore # Call async method - - response = client.post( - "https://api.jina.ai/v1/rerank", - headers={ - "accept": "application/json", - "content-type": "application/json", - "authorization": f"Bearer {api_key}", - }, - json=request_data_dict, - ) - - if response.status_code != 200: - raise Exception(response.text) - - _json_response = response.json() - - return JinaAIRerankConfig()._transform_response(_json_response) - - async def async_rerank( # New async method - self, - request_data_dict: Dict[str, Any], - api_key: str, - ) -> RerankResponse: - client = get_async_httpx_client( - llm_provider=litellm.LlmProviders.JINA_AI - ) # Use async client - - response = await client.post( - "https://api.jina.ai/v1/rerank", - headers={ - "accept": "application/json", - "content-type": "application/json", - "authorization": f"Bearer {api_key}", - }, - json=request_data_dict, - ) - - if response.status_code != 200: - raise Exception(response.text) - - _json_response = response.json() - - return JinaAIRerankConfig()._transform_response(_json_response) - - pass diff --git a/litellm/llms/jina_ai/rerank/transformation.py b/litellm/llms/jina_ai/rerank/transformation.py index a6c0a810c7..8d0a9b1431 100644 --- a/litellm/llms/jina_ai/rerank/transformation.py +++ b/litellm/llms/jina_ai/rerank/transformation.py @@ -7,30 +7,137 @@ Docs - https://jina.ai/reranker """ import uuid -from typing import List, Optional +from typing import Any, Dict, List, Optional, Tuple, Union +from httpx import URL, Response + +from litellm.llms.base_llm.chat.transformation import LiteLLMLoggingObj +from litellm.llms.base_llm.rerank.transformation import BaseRerankConfig from litellm.types.rerank import ( + OptionalRerankParams, RerankBilledUnits, RerankResponse, RerankResponseMeta, RerankTokens, ) +from litellm.types.utils import ModelInfo -class JinaAIRerankConfig: - def _transform_response(self, response: dict) -> RerankResponse: +class JinaAIRerankConfig(BaseRerankConfig): + def get_supported_cohere_rerank_params(self, model: str) -> list: + return [ + "query", + "top_n", + "documents", + "return_documents", + ] - _billed_units = RerankBilledUnits(**response.get("usage", {})) - _tokens = RerankTokens(**response.get("usage", {})) + def map_cohere_rerank_params( + self, + non_default_params: dict, + model: str, + drop_params: bool, + query: str, + documents: List[Union[str, Dict[str, Any]]], + custom_llm_provider: Optional[str] = None, + top_n: Optional[int] = None, + rank_fields: Optional[List[str]] = None, + return_documents: Optional[bool] = True, + max_chunks_per_doc: Optional[int] = None, + max_tokens_per_doc: Optional[int] = None, + ) -> OptionalRerankParams: + optional_params = {} + supported_params = self.get_supported_cohere_rerank_params(model) + for k, v in non_default_params.items(): + if k in supported_params: + optional_params[k] = v + return OptionalRerankParams( + **optional_params, + ) + + def get_complete_url(self, api_base: Optional[str], model: str) -> str: + base_path = "/v1/rerank" + + if api_base is None: + return "https://api.jina.ai/v1/rerank" + base = URL(api_base) + # Reconstruct URL with cleaned path + cleaned_base = str(base.copy_with(path=base_path)) + + return cleaned_base + + def transform_rerank_request( + self, model: str, optional_rerank_params: OptionalRerankParams, headers: Dict + ) -> Dict: + return {"model": model, **optional_rerank_params} + + def transform_rerank_response( + self, + model: str, + raw_response: Response, + model_response: RerankResponse, + logging_obj: LiteLLMLoggingObj, + api_key: Optional[str] = None, + request_data: Dict = {}, + optional_params: Dict = {}, + litellm_params: Dict = {}, + ) -> RerankResponse: + if raw_response.status_code != 200: + raise Exception(raw_response.text) + + logging_obj.post_call(original_response=raw_response.text) + + _json_response = raw_response.json() + + _billed_units = RerankBilledUnits(**_json_response.get("usage", {})) + _tokens = RerankTokens(**_json_response.get("usage", {})) rerank_meta = RerankResponseMeta(billed_units=_billed_units, tokens=_tokens) - _results: Optional[List[dict]] = response.get("results") + _results: Optional[List[dict]] = _json_response.get("results") if _results is None: - raise ValueError(f"No results found in the response={response}") + raise ValueError(f"No results found in the response={_json_response}") return RerankResponse( - id=response.get("id") or str(uuid.uuid4()), + id=_json_response.get("id") or str(uuid.uuid4()), results=_results, # type: ignore meta=rerank_meta, ) # Return response + + def validate_environment( + self, headers: Dict, model: str, api_key: Optional[str] = None + ) -> Dict: + if api_key is None: + raise ValueError( + "api_key is required. Set via `api_key` parameter or `JINA_API_KEY` environment variable." + ) + return { + "accept": "application/json", + "content-type": "application/json", + "authorization": f"Bearer {api_key}", + } + + def calculate_rerank_cost( + self, + model: str, + custom_llm_provider: Optional[str] = None, + billed_units: Optional[RerankBilledUnits] = None, + model_info: Optional[ModelInfo] = None, + ) -> Tuple[float, float]: + """ + Jina AI reranker is priced at $0.000000018 per token. + """ + if ( + model_info is None + or "input_cost_per_token" not in model_info + or model_info["input_cost_per_token"] is None + or billed_units is None + ): + return 0.0, 0.0 + + total_tokens = billed_units.get("total_tokens") + if total_tokens is None: + return 0.0, 0.0 + + input_cost = model_info["input_cost_per_token"] * total_tokens + return input_cost, 0.0 diff --git a/litellm/llms/lm_studio/embed/transformation.py b/litellm/llms/lm_studio/embed/transformation.py index 5ef121ea7a..1285550c30 100644 --- a/litellm/llms/lm_studio/embed/transformation.py +++ b/litellm/llms/lm_studio/embed/transformation.py @@ -18,7 +18,7 @@ class LmStudioEmbeddingConfig: def __init__( self, ) -> None: - locals_ = locals() + locals_ = locals().copy() for key, value in locals_.items(): if key != "self" and value is not None: setattr(self.__class__, key, value) diff --git a/litellm/llms/maritalk.py b/litellm/llms/maritalk.py index 62fa0113eb..5f2b8d71bc 100644 --- a/litellm/llms/maritalk.py +++ b/litellm/llms/maritalk.py @@ -33,7 +33,7 @@ class MaritalkConfig(OpenAIGPTConfig): tools: Optional[List[dict]] = None, tool_choice: Optional[Union[str, dict]] = None, ) -> None: - locals_ = locals() + locals_ = locals().copy() for key, value in locals_.items(): if key != "self" and value is not None: setattr(self.__class__, key, value) diff --git a/litellm/llms/nlp_cloud/chat/transformation.py b/litellm/llms/nlp_cloud/chat/transformation.py index 35ced50242..b7967249ab 100644 --- a/litellm/llms/nlp_cloud/chat/transformation.py +++ b/litellm/llms/nlp_cloud/chat/transformation.py @@ -78,7 +78,7 @@ class NLPCloudConfig(BaseConfig): num_beams: Optional[int] = None, num_return_sequences: Optional[int] = None, ) -> None: - locals_ = locals() + locals_ = locals().copy() for key, value in locals_.items(): if key != "self" and value is not None: setattr(self.__class__, key, value) diff --git a/litellm/llms/nvidia_nim/embed.py b/litellm/llms/nvidia_nim/embed.py index bf5d4d4ae6..24c6cc34e4 100644 --- a/litellm/llms/nvidia_nim/embed.py +++ b/litellm/llms/nvidia_nim/embed.py @@ -32,7 +32,7 @@ class NvidiaNimEmbeddingConfig: input_type: Optional[str] = None, truncate: Optional[str] = None, ) -> None: - locals_ = locals() + locals_ = locals().copy() for key, value in locals_.items(): if key != "self" and value is not None: setattr(self.__class__, key, value) @@ -58,7 +58,7 @@ class NvidiaNimEmbeddingConfig: def get_supported_openai_params( self, ): - return ["encoding_format", "user"] + return ["encoding_format", "user", "dimensions"] def map_openai_params( self, @@ -73,6 +73,8 @@ class NvidiaNimEmbeddingConfig: optional_params["extra_body"].update({"input_type": v}) elif k == "truncate": optional_params["extra_body"].update({"truncate": v}) + else: + optional_params[k] = v if kwargs is not None: # pass kwargs in extra_body diff --git a/litellm/llms/ollama/completion/transformation.py b/litellm/llms/ollama/completion/transformation.py index fcd198b01a..283b2a2437 100644 --- a/litellm/llms/ollama/completion/transformation.py +++ b/litellm/llms/ollama/completion/transformation.py @@ -117,7 +117,7 @@ class OllamaConfig(BaseConfig): system: Optional[str] = None, template: Optional[str] = None, ) -> None: - locals_ = locals() + locals_ = locals().copy() for key, value in locals_.items(): if key != "self" and value is not None: setattr(self.__class__, key, value) @@ -353,7 +353,7 @@ class OllamaConfig(BaseConfig): def get_complete_url( self, - api_base: str, + api_base: Optional[str], model: str, optional_params: dict, stream: Optional[bool] = None, @@ -365,6 +365,8 @@ class OllamaConfig(BaseConfig): Some providers need `model` in `api_base` """ + if api_base is None: + api_base = "http://localhost:11434" if api_base.endswith("/api/generate"): url = api_base else: diff --git a/litellm/llms/ollama_chat.py b/litellm/llms/ollama_chat.py index 38fe549ca6..6f421680b4 100644 --- a/litellm/llms/ollama_chat.py +++ b/litellm/llms/ollama_chat.py @@ -1,7 +1,7 @@ import json import time import uuid -from typing import Any, List, Optional +from typing import Any, List, Optional, Union import aiohttp import httpx @@ -9,7 +9,11 @@ from pydantic import BaseModel import litellm from litellm import verbose_logger -from litellm.llms.custom_httpx.http_handler import get_async_httpx_client +from litellm.llms.custom_httpx.http_handler import ( + AsyncHTTPHandler, + HTTPHandler, + get_async_httpx_client, +) from litellm.llms.openai.chat.gpt_transformation import OpenAIGPTConfig from litellm.types.llms.ollama import OllamaToolCall, OllamaToolCallFunction from litellm.types.llms.openai import ChatCompletionAssistantToolCall @@ -105,7 +109,7 @@ class OllamaChatConfig(OpenAIGPTConfig): system: Optional[str] = None, template: Optional[str] = None, ) -> None: - locals_ = locals() + locals_ = locals().copy() for key, value in locals_.items(): if key != "self" and value is not None: setattr(self.__class__, key, value) @@ -205,6 +209,7 @@ def get_ollama_response( # noqa: PLR0915 api_key: Optional[str] = None, acompletion: bool = False, encoding=None, + client: Optional[Union[HTTPHandler, AsyncHTTPHandler]] = None, ): if api_base.endswith("/api/chat"): url = api_base @@ -301,7 +306,11 @@ def get_ollama_response( # noqa: PLR0915 headers: Optional[dict] = None if api_key is not None: headers = {"Authorization": "Bearer {}".format(api_key)} - response = litellm.module_level_client.post( + + sync_client = litellm.module_level_client + if client is not None and isinstance(client, HTTPHandler): + sync_client = client + response = sync_client.post( url=url, json=data, headers=headers, @@ -508,6 +517,7 @@ async def ollama_async_streaming( verbose_logger.exception( "LiteLLM.ollama(): Exception occured - {}".format(str(e)) ) + raise e async def ollama_acompletion( diff --git a/litellm/llms/openai/chat/gpt_transformation.py b/litellm/llms/openai/chat/gpt_transformation.py index 63d75eff8c..1f34d63681 100644 --- a/litellm/llms/openai/chat/gpt_transformation.py +++ b/litellm/llms/openai/chat/gpt_transformation.py @@ -2,16 +2,31 @@ Support for gpt model family """ -from typing import TYPE_CHECKING, Any, List, Optional, Union, cast +from typing import ( + TYPE_CHECKING, + Any, + AsyncIterator, + Iterator, + List, + Optional, + Union, + cast, +) import httpx import litellm +from litellm.llms.base_llm.base_model_iterator import BaseModelResponseIterator from litellm.llms.base_llm.base_utils import BaseLLMModelInfo from litellm.llms.base_llm.chat.transformation import BaseConfig, BaseLLMException from litellm.secret_managers.main import get_secret_str -from litellm.types.llms.openai import AllMessageValues -from litellm.types.utils import ModelInfoBase, ModelResponse +from litellm.types.llms.openai import ( + AllMessageValues, + ChatCompletionImageObject, + ChatCompletionImageUrlObject, +) +from litellm.types.utils import ModelResponse, ModelResponseStream +from litellm.utils import convert_to_model_response_object from ..common_utils import OpenAIError @@ -167,6 +182,27 @@ class OpenAIGPTConfig(BaseLLMModelInfo, BaseConfig): def _transform_messages( self, messages: List[AllMessageValues], model: str ) -> List[AllMessageValues]: + """OpenAI no longer supports image_url as a string, so we need to convert it to a dict""" + for message in messages: + message_content = message.get("content") + if message_content and isinstance(message_content, list): + for content_item in message_content: + if content_item.get("type") == "image_url": + content_item = cast(ChatCompletionImageObject, content_item) + if isinstance(content_item["image_url"], str): + content_item["image_url"] = { + "url": content_item["image_url"], + } + elif isinstance(content_item["image_url"], dict): + litellm_specific_params = {"format"} + new_image_url_obj = ChatCompletionImageUrlObject( + **{ # type: ignore + k: v + for k, v in content_item["image_url"].items() + if k not in litellm_specific_params + } + ) + content_item["image_url"] = new_image_url_obj return messages def transform_request( @@ -210,7 +246,36 @@ class OpenAIGPTConfig(BaseLLMModelInfo, BaseConfig): Returns: dict: The transformed response. """ - raise NotImplementedError + + ## LOGGING + logging_obj.post_call( + input=messages, + api_key=api_key, + original_response=raw_response.text, + additional_args={"complete_input_dict": request_data}, + ) + + ## RESPONSE OBJECT + try: + completion_response = raw_response.json() + except Exception as e: + response_headers = getattr(raw_response, "headers", None) + raise OpenAIError( + message="Unable to get json response - {}, Original Response: {}".format( + str(e), raw_response.text + ), + status_code=raw_response.status_code, + headers=response_headers, + ) + raw_response_headers = dict(raw_response.headers) + final_response_obj = convert_to_model_response_object( + response_object=completion_response, + model_response_object=model_response, + hidden_params={"headers": raw_response_headers}, + _response_headers=raw_response_headers, + ) + + return cast(ModelResponse, final_response_obj) def get_error_class( self, error_message: str, status_code: int, headers: Union[dict, httpx.Headers] @@ -221,6 +286,32 @@ class OpenAIGPTConfig(BaseLLMModelInfo, BaseConfig): headers=cast(httpx.Headers, headers), ) + def get_complete_url( + self, + api_base: Optional[str], + model: str, + optional_params: dict, + stream: Optional[bool] = None, + ) -> str: + """ + Get the complete URL for the API call. + + Returns: + str: The complete URL for the API call. + """ + if api_base is None: + api_base = "https://api.openai.com" + endpoint = "chat/completions" + + # Remove trailing slash from api_base if present + api_base = api_base.rstrip("/") + + # Check if endpoint is already in the api_base + if endpoint in api_base: + return api_base + + return f"{api_base}/{endpoint}" + def validate_environment( self, headers: dict, @@ -230,7 +321,14 @@ class OpenAIGPTConfig(BaseLLMModelInfo, BaseConfig): api_key: Optional[str] = None, api_base: Optional[str] = None, ) -> dict: - raise NotImplementedError + if api_key is not None: + headers["Authorization"] = f"Bearer {api_key}" + + # Ensure Content-Type is set to application/json + if "content-type" not in headers and "Content-Type" not in headers: + headers["Content-Type"] = "application/json" + + return headers def get_models( self, api_key: Optional[str] = None, api_base: Optional[str] = None @@ -255,23 +353,6 @@ class OpenAIGPTConfig(BaseLLMModelInfo, BaseConfig): models = response.json()["data"] return [model["id"] for model in models] - def get_model_info( - self, model: str, existing_model_info: Optional[ModelInfoBase] = None - ) -> ModelInfoBase: - - if existing_model_info is not None: - return existing_model_info - return ModelInfoBase( - key=model, - litellm_provider="openai", - mode="chat", - input_cost_per_token=0.0, - output_cost_per_token=0.0, - max_tokens=None, - max_input_tokens=None, - max_output_tokens=None, - ) - @staticmethod def get_api_key(api_key: Optional[str] = None) -> Optional[str]: return ( @@ -289,3 +370,34 @@ class OpenAIGPTConfig(BaseLLMModelInfo, BaseConfig): or get_secret_str("OPENAI_API_BASE") or "https://api.openai.com/v1" ) + + @staticmethod + def get_base_model(model: str) -> str: + return model + + def get_model_response_iterator( + self, + streaming_response: Union[Iterator[str], AsyncIterator[str], ModelResponse], + sync_stream: bool, + json_mode: Optional[bool] = False, + ) -> Any: + return OpenAIChatCompletionStreamingHandler( + streaming_response=streaming_response, + sync_stream=sync_stream, + json_mode=json_mode, + ) + + +class OpenAIChatCompletionStreamingHandler(BaseModelResponseIterator): + + def chunk_parser(self, chunk: dict) -> ModelResponseStream: + try: + return ModelResponseStream( + id=chunk["id"], + object="chat.completion.chunk", + created=chunk["created"], + model=chunk["model"], + choices=chunk["choices"], + ) + except Exception as e: + raise e diff --git a/litellm/llms/openai/chat/o1_handler.py b/litellm/llms/openai/chat/o_series_handler.py similarity index 100% rename from litellm/llms/openai/chat/o1_handler.py rename to litellm/llms/openai/chat/o_series_handler.py diff --git a/litellm/llms/openai/chat/o1_transformation.py b/litellm/llms/openai/chat/o_series_transformation.py similarity index 80% rename from litellm/llms/openai/chat/o1_transformation.py rename to litellm/llms/openai/chat/o_series_transformation.py index f19472982b..b2ffda6e7d 100644 --- a/litellm/llms/openai/chat/o1_transformation.py +++ b/litellm/llms/openai/chat/o_series_transformation.py @@ -1,5 +1,5 @@ """ -Support for o1 model family +Support for o1/o3 model family https://platform.openai.com/docs/guides/reasoning @@ -19,6 +19,7 @@ from litellm.litellm_core_utils.get_llm_provider_logic import get_llm_provider from litellm.types.llms.openai import AllMessageValues, ChatCompletionUserMessage from litellm.utils import ( supports_function_calling, + supports_parallel_function_calling, supports_response_schema, supports_system_messages, ) @@ -26,7 +27,7 @@ from litellm.utils import ( from .gpt_transformation import OpenAIGPTConfig -class OpenAIO1Config(OpenAIGPTConfig): +class OpenAIOSeriesConfig(OpenAIGPTConfig): """ Reference: https://platform.openai.com/docs/guides/reasoning """ @@ -35,22 +36,13 @@ class OpenAIO1Config(OpenAIGPTConfig): def get_config(cls): return super().get_config() - def should_fake_stream( - self, - model: Optional[str], - stream: Optional[bool], - custom_llm_provider: Optional[str] = None, - ) -> bool: - if stream is not True: - return False - - if model is None: - return True - supported_stream_models = ["o1-mini", "o1-preview"] - for supported_model in supported_stream_models: - if supported_model in model: - return False - return True + def translate_developer_role_to_system_role( + self, messages: List[AllMessageValues] + ) -> List[AllMessageValues]: + """ + O-series models support `developer` role. + """ + return messages def get_supported_openai_params(self, model: str) -> list: """ @@ -67,6 +59,10 @@ class OpenAIO1Config(OpenAIGPTConfig): "top_logprobs", ] + o_series_only_param = ["reasoning_effort"] + + all_openai_params.extend(o_series_only_param) + try: model, custom_llm_provider, api_base, api_key = get_llm_provider( model=model @@ -81,14 +77,19 @@ class OpenAIO1Config(OpenAIGPTConfig): model, custom_llm_provider ) _supports_response_schema = supports_response_schema(model, custom_llm_provider) + _supports_parallel_tool_calls = supports_parallel_function_calling( + model, custom_llm_provider + ) if not _supports_function_calling: non_supported_params.append("tools") non_supported_params.append("tool_choice") - non_supported_params.append("parallel_tool_calls") non_supported_params.append("function_call") non_supported_params.append("functions") + if not _supports_parallel_tool_calls: + non_supported_params.append("parallel_tool_calls") + if not _supports_response_schema: non_supported_params.append("response_format") @@ -118,7 +119,7 @@ class OpenAIO1Config(OpenAIGPTConfig): pass else: raise litellm.utils.UnsupportedParamsError( - message="O-1 doesn't support temperature={}. To drop unsupported openai params from the call, set `litellm.drop_params = True`".format( + message="O-series models don't support temperature={}. Only temperature=1 is supported. To drop unsupported openai params from the call, set `litellm.drop_params = True`".format( temperature_value ), status_code=400, @@ -128,8 +129,10 @@ class OpenAIO1Config(OpenAIGPTConfig): non_default_params, optional_params, model, drop_params ) - def is_model_o1_reasoning_model(self, model: str) -> bool: - if model in litellm.open_ai_chat_completion_models and "o1" in model: + def is_model_o_series_model(self, model: str) -> bool: + if model in litellm.open_ai_chat_completion_models and ( + "o1" in model or "o3" in model + ): return True return False @@ -149,4 +152,5 @@ class OpenAIO1Config(OpenAIGPTConfig): ) messages[i] = new_message # Replace the old message with the new one + messages = super()._transform_messages(messages, model) return messages diff --git a/litellm/llms/openai/common_utils.py b/litellm/llms/openai/common_utils.py index 98a55b4bd3..a8412f867b 100644 --- a/litellm/llms/openai/common_utils.py +++ b/litellm/llms/openai/common_utils.py @@ -19,6 +19,7 @@ class OpenAIError(BaseLLMException): request: Optional[httpx.Request] = None, response: Optional[httpx.Response] = None, headers: Optional[Union[dict, httpx.Headers]] = None, + body: Optional[dict] = None, ): self.status_code = status_code self.message = message @@ -39,6 +40,7 @@ class OpenAIError(BaseLLMException): headers=self.headers, request=self.request, response=self.response, + body=body, ) diff --git a/litellm/llms/openai/completion/transformation.py b/litellm/llms/openai/completion/transformation.py index e26b5eb195..1aef72d3fa 100644 --- a/litellm/llms/openai/completion/transformation.py +++ b/litellm/llms/openai/completion/transformation.py @@ -94,7 +94,10 @@ class OpenAITextCompletionConfig(BaseTextCompletionConfig, OpenAIGPTConfig): role="assistant", ) choice = Choices( - finish_reason=choice["finish_reason"], index=idx, message=message + finish_reason=choice["finish_reason"], + index=idx, + message=message, + logprobs=choice.get("logprobs", None), ) choice_list.append(choice) model_response_object.choices = choice_list diff --git a/litellm/llms/openai/openai.py b/litellm/llms/openai/openai.py index aa361422fe..7935c46293 100644 --- a/litellm/llms/openai/openai.py +++ b/litellm/llms/openai/openai.py @@ -14,6 +14,7 @@ from typing import ( Union, cast, ) +from urllib.parse import urlparse import httpx import openai @@ -26,6 +27,7 @@ from typing_extensions import overload import litellm from litellm import LlmProviders from litellm._logging import verbose_logger +from litellm.constants import DEFAULT_MAX_RETRIES from litellm.litellm_core_utils.litellm_logging import Logging as LiteLLMLoggingObj from litellm.litellm_core_utils.logging_utils import track_llm_api_timing from litellm.llms.base_llm.base_model_iterator import BaseModelResponseIterator @@ -35,6 +37,7 @@ from litellm.llms.custom_httpx.http_handler import _DEFAULT_TTL_FOR_HTTPX_CLIENT from litellm.types.utils import ( EmbeddingResponse, ImageResponse, + LiteLLMBatch, ModelResponse, ModelResponseStream, ) @@ -46,8 +49,11 @@ from litellm.utils import ( from ...types.llms.openai import * from ..base import BaseLLM +from .chat.o_series_transformation import OpenAIOSeriesConfig from .common_utils import OpenAIError, drop_params_from_unprocessable_entity_error +openaiOSeriesConfig = OpenAIOSeriesConfig() + class MistralEmbeddingConfig: """ @@ -173,8 +179,8 @@ class OpenAIConfig(BaseConfig): Returns: list: List of supported openai parameters """ - if litellm.openAIO1Config.is_model_o1_reasoning_model(model=model): - return litellm.openAIO1Config.get_supported_openai_params(model=model) + if openaiOSeriesConfig.is_model_o_series_model(model=model): + return openaiOSeriesConfig.get_supported_openai_params(model=model) elif litellm.openAIGPTAudioConfig.is_model_gpt_audio_model(model=model): return litellm.openAIGPTAudioConfig.get_supported_openai_params(model=model) else: @@ -202,8 +208,8 @@ class OpenAIConfig(BaseConfig): drop_params: bool, ) -> dict: """ """ - if litellm.openAIO1Config.is_model_o1_reasoning_model(model=model): - return litellm.openAIO1Config.map_openai_params( + if openaiOSeriesConfig.is_model_o_series_model(model=model): + return openaiOSeriesConfig.map_openai_params( non_default_params=non_default_params, optional_params=optional_params, model=model, @@ -316,6 +322,17 @@ class OpenAIChatCompletion(BaseLLM): def __init__(self) -> None: super().__init__() + def _set_dynamic_params_on_client( + self, + client: Union[OpenAI, AsyncOpenAI], + organization: Optional[str] = None, + max_retries: Optional[int] = None, + ): + if organization is not None: + client.organization = organization + if max_retries is not None: + client.max_retries = max_retries + def _get_openai_client( self, is_async: bool, @@ -323,11 +340,10 @@ class OpenAIChatCompletion(BaseLLM): api_base: Optional[str] = None, api_version: Optional[str] = None, timeout: Union[float, httpx.Timeout] = httpx.Timeout(None), - max_retries: Optional[int] = 2, + max_retries: Optional[int] = DEFAULT_MAX_RETRIES, organization: Optional[str] = None, client: Optional[Union[OpenAI, AsyncOpenAI]] = None, ): - args = locals() if client is None: if not isinstance(max_retries, int): raise OpenAIError( @@ -360,7 +376,6 @@ class OpenAIChatCompletion(BaseLLM): organization=organization, ) else: - _new_client = OpenAI( api_key=api_key, base_url=api_base, @@ -379,6 +394,11 @@ class OpenAIChatCompletion(BaseLLM): return _new_client else: + self._set_dynamic_params_on_client( + client=client, + organization=organization, + max_retries=max_retries, + ) return client @track_llm_api_timing() @@ -712,10 +732,14 @@ class OpenAIChatCompletion(BaseLLM): error_headers = getattr(e, "headers", None) error_text = getattr(e, "text", str(e)) error_response = getattr(e, "response", None) + error_body = getattr(e, "body", None) if error_headers is None and error_response: error_headers = getattr(error_response, "headers", None) raise OpenAIError( - status_code=status_code, message=error_text, headers=error_headers + status_code=status_code, + message=error_text, + headers=error_headers, + body=error_body, ) async def acompletion( @@ -808,13 +832,17 @@ class OpenAIChatCompletion(BaseLLM): except Exception as e: exception_response = getattr(e, "response", None) status_code = getattr(e, "status_code", 500) + exception_body = getattr(e, "body", None) error_headers = getattr(e, "headers", None) if error_headers is None and exception_response: error_headers = getattr(exception_response, "headers", None) message = getattr(e, "message", str(e)) raise OpenAIError( - status_code=status_code, message=message, headers=error_headers + status_code=status_code, + message=message, + headers=error_headers, + body=exception_body, ) def streaming( @@ -833,8 +861,9 @@ class OpenAIChatCompletion(BaseLLM): stream_options: Optional[dict] = None, ): data["stream"] = True - if stream_options is not None: - data["stream_options"] = stream_options + data.update( + self.get_stream_options(stream_options=stream_options, api_base=api_base) + ) openai_client: OpenAI = self._get_openai_client( # type: ignore is_async=False, @@ -893,8 +922,9 @@ class OpenAIChatCompletion(BaseLLM): ): response = None data["stream"] = True - if stream_options is not None: - data["stream_options"] = stream_options + data.update( + self.get_stream_options(stream_options=stream_options, api_base=api_base) + ) for _ in range(2): try: openai_aclient: AsyncOpenAI = self._get_openai_client( # type: ignore @@ -951,6 +981,7 @@ class OpenAIChatCompletion(BaseLLM): error_headers = getattr(e, "headers", None) status_code = getattr(e, "status_code", 500) error_response = getattr(e, "response", None) + exception_body = getattr(e, "body", None) if error_headers is None and error_response: error_headers = getattr(error_response, "headers", None) if response is not None and hasattr(response, "text"): @@ -958,6 +989,7 @@ class OpenAIChatCompletion(BaseLLM): status_code=status_code, message=f"{str(e)}\n\nOriginal Response: {response.text}", # type: ignore headers=error_headers, + body=exception_body, ) else: if type(e).__name__ == "ReadTimeout": @@ -965,18 +997,37 @@ class OpenAIChatCompletion(BaseLLM): status_code=408, message=f"{type(e).__name__}", headers=error_headers, + body=exception_body, ) elif hasattr(e, "status_code"): raise OpenAIError( status_code=getattr(e, "status_code", 500), message=str(e), headers=error_headers, + body=exception_body, ) else: raise OpenAIError( - status_code=500, message=f"{str(e)}", headers=error_headers + status_code=500, + message=f"{str(e)}", + headers=error_headers, + body=exception_body, ) + def get_stream_options( + self, stream_options: Optional[dict], api_base: Optional[str] + ) -> dict: + """ + Pass `stream_options` to the data dict for OpenAI requests + """ + if stream_options is not None: + return {"stream_options": stream_options} + else: + # by default litellm will include usage for openai endpoints + if api_base is None or urlparse(api_base).hostname == "api.openai.com": + return {"stream_options": {"include_usage": True}} + return {} + # Embedding @track_llm_api_timing() async def make_openai_embedding_request( @@ -1720,9 +1771,9 @@ class OpenAIBatchesAPI(BaseLLM): self, create_batch_data: CreateBatchRequest, openai_client: AsyncOpenAI, - ) -> Batch: + ) -> LiteLLMBatch: response = await openai_client.batches.create(**create_batch_data) - return response + return LiteLLMBatch(**response.model_dump()) def create_batch( self, @@ -1734,7 +1785,7 @@ class OpenAIBatchesAPI(BaseLLM): max_retries: Optional[int], organization: Optional[str], client: Optional[Union[OpenAI, AsyncOpenAI]] = None, - ) -> Union[Batch, Coroutine[Any, Any, Batch]]: + ) -> Union[LiteLLMBatch, Coroutine[Any, Any, LiteLLMBatch]]: openai_client: Optional[Union[OpenAI, AsyncOpenAI]] = self.get_openai_client( api_key=api_key, api_base=api_base, @@ -1757,17 +1808,18 @@ class OpenAIBatchesAPI(BaseLLM): return self.acreate_batch( # type: ignore create_batch_data=create_batch_data, openai_client=openai_client ) - response = openai_client.batches.create(**create_batch_data) - return response + response = cast(OpenAI, openai_client).batches.create(**create_batch_data) + + return LiteLLMBatch(**response.model_dump()) async def aretrieve_batch( self, retrieve_batch_data: RetrieveBatchRequest, openai_client: AsyncOpenAI, - ) -> Batch: + ) -> LiteLLMBatch: verbose_logger.debug("retrieving batch, args= %s", retrieve_batch_data) response = await openai_client.batches.retrieve(**retrieve_batch_data) - return response + return LiteLLMBatch(**response.model_dump()) def retrieve_batch( self, @@ -1802,8 +1854,8 @@ class OpenAIBatchesAPI(BaseLLM): return self.aretrieve_batch( # type: ignore retrieve_batch_data=retrieve_batch_data, openai_client=openai_client ) - response = openai_client.batches.retrieve(**retrieve_batch_data) - return response + response = cast(OpenAI, openai_client).batches.retrieve(**retrieve_batch_data) + return LiteLLMBatch(**response.model_dump()) async def acancel_batch( self, diff --git a/litellm/llms/openai/transcriptions/handler.py b/litellm/llms/openai/transcriptions/handler.py index 5e1746319e..d9dd3c123b 100644 --- a/litellm/llms/openai/transcriptions/handler.py +++ b/litellm/llms/openai/transcriptions/handler.py @@ -112,6 +112,7 @@ class OpenAIAudioTranscription(OpenAIChatCompletion): api_base=api_base, timeout=timeout, max_retries=max_retries, + client=client, ) ## LOGGING diff --git a/litellm/llms/openai_like/chat/handler.py b/litellm/llms/openai_like/chat/handler.py index c34bbeabf3..ac886e915c 100644 --- a/litellm/llms/openai_like/chat/handler.py +++ b/litellm/llms/openai_like/chat/handler.py @@ -337,6 +337,7 @@ class OpenAILikeChatHandler(OpenAILikeBase): timeout=timeout, base_model=base_model, client=client, + json_mode=json_mode ) else: ## COMPLETION CALL diff --git a/litellm/llms/openrouter/chat/transformation.py b/litellm/llms/openrouter/chat/transformation.py index 5a4c2ff209..4b95ec87cf 100644 --- a/litellm/llms/openrouter/chat/transformation.py +++ b/litellm/llms/openrouter/chat/transformation.py @@ -6,7 +6,16 @@ Calls done in OpenAI/openai.py as OpenRouter is openai-compatible. Docs: https://openrouter.ai/docs/parameters """ +from typing import Any, AsyncIterator, Iterator, Optional, Union + +import httpx + +from litellm.llms.base_llm.base_model_iterator import BaseModelResponseIterator +from litellm.llms.base_llm.chat.transformation import BaseLLMException +from litellm.types.utils import ModelResponse, ModelResponseStream + from ...openai.chat.gpt_transformation import OpenAIGPTConfig +from ..common_utils import OpenRouterException class OpenrouterConfig(OpenAIGPTConfig): @@ -37,3 +46,43 @@ class OpenrouterConfig(OpenAIGPTConfig): extra_body # openai client supports `extra_body` param ) return mapped_openai_params + + def get_error_class( + self, error_message: str, status_code: int, headers: Union[dict, httpx.Headers] + ) -> BaseLLMException: + return OpenRouterException( + message=error_message, + status_code=status_code, + headers=headers, + ) + + def get_model_response_iterator( + self, + streaming_response: Union[Iterator[str], AsyncIterator[str], ModelResponse], + sync_stream: bool, + json_mode: Optional[bool] = False, + ) -> Any: + return OpenRouterChatCompletionStreamingHandler( + streaming_response=streaming_response, + sync_stream=sync_stream, + json_mode=json_mode, + ) + + +class OpenRouterChatCompletionStreamingHandler(BaseModelResponseIterator): + + def chunk_parser(self, chunk: dict) -> ModelResponseStream: + try: + new_choices = [] + for choice in chunk["choices"]: + choice["delta"]["reasoning_content"] = choice["delta"].get("reasoning") + new_choices.append(choice) + return ModelResponseStream( + id=chunk["id"], + object="chat.completion.chunk", + created=chunk["created"], + model=chunk["model"], + choices=new_choices, + ) + except Exception as e: + raise e diff --git a/litellm/llms/openrouter/common_utils.py b/litellm/llms/openrouter/common_utils.py new file mode 100644 index 0000000000..96e53a5aae --- /dev/null +++ b/litellm/llms/openrouter/common_utils.py @@ -0,0 +1,5 @@ +from litellm.llms.base_llm.chat.transformation import BaseLLMException + + +class OpenRouterException(BaseLLMException): + pass diff --git a/litellm/llms/perplexity/chat/transformation.py b/litellm/llms/perplexity/chat/transformation.py index afa5008b79..dab64283ec 100644 --- a/litellm/llms/perplexity/chat/transformation.py +++ b/litellm/llms/perplexity/chat/transformation.py @@ -20,3 +20,24 @@ class PerplexityChatConfig(OpenAIGPTConfig): or get_secret_str("PERPLEXITY_API_KEY") ) return api_base, dynamic_api_key + + def get_supported_openai_params(self, model: str) -> list: + """ + Perplexity supports a subset of OpenAI params + + Ref: https://docs.perplexity.ai/api-reference/chat-completions + + Eg. Perplexity does not support tools, tool_choice, function_call, functions, etc. + """ + return [ + "frequency_penalty", + "max_tokens", + "max_completion_tokens", + "presence_penalty", + "response_format", + "stream", + "temperature", + "top_p", + "max_retries", + "extra_headers", + ] diff --git a/litellm/llms/petals/completion/transformation.py b/litellm/llms/petals/completion/transformation.py index dec3f69416..08ec15de33 100644 --- a/litellm/llms/petals/completion/transformation.py +++ b/litellm/llms/petals/completion/transformation.py @@ -58,7 +58,7 @@ class PetalsConfig(BaseConfig): top_p: Optional[float] = None, repetition_penalty: Optional[float] = None, ) -> None: - locals_ = locals() + locals_ = locals().copy() for key, value in locals_.items(): if key != "self" and value is not None: setattr(self.__class__, key, value) diff --git a/litellm/llms/predibase/chat/transformation.py b/litellm/llms/predibase/chat/transformation.py index b9ca0ff693..f574238696 100644 --- a/litellm/llms/predibase/chat/transformation.py +++ b/litellm/llms/predibase/chat/transformation.py @@ -59,7 +59,7 @@ class PredibaseConfig(BaseConfig): typical_p: Optional[float] = None, watermark: Optional[bool] = None, ) -> None: - locals_ = locals() + locals_ = locals().copy() for key, value in locals_.items(): if key != "self" and value is not None: setattr(self.__class__, key, value) diff --git a/litellm/llms/replicate/chat/transformation.py b/litellm/llms/replicate/chat/transformation.py index 310193ea66..39aaad6808 100644 --- a/litellm/llms/replicate/chat/transformation.py +++ b/litellm/llms/replicate/chat/transformation.py @@ -73,7 +73,7 @@ class ReplicateConfig(BaseConfig): seed: Optional[int] = None, debug: Optional[bool] = None, ) -> None: - locals_ = locals() + locals_ = locals().copy() for key, value in locals_.items(): if key != "self" and value is not None: setattr(self.__class__, key, value) @@ -138,7 +138,7 @@ class ReplicateConfig(BaseConfig): def get_complete_url( self, - api_base: str, + api_base: Optional[str], model: str, optional_params: dict, stream: Optional[bool] = None, diff --git a/litellm/llms/sagemaker/common_utils.py b/litellm/llms/sagemaker/common_utils.py index 49e4989ff1..9884f420c3 100644 --- a/litellm/llms/sagemaker/common_utils.py +++ b/litellm/llms/sagemaker/common_utils.py @@ -3,6 +3,7 @@ from typing import AsyncIterator, Iterator, List, Optional, Union import httpx +import litellm from litellm import verbose_logger from litellm.llms.base_llm.chat.transformation import BaseLLMException from litellm.types.utils import GenericStreamingChunk as GChunk @@ -78,7 +79,11 @@ class AWSEventStreamDecoder: message = self._parse_message_from_event(event) if message: # remove data: prefix and "\n\n" at the end - message = message.replace("data:", "").replace("\n\n", "") + message = ( + litellm.CustomStreamWrapper._strip_sse_data_from_chunk(message) + or "" + ) + message = message.replace("\n\n", "") # Accumulate JSON data accumulated_json += message @@ -127,7 +132,11 @@ class AWSEventStreamDecoder: if message: verbose_logger.debug("sagemaker parsed chunk bytes %s", message) # remove data: prefix and "\n\n" at the end - message = message.replace("data:", "").replace("\n\n", "") + message = ( + litellm.CustomStreamWrapper._strip_sse_data_from_chunk(message) + or "" + ) + message = message.replace("\n\n", "") # Accumulate JSON data accumulated_json += message diff --git a/litellm/llms/sagemaker/completion/handler.py b/litellm/llms/sagemaker/completion/handler.py index 0a403dc484..4aff5f5d71 100644 --- a/litellm/llms/sagemaker/completion/handler.py +++ b/litellm/llms/sagemaker/completion/handler.py @@ -433,6 +433,10 @@ class SagemakerLLM(BaseAWSLLM): "messages": messages, } prepared_request = await asyncified_prepare_request(**prepared_request_args) + if model_id is not None: # Fixes https://github.com/BerriAI/litellm/issues/8889 + prepared_request.headers.update( + {"X-Amzn-SageMaker-Inference-Component": model_id} + ) completion_stream = await self.make_async_call( api_base=prepared_request.url, headers=prepared_request.headers, # type: ignore @@ -511,7 +515,7 @@ class SagemakerLLM(BaseAWSLLM): # Add model_id as InferenceComponentName header # boto3 doc: https://docs.aws.amazon.com/sagemaker/latest/APIReference/API_runtime_InvokeEndpoint.html prepared_request.headers.update( - {"X-Amzn-SageMaker-Inference-Componen": model_id} + {"X-Amzn-SageMaker-Inference-Component": model_id} ) # make async httpx post request here try: diff --git a/litellm/llms/sagemaker/completion/transformation.py b/litellm/llms/sagemaker/completion/transformation.py index 4ee4d2ce6a..d0ab5d0697 100644 --- a/litellm/llms/sagemaker/completion/transformation.py +++ b/litellm/llms/sagemaker/completion/transformation.py @@ -47,7 +47,7 @@ class SagemakerConfig(BaseConfig): temperature: Optional[float] = None, return_full_text: Optional[bool] = None, ) -> None: - locals_ = locals() + locals_ = locals().copy() for key, value in locals_.items(): if key != "self" and value is not None: setattr(self.__class__, key, value) diff --git a/litellm/llms/sambanova/chat.py b/litellm/llms/sambanova/chat.py index 4eea1914ce..abf55d44fb 100644 --- a/litellm/llms/sambanova/chat.py +++ b/litellm/llms/sambanova/chat.py @@ -11,7 +11,7 @@ from litellm.llms.openai.chat.gpt_transformation import OpenAIGPTConfig class SambanovaConfig(OpenAIGPTConfig): """ - Reference: https://community.sambanova.ai/t/create-chat-completion-api/ + Reference: https://docs.sambanova.ai/cloud/api-reference/ Below are the parameters: """ diff --git a/litellm/llms/together_ai/rerank/transformation.py b/litellm/llms/together_ai/rerank/transformation.py index b74e0b6c00..4714376979 100644 --- a/litellm/llms/together_ai/rerank/transformation.py +++ b/litellm/llms/together_ai/rerank/transformation.py @@ -10,7 +10,9 @@ from typing import List, Optional from litellm.types.rerank import ( RerankBilledUnits, RerankResponse, + RerankResponseDocument, RerankResponseMeta, + RerankResponseResult, RerankTokens, ) @@ -27,8 +29,35 @@ class TogetherAIRerankConfig: if _results is None: raise ValueError(f"No results found in the response={response}") + rerank_results: List[RerankResponseResult] = [] + + for result in _results: + # Validate required fields exist + if not all(key in result for key in ["index", "relevance_score"]): + raise ValueError(f"Missing required fields in the result={result}") + + # Get document data if it exists + document_data = result.get("document", {}) + document = ( + RerankResponseDocument(text=str(document_data.get("text", ""))) + if document_data + else None + ) + + # Create typed result + rerank_result = RerankResponseResult( + index=int(result["index"]), + relevance_score=float(result["relevance_score"]), + ) + + # Only add document if it exists + if document: + rerank_result["document"] = document + + rerank_results.append(rerank_result) + return RerankResponse( id=response.get("id") or str(uuid.uuid4()), - results=_results, # type: ignore + results=rerank_results, meta=rerank_meta, ) # Return response diff --git a/litellm/llms/topaz/common_utils.py b/litellm/llms/topaz/common_utils.py index 9e63f31c8f..4ef2315db4 100644 --- a/litellm/llms/topaz/common_utils.py +++ b/litellm/llms/topaz/common_utils.py @@ -1,7 +1,6 @@ from typing import List, Optional from litellm.secret_managers.main import get_secret_str -from litellm.types.utils import ModelInfoBase from ..base_llm.base_utils import BaseLLMModelInfo from ..base_llm.chat.transformation import BaseLLMException @@ -12,11 +11,6 @@ class TopazException(BaseLLMException): class TopazModelInfo(BaseLLMModelInfo): - def get_model_info( - self, model: str, existing_model_info: Optional[ModelInfoBase] = None - ) -> Optional[ModelInfoBase]: - return existing_model_info - def get_models(self) -> List[str]: return [ "topaz/Standard V2", @@ -35,3 +29,7 @@ class TopazModelInfo(BaseLLMModelInfo): return ( api_base or get_secret_str("TOPAZ_API_BASE") or "https://api.topazlabs.com" ) + + @staticmethod + def get_base_model(model: str) -> str: + return model diff --git a/litellm/llms/triton/completion/transformation.py b/litellm/llms/triton/completion/transformation.py index 0cd6940063..4037c32365 100644 --- a/litellm/llms/triton/completion/transformation.py +++ b/litellm/llms/triton/completion/transformation.py @@ -3,7 +3,7 @@ Translates from OpenAI's `/v1/chat/completions` endpoint to Triton's `/generate` """ import json -from typing import Any, Dict, List, Literal, Optional, Union +from typing import Any, AsyncIterator, Dict, Iterator, List, Literal, Optional, Union from httpx import Headers, Response @@ -67,6 +67,20 @@ class TritonConfig(BaseConfig): optional_params[param] = value return optional_params + def get_complete_url( + self, + api_base: Optional[str], + model: str, + optional_params: dict, + stream: Optional[bool] = None, + ) -> str: + if api_base is None: + raise ValueError("api_base is required") + llm_type = self._get_triton_llm_type(api_base) + if llm_type == "generate" and stream: + return api_base + "_stream" + return api_base + def transform_response( self, model: str, @@ -149,6 +163,18 @@ class TritonConfig(BaseConfig): else: raise ValueError(f"Invalid Triton API base: {api_base}") + def get_model_response_iterator( + self, + streaming_response: Union[Iterator[str], AsyncIterator[str], ModelResponse], + sync_stream: bool, + json_mode: Optional[bool] = False, + ) -> Any: + return TritonResponseIterator( + streaming_response=streaming_response, + sync_stream=sync_stream, + json_mode=json_mode, + ) + class TritonGenerateConfig(TritonConfig): """ @@ -204,7 +230,7 @@ class TritonGenerateConfig(TritonConfig): return model_response -class TritonInferConfig(TritonGenerateConfig): +class TritonInferConfig(TritonConfig): """ Transformations for triton /infer endpoint (his is an infer model with a custom model on triton) """ diff --git a/litellm/llms/vertex_ai/batches/handler.py b/litellm/llms/vertex_ai/batches/handler.py index 0274cd5b05..b82268bef6 100644 --- a/litellm/llms/vertex_ai/batches/handler.py +++ b/litellm/llms/vertex_ai/batches/handler.py @@ -9,8 +9,12 @@ from litellm.llms.custom_httpx.http_handler import ( get_async_httpx_client, ) from litellm.llms.vertex_ai.gemini.vertex_and_google_ai_studio_gemini import VertexLLM -from litellm.types.llms.openai import Batch, CreateBatchRequest -from litellm.types.llms.vertex_ai import VertexAIBatchPredictionJob +from litellm.types.llms.openai import CreateBatchRequest +from litellm.types.llms.vertex_ai import ( + VERTEX_CREDENTIALS_TYPES, + VertexAIBatchPredictionJob, +) +from litellm.types.utils import LiteLLMBatch from .transformation import VertexAIBatchTransformation @@ -25,12 +29,12 @@ class VertexAIBatchPrediction(VertexLLM): _is_async: bool, create_batch_data: CreateBatchRequest, api_base: Optional[str], - vertex_credentials: Optional[str], + vertex_credentials: Optional[VERTEX_CREDENTIALS_TYPES], vertex_project: Optional[str], vertex_location: Optional[str], timeout: Union[float, httpx.Timeout], max_retries: Optional[int], - ) -> Union[Batch, Coroutine[Any, Any, Batch]]: + ) -> Union[LiteLLMBatch, Coroutine[Any, Any, LiteLLMBatch]]: sync_handler = _get_httpx_client() @@ -98,7 +102,7 @@ class VertexAIBatchPrediction(VertexLLM): vertex_batch_request: VertexAIBatchPredictionJob, api_base: str, headers: Dict[str, str], - ) -> Batch: + ) -> LiteLLMBatch: client = get_async_httpx_client( llm_provider=litellm.LlmProviders.VERTEX_AI, ) @@ -130,12 +134,12 @@ class VertexAIBatchPrediction(VertexLLM): _is_async: bool, batch_id: str, api_base: Optional[str], - vertex_credentials: Optional[str], + vertex_credentials: Optional[VERTEX_CREDENTIALS_TYPES], vertex_project: Optional[str], vertex_location: Optional[str], timeout: Union[float, httpx.Timeout], max_retries: Optional[int], - ) -> Union[Batch, Coroutine[Any, Any, Batch]]: + ) -> Union[LiteLLMBatch, Coroutine[Any, Any, LiteLLMBatch]]: sync_handler = _get_httpx_client() access_token, project_id = self._ensure_access_token( @@ -196,7 +200,7 @@ class VertexAIBatchPrediction(VertexLLM): self, api_base: str, headers: Dict[str, str], - ) -> Batch: + ) -> LiteLLMBatch: client = get_async_httpx_client( llm_provider=litellm.LlmProviders.VERTEX_AI, ) diff --git a/litellm/llms/vertex_ai/batches/transformation.py b/litellm/llms/vertex_ai/batches/transformation.py index 32cabdcf56..a97f312d48 100644 --- a/litellm/llms/vertex_ai/batches/transformation.py +++ b/litellm/llms/vertex_ai/batches/transformation.py @@ -4,8 +4,9 @@ from typing import Dict from litellm.llms.vertex_ai.common_utils import ( _convert_vertex_datetime_to_openai_datetime, ) -from litellm.types.llms.openai import Batch, BatchJobStatus, CreateBatchRequest +from litellm.types.llms.openai import BatchJobStatus, CreateBatchRequest from litellm.types.llms.vertex_ai import * +from litellm.types.utils import LiteLLMBatch class VertexAIBatchTransformation: @@ -47,8 +48,8 @@ class VertexAIBatchTransformation: @classmethod def transform_vertex_ai_batch_response_to_openai_batch_response( cls, response: VertexBatchPredictionResponse - ) -> Batch: - return Batch( + ) -> LiteLLMBatch: + return LiteLLMBatch( id=cls._get_batch_id_from_vertex_ai_batch_response(response), completion_window="24hrs", created_at=_convert_vertex_datetime_to_openai_datetime( diff --git a/litellm/llms/vertex_ai/common_utils.py b/litellm/llms/vertex_ai/common_utils.py index a412a1f0db..f7149c349a 100644 --- a/litellm/llms/vertex_ai/common_utils.py +++ b/litellm/llms/vertex_ai/common_utils.py @@ -170,6 +170,9 @@ def _build_vertex_schema(parameters: dict): strip_field( parameters, field_name="$schema" ) # 5. Remove $schema - json schema value, not supported by OpenAPI - causes vertex errors. + strip_field( + parameters, field_name="$id" + ) # 6. Remove id - json schema value, not supported by OpenAPI - causes vertex errors. return parameters diff --git a/litellm/llms/vertex_ai/files/handler.py b/litellm/llms/vertex_ai/files/handler.py index 4bae106045..266169cdfb 100644 --- a/litellm/llms/vertex_ai/files/handler.py +++ b/litellm/llms/vertex_ai/files/handler.py @@ -9,6 +9,7 @@ from litellm.integrations.gcs_bucket.gcs_bucket_base import ( ) from litellm.llms.custom_httpx.http_handler import get_async_httpx_client from litellm.types.llms.openai import CreateFileRequest, FileObject +from litellm.types.llms.vertex_ai import VERTEX_CREDENTIALS_TYPES from .transformation import VertexAIFilesTransformation @@ -34,7 +35,7 @@ class VertexAIFilesHandler(GCSBucketBase): self, create_file_data: CreateFileRequest, api_base: Optional[str], - vertex_credentials: Optional[str], + vertex_credentials: Optional[VERTEX_CREDENTIALS_TYPES], vertex_project: Optional[str], vertex_location: Optional[str], timeout: Union[float, httpx.Timeout], @@ -70,7 +71,7 @@ class VertexAIFilesHandler(GCSBucketBase): _is_async: bool, create_file_data: CreateFileRequest, api_base: Optional[str], - vertex_credentials: Optional[str], + vertex_credentials: Optional[VERTEX_CREDENTIALS_TYPES], vertex_project: Optional[str], vertex_location: Optional[str], timeout: Union[float, httpx.Timeout], diff --git a/litellm/llms/vertex_ai/fine_tuning/handler.py b/litellm/llms/vertex_ai/fine_tuning/handler.py index 8564b8cb69..3cf409c78e 100644 --- a/litellm/llms/vertex_ai/fine_tuning/handler.py +++ b/litellm/llms/vertex_ai/fine_tuning/handler.py @@ -13,6 +13,7 @@ from litellm.llms.vertex_ai.gemini.vertex_and_google_ai_studio_gemini import Ver from litellm.types.fine_tuning import OpenAIFineTuningHyperparameters from litellm.types.llms.openai import FineTuningJobCreate from litellm.types.llms.vertex_ai import ( + VERTEX_CREDENTIALS_TYPES, FineTuneHyperparameters, FineTuneJobCreate, FineTunesupervisedTuningSpec, @@ -222,7 +223,7 @@ class VertexFineTuningAPI(VertexLLM): create_fine_tuning_job_data: FineTuningJobCreate, vertex_project: Optional[str], vertex_location: Optional[str], - vertex_credentials: Optional[str], + vertex_credentials: Optional[VERTEX_CREDENTIALS_TYPES], api_base: Optional[str], timeout: Union[float, httpx.Timeout], kwargs: Optional[dict] = None, diff --git a/litellm/llms/vertex_ai/gemini/transformation.py b/litellm/llms/vertex_ai/gemini/transformation.py index 8109c8bf61..d6bafc7c60 100644 --- a/litellm/llms/vertex_ai/gemini/transformation.py +++ b/litellm/llms/vertex_ai/gemini/transformation.py @@ -55,10 +55,11 @@ else: LiteLLMLoggingObj = Any -def _process_gemini_image(image_url: str) -> PartType: +def _process_gemini_image(image_url: str, format: Optional[str] = None) -> PartType: """ Given an image URL, return the appropriate PartType for Gemini """ + try: # GCS URIs if "gs://" in image_url: @@ -66,25 +67,30 @@ def _process_gemini_image(image_url: str) -> PartType: extension_with_dot = os.path.splitext(image_url)[-1] # Ex: ".png" extension = extension_with_dot[1:] # Ex: "png" - file_type = get_file_type_from_extension(extension) + if not format: + file_type = get_file_type_from_extension(extension) - # Validate the file type is supported by Gemini - if not is_gemini_1_5_accepted_file_type(file_type): - raise Exception(f"File type not supported by gemini - {file_type}") + # Validate the file type is supported by Gemini + if not is_gemini_1_5_accepted_file_type(file_type): + raise Exception(f"File type not supported by gemini - {file_type}") - mime_type = get_file_mime_type_for_file_type(file_type) + mime_type = get_file_mime_type_for_file_type(file_type) + else: + mime_type = format file_data = FileDataType(mime_type=mime_type, file_uri=image_url) return PartType(file_data=file_data) elif ( "https://" in image_url - and (image_type := _get_image_mime_type_from_url(image_url)) is not None + and (image_type := format or _get_image_mime_type_from_url(image_url)) + is not None ): + file_data = FileDataType(file_uri=image_url, mime_type=image_type) return PartType(file_data=file_data) elif "http://" in image_url or "https://" in image_url or "base64" in image_url: # https links for unsupported mime types and base64 images - image = convert_to_anthropic_image_obj(image_url) + image = convert_to_anthropic_image_obj(image_url, format=format) _blob = BlobType(data=image["data"], mime_type=image["media_type"]) return PartType(inline_data=_blob) raise Exception("Invalid image received - {}".format(image_url)) @@ -159,11 +165,15 @@ def _gemini_convert_messages_with_history( # noqa: PLR0915 elif element["type"] == "image_url": element = cast(ChatCompletionImageObject, element) img_element = element + format: Optional[str] = None if isinstance(img_element["image_url"], dict): image_url = img_element["image_url"]["url"] + format = img_element["image_url"].get("format") else: image_url = img_element["image_url"] - _part = _process_gemini_image(image_url=image_url) + _part = _process_gemini_image( + image_url=image_url, format=format + ) _parts.append(_part) user_content.extend(_parts) elif ( diff --git a/litellm/llms/vertex_ai/gemini/vertex_and_google_ai_studio_gemini.py b/litellm/llms/vertex_ai/gemini/vertex_and_google_ai_studio_gemini.py index 294c815016..294939a3c5 100644 --- a/litellm/llms/vertex_ai/gemini/vertex_and_google_ai_studio_gemini.py +++ b/litellm/llms/vertex_ai/gemini/vertex_and_google_ai_studio_gemini.py @@ -40,6 +40,7 @@ from litellm.types.llms.openai import ( ChatCompletionUsageBlock, ) from litellm.types.llms.vertex_ai import ( + VERTEX_CREDENTIALS_TYPES, Candidates, ContentType, FunctionCallingConfig, @@ -179,7 +180,7 @@ class VertexGeminiConfig(VertexAIBaseConfig, BaseConfig): presence_penalty: Optional[float] = None, seed: Optional[int] = None, ) -> None: - locals_ = locals() + locals_ = locals().copy() for key, value in locals_.items(): if key != "self" and value is not None: setattr(self.__class__, key, value) @@ -930,7 +931,7 @@ class VertexLLM(VertexBase): client: Optional[AsyncHTTPHandler] = None, vertex_project: Optional[str] = None, vertex_location: Optional[str] = None, - vertex_credentials: Optional[str] = None, + vertex_credentials: Optional[VERTEX_CREDENTIALS_TYPES] = None, gemini_api_key: Optional[str] = None, extra_headers: Optional[dict] = None, ) -> CustomStreamWrapper: @@ -1018,11 +1019,10 @@ class VertexLLM(VertexBase): client: Optional[AsyncHTTPHandler] = None, vertex_project: Optional[str] = None, vertex_location: Optional[str] = None, - vertex_credentials: Optional[str] = None, + vertex_credentials: Optional[VERTEX_CREDENTIALS_TYPES] = None, gemini_api_key: Optional[str] = None, extra_headers: Optional[dict] = None, ) -> Union[ModelResponse, CustomStreamWrapper]: - should_use_v1beta1_features = self.is_using_v1beta1_features( optional_params=optional_params ) @@ -1123,7 +1123,7 @@ class VertexLLM(VertexBase): timeout: Optional[Union[float, httpx.Timeout]], vertex_project: Optional[str], vertex_location: Optional[str], - vertex_credentials: Optional[str], + vertex_credentials: Optional[VERTEX_CREDENTIALS_TYPES], gemini_api_key: Optional[str], litellm_params: dict, logger_fn=None, @@ -1408,7 +1408,8 @@ class ModelResponseIterator: return self.chunk_parser(chunk=json_chunk) def handle_accumulated_json_chunk(self, chunk: str) -> GenericStreamingChunk: - message = chunk.replace("data:", "").replace("\n\n", "") + chunk = litellm.CustomStreamWrapper._strip_sse_data_from_chunk(chunk) or "" + message = chunk.replace("\n\n", "") # Accumulate JSON data self.accumulated_json += message @@ -1431,7 +1432,7 @@ class ModelResponseIterator: def _common_chunk_parsing_logic(self, chunk: str) -> GenericStreamingChunk: try: - chunk = chunk.replace("data:", "") + chunk = litellm.CustomStreamWrapper._strip_sse_data_from_chunk(chunk) or "" if len(chunk) > 0: """ Check if initial chunk valid json diff --git a/litellm/llms/vertex_ai/image_generation/image_generation_handler.py b/litellm/llms/vertex_ai/image_generation/image_generation_handler.py index bb39fcb1ad..1d5322c08d 100644 --- a/litellm/llms/vertex_ai/image_generation/image_generation_handler.py +++ b/litellm/llms/vertex_ai/image_generation/image_generation_handler.py @@ -11,6 +11,7 @@ from litellm.llms.custom_httpx.http_handler import ( get_async_httpx_client, ) from litellm.llms.vertex_ai.gemini.vertex_and_google_ai_studio_gemini import VertexLLM +from litellm.types.llms.vertex_ai import VERTEX_CREDENTIALS_TYPES from litellm.types.utils import ImageResponse @@ -44,7 +45,7 @@ class VertexImageGeneration(VertexLLM): prompt: str, vertex_project: Optional[str], vertex_location: Optional[str], - vertex_credentials: Optional[str], + vertex_credentials: Optional[VERTEX_CREDENTIALS_TYPES], model_response: ImageResponse, logging_obj: Any, model: Optional[ @@ -139,7 +140,7 @@ class VertexImageGeneration(VertexLLM): prompt: str, vertex_project: Optional[str], vertex_location: Optional[str], - vertex_credentials: Optional[str], + vertex_credentials: Optional[VERTEX_CREDENTIALS_TYPES], model_response: litellm.ImageResponse, logging_obj: Any, model: Optional[ diff --git a/litellm/llms/vertex_ai/text_to_speech/text_to_speech_handler.py b/litellm/llms/vertex_ai/text_to_speech/text_to_speech_handler.py index 10c73e815c..18bc72db46 100644 --- a/litellm/llms/vertex_ai/text_to_speech/text_to_speech_handler.py +++ b/litellm/llms/vertex_ai/text_to_speech/text_to_speech_handler.py @@ -9,6 +9,7 @@ from litellm.llms.custom_httpx.http_handler import ( ) from litellm.llms.openai.openai import HttpxBinaryResponseContent from litellm.llms.vertex_ai.gemini.vertex_and_google_ai_studio_gemini import VertexLLM +from litellm.types.llms.vertex_ai import VERTEX_CREDENTIALS_TYPES class VertexInput(TypedDict, total=False): @@ -45,7 +46,7 @@ class VertexTextToSpeechAPI(VertexLLM): logging_obj, vertex_project: Optional[str], vertex_location: Optional[str], - vertex_credentials: Optional[str], + vertex_credentials: Optional[VERTEX_CREDENTIALS_TYPES], api_base: Optional[str], timeout: Union[float, httpx.Timeout], model: str, diff --git a/litellm/llms/vertex_ai/vertex_ai_partner_models/ai21/transformation.py b/litellm/llms/vertex_ai/vertex_ai_partner_models/ai21/transformation.py index 7ddd1cf89f..d87b2e0311 100644 --- a/litellm/llms/vertex_ai/vertex_ai_partner_models/ai21/transformation.py +++ b/litellm/llms/vertex_ai/vertex_ai_partner_models/ai21/transformation.py @@ -17,7 +17,7 @@ class VertexAIAi21Config: self, max_tokens: Optional[int] = None, ) -> None: - locals_ = locals() + locals_ = locals().copy() for key, value in locals_.items(): if key != "self" and value is not None: setattr(self.__class__, key, value) diff --git a/litellm/llms/vertex_ai/vertex_ai_partner_models/llama3/transformation.py b/litellm/llms/vertex_ai/vertex_ai_partner_models/llama3/transformation.py index 331d378c84..cf46f4a742 100644 --- a/litellm/llms/vertex_ai/vertex_ai_partner_models/llama3/transformation.py +++ b/litellm/llms/vertex_ai/vertex_ai_partner_models/llama3/transformation.py @@ -1,10 +1,10 @@ import types from typing import Optional -import litellm +from litellm.llms.openai.chat.gpt_transformation import OpenAIGPTConfig -class VertexAILlama3Config: +class VertexAILlama3Config(OpenAIGPTConfig): """ Reference:https://cloud.google.com/vertex-ai/generative-ai/docs/partner-models/llama#streaming @@ -21,7 +21,7 @@ class VertexAILlama3Config: self, max_tokens: Optional[int] = None, ) -> None: - locals_ = locals() + locals_ = locals().copy() for key, value in locals_.items(): if key == "max_tokens" and value is None: value = self.max_tokens @@ -46,8 +46,13 @@ class VertexAILlama3Config: and v is not None } - def get_supported_openai_params(self): - return litellm.OpenAIConfig().get_supported_openai_params(model="gpt-3.5-turbo") + def get_supported_openai_params(self, model: str): + supported_params = super().get_supported_openai_params(model=model) + try: + supported_params.remove("max_retries") + except KeyError: + pass + return supported_params def map_openai_params( self, @@ -60,7 +65,7 @@ class VertexAILlama3Config: non_default_params["max_tokens"] = non_default_params.pop( "max_completion_tokens" ) - return litellm.OpenAIConfig().map_openai_params( + return super().map_openai_params( non_default_params=non_default_params, optional_params=optional_params, model=model, diff --git a/litellm/llms/vertex_ai/vertex_ai_partner_models/main.py b/litellm/llms/vertex_ai/vertex_ai_partner_models/main.py index ad52472130..fb2393631b 100644 --- a/litellm/llms/vertex_ai/vertex_ai_partner_models/main.py +++ b/litellm/llms/vertex_ai/vertex_ai_partner_models/main.py @@ -160,7 +160,8 @@ class VertexAIPartnerModels(VertexBase): url=default_api_base, ) - model = model.split("@")[0] + if "codestral" in model or "mistral" in model: + model = model.split("@")[0] if "codestral" in model and litellm_params.get("text_completion") is True: optional_params["model"] = model diff --git a/litellm/llms/vertex_ai/vertex_embeddings/embedding_handler.py b/litellm/llms/vertex_ai/vertex_embeddings/embedding_handler.py index 0f73db30a0..3ef40703e8 100644 --- a/litellm/llms/vertex_ai/vertex_embeddings/embedding_handler.py +++ b/litellm/llms/vertex_ai/vertex_embeddings/embedding_handler.py @@ -41,7 +41,7 @@ class VertexEmbedding(VertexBase): client: Optional[Union[AsyncHTTPHandler, HTTPHandler]] = None, vertex_project: Optional[str] = None, vertex_location: Optional[str] = None, - vertex_credentials: Optional[str] = None, + vertex_credentials: Optional[VERTEX_CREDENTIALS_TYPES] = None, gemini_api_key: Optional[str] = None, extra_headers: Optional[dict] = None, ) -> EmbeddingResponse: @@ -148,7 +148,7 @@ class VertexEmbedding(VertexBase): client: Optional[AsyncHTTPHandler] = None, vertex_project: Optional[str] = None, vertex_location: Optional[str] = None, - vertex_credentials: Optional[str] = None, + vertex_credentials: Optional[VERTEX_CREDENTIALS_TYPES] = None, gemini_api_key: Optional[str] = None, extra_headers: Optional[dict] = None, encoding=None, diff --git a/litellm/llms/vertex_ai/vertex_embeddings/transformation.py b/litellm/llms/vertex_ai/vertex_embeddings/transformation.py index 41eb65be69..0e9c073f8d 100644 --- a/litellm/llms/vertex_ai/vertex_embeddings/transformation.py +++ b/litellm/llms/vertex_ai/vertex_embeddings/transformation.py @@ -48,7 +48,7 @@ class VertexAITextEmbeddingConfig(BaseModel): ] = None, title: Optional[str] = None, ) -> None: - locals_ = locals() + locals_ = locals().copy() for key, value in locals_.items(): if key != "self" and value is not None: setattr(self.__class__, key, value) diff --git a/litellm/llms/vertex_ai/vertex_llm_base.py b/litellm/llms/vertex_ai/vertex_llm_base.py index 71346a2e01..8286cb515f 100644 --- a/litellm/llms/vertex_ai/vertex_llm_base.py +++ b/litellm/llms/vertex_ai/vertex_llm_base.py @@ -12,6 +12,7 @@ from litellm._logging import verbose_logger from litellm.litellm_core_utils.asyncify import asyncify from litellm.llms.base import BaseLLM from litellm.llms.custom_httpx.http_handler import AsyncHTTPHandler +from litellm.types.llms.vertex_ai import VERTEX_CREDENTIALS_TYPES from .common_utils import _get_gemini_url, _get_vertex_url, all_gemini_url_modes @@ -34,7 +35,7 @@ class VertexBase(BaseLLM): return vertex_region or "us-central1" def load_auth( - self, credentials: Optional[str], project_id: Optional[str] + self, credentials: Optional[VERTEX_CREDENTIALS_TYPES], project_id: Optional[str] ) -> Tuple[Any, str]: import google.auth as google_auth from google.auth import identity_pool @@ -42,29 +43,36 @@ class VertexBase(BaseLLM): Request, # type: ignore[import-untyped] ) - if credentials is not None and isinstance(credentials, str): + if credentials is not None: import google.oauth2.service_account - verbose_logger.debug( - "Vertex: Loading vertex credentials from %s", credentials - ) - verbose_logger.debug( - "Vertex: checking if credentials is a valid path, os.path.exists(%s)=%s, current dir %s", - credentials, - os.path.exists(credentials), - os.getcwd(), - ) + if isinstance(credentials, str): + verbose_logger.debug( + "Vertex: Loading vertex credentials from %s", credentials + ) + verbose_logger.debug( + "Vertex: checking if credentials is a valid path, os.path.exists(%s)=%s, current dir %s", + credentials, + os.path.exists(credentials), + os.getcwd(), + ) - try: - if os.path.exists(credentials): - json_obj = json.load(open(credentials)) - else: - json_obj = json.loads(credentials) - except Exception: - raise Exception( - "Unable to load vertex credentials from environment. Got={}".format( - credentials + try: + if os.path.exists(credentials): + json_obj = json.load(open(credentials)) + else: + json_obj = json.loads(credentials) + except Exception: + raise Exception( + "Unable to load vertex credentials from environment. Got={}".format( + credentials + ) ) + elif isinstance(credentials, dict): + json_obj = credentials + else: + raise ValueError( + "Invalid credentials type: {}".format(type(credentials)) ) # Check if the JSON object contains Workload Identity Federation configuration @@ -109,7 +117,7 @@ class VertexBase(BaseLLM): def _ensure_access_token( self, - credentials: Optional[str], + credentials: Optional[VERTEX_CREDENTIALS_TYPES], project_id: Optional[str], custom_llm_provider: Literal[ "vertex_ai", "vertex_ai_beta", "gemini" @@ -202,7 +210,7 @@ class VertexBase(BaseLLM): gemini_api_key: Optional[str], vertex_project: Optional[str], vertex_location: Optional[str], - vertex_credentials: Optional[str], + vertex_credentials: Optional[VERTEX_CREDENTIALS_TYPES], stream: Optional[bool], custom_llm_provider: Literal["vertex_ai", "vertex_ai_beta", "gemini"], api_base: Optional[str], @@ -253,7 +261,7 @@ class VertexBase(BaseLLM): async def _ensure_access_token_async( self, - credentials: Optional[str], + credentials: Optional[VERTEX_CREDENTIALS_TYPES], project_id: Optional[str], custom_llm_provider: Literal[ "vertex_ai", "vertex_ai_beta", "gemini" diff --git a/litellm/llms/watsonx/chat/transformation.py b/litellm/llms/watsonx/chat/transformation.py index 208da82ef5..d5e0ed6544 100644 --- a/litellm/llms/watsonx/chat/transformation.py +++ b/litellm/llms/watsonx/chat/transformation.py @@ -80,7 +80,7 @@ class IBMWatsonXChatConfig(IBMWatsonXMixin, OpenAIGPTConfig): def get_complete_url( self, - api_base: str, + api_base: Optional[str], model: str, optional_params: dict, stream: Optional[bool] = None, diff --git a/litellm/llms/watsonx/completion/transformation.py b/litellm/llms/watsonx/completion/transformation.py index 7e6a8a525d..7a4df23944 100644 --- a/litellm/llms/watsonx/completion/transformation.py +++ b/litellm/llms/watsonx/completion/transformation.py @@ -108,7 +108,7 @@ class IBMWatsonXAIConfig(IBMWatsonXMixin, BaseConfig): stream: Optional[bool] = None, **kwargs, ) -> None: - locals_ = locals() + locals_ = locals().copy() for key, value in locals_.items(): if key != "self" and value is not None: setattr(self.__class__, key, value) @@ -315,7 +315,7 @@ class IBMWatsonXAIConfig(IBMWatsonXMixin, BaseConfig): def get_complete_url( self, - api_base: str, + api_base: Optional[str], model: str, optional_params: dict, stream: Optional[bool] = None, diff --git a/litellm/main.py b/litellm/main.py index 93cf16c601..b90030a6bb 100644 --- a/litellm/main.py +++ b/litellm/main.py @@ -50,6 +50,7 @@ from litellm import ( # type: ignore get_litellm_params, get_optional_params, ) +from litellm.exceptions import LiteLLMUnknownProvider from litellm.integrations.custom_logger import CustomLogger from litellm.litellm_core_utils.audio_utils.utils import get_audio_file_for_health_check from litellm.litellm_core_utils.health_check_utils import ( @@ -67,6 +68,8 @@ from litellm.litellm_core_utils.mock_functions import ( from litellm.litellm_core_utils.prompt_templates.common_utils import ( get_content_from_model_response, ) +from litellm.llms.base_llm.chat.transformation import BaseConfig +from litellm.llms.bedrock.common_utils import BedrockModelInfo from litellm.llms.custom_httpx.http_handler import AsyncHTTPHandler, HTTPHandler from litellm.realtime_api.main import _realtime_health_check from litellm.secret_managers.main import get_secret_str @@ -75,6 +78,7 @@ from litellm.utils import ( CustomStreamWrapper, ProviderConfigManager, Usage, + add_openai_metadata, async_mock_completion_streaming_obj, convert_to_model_response_object, create_pretrained_tokenizer, @@ -90,7 +94,7 @@ from litellm.utils import ( read_config_args, supports_httpx_timeout, token_counter, - validate_chat_completion_messages, + validate_and_fix_openai_messages, validate_chat_completion_tool_choice, ) @@ -114,7 +118,7 @@ from .llms import baseten, maritalk, ollama_chat from .llms.anthropic.chat import AnthropicChatCompletion from .llms.azure.audio_transcriptions import AzureAudioTranscription from .llms.azure.azure import AzureChatCompletion, _check_dynamic_azure_params -from .llms.azure.chat.o1_handler import AzureOpenAIO1ChatCompletion +from .llms.azure.chat.o_series_handler import AzureOpenAIO1ChatCompletion from .llms.azure.completion.handler import AzureTextCompletion from .llms.azure_ai.embed import AzureAIEmbedding from .llms.bedrock.chat import BedrockConverseLLM, BedrockLLM @@ -162,6 +166,7 @@ from .llms.vertex_ai.vertex_model_garden.main import VertexAIModelGardenModels from .llms.vllm.completion import handler as vllm_handler from .llms.watsonx.chat.handler import WatsonXChatHandler from .llms.watsonx.common_utils import IBMWatsonXMixin +from .types.llms.anthropic import AnthropicThinkingParam from .types.llms.openai import ( ChatCompletionAssistantMessage, ChatCompletionAudioParam, @@ -212,7 +217,6 @@ azure_audio_transcriptions = AzureAudioTranscription() huggingface = Huggingface() predibase_chat_completions = PredibaseChatCompletion() codestral_text_completions = CodestralTextCompletion() -bedrock_chat_completion = BedrockLLM() bedrock_converse_chat_completion = BedrockConverseLLM() bedrock_embedding = BedrockEmbedding() bedrock_image_generation = BedrockImageGeneration() @@ -330,6 +334,7 @@ async def acompletion( logprobs: Optional[bool] = None, top_logprobs: Optional[int] = None, deployment_id=None, + reasoning_effort: Optional[Literal["low", "medium", "high"]] = None, # set api_base, api_version, api_key base_url: Optional[str] = None, api_version: Optional[str] = None, @@ -337,6 +342,7 @@ async def acompletion( model_list: Optional[list] = None, # pass in a list of api_base,keys, etc. extra_headers: Optional[dict] = None, # Optional liteLLM function params + thinking: Optional[AnthropicThinkingParam] = None, **kwargs, ) -> Union[ModelResponse, CustomStreamWrapper]: """ @@ -383,6 +389,10 @@ async def acompletion( - If `stream` is True, the function returns an async generator that yields completion lines. """ fallbacks = kwargs.get("fallbacks", None) + mock_timeout = kwargs.get("mock_timeout", None) + + if mock_timeout is True: + await _handle_mock_timeout_async(mock_timeout, timeout, model) loop = asyncio.get_event_loop() custom_llm_provider = kwargs.get("custom_llm_provider", None) @@ -420,8 +430,10 @@ async def acompletion( "api_version": api_version, "api_key": api_key, "model_list": model_list, + "reasoning_effort": reasoning_effort, "extra_headers": extra_headers, "acompletion": True, # assuming this is a required parameter + "thinking": thinking, } if custom_llm_provider is None: _, custom_llm_provider, _, _ = get_llm_provider( @@ -565,12 +577,7 @@ def _handle_mock_timeout( model: str, ): if mock_timeout is True and timeout is not None: - if isinstance(timeout, float): - time.sleep(timeout) - elif isinstance(timeout, str): - time.sleep(float(timeout)) - elif isinstance(timeout, httpx.Timeout) and timeout.connect is not None: - time.sleep(timeout.connect) + _sleep_for_timeout(timeout) raise litellm.Timeout( message="This is a mock timeout error", llm_provider="openai", @@ -578,6 +585,38 @@ def _handle_mock_timeout( ) +async def _handle_mock_timeout_async( + mock_timeout: Optional[bool], + timeout: Optional[Union[float, str, httpx.Timeout]], + model: str, +): + if mock_timeout is True and timeout is not None: + await _sleep_for_timeout_async(timeout) + raise litellm.Timeout( + message="This is a mock timeout error", + llm_provider="openai", + model=model, + ) + + +def _sleep_for_timeout(timeout: Union[float, str, httpx.Timeout]): + if isinstance(timeout, float): + time.sleep(timeout) + elif isinstance(timeout, str): + time.sleep(float(timeout)) + elif isinstance(timeout, httpx.Timeout) and timeout.connect is not None: + time.sleep(timeout.connect) + + +async def _sleep_for_timeout_async(timeout: Union[float, str, httpx.Timeout]): + if isinstance(timeout, float): + await asyncio.sleep(timeout) + elif isinstance(timeout, str): + await asyncio.sleep(float(timeout)) + elif isinstance(timeout, httpx.Timeout) and timeout.connect is not None: + await asyncio.sleep(timeout.connect) + + def mock_completion( model: str, messages: List, @@ -745,6 +784,7 @@ def completion( # type: ignore # noqa: PLR0915 logit_bias: Optional[dict] = None, user: Optional[str] = None, # openai v1.0+ new params + reasoning_effort: Optional[Literal["low", "medium", "high"]] = None, response_format: Optional[Union[dict, Type[BaseModel]]] = None, seed: Optional[int] = None, tools: Optional[List] = None, @@ -763,6 +803,7 @@ def completion( # type: ignore # noqa: PLR0915 api_key: Optional[str] = None, model_list: Optional[list] = None, # pass in a list of api_base,keys, etc. # Optional liteLLM function params + thinking: Optional[AnthropicThinkingParam] = None, **kwargs, ) -> Union[ModelResponse, CustomStreamWrapper]: """ @@ -814,7 +855,7 @@ def completion( # type: ignore # noqa: PLR0915 if model is None: raise ValueError("model param not passed in.") # validate messages - messages = validate_chat_completion_messages(messages=messages) + messages = validate_and_fix_openai_messages(messages=messages) # validate tool_choice tool_choice = validate_chat_completion_tool_choice(tool_choice=tool_choice) ######### unpacking kwargs ##################### @@ -837,6 +878,7 @@ def completion( # type: ignore # noqa: PLR0915 Optional[ProviderSpecificHeader], kwargs.get("provider_specific_header", None) ) headers = kwargs.get("headers", None) or extra_headers + ensure_alternating_roles: Optional[bool] = kwargs.get( "ensure_alternating_roles", None ) @@ -848,6 +890,8 @@ def completion( # type: ignore # noqa: PLR0915 ) if headers is None: headers = {} + if extra_headers is not None: + headers.update(extra_headers) num_retries = kwargs.get( "num_retries", None ) ## alt. param for 'max_retries'. Use this to pass retries w/ instructor. @@ -1011,6 +1055,19 @@ def completion( # type: ignore # noqa: PLR0915 if eos_token: custom_prompt_dict[model]["eos_token"] = eos_token + provider_config: Optional[BaseConfig] = None + if custom_llm_provider is not None and custom_llm_provider in [ + provider.value for provider in LlmProviders + ]: + provider_config = ProviderConfigManager.get_provider_chat_config( + model=model, provider=LlmProviders(custom_llm_provider) + ) + + if provider_config is not None: + messages = provider_config.translate_developer_role_to_system_role( + messages=messages + ) + if ( supports_system_message is not None and isinstance(supports_system_message, bool) @@ -1052,14 +1109,11 @@ def completion( # type: ignore # noqa: PLR0915 api_version=api_version, parallel_tool_calls=parallel_tool_calls, messages=messages, - extra_headers=extra_headers, + reasoning_effort=reasoning_effort, + thinking=thinking, **non_default_params, ) - extra_headers = optional_params.pop("extra_headers", None) - if extra_headers is not None: - headers.update(extra_headers) - if litellm.add_function_to_prompt and optional_params.get( "functions_unsupported_model", None ): # if user opts to add it to prompt, when API doesn't support function calling @@ -1105,6 +1159,9 @@ def completion( # type: ignore # noqa: PLR0915 prompt_id=prompt_id, prompt_variables=prompt_variables, ssl_verify=ssl_verify, + merge_reasoning_content_in_choices=kwargs.get( + "merge_reasoning_content_in_choices", None + ), ) logging.update_environment_variables( model=model, @@ -1166,12 +1223,19 @@ def completion( # type: ignore # noqa: PLR0915 "azure_ad_token", None ) or get_secret("AZURE_AD_TOKEN") + azure_ad_token_provider = litellm_params.get( + "azure_ad_token_provider", None + ) + headers = headers or litellm.headers if extra_headers is not None: optional_params["extra_headers"] = extra_headers + if max_retries is not None: + optional_params["max_retries"] = max_retries + + if litellm.AzureOpenAIO1Config().is_o_series_model(model=model): - if litellm.AzureOpenAIO1Config().is_o1_model(model=model): ## LOAD CONFIG - if set config = litellm.AzureOpenAIO1Config.get_config() for k, v in config.items(): @@ -1220,6 +1284,7 @@ def completion( # type: ignore # noqa: PLR0915 api_type=api_type, dynamic_params=dynamic_params, azure_ad_token=azure_ad_token, + azure_ad_token_provider=azure_ad_token_provider, model_response=model_response, print_verbose=print_verbose, optional_params=optional_params, @@ -1265,6 +1330,10 @@ def completion( # type: ignore # noqa: PLR0915 "azure_ad_token", None ) or get_secret("AZURE_AD_TOKEN") + azure_ad_token_provider = litellm_params.get( + "azure_ad_token_provider", None + ) + headers = headers or litellm.headers if extra_headers is not None: @@ -1288,6 +1357,7 @@ def completion( # type: ignore # noqa: PLR0915 api_version=api_version, api_type=api_type, azure_ad_token=azure_ad_token, + azure_ad_token_provider=azure_ad_token_provider, model_response=model_response, print_verbose=print_verbose, optional_params=optional_params, @@ -1311,6 +1381,36 @@ def completion( # type: ignore # noqa: PLR0915 "api_base": api_base, }, ) + elif custom_llm_provider == "deepseek": + ## COMPLETION CALL + try: + response = base_llm_http_handler.completion( + model=model, + messages=messages, + headers=headers, + model_response=model_response, + api_key=api_key, + api_base=api_base, + acompletion=acompletion, + logging_obj=logging, + optional_params=optional_params, + litellm_params=litellm_params, + timeout=timeout, # type: ignore + client=client, + custom_llm_provider=custom_llm_provider, + encoding=encoding, + stream=stream, + ) + except Exception as e: + ## LOGGING - log the original exception returned + logging.post_call( + input=messages, + api_key=api_key, + original_response=str(e), + additional_args={"headers": headers}, + ) + raise e + elif custom_llm_provider == "azure_ai": api_base = ( api_base # for deepinfra/perplexity/anyscale/groq/friendliai we check in get_llm_provider and pass in the api base from there @@ -1552,7 +1652,6 @@ def completion( # type: ignore # noqa: PLR0915 or custom_llm_provider == "cerebras" or custom_llm_provider == "sambanova" or custom_llm_provider == "volcengine" - or custom_llm_provider == "deepseek" or custom_llm_provider == "anyscale" or custom_llm_provider == "mistral" or custom_llm_provider == "openai" @@ -1588,6 +1687,11 @@ def completion( # type: ignore # noqa: PLR0915 if extra_headers is not None: optional_params["extra_headers"] = extra_headers + if ( + litellm.enable_preview_features and metadata is not None + ): # [PREVIEW] allow metadata to be passed to OPENAI + optional_params["metadata"] = add_openai_metadata(metadata) + ## LOAD CONFIG - if set config = litellm.OpenAIConfig.get_config() for k, v in config.items(): @@ -2170,21 +2274,22 @@ def completion( # type: ignore # noqa: PLR0915 data = {"model": model, "messages": messages, **optional_params} ## COMPLETION CALL - response = openai_chat_completions.completion( + response = base_llm_http_handler.completion( model=model, + stream=stream, messages=messages, - headers=headers, - api_key=api_key, + acompletion=acompletion, api_base=api_base, model_response=model_response, - print_verbose=print_verbose, optional_params=optional_params, litellm_params=litellm_params, - logger_fn=logger_fn, - logging_obj=logging, - acompletion=acompletion, - timeout=timeout, # type: ignore custom_llm_provider="openrouter", + timeout=timeout, + headers=headers, + encoding=encoding, + api_key=api_key, + logging_obj=logging, # model call logging done inside the class as we make need to modify I/O to fit aleph alpha's requirements + client=client, ) ## LOGGING logging.post_call( @@ -2477,6 +2582,7 @@ def completion( # type: ignore # noqa: PLR0915 print_verbose=print_verbose, optional_params=optional_params, litellm_params=litellm_params, + timeout=timeout, custom_prompt_dict=custom_prompt_dict, logger_fn=logger_fn, encoding=encoding, @@ -2531,18 +2637,14 @@ def completion( # type: ignore # noqa: PLR0915 aws_bedrock_client.meta.region_name ) - base_model = litellm.AmazonConverseConfig()._get_base_model(model) - - if base_model in litellm.bedrock_converse_models or model.startswith( - "converse/" - ): + bedrock_route = BedrockModelInfo.get_bedrock_route(model) + if bedrock_route == "converse": model = model.replace("converse/", "") response = bedrock_converse_chat_completion.completion( model=model, messages=messages, custom_prompt_dict=custom_prompt_dict, model_response=model_response, - print_verbose=print_verbose, optional_params=optional_params, litellm_params=litellm_params, # type: ignore logger_fn=logger_fn, @@ -2554,36 +2656,43 @@ def completion( # type: ignore # noqa: PLR0915 client=client, api_base=api_base, ) - else: - model = model.replace("invoke/", "") - response = bedrock_chat_completion.completion( + elif bedrock_route == "converse_like": + model = model.replace("converse_like/", "") + response = base_llm_http_handler.completion( model=model, + stream=stream, messages=messages, - custom_prompt_dict=custom_prompt_dict, + acompletion=acompletion, + api_base=api_base, model_response=model_response, - print_verbose=print_verbose, optional_params=optional_params, litellm_params=litellm_params, - logger_fn=logger_fn, - encoding=encoding, - logging_obj=logging, - extra_headers=extra_headers, + custom_llm_provider="bedrock", timeout=timeout, - acompletion=acompletion, + headers=headers, + encoding=encoding, + api_key=api_key, + logging_obj=logging, # model call logging done inside the class as we make need to modify I/O to fit aleph alpha's requirements client=client, + ) + else: + response = base_llm_http_handler.completion( + model=model, + stream=stream, + messages=messages, + acompletion=acompletion, api_base=api_base, + model_response=model_response, + optional_params=optional_params, + litellm_params=litellm_params, + custom_llm_provider="bedrock", + timeout=timeout, + headers=headers, + encoding=encoding, + api_key=api_key, + logging_obj=logging, + client=client, ) - - if optional_params.get("stream", False): - ## LOGGING - logging.post_call( - input=messages, - api_key=None, - original_response=response, - ) - - ## RESPONSE OBJECT - response = response elif custom_llm_provider == "watsonx": response = watsonx_chat_completion.completion( model=model, @@ -2746,6 +2855,7 @@ def completion( # type: ignore # noqa: PLR0915 acompletion=acompletion, model_response=model_response, encoding=encoding, + client=client, ) if acompletion is True or optional_params.get("stream", False) is True: return generator @@ -2935,8 +3045,8 @@ def completion( # type: ignore # noqa: PLR0915 custom_handler = item["custom_handler"] if custom_handler is None: - raise ValueError( - f"Unable to map your input to a model. Check your input - {args}" + raise LiteLLMUnknownProvider( + model=model, custom_llm_provider=custom_llm_provider ) ## ROUTE LLM CALL ## @@ -2974,8 +3084,8 @@ def completion( # type: ignore # noqa: PLR0915 ) else: - raise ValueError( - f"Unable to map your input to a model. Check your input - {args}" + raise LiteLLMUnknownProvider( + model=model, custom_llm_provider=custom_llm_provider ) return response except Exception as e: @@ -3162,16 +3272,10 @@ def embedding( # noqa: PLR0915 """ azure = kwargs.get("azure", None) client = kwargs.pop("client", None) - rpm = kwargs.pop("rpm", None) - tpm = kwargs.pop("tpm", None) max_retries = kwargs.get("max_retries", None) litellm_logging_obj: LiteLLMLoggingObj = kwargs.get("litellm_logging_obj") # type: ignore - cooldown_time = kwargs.get("cooldown_time", None) mock_response: Optional[List[float]] = kwargs.get("mock_response", None) # type: ignore - max_parallel_requests = kwargs.pop("max_parallel_requests", None) - model_info = kwargs.get("model_info", None) - metadata = kwargs.get("metadata", None) - proxy_server_request = kwargs.get("proxy_server_request", None) + azure_ad_token_provider = kwargs.pop("azure_ad_token_provider", None) aembedding = kwargs.get("aembedding", None) extra_headers = kwargs.get("extra_headers", None) headers = kwargs.get("headers", None) @@ -3224,8 +3328,6 @@ def embedding( # noqa: PLR0915 **non_default_params, ) - if mock_response is not None: - return mock_embedding(model=model, mock_response=mock_response) ### REGISTER CUSTOM MODEL PRICING -- IF GIVEN ### if input_cost_per_token is not None and output_cost_per_token is not None: litellm.register_model( @@ -3248,31 +3350,24 @@ def embedding( # noqa: PLR0915 } } ) + litellm_params_dict = get_litellm_params(**kwargs) + + logging: Logging = litellm_logging_obj # type: ignore + logging.update_environment_variables( + model=model, + user=user, + optional_params=optional_params, + litellm_params=litellm_params_dict, + custom_llm_provider=custom_llm_provider, + ) + + if mock_response is not None: + return mock_embedding(model=model, mock_response=mock_response) try: response: Optional[EmbeddingResponse] = None - logging: Logging = litellm_logging_obj # type: ignore - logging.update_environment_variables( - model=model, - user=user, - optional_params=optional_params, - litellm_params={ - "timeout": timeout, - "azure": azure, - "litellm_call_id": litellm_call_id, - "logger_fn": logger_fn, - "proxy_server_request": proxy_server_request, - "model_info": model_info, - "metadata": metadata, - "aembedding": aembedding, - "preset_cache_key": None, - "stream_response": {}, - "cooldown_time": cooldown_time, - }, - custom_llm_provider=custom_llm_provider, - ) + if azure is True or custom_llm_provider == "azure": # azure configs - api_type = get_secret_str("AZURE_API_TYPE") or "azure" api_base = api_base or litellm.api_base or get_secret_str("AZURE_API_BASE") @@ -3307,6 +3402,7 @@ def embedding( # noqa: PLR0915 api_key=api_key, api_version=api_version, azure_ad_token=azure_ad_token, + azure_ad_token_provider=azure_ad_token_provider, logging_obj=logging, timeout=timeout, model_response=EmbeddingResponse(), @@ -3321,6 +3417,7 @@ def embedding( # noqa: PLR0915 or custom_llm_provider == "openai" or custom_llm_provider == "together_ai" or custom_llm_provider == "nvidia_nim" + or custom_llm_provider == "litellm_proxy" ): api_base = ( api_base @@ -3344,7 +3441,6 @@ def embedding( # noqa: PLR0915 if extra_headers is not None: optional_params["extra_headers"] = extra_headers - api_type = "openai" api_version = None ## EMBEDDING CALL @@ -3398,7 +3494,8 @@ def embedding( # noqa: PLR0915 # set API KEY if api_key is None: api_key = ( - litellm.api_key + api_key + or litellm.api_key or litellm.openai_like_key or get_secret_str("OPENAI_LIKE_API_KEY") ) @@ -3755,14 +3852,16 @@ def embedding( # noqa: PLR0915 aembedding=aembedding, ) else: - args = locals() - raise ValueError(f"No valid embedding model args passed in - {args}") + raise LiteLLMUnknownProvider( + model=model, custom_llm_provider=custom_llm_provider + ) if response is not None and hasattr(response, "_hidden_params"): response._hidden_params["custom_llm_provider"] = custom_llm_provider if response is None: - args = locals() - raise ValueError(f"No valid embedding model args passed in - {args}") + raise LiteLLMUnknownProvider( + model=model, custom_llm_provider=custom_llm_provider + ) return response except Exception as e: ## LOGGING @@ -3801,42 +3900,19 @@ async def atext_completion( ctx = contextvars.copy_context() func_with_context = partial(ctx.run, func) - _, custom_llm_provider, _, _ = get_llm_provider( - model=model, api_base=kwargs.get("api_base", None) - ) - - if ( - custom_llm_provider == "openai" - or custom_llm_provider == "azure" - or custom_llm_provider == "azure_text" - or custom_llm_provider == "custom_openai" - or custom_llm_provider == "anyscale" - or custom_llm_provider == "mistral" - or custom_llm_provider == "openrouter" - or custom_llm_provider == "deepinfra" - or custom_llm_provider == "perplexity" - or custom_llm_provider == "groq" - or custom_llm_provider == "nvidia_nim" - or custom_llm_provider == "cerebras" - or custom_llm_provider == "sambanova" - or custom_llm_provider == "ai21_chat" - or custom_llm_provider == "ai21" - or custom_llm_provider == "volcengine" - or custom_llm_provider == "text-completion-codestral" - or custom_llm_provider == "deepseek" - or custom_llm_provider == "text-completion-openai" - or custom_llm_provider == "huggingface" - or custom_llm_provider == "ollama" - or custom_llm_provider == "vertex_ai" - or custom_llm_provider in litellm.openai_compatible_providers - ): # currently implemented aiohttp calls for just azure and openai, soon all. - # Await normally - response = await loop.run_in_executor(None, func_with_context) - if asyncio.iscoroutine(response): - response = await response + init_response = await loop.run_in_executor(None, func_with_context) + if isinstance(init_response, dict) or isinstance( + init_response, TextCompletionResponse + ): ## CACHING SCENARIO + if isinstance(init_response, dict): + response = TextCompletionResponse(**init_response) + else: + response = init_response + elif asyncio.iscoroutine(init_response): + response = await init_response else: - # Call the synchronous function using run_in_executor - response = await loop.run_in_executor(None, func_with_context) + response = init_response # type: ignore + if ( kwargs.get("stream", False) is True or isinstance(response, TextCompletionStreamWrapper) @@ -3851,6 +3927,7 @@ async def atext_completion( ), model=model, custom_llm_provider=custom_llm_provider, + stream_options=kwargs.get("stream_options"), ) else: ## OpenAI / Azure Text Completion Returns here @@ -4382,6 +4459,7 @@ def image_generation( # noqa: PLR0915 logger_fn = kwargs.get("logger_fn", None) mock_response: Optional[str] = kwargs.get("mock_response", None) # type: ignore proxy_server_request = kwargs.get("proxy_server_request", None) + azure_ad_token_provider = kwargs.get("azure_ad_token_provider", None) model_info = kwargs.get("model_info", None) metadata = kwargs.get("metadata", {}) litellm_logging_obj: LiteLLMLoggingObj = kwargs.get("litellm_logging_obj") # type: ignore @@ -4423,6 +4501,7 @@ def image_generation( # noqa: PLR0915 non_default_params = { k: v for k, v in kwargs.items() if k not in default_params } # model-specific params - pass them straight to the model/provider + optional_params = get_optional_params_image_gen( model=model, n=n, @@ -4434,6 +4513,7 @@ def image_generation( # noqa: PLR0915 custom_llm_provider=custom_llm_provider, **non_default_params, ) + logging: Logging = litellm_logging_obj logging.update_environment_variables( model=model, @@ -4495,6 +4575,8 @@ def image_generation( # noqa: PLR0915 timeout=timeout, api_key=api_key, api_base=api_base, + azure_ad_token=azure_ad_token, + azure_ad_token_provider=azure_ad_token_provider, logging_obj=litellm_logging_obj, optional_params=optional_params, model_response=model_response, @@ -4503,7 +4585,10 @@ def image_generation( # noqa: PLR0915 client=client, headers=headers, ) - elif custom_llm_provider == "openai": + elif ( + custom_llm_provider == "openai" + or custom_llm_provider in litellm.openai_compatible_providers + ): model_response = openai_chat_completions.image_generation( model=model, prompt=prompt, @@ -4527,6 +4612,7 @@ def image_generation( # noqa: PLR0915 optional_params=optional_params, model_response=model_response, aimg_generation=aimg_generation, + client=client, ) elif custom_llm_provider == "vertex_ai": vertex_ai_project = ( @@ -4568,8 +4654,8 @@ def image_generation( # noqa: PLR0915 custom_handler = item["custom_handler"] if custom_handler is None: - raise ValueError( - f"Unable to map your input to a model. Check your input - {args}" + raise LiteLLMUnknownProvider( + model=model, custom_llm_provider=custom_llm_provider ) ## ROUTE LLM CALL ## @@ -4949,8 +5035,7 @@ def transcription( ) elif ( custom_llm_provider == "openai" - or custom_llm_provider == "groq" - or custom_llm_provider == "fireworks_ai" + or custom_llm_provider in litellm.openai_compatible_providers ): api_base = ( api_base @@ -5108,7 +5193,10 @@ def speech( custom_llm_provider=custom_llm_provider, ) response: Optional[HttpxBinaryResponseContent] = None - if custom_llm_provider == "openai": + if ( + custom_llm_provider == "openai" + or custom_llm_provider in litellm.openai_compatible_providers + ): if voice is None or not (isinstance(voice, str)): raise litellm.BadRequestError( message="'voice' is required to be passed as a string for OpenAI TTS", @@ -5184,6 +5272,7 @@ def speech( ) or get_secret( "AZURE_AD_TOKEN" ) + azure_ad_token_provider = kwargs.get("azure_ad_token_provider", None) if extra_headers: optional_params["extra_headers"] = extra_headers @@ -5197,6 +5286,7 @@ def speech( api_base=api_base, api_version=api_version, azure_ad_token=azure_ad_token, + azure_ad_token_provider=azure_ad_token_provider, organization=organization, max_retries=max_retries, timeout=timeout, diff --git a/litellm/model_prices_and_context_window_backup.json b/litellm/model_prices_and_context_window_backup.json index 9022558c43..04913d4c63 100644 --- a/litellm/model_prices_and_context_window_backup.json +++ b/litellm/model_prices_and_context_window_backup.json @@ -6,7 +6,7 @@ "input_cost_per_token": 0.0000, "output_cost_per_token": 0.000, "litellm_provider": "one of https://docs.litellm.ai/docs/providers", - "mode": "one of chat, embedding, completion, image_generation, audio_transcription, audio_speech", + "mode": "one of: chat, embedding, completion, image_generation, audio_transcription, audio_speech, image_generation, moderation, rerank", "supports_function_calling": true, "supports_parallel_function_calling": true, "supports_vision": true, @@ -14,7 +14,8 @@ "supports_audio_output": true, "supports_prompt_caching": true, "supports_response_schema": true, - "supports_system_messages": true + "supports_system_messages": true, + "deprecation_date": "date when the model becomes deprecated in the format YYYY-MM-DD" }, "omni-moderation-latest": { "max_tokens": 32768, @@ -53,7 +54,8 @@ "mode": "chat", "supports_function_calling": true, "supports_prompt_caching": true, - "supports_system_messages": true + "supports_system_messages": true, + "supports_tool_choice": true }, "gpt-4o": { "max_tokens": 16384, @@ -71,7 +73,46 @@ "supports_response_schema": true, "supports_vision": true, "supports_prompt_caching": true, - "supports_system_messages": true + "supports_system_messages": true, + "supports_tool_choice": true + }, + "gpt-4.5-preview": { + "max_tokens": 16384, + "max_input_tokens": 128000, + "max_output_tokens": 16384, + "input_cost_per_token": 0.000075, + "output_cost_per_token": 0.00015, + "input_cost_per_token_batches": 0.0000375, + "output_cost_per_token_batches": 0.000075, + "cache_read_input_token_cost": 0.0000375, + "litellm_provider": "openai", + "mode": "chat", + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_response_schema": true, + "supports_vision": true, + "supports_prompt_caching": true, + "supports_system_messages": true, + "supports_tool_choice": true + }, + "gpt-4.5-preview-2025-02-27": { + "max_tokens": 16384, + "max_input_tokens": 128000, + "max_output_tokens": 16384, + "input_cost_per_token": 0.000075, + "output_cost_per_token": 0.00015, + "input_cost_per_token_batches": 0.0000375, + "output_cost_per_token_batches": 0.000075, + "cache_read_input_token_cost": 0.0000375, + "litellm_provider": "openai", + "mode": "chat", + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_response_schema": true, + "supports_vision": true, + "supports_prompt_caching": true, + "supports_system_messages": true, + "supports_tool_choice": true }, "gpt-4o-audio-preview": { "max_tokens": 16384, @@ -87,7 +128,8 @@ "supports_parallel_function_calling": true, "supports_audio_input": true, "supports_audio_output": true, - "supports_system_messages": true + "supports_system_messages": true, + "supports_tool_choice": true }, "gpt-4o-audio-preview-2024-12-17": { "max_tokens": 16384, @@ -103,7 +145,8 @@ "supports_parallel_function_calling": true, "supports_audio_input": true, "supports_audio_output": true, - "supports_system_messages": true + "supports_system_messages": true, + "supports_tool_choice": true }, "gpt-4o-audio-preview-2024-10-01": { "max_tokens": 16384, @@ -119,7 +162,8 @@ "supports_parallel_function_calling": true, "supports_audio_input": true, "supports_audio_output": true, - "supports_system_messages": true + "supports_system_messages": true, + "supports_tool_choice": true }, "gpt-4o-mini-audio-preview-2024-12-17": { "max_tokens": 16384, @@ -135,7 +179,8 @@ "supports_parallel_function_calling": true, "supports_audio_input": true, "supports_audio_output": true, - "supports_system_messages": true + "supports_system_messages": true, + "supports_tool_choice": true }, "gpt-4o-mini": { "max_tokens": 16384, @@ -153,7 +198,8 @@ "supports_response_schema": true, "supports_vision": true, "supports_prompt_caching": true, - "supports_system_messages": true + "supports_system_messages": true, + "supports_tool_choice": true }, "gpt-4o-mini-2024-07-18": { "max_tokens": 16384, @@ -171,7 +217,8 @@ "supports_response_schema": true, "supports_vision": true, "supports_prompt_caching": true, - "supports_system_messages": true + "supports_system_messages": true, + "supports_tool_choice": true }, "o1": { "max_tokens": 100000, @@ -187,20 +234,53 @@ "supports_vision": true, "supports_prompt_caching": true, "supports_system_messages": true, - "supports_response_schema": true + "supports_response_schema": true, + "supports_tool_choice": true }, "o1-mini": { "max_tokens": 65536, "max_input_tokens": 128000, "max_output_tokens": 65536, - "input_cost_per_token": 0.000003, - "output_cost_per_token": 0.000012, - "cache_read_input_token_cost": 0.0000015, + "input_cost_per_token": 0.0000011, + "output_cost_per_token": 0.0000044, + "cache_read_input_token_cost": 0.00000055, "litellm_provider": "openai", "mode": "chat", "supports_vision": true, "supports_prompt_caching": true }, + "o3-mini": { + "max_tokens": 100000, + "max_input_tokens": 200000, + "max_output_tokens": 100000, + "input_cost_per_token": 0.0000011, + "output_cost_per_token": 0.0000044, + "cache_read_input_token_cost": 0.00000055, + "litellm_provider": "openai", + "mode": "chat", + "supports_function_calling": true, + "supports_parallel_function_calling": false, + "supports_vision": false, + "supports_prompt_caching": true, + "supports_response_schema": true, + "supports_tool_choice": true + }, + "o3-mini-2025-01-31": { + "max_tokens": 100000, + "max_input_tokens": 200000, + "max_output_tokens": 100000, + "input_cost_per_token": 0.0000011, + "output_cost_per_token": 0.0000044, + "cache_read_input_token_cost": 0.00000055, + "litellm_provider": "openai", + "mode": "chat", + "supports_function_calling": true, + "supports_parallel_function_calling": false, + "supports_vision": false, + "supports_prompt_caching": true, + "supports_response_schema": true, + "supports_tool_choice": true + }, "o1-mini-2024-09-12": { "max_tokens": 65536, "max_input_tokens": 128000, @@ -251,7 +331,8 @@ "supports_vision": true, "supports_prompt_caching": true, "supports_system_messages": true, - "supports_response_schema": true + "supports_response_schema": true, + "supports_tool_choice": true }, "chatgpt-4o-latest": { "max_tokens": 4096, @@ -265,7 +346,8 @@ "supports_parallel_function_calling": true, "supports_vision": true, "supports_prompt_caching": true, - "supports_system_messages": true + "supports_system_messages": true, + "supports_tool_choice": true }, "gpt-4o-2024-05-13": { "max_tokens": 4096, @@ -281,7 +363,8 @@ "supports_parallel_function_calling": true, "supports_vision": true, "supports_prompt_caching": true, - "supports_system_messages": true + "supports_system_messages": true, + "supports_tool_choice": true }, "gpt-4o-2024-08-06": { "max_tokens": 16384, @@ -299,7 +382,8 @@ "supports_response_schema": true, "supports_vision": true, "supports_prompt_caching": true, - "supports_system_messages": true + "supports_system_messages": true, + "supports_tool_choice": true }, "gpt-4o-2024-11-20": { "max_tokens": 16384, @@ -317,7 +401,8 @@ "supports_response_schema": true, "supports_vision": true, "supports_prompt_caching": true, - "supports_system_messages": true + "supports_system_messages": true, + "supports_tool_choice": true }, "gpt-4o-realtime-preview-2024-10-01": { "max_tokens": 4096, @@ -335,7 +420,8 @@ "supports_parallel_function_calling": true, "supports_audio_input": true, "supports_audio_output": true, - "supports_system_messages": true + "supports_system_messages": true, + "supports_tool_choice": true }, "gpt-4o-realtime-preview": { "max_tokens": 4096, @@ -352,7 +438,8 @@ "supports_parallel_function_calling": true, "supports_audio_input": true, "supports_audio_output": true, - "supports_system_messages": true + "supports_system_messages": true, + "supports_tool_choice": true }, "gpt-4o-realtime-preview-2024-12-17": { "max_tokens": 4096, @@ -369,7 +456,8 @@ "supports_parallel_function_calling": true, "supports_audio_input": true, "supports_audio_output": true, - "supports_system_messages": true + "supports_system_messages": true, + "supports_tool_choice": true }, "gpt-4o-mini-realtime-preview": { "max_tokens": 4096, @@ -387,7 +475,8 @@ "supports_parallel_function_calling": true, "supports_audio_input": true, "supports_audio_output": true, - "supports_system_messages": true + "supports_system_messages": true, + "supports_tool_choice": true }, "gpt-4o-mini-realtime-preview-2024-12-17": { "max_tokens": 4096, @@ -405,7 +494,8 @@ "supports_parallel_function_calling": true, "supports_audio_input": true, "supports_audio_output": true, - "supports_system_messages": true + "supports_system_messages": true, + "supports_tool_choice": true }, "gpt-4-turbo-preview": { "max_tokens": 4096, @@ -418,7 +508,8 @@ "supports_function_calling": true, "supports_parallel_function_calling": true, "supports_prompt_caching": true, - "supports_system_messages": true + "supports_system_messages": true, + "supports_tool_choice": true }, "gpt-4-0314": { "max_tokens": 4096, @@ -429,7 +520,8 @@ "litellm_provider": "openai", "mode": "chat", "supports_prompt_caching": true, - "supports_system_messages": true + "supports_system_messages": true, + "supports_tool_choice": true }, "gpt-4-0613": { "max_tokens": 4096, @@ -441,7 +533,9 @@ "mode": "chat", "supports_function_calling": true, "supports_prompt_caching": true, - "supports_system_messages": true + "supports_system_messages": true, + "deprecation_date": "2025-06-06", + "supports_tool_choice": true }, "gpt-4-32k": { "max_tokens": 4096, @@ -452,7 +546,8 @@ "litellm_provider": "openai", "mode": "chat", "supports_prompt_caching": true, - "supports_system_messages": true + "supports_system_messages": true, + "supports_tool_choice": true }, "gpt-4-32k-0314": { "max_tokens": 4096, @@ -463,7 +558,8 @@ "litellm_provider": "openai", "mode": "chat", "supports_prompt_caching": true, - "supports_system_messages": true + "supports_system_messages": true, + "supports_tool_choice": true }, "gpt-4-32k-0613": { "max_tokens": 4096, @@ -474,7 +570,8 @@ "litellm_provider": "openai", "mode": "chat", "supports_prompt_caching": true, - "supports_system_messages": true + "supports_system_messages": true, + "supports_tool_choice": true }, "gpt-4-turbo": { "max_tokens": 4096, @@ -488,7 +585,8 @@ "supports_parallel_function_calling": true, "supports_vision": true, "supports_prompt_caching": true, - "supports_system_messages": true + "supports_system_messages": true, + "supports_tool_choice": true }, "gpt-4-turbo-2024-04-09": { "max_tokens": 4096, @@ -502,7 +600,8 @@ "supports_parallel_function_calling": true, "supports_vision": true, "supports_prompt_caching": true, - "supports_system_messages": true + "supports_system_messages": true, + "supports_tool_choice": true }, "gpt-4-1106-preview": { "max_tokens": 4096, @@ -515,7 +614,8 @@ "supports_function_calling": true, "supports_parallel_function_calling": true, "supports_prompt_caching": true, - "supports_system_messages": true + "supports_system_messages": true, + "supports_tool_choice": true }, "gpt-4-0125-preview": { "max_tokens": 4096, @@ -528,7 +628,8 @@ "supports_function_calling": true, "supports_parallel_function_calling": true, "supports_prompt_caching": true, - "supports_system_messages": true + "supports_system_messages": true, + "supports_tool_choice": true }, "gpt-4-vision-preview": { "max_tokens": 4096, @@ -540,7 +641,9 @@ "mode": "chat", "supports_vision": true, "supports_prompt_caching": true, - "supports_system_messages": true + "supports_system_messages": true, + "deprecation_date": "2024-12-06", + "supports_tool_choice": true }, "gpt-4-1106-vision-preview": { "max_tokens": 4096, @@ -552,7 +655,9 @@ "mode": "chat", "supports_vision": true, "supports_prompt_caching": true, - "supports_system_messages": true + "supports_system_messages": true, + "deprecation_date": "2024-12-06", + "supports_tool_choice": true }, "gpt-3.5-turbo": { "max_tokens": 4097, @@ -564,7 +669,8 @@ "mode": "chat", "supports_function_calling": true, "supports_prompt_caching": true, - "supports_system_messages": true + "supports_system_messages": true, + "supports_tool_choice": true }, "gpt-3.5-turbo-0301": { "max_tokens": 4097, @@ -575,7 +681,8 @@ "litellm_provider": "openai", "mode": "chat", "supports_prompt_caching": true, - "supports_system_messages": true + "supports_system_messages": true, + "supports_tool_choice": true }, "gpt-3.5-turbo-0613": { "max_tokens": 4097, @@ -587,7 +694,8 @@ "mode": "chat", "supports_function_calling": true, "supports_prompt_caching": true, - "supports_system_messages": true + "supports_system_messages": true, + "supports_tool_choice": true }, "gpt-3.5-turbo-1106": { "max_tokens": 16385, @@ -600,7 +708,8 @@ "supports_function_calling": true, "supports_parallel_function_calling": true, "supports_prompt_caching": true, - "supports_system_messages": true + "supports_system_messages": true, + "supports_tool_choice": true }, "gpt-3.5-turbo-0125": { "max_tokens": 16385, @@ -613,7 +722,8 @@ "supports_function_calling": true, "supports_parallel_function_calling": true, "supports_prompt_caching": true, - "supports_system_messages": true + "supports_system_messages": true, + "supports_tool_choice": true }, "gpt-3.5-turbo-16k": { "max_tokens": 16385, @@ -624,7 +734,8 @@ "litellm_provider": "openai", "mode": "chat", "supports_prompt_caching": true, - "supports_system_messages": true + "supports_system_messages": true, + "supports_tool_choice": true }, "gpt-3.5-turbo-16k-0613": { "max_tokens": 16385, @@ -635,7 +746,8 @@ "litellm_provider": "openai", "mode": "chat", "supports_prompt_caching": true, - "supports_system_messages": true + "supports_system_messages": true, + "supports_tool_choice": true }, "ft:gpt-3.5-turbo": { "max_tokens": 4096, @@ -647,7 +759,8 @@ "output_cost_per_token_batches": 0.000003, "litellm_provider": "openai", "mode": "chat", - "supports_system_messages": true + "supports_system_messages": true, + "supports_tool_choice": true }, "ft:gpt-3.5-turbo-0125": { "max_tokens": 4096, @@ -657,7 +770,8 @@ "output_cost_per_token": 0.000006, "litellm_provider": "openai", "mode": "chat", - "supports_system_messages": true + "supports_system_messages": true, + "supports_tool_choice": true }, "ft:gpt-3.5-turbo-1106": { "max_tokens": 4096, @@ -667,7 +781,8 @@ "output_cost_per_token": 0.000006, "litellm_provider": "openai", "mode": "chat", - "supports_system_messages": true + "supports_system_messages": true, + "supports_tool_choice": true }, "ft:gpt-3.5-turbo-0613": { "max_tokens": 4096, @@ -677,7 +792,8 @@ "output_cost_per_token": 0.000006, "litellm_provider": "openai", "mode": "chat", - "supports_system_messages": true + "supports_system_messages": true, + "supports_tool_choice": true }, "ft:gpt-4-0613": { "max_tokens": 4096, @@ -689,7 +805,8 @@ "mode": "chat", "supports_function_calling": true, "source": "OpenAI needs to add pricing for this ft model, will be updated when added by OpenAI. Defaulting to base model pricing", - "supports_system_messages": true + "supports_system_messages": true, + "supports_tool_choice": true }, "ft:gpt-4o-2024-08-06": { "max_tokens": 16384, @@ -705,7 +822,8 @@ "supports_parallel_function_calling": true, "supports_response_schema": true, "supports_vision": true, - "supports_system_messages": true + "supports_system_messages": true, + "supports_tool_choice": true }, "ft:gpt-4o-2024-11-20": { "max_tokens": 16384, @@ -721,7 +839,8 @@ "supports_response_schema": true, "supports_vision": true, "supports_prompt_caching": true, - "supports_system_messages": true + "supports_system_messages": true, + "supports_tool_choice": true }, "ft:gpt-4o-mini-2024-07-18": { "max_tokens": 16384, @@ -739,7 +858,8 @@ "supports_response_schema": true, "supports_vision": true, "supports_prompt_caching": true, - "supports_system_messages": true + "supports_system_messages": true, + "supports_tool_choice": true }, "ft:davinci-002": { "max_tokens": 16384, @@ -811,7 +931,7 @@ "input_cost_per_token": 0.000000, "output_cost_per_token": 0.000000, "litellm_provider": "openai", - "mode": "moderations" + "mode": "moderation" }, "text-moderation-007": { "max_tokens": 32768, @@ -820,7 +940,7 @@ "input_cost_per_token": 0.000000, "output_cost_per_token": 0.000000, "litellm_provider": "openai", - "mode": "moderations" + "mode": "moderation" }, "text-moderation-latest": { "max_tokens": 32768, @@ -829,7 +949,7 @@ "input_cost_per_token": 0.000000, "output_cost_per_token": 0.000000, "litellm_provider": "openai", - "mode": "moderations" + "mode": "moderation" }, "256-x-256/dall-e-2": { "mode": "image_generation", @@ -887,7 +1007,7 @@ }, "whisper-1": { "mode": "audio_transcription", - "input_cost_per_second": 0, + "input_cost_per_second": 0.0001, "output_cost_per_second": 0.0001, "litellm_provider": "openai" }, @@ -901,6 +1021,19 @@ "input_cost_per_character": 0.000030, "litellm_provider": "openai" }, + "azure/o3-mini-2025-01-31": { + "max_tokens": 100000, + "max_input_tokens": 200000, + "max_output_tokens": 100000, + "input_cost_per_token": 0.0000011, + "output_cost_per_token": 0.0000044, + "cache_read_input_token_cost": 0.00000055, + "litellm_provider": "azure", + "mode": "chat", + "supports_vision": false, + "supports_prompt_caching": true, + "supports_tool_choice": true + }, "azure/tts-1": { "mode": "audio_speech", "input_cost_per_character": 0.000015, @@ -913,17 +1046,31 @@ }, "azure/whisper-1": { "mode": "audio_transcription", - "input_cost_per_second": 0, + "input_cost_per_second": 0.0001, "output_cost_per_second": 0.0001, "litellm_provider": "azure" }, + "azure/o3-mini": { + "max_tokens": 100000, + "max_input_tokens": 200000, + "max_output_tokens": 100000, + "input_cost_per_token": 0.0000011, + "output_cost_per_token": 0.0000044, + "cache_read_input_token_cost": 0.00000055, + "litellm_provider": "azure", + "mode": "chat", + "supports_vision": false, + "supports_prompt_caching": true, + "supports_response_schema": true, + "supports_tool_choice": true + }, "azure/o1-mini": { "max_tokens": 65536, "max_input_tokens": 128000, "max_output_tokens": 65536, - "input_cost_per_token": 0.000003, - "output_cost_per_token": 0.000012, - "cache_read_input_token_cost": 0.0000015, + "input_cost_per_token": 0.00000121, + "output_cost_per_token": 0.00000484, + "cache_read_input_token_cost": 0.000000605, "litellm_provider": "azure", "mode": "chat", "supports_function_calling": true, @@ -935,9 +1082,9 @@ "max_tokens": 65536, "max_input_tokens": 128000, "max_output_tokens": 65536, - "input_cost_per_token": 0.000003, - "output_cost_per_token": 0.000012, - "cache_read_input_token_cost": 0.0000015, + "input_cost_per_token": 0.00000121, + "output_cost_per_token": 0.00000484, + "cache_read_input_token_cost": 0.000000605, "litellm_provider": "azure", "mode": "chat", "supports_function_calling": true, @@ -957,7 +1104,23 @@ "supports_function_calling": true, "supports_parallel_function_calling": true, "supports_vision": true, - "supports_prompt_caching": true + "supports_prompt_caching": true, + "supports_tool_choice": true + }, + "azure/o1-2024-12-17": { + "max_tokens": 100000, + "max_input_tokens": 200000, + "max_output_tokens": 100000, + "input_cost_per_token": 0.000015, + "output_cost_per_token": 0.000060, + "cache_read_input_token_cost": 0.0000075, + "litellm_provider": "azure", + "mode": "chat", + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "supports_vision": true, + "supports_prompt_caching": true, + "supports_tool_choice": true }, "azure/o1-preview": { "max_tokens": 32768, @@ -999,7 +1162,8 @@ "supports_function_calling": true, "supports_parallel_function_calling": true, "supports_vision": true, - "supports_prompt_caching": true + "supports_prompt_caching": true, + "supports_tool_choice": true }, "azure/gpt-4o-2024-08-06": { "max_tokens": 16384, @@ -1007,13 +1171,15 @@ "max_output_tokens": 16384, "input_cost_per_token": 0.00000275, "output_cost_per_token": 0.000011, + "cache_read_input_token_cost": 0.00000125, "litellm_provider": "azure", "mode": "chat", "supports_function_calling": true, "supports_parallel_function_calling": true, "supports_response_schema": true, "supports_vision": true, - "supports_prompt_caching": true + "supports_prompt_caching": true, + "supports_tool_choice": true }, "azure/gpt-4o-2024-11-20": { "max_tokens": 16384, @@ -1026,7 +1192,8 @@ "supports_function_calling": true, "supports_parallel_function_calling": true, "supports_response_schema": true, - "supports_vision": true + "supports_vision": true, + "supports_tool_choice": true }, "azure/gpt-4o-2024-05-13": { "max_tokens": 4096, @@ -1039,7 +1206,8 @@ "supports_function_calling": true, "supports_parallel_function_calling": true, "supports_vision": true, - "supports_prompt_caching": true + "supports_prompt_caching": true, + "supports_tool_choice": true }, "azure/global-standard/gpt-4o-2024-08-06": { "max_tokens": 16384, @@ -1047,13 +1215,15 @@ "max_output_tokens": 16384, "input_cost_per_token": 0.0000025, "output_cost_per_token": 0.000010, + "cache_read_input_token_cost": 0.00000125, "litellm_provider": "azure", "mode": "chat", "supports_function_calling": true, "supports_parallel_function_calling": true, "supports_response_schema": true, "supports_vision": true, - "supports_prompt_caching": true + "supports_prompt_caching": true, + "supports_tool_choice": true }, "azure/global-standard/gpt-4o-2024-11-20": { "max_tokens": 16384, @@ -1066,7 +1236,8 @@ "supports_function_calling": true, "supports_parallel_function_calling": true, "supports_response_schema": true, - "supports_vision": true + "supports_vision": true, + "supports_tool_choice": true }, "azure/global-standard/gpt-4o-mini": { "max_tokens": 16384, @@ -1079,7 +1250,8 @@ "supports_function_calling": true, "supports_parallel_function_calling": true, "supports_response_schema": true, - "supports_vision": true + "supports_vision": true, + "supports_tool_choice": true }, "azure/gpt-4o-mini": { "max_tokens": 16384, @@ -1094,7 +1266,8 @@ "supports_parallel_function_calling": true, "supports_response_schema": true, "supports_vision": true, - "supports_prompt_caching": true + "supports_prompt_caching": true, + "supports_tool_choice": true }, "azure/gpt-4o-mini-2024-07-18": { "max_tokens": 16384, @@ -1109,7 +1282,8 @@ "supports_parallel_function_calling": true, "supports_response_schema": true, "supports_vision": true, - "supports_prompt_caching": true + "supports_prompt_caching": true, + "supports_tool_choice": true }, "azure/gpt-4-turbo-2024-04-09": { "max_tokens": 4096, @@ -1121,7 +1295,8 @@ "mode": "chat", "supports_function_calling": true, "supports_parallel_function_calling": true, - "supports_vision": true + "supports_vision": true, + "supports_tool_choice": true }, "azure/gpt-4-0125-preview": { "max_tokens": 4096, @@ -1132,7 +1307,8 @@ "litellm_provider": "azure", "mode": "chat", "supports_function_calling": true, - "supports_parallel_function_calling": true + "supports_parallel_function_calling": true, + "supports_tool_choice": true }, "azure/gpt-4-1106-preview": { "max_tokens": 4096, @@ -1143,7 +1319,8 @@ "litellm_provider": "azure", "mode": "chat", "supports_function_calling": true, - "supports_parallel_function_calling": true + "supports_parallel_function_calling": true, + "supports_tool_choice": true }, "azure/gpt-4-0613": { "max_tokens": 4096, @@ -1153,7 +1330,8 @@ "output_cost_per_token": 0.00006, "litellm_provider": "azure", "mode": "chat", - "supports_function_calling": true + "supports_function_calling": true, + "supports_tool_choice": true }, "azure/gpt-4-32k-0613": { "max_tokens": 4096, @@ -1162,7 +1340,8 @@ "input_cost_per_token": 0.00006, "output_cost_per_token": 0.00012, "litellm_provider": "azure", - "mode": "chat" + "mode": "chat", + "supports_tool_choice": true }, "azure/gpt-4-32k": { "max_tokens": 4096, @@ -1171,7 +1350,8 @@ "input_cost_per_token": 0.00006, "output_cost_per_token": 0.00012, "litellm_provider": "azure", - "mode": "chat" + "mode": "chat", + "supports_tool_choice": true }, "azure/gpt-4": { "max_tokens": 4096, @@ -1181,7 +1361,8 @@ "output_cost_per_token": 0.00006, "litellm_provider": "azure", "mode": "chat", - "supports_function_calling": true + "supports_function_calling": true, + "supports_tool_choice": true }, "azure/gpt-4-turbo": { "max_tokens": 4096, @@ -1192,7 +1373,8 @@ "litellm_provider": "azure", "mode": "chat", "supports_function_calling": true, - "supports_parallel_function_calling": true + "supports_parallel_function_calling": true, + "supports_tool_choice": true }, "azure/gpt-4-turbo-vision-preview": { "max_tokens": 4096, @@ -1202,7 +1384,8 @@ "output_cost_per_token": 0.00003, "litellm_provider": "azure", "mode": "chat", - "supports_vision": true + "supports_vision": true, + "supports_tool_choice": true }, "azure/gpt-35-turbo-16k-0613": { "max_tokens": 4096, @@ -1212,7 +1395,8 @@ "output_cost_per_token": 0.000004, "litellm_provider": "azure", "mode": "chat", - "supports_function_calling": true + "supports_function_calling": true, + "supports_tool_choice": true }, "azure/gpt-35-turbo-1106": { "max_tokens": 4096, @@ -1223,7 +1407,9 @@ "litellm_provider": "azure", "mode": "chat", "supports_function_calling": true, - "supports_parallel_function_calling": true + "supports_parallel_function_calling": true, + "deprecation_date": "2025-03-31", + "supports_tool_choice": true }, "azure/gpt-35-turbo-0613": { "max_tokens": 4097, @@ -1234,7 +1420,9 @@ "litellm_provider": "azure", "mode": "chat", "supports_function_calling": true, - "supports_parallel_function_calling": true + "supports_parallel_function_calling": true, + "deprecation_date": "2025-02-13", + "supports_tool_choice": true }, "azure/gpt-35-turbo-0301": { "max_tokens": 4097, @@ -1245,7 +1433,9 @@ "litellm_provider": "azure", "mode": "chat", "supports_function_calling": true, - "supports_parallel_function_calling": true + "supports_parallel_function_calling": true, + "deprecation_date": "2025-02-13", + "supports_tool_choice": true }, "azure/gpt-35-turbo-0125": { "max_tokens": 4096, @@ -1256,7 +1446,22 @@ "litellm_provider": "azure", "mode": "chat", "supports_function_calling": true, - "supports_parallel_function_calling": true + "supports_parallel_function_calling": true, + "deprecation_date": "2025-05-31", + "supports_tool_choice": true + }, + "azure/gpt-3.5-turbo-0125": { + "max_tokens": 4096, + "max_input_tokens": 16384, + "max_output_tokens": 4096, + "input_cost_per_token": 0.0000005, + "output_cost_per_token": 0.0000015, + "litellm_provider": "azure", + "mode": "chat", + "supports_function_calling": true, + "supports_parallel_function_calling": true, + "deprecation_date": "2025-03-31", + "supports_tool_choice": true }, "azure/gpt-35-turbo-16k": { "max_tokens": 4096, @@ -1265,7 +1470,8 @@ "input_cost_per_token": 0.000003, "output_cost_per_token": 0.000004, "litellm_provider": "azure", - "mode": "chat" + "mode": "chat", + "supports_tool_choice": true }, "azure/gpt-35-turbo": { "max_tokens": 4096, @@ -1275,7 +1481,19 @@ "output_cost_per_token": 0.0000015, "litellm_provider": "azure", "mode": "chat", - "supports_function_calling": true + "supports_function_calling": true, + "supports_tool_choice": true + }, + "azure/gpt-3.5-turbo": { + "max_tokens": 4096, + "max_input_tokens": 4097, + "max_output_tokens": 4096, + "input_cost_per_token": 0.0000005, + "output_cost_per_token": 0.0000015, + "litellm_provider": "azure", + "mode": "chat", + "supports_function_calling": true, + "supports_tool_choice": true }, "azure/gpt-3.5-turbo-instruct-0914": { "max_tokens": 4097, @@ -1403,6 +1621,18 @@ "litellm_provider": "azure", "mode": "image_generation" }, + "azure_ai/deepseek-r1": { + "max_tokens": 8192, + "max_input_tokens": 128000, + "max_output_tokens": 8192, + "input_cost_per_token": 0.0, + "input_cost_per_token_cache_hit": 0.0, + "output_cost_per_token": 0.0, + "litellm_provider": "azure_ai", + "mode": "chat", + "supports_prompt_caching": true, + "supports_tool_choice": true + }, "azure_ai/jamba-instruct": { "max_tokens": 4096, "max_input_tokens": 70000, @@ -1410,7 +1640,8 @@ "input_cost_per_token": 0.0000005, "output_cost_per_token": 0.0000007, "litellm_provider": "azure_ai", - "mode": "chat" + "mode": "chat", + "supports_tool_choice": true }, "azure_ai/mistral-large": { "max_tokens": 8191, @@ -1420,7 +1651,8 @@ "output_cost_per_token": 0.000012, "litellm_provider": "azure_ai", "mode": "chat", - "supports_function_calling": true + "supports_function_calling": true, + "supports_tool_choice": true }, "azure_ai/mistral-small": { "max_tokens": 8191, @@ -1430,7 +1662,8 @@ "output_cost_per_token": 0.000003, "litellm_provider": "azure_ai", "supports_function_calling": true, - "mode": "chat" + "mode": "chat", + "supports_tool_choice": true }, "azure_ai/mistral-large-2407": { "max_tokens": 4096, @@ -1441,7 +1674,8 @@ "litellm_provider": "azure_ai", "supports_function_calling": true, "mode": "chat", - "source": "https://azuremarketplace.microsoft.com/en/marketplace/apps/000-000.mistral-ai-large-2407-offer?tab=Overview" + "source": "https://azuremarketplace.microsoft.com/en/marketplace/apps/000-000.mistral-ai-large-2407-offer?tab=Overview", + "supports_tool_choice": true }, "azure_ai/ministral-3b": { "max_tokens": 4096, @@ -1452,7 +1686,8 @@ "litellm_provider": "azure_ai", "supports_function_calling": true, "mode": "chat", - "source": "https://azuremarketplace.microsoft.com/en/marketplace/apps/000-000.ministral-3b-2410-offer?tab=Overview" + "source": "https://azuremarketplace.microsoft.com/en/marketplace/apps/000-000.ministral-3b-2410-offer?tab=Overview", + "supports_tool_choice": true }, "azure_ai/Llama-3.2-11B-Vision-Instruct": { "max_tokens": 2048, @@ -1464,7 +1699,8 @@ "supports_function_calling": true, "supports_vision": true, "mode": "chat", - "source": "https://azuremarketplace.microsoft.com/en/marketplace/apps/metagenai.meta-llama-3-2-11b-vision-instruct-offer?tab=Overview" + "source": "https://azuremarketplace.microsoft.com/en/marketplace/apps/metagenai.meta-llama-3-2-11b-vision-instruct-offer?tab=Overview", + "supports_tool_choice": true }, "azure_ai/Llama-3.3-70B-Instruct": { "max_tokens": 2048, @@ -1475,7 +1711,8 @@ "litellm_provider": "azure_ai", "supports_function_calling": true, "mode": "chat", - "source": "https://azuremarketplace.microsoft.com/en/marketplace/apps/metagenai.llama-3-3-70b-instruct-offer?tab=Overview" + "source": "https://azuremarketplace.microsoft.com/en/marketplace/apps/metagenai.llama-3-3-70b-instruct-offer?tab=Overview", + "supports_tool_choice": true }, "azure_ai/Llama-3.2-90B-Vision-Instruct": { "max_tokens": 2048, @@ -1487,7 +1724,8 @@ "supports_function_calling": true, "supports_vision": true, "mode": "chat", - "source": "https://azuremarketplace.microsoft.com/en/marketplace/apps/metagenai.meta-llama-3-2-90b-vision-instruct-offer?tab=Overview" + "source": "https://azuremarketplace.microsoft.com/en/marketplace/apps/metagenai.meta-llama-3-2-90b-vision-instruct-offer?tab=Overview", + "supports_tool_choice": true }, "azure_ai/Meta-Llama-3-70B-Instruct": { "max_tokens": 2048, @@ -1496,7 +1734,8 @@ "input_cost_per_token": 0.0000011, "output_cost_per_token": 0.00000037, "litellm_provider": "azure_ai", - "mode": "chat" + "mode": "chat", + "supports_tool_choice": true }, "azure_ai/Meta-Llama-3.1-8B-Instruct": { "max_tokens": 2048, @@ -1506,7 +1745,8 @@ "output_cost_per_token": 0.00000061, "litellm_provider": "azure_ai", "mode": "chat", - "source":"https://azuremarketplace.microsoft.com/en-us/marketplace/apps/metagenai.meta-llama-3-1-8b-instruct-offer?tab=PlansAndPrice" + "source":"https://azuremarketplace.microsoft.com/en-us/marketplace/apps/metagenai.meta-llama-3-1-8b-instruct-offer?tab=PlansAndPrice", + "supports_tool_choice": true }, "azure_ai/Meta-Llama-3.1-70B-Instruct": { "max_tokens": 2048, @@ -1516,7 +1756,8 @@ "output_cost_per_token": 0.00000354, "litellm_provider": "azure_ai", "mode": "chat", - "source":"https://azuremarketplace.microsoft.com/en-us/marketplace/apps/metagenai.meta-llama-3-1-70b-instruct-offer?tab=PlansAndPrice" + "source":"https://azuremarketplace.microsoft.com/en-us/marketplace/apps/metagenai.meta-llama-3-1-70b-instruct-offer?tab=PlansAndPrice", + "supports_tool_choice": true }, "azure_ai/Meta-Llama-3.1-405B-Instruct": { "max_tokens": 2048, @@ -1526,7 +1767,21 @@ "output_cost_per_token": 0.000016, "litellm_provider": "azure_ai", "mode": "chat", - "source":"https://azuremarketplace.microsoft.com/en-us/marketplace/apps/metagenai.meta-llama-3-1-405b-instruct-offer?tab=PlansAndPrice" + "source":"https://azuremarketplace.microsoft.com/en-us/marketplace/apps/metagenai.meta-llama-3-1-405b-instruct-offer?tab=PlansAndPrice", + "supports_tool_choice": true + }, + "azure_ai/Phi-4": { + "max_tokens": 4096, + "max_input_tokens": 128000, + "max_output_tokens": 4096, + "input_cost_per_token": 0.000000125, + "output_cost_per_token": 0.0000005, + "litellm_provider": "azure_ai", + "mode": "chat", + "supports_vision": false, + "source": "https://techcommunity.microsoft.com/blog/machinelearningblog/affordable-innovation-unveiling-the-pricing-of-phi-3-slms-on-models-as-a-service/4156495", + "supports_function_calling": true, + "supports_tool_choice": true }, "azure_ai/Phi-3.5-mini-instruct": { "max_tokens": 4096, @@ -1537,7 +1792,8 @@ "litellm_provider": "azure_ai", "mode": "chat", "supports_vision": false, - "source": "https://azure.microsoft.com/en-us/pricing/details/phi-3/" + "source": "https://azure.microsoft.com/en-us/pricing/details/phi-3/", + "supports_tool_choice": true }, "azure_ai/Phi-3.5-vision-instruct": { "max_tokens": 4096, @@ -1548,7 +1804,8 @@ "litellm_provider": "azure_ai", "mode": "chat", "supports_vision": true, - "source": "https://azure.microsoft.com/en-us/pricing/details/phi-3/" + "source": "https://azure.microsoft.com/en-us/pricing/details/phi-3/", + "supports_tool_choice": true }, "azure_ai/Phi-3.5-MoE-instruct": { "max_tokens": 4096, @@ -1559,7 +1816,8 @@ "litellm_provider": "azure_ai", "mode": "chat", "supports_vision": false, - "source": "https://azure.microsoft.com/en-us/pricing/details/phi-3/" + "source": "https://azure.microsoft.com/en-us/pricing/details/phi-3/", + "supports_tool_choice": true }, "azure_ai/Phi-3-mini-4k-instruct": { "max_tokens": 4096, @@ -1570,7 +1828,8 @@ "litellm_provider": "azure_ai", "mode": "chat", "supports_vision": false, - "source": "https://azure.microsoft.com/en-us/pricing/details/phi-3/" + "source": "https://azure.microsoft.com/en-us/pricing/details/phi-3/", + "supports_tool_choice": true }, "azure_ai/Phi-3-mini-128k-instruct": { "max_tokens": 4096, @@ -1581,7 +1840,8 @@ "litellm_provider": "azure_ai", "mode": "chat", "supports_vision": false, - "source": "https://azure.microsoft.com/en-us/pricing/details/phi-3/" + "source": "https://azure.microsoft.com/en-us/pricing/details/phi-3/", + "supports_tool_choice": true }, "azure_ai/Phi-3-small-8k-instruct": { "max_tokens": 4096, @@ -1592,7 +1852,8 @@ "litellm_provider": "azure_ai", "mode": "chat", "supports_vision": false, - "source": "https://azure.microsoft.com/en-us/pricing/details/phi-3/" + "source": "https://azure.microsoft.com/en-us/pricing/details/phi-3/", + "supports_tool_choice": true }, "azure_ai/Phi-3-small-128k-instruct": { "max_tokens": 4096, @@ -1603,7 +1864,8 @@ "litellm_provider": "azure_ai", "mode": "chat", "supports_vision": false, - "source": "https://azure.microsoft.com/en-us/pricing/details/phi-3/" + "source": "https://azure.microsoft.com/en-us/pricing/details/phi-3/", + "supports_tool_choice": true }, "azure_ai/Phi-3-medium-4k-instruct": { "max_tokens": 4096, @@ -1614,7 +1876,8 @@ "litellm_provider": "azure_ai", "mode": "chat", "supports_vision": false, - "source": "https://azure.microsoft.com/en-us/pricing/details/phi-3/" + "source": "https://azure.microsoft.com/en-us/pricing/details/phi-3/", + "supports_tool_choice": true }, "azure_ai/Phi-3-medium-128k-instruct": { "max_tokens": 4096, @@ -1625,7 +1888,8 @@ "litellm_provider": "azure_ai", "mode": "chat", "supports_vision": false, - "source": "https://azure.microsoft.com/en-us/pricing/details/phi-3/" + "source": "https://azure.microsoft.com/en-us/pricing/details/phi-3/", + "supports_tool_choice": true }, "azure_ai/cohere-rerank-v3-multilingual": { "max_tokens": 4096, @@ -1723,29 +1987,32 @@ "output_cost_per_token": 0.00000025, "litellm_provider": "mistral", "mode": "chat", - "supports_assistant_prefill": true + "supports_assistant_prefill": true, + "supports_tool_choice": true }, "mistral/mistral-small": { "max_tokens": 8191, "max_input_tokens": 32000, "max_output_tokens": 8191, - "input_cost_per_token": 0.000001, - "output_cost_per_token": 0.000003, + "input_cost_per_token": 0.0000001, + "output_cost_per_token": 0.0000003, "litellm_provider": "mistral", "supports_function_calling": true, "mode": "chat", - "supports_assistant_prefill": true + "supports_assistant_prefill": true, + "supports_tool_choice": true }, "mistral/mistral-small-latest": { "max_tokens": 8191, "max_input_tokens": 32000, "max_output_tokens": 8191, - "input_cost_per_token": 0.000001, - "output_cost_per_token": 0.000003, + "input_cost_per_token": 0.0000001, + "output_cost_per_token": 0.0000003, "litellm_provider": "mistral", "supports_function_calling": true, "mode": "chat", - "supports_assistant_prefill": true + "supports_assistant_prefill": true, + "supports_tool_choice": true }, "mistral/mistral-medium": { "max_tokens": 8191, @@ -1755,7 +2022,8 @@ "output_cost_per_token": 0.0000081, "litellm_provider": "mistral", "mode": "chat", - "supports_assistant_prefill": true + "supports_assistant_prefill": true, + "supports_tool_choice": true }, "mistral/mistral-medium-latest": { "max_tokens": 8191, @@ -1765,7 +2033,8 @@ "output_cost_per_token": 0.0000081, "litellm_provider": "mistral", "mode": "chat", - "supports_assistant_prefill": true + "supports_assistant_prefill": true, + "supports_tool_choice": true }, "mistral/mistral-medium-2312": { "max_tokens": 8191, @@ -1775,7 +2044,8 @@ "output_cost_per_token": 0.0000081, "litellm_provider": "mistral", "mode": "chat", - "supports_assistant_prefill": true + "supports_assistant_prefill": true, + "supports_tool_choice": true }, "mistral/mistral-large-latest": { "max_tokens": 128000, @@ -1786,7 +2056,8 @@ "litellm_provider": "mistral", "mode": "chat", "supports_function_calling": true, - "supports_assistant_prefill": true + "supports_assistant_prefill": true, + "supports_tool_choice": true }, "mistral/mistral-large-2411": { "max_tokens": 128000, @@ -1797,7 +2068,8 @@ "litellm_provider": "mistral", "mode": "chat", "supports_function_calling": true, - "supports_assistant_prefill": true + "supports_assistant_prefill": true, + "supports_tool_choice": true }, "mistral/mistral-large-2402": { "max_tokens": 8191, @@ -1808,7 +2080,8 @@ "litellm_provider": "mistral", "mode": "chat", "supports_function_calling": true, - "supports_assistant_prefill": true + "supports_assistant_prefill": true, + "supports_tool_choice": true }, "mistral/mistral-large-2407": { "max_tokens": 128000, @@ -1819,7 +2092,8 @@ "litellm_provider": "mistral", "mode": "chat", "supports_function_calling": true, - "supports_assistant_prefill": true + "supports_assistant_prefill": true, + "supports_tool_choice": true }, "mistral/pixtral-large-latest": { "max_tokens": 128000, @@ -1831,7 +2105,8 @@ "mode": "chat", "supports_function_calling": true, "supports_assistant_prefill": true, - "supports_vision": true + "supports_vision": true, + "supports_tool_choice": true }, "mistral/pixtral-large-2411": { "max_tokens": 128000, @@ -1843,7 +2118,8 @@ "mode": "chat", "supports_function_calling": true, "supports_assistant_prefill": true, - "supports_vision": true + "supports_vision": true, + "supports_tool_choice": true }, "mistral/pixtral-12b-2409": { "max_tokens": 128000, @@ -1855,7 +2131,8 @@ "mode": "chat", "supports_function_calling": true, "supports_assistant_prefill": true, - "supports_vision": true + "supports_vision": true, + "supports_tool_choice": true }, "mistral/open-mistral-7b": { "max_tokens": 8191, @@ -1865,7 +2142,8 @@ "output_cost_per_token": 0.00000025, "litellm_provider": "mistral", "mode": "chat", - "supports_assistant_prefill": true + "supports_assistant_prefill": true, + "supports_tool_choice": true }, "mistral/open-mixtral-8x7b": { "max_tokens": 8191, @@ -1876,18 +2154,20 @@ "litellm_provider": "mistral", "mode": "chat", "supports_function_calling": true, - "supports_assistant_prefill": true + "supports_assistant_prefill": true, + "supports_tool_choice": true }, "mistral/open-mixtral-8x22b": { "max_tokens": 8191, - "max_input_tokens": 64000, + "max_input_tokens": 65336, "max_output_tokens": 8191, "input_cost_per_token": 0.000002, "output_cost_per_token": 0.000006, "litellm_provider": "mistral", "mode": "chat", "supports_function_calling": true, - "supports_assistant_prefill": true + "supports_assistant_prefill": true, + "supports_tool_choice": true }, "mistral/codestral-latest": { "max_tokens": 8191, @@ -1897,7 +2177,8 @@ "output_cost_per_token": 0.000003, "litellm_provider": "mistral", "mode": "chat", - "supports_assistant_prefill": true + "supports_assistant_prefill": true, + "supports_tool_choice": true }, "mistral/codestral-2405": { "max_tokens": 8191, @@ -1907,7 +2188,8 @@ "output_cost_per_token": 0.000003, "litellm_provider": "mistral", "mode": "chat", - "supports_assistant_prefill": true + "supports_assistant_prefill": true, + "supports_tool_choice": true }, "mistral/open-mistral-nemo": { "max_tokens": 128000, @@ -1918,7 +2200,8 @@ "litellm_provider": "mistral", "mode": "chat", "source": "https://mistral.ai/technology/", - "supports_assistant_prefill": true + "supports_assistant_prefill": true, + "supports_tool_choice": true }, "mistral/open-mistral-nemo-2407": { "max_tokens": 128000, @@ -1929,7 +2212,8 @@ "litellm_provider": "mistral", "mode": "chat", "source": "https://mistral.ai/technology/", - "supports_assistant_prefill": true + "supports_assistant_prefill": true, + "supports_tool_choice": true }, "mistral/open-codestral-mamba": { "max_tokens": 256000, @@ -1940,7 +2224,8 @@ "litellm_provider": "mistral", "mode": "chat", "source": "https://mistral.ai/technology/", - "supports_assistant_prefill": true + "supports_assistant_prefill": true, + "supports_tool_choice": true }, "mistral/codestral-mamba-latest": { "max_tokens": 256000, @@ -1951,7 +2236,8 @@ "litellm_provider": "mistral", "mode": "chat", "source": "https://mistral.ai/technology/", - "supports_assistant_prefill": true + "supports_assistant_prefill": true, + "supports_tool_choice": true }, "mistral/mistral-embed": { "max_tokens": 8192, @@ -1962,7 +2248,7 @@ }, "deepseek/deepseek-reasoner": { "max_tokens": 8192, - "max_input_tokens": 64000, + "max_input_tokens": 65536, "max_output_tokens": 8192, "input_cost_per_token": 0.00000055, "input_cost_per_token_cache_hit": 0.00000014, @@ -1975,14 +2261,14 @@ "supports_prompt_caching": true }, "deepseek/deepseek-chat": { - "max_tokens": 4096, - "max_input_tokens": 128000, - "max_output_tokens": 4096, - "input_cost_per_token": 0.00000014, - "input_cost_per_token_cache_hit": 0.000000014, - "cache_read_input_token_cost": 0.000000014, + "max_tokens": 8192, + "max_input_tokens": 65536, + "max_output_tokens": 8192, + "input_cost_per_token": 0.00000027, + "input_cost_per_token_cache_hit": 0.00000007, + "cache_read_input_token_cost": 0.00000007, "cache_creation_input_token_cost": 0.0, - "output_cost_per_token": 0.00000028, + "output_cost_per_token": 0.0000011, "litellm_provider": "deepseek", "mode": "chat", "supports_function_calling": true, @@ -1999,7 +2285,8 @@ "litellm_provider": "codestral", "mode": "chat", "source": "https://docs.mistral.ai/capabilities/code_generation/", - "supports_assistant_prefill": true + "supports_assistant_prefill": true, + "supports_tool_choice": true }, "codestral/codestral-2405": { "max_tokens": 8191, @@ -2010,7 +2297,8 @@ "litellm_provider": "codestral", "mode": "chat", "source": "https://docs.mistral.ai/capabilities/code_generation/", - "supports_assistant_prefill": true + "supports_assistant_prefill": true, + "supports_tool_choice": true }, "text-completion-codestral/codestral-latest": { "max_tokens": 8191, @@ -2041,7 +2329,93 @@ "litellm_provider": "xai", "mode": "chat", "supports_function_calling": true, - "supports_vision": true + "supports_vision": true, + "supports_tool_choice": true + }, + "xai/grok-2-vision-1212": { + "max_tokens": 32768, + "max_input_tokens": 32768, + "max_output_tokens": 32768, + "input_cost_per_token": 0.000002, + "input_cost_per_image": 0.000002, + "output_cost_per_token": 0.00001, + "litellm_provider": "xai", + "mode": "chat", + "supports_function_calling": true, + "supports_vision": true, + "supports_tool_choice": true + }, + "xai/grok-2-vision-latest": { + "max_tokens": 32768, + "max_input_tokens": 32768, + "max_output_tokens": 32768, + "input_cost_per_token": 0.000002, + "input_cost_per_image": 0.000002, + "output_cost_per_token": 0.00001, + "litellm_provider": "xai", + "mode": "chat", + "supports_function_calling": true, + "supports_vision": true, + "supports_tool_choice": true + }, + "xai/grok-2-vision": { + "max_tokens": 32768, + "max_input_tokens": 32768, + "max_output_tokens": 32768, + "input_cost_per_token": 0.000002, + "input_cost_per_image": 0.000002, + "output_cost_per_token": 0.00001, + "litellm_provider": "xai", + "mode": "chat", + "supports_function_calling": true, + "supports_vision": true, + "supports_tool_choice": true + }, + "xai/grok-vision-beta": { + "max_tokens": 8192, + "max_input_tokens": 8192, + "max_output_tokens": 8192, + "input_cost_per_token": 0.000005, + "input_cost_per_image": 0.000005, + "output_cost_per_token": 0.000015, + "litellm_provider": "xai", + "mode": "chat", + "supports_function_calling": true, + "supports_vision": true, + "supports_tool_choice": true + }, + "xai/grok-2-1212": { + "max_tokens": 131072, + "max_input_tokens": 131072, + "max_output_tokens": 131072, + "input_cost_per_token": 0.000002, + "output_cost_per_token": 0.00001, + "litellm_provider": "xai", + "mode": "chat", + "supports_function_calling": true, + "supports_tool_choice": true + }, + "xai/grok-2": { + "max_tokens": 131072, + "max_input_tokens": 131072, + "max_output_tokens": 131072, + "input_cost_per_token": 0.000002, + "output_cost_per_token": 0.00001, + "litellm_provider": "xai", + "mode": "chat", + "supports_function_calling": true, + "supports_tool_choice": true + }, + "xai/grok-2-latest": { + "max_tokens": 131072, + "max_input_tokens": 131072, + "max_output_tokens": 131072, + "input_cost_per_token": 0.000002, + "output_cost_per_token": 0.00001, + "litellm_provider": "xai", + "mode": "chat", + "supports_function_calling": true, + "supports_tool_choice": true }, "deepseek/deepseek-coder": { "max_tokens": 4096, @@ -2057,6 +2431,19 @@ "supports_tool_choice": true, "supports_prompt_caching": true }, + "groq/deepseek-r1-distill-llama-70b": { + "max_tokens": 131072, + "max_input_tokens": 131072, + "max_output_tokens": 131072, + "input_cost_per_token": 0.00000075, + "output_cost_per_token": 0.00000099, + "litellm_provider": "groq", + "mode": "chat", + "supports_system_messages": false, + "supports_function_calling": false, + "supports_response_schema": false, + "supports_tool_choice": true + }, "groq/llama-3.3-70b-versatile": { "max_tokens": 8192, "max_input_tokens": 128000, @@ -2064,7 +2451,10 @@ "input_cost_per_token": 0.00000059, "output_cost_per_token": 0.00000079, "litellm_provider": "groq", - "mode": "chat" + "mode": "chat", + "supports_function_calling": true, + "supports_response_schema": true, + "supports_tool_choice": true }, "groq/llama-3.3-70b-specdec": { "max_tokens": 8192, @@ -2073,7 +2463,8 @@ "input_cost_per_token": 0.00000059, "output_cost_per_token": 0.00000099, "litellm_provider": "groq", - "mode": "chat" + "mode": "chat", + "supports_tool_choice": true }, "groq/llama2-70b-4096": { "max_tokens": 4096, @@ -2084,7 +2475,8 @@ "litellm_provider": "groq", "mode": "chat", "supports_function_calling": true, - "supports_response_schema": true + "supports_response_schema": true, + "supports_tool_choice": true }, "groq/llama3-8b-8192": { "max_tokens": 8192, @@ -2095,7 +2487,8 @@ "litellm_provider": "groq", "mode": "chat", "supports_function_calling": true, - "supports_response_schema": true + "supports_response_schema": true, + "supports_tool_choice": true }, "groq/llama-3.2-1b-preview": { "max_tokens": 8192, @@ -2106,7 +2499,8 @@ "litellm_provider": "groq", "mode": "chat", "supports_function_calling": true, - "supports_response_schema": true + "supports_response_schema": true, + "supports_tool_choice": true }, "groq/llama-3.2-3b-preview": { "max_tokens": 8192, @@ -2117,7 +2511,8 @@ "litellm_provider": "groq", "mode": "chat", "supports_function_calling": true, - "supports_response_schema": true + "supports_response_schema": true, + "supports_tool_choice": true }, "groq/llama-3.2-11b-text-preview": { "max_tokens": 8192, @@ -2128,7 +2523,8 @@ "litellm_provider": "groq", "mode": "chat", "supports_function_calling": true, - "supports_response_schema": true + "supports_response_schema": true, + "supports_tool_choice": true }, "groq/llama-3.2-11b-vision-preview": { "max_tokens": 8192, @@ -2140,7 +2536,8 @@ "mode": "chat", "supports_function_calling": true, "supports_response_schema": true, - "supports_vision": true + "supports_vision": true, + "supports_tool_choice": true }, "groq/llama-3.2-90b-text-preview": { "max_tokens": 8192, @@ -2151,7 +2548,8 @@ "litellm_provider": "groq", "mode": "chat", "supports_function_calling": true, - "supports_response_schema": true + "supports_response_schema": true, + "supports_tool_choice": true }, "groq/llama-3.2-90b-vision-preview": { "max_tokens": 8192, @@ -2163,7 +2561,8 @@ "mode": "chat", "supports_function_calling": true, "supports_response_schema": true, - "supports_vision": true + "supports_vision": true, + "supports_tool_choice": true }, "groq/llama3-70b-8192": { "max_tokens": 8192, @@ -2174,7 +2573,8 @@ "litellm_provider": "groq", "mode": "chat", "supports_function_calling": true, - "supports_response_schema": true + "supports_response_schema": true, + "supports_tool_choice": true }, "groq/llama-3.1-8b-instant": { "max_tokens": 8192, @@ -2185,7 +2585,8 @@ "litellm_provider": "groq", "mode": "chat", "supports_function_calling": true, - "supports_response_schema": true + "supports_response_schema": true, + "supports_tool_choice": true }, "groq/llama-3.1-70b-versatile": { "max_tokens": 8192, @@ -2196,7 +2597,8 @@ "litellm_provider": "groq", "mode": "chat", "supports_function_calling": true, - "supports_response_schema": true + "supports_response_schema": true, + "supports_tool_choice": true }, "groq/llama-3.1-405b-reasoning": { "max_tokens": 8192, @@ -2207,7 +2609,8 @@ "litellm_provider": "groq", "mode": "chat", "supports_function_calling": true, - "supports_response_schema": true + "supports_response_schema": true, + "supports_tool_choice": true }, "groq/mixtral-8x7b-32768": { "max_tokens": 32768, @@ -2218,7 +2621,8 @@ "litellm_provider": "groq", "mode": "chat", "supports_function_calling": true, - "supports_response_schema": true + "supports_response_schema": true, + "supports_tool_choice": true }, "groq/gemma-7b-it": { "max_tokens": 8192, @@ -2229,7 +2633,8 @@ "litellm_provider": "groq", "mode": "chat", "supports_function_calling": true, - "supports_response_schema": true + "supports_response_schema": true, + "supports_tool_choice": true }, "groq/gemma2-9b-it": { "max_tokens": 8192, @@ -2240,7 +2645,8 @@ "litellm_provider": "groq", "mode": "chat", "supports_function_calling": true, - "supports_response_schema": true + "supports_response_schema": true, + "supports_tool_choice": true }, "groq/llama3-groq-70b-8192-tool-use-preview": { "max_tokens": 8192, @@ -2251,7 +2657,8 @@ "litellm_provider": "groq", "mode": "chat", "supports_function_calling": true, - "supports_response_schema": true + "supports_response_schema": true, + "supports_tool_choice": true }, "groq/llama3-groq-8b-8192-tool-use-preview": { "max_tokens": 8192, @@ -2262,7 +2669,8 @@ "litellm_provider": "groq", "mode": "chat", "supports_function_calling": true, - "supports_response_schema": true + "supports_response_schema": true, + "supports_tool_choice": true }, "cerebras/llama3.1-8b": { "max_tokens": 128000, @@ -2272,7 +2680,8 @@ "output_cost_per_token": 0.0000001, "litellm_provider": "cerebras", "mode": "chat", - "supports_function_calling": true + "supports_function_calling": true, + "supports_tool_choice": true }, "cerebras/llama3.1-70b": { "max_tokens": 128000, @@ -2282,7 +2691,19 @@ "output_cost_per_token": 0.0000006, "litellm_provider": "cerebras", "mode": "chat", - "supports_function_calling": true + "supports_function_calling": true, + "supports_tool_choice": true + }, + "cerebras/llama3.3-70b": { + "max_tokens": 128000, + "max_input_tokens": 128000, + "max_output_tokens": 128000, + "input_cost_per_token": 0.00000085, + "output_cost_per_token": 0.0000012, + "litellm_provider": "cerebras", + "mode": "chat", + "supports_function_calling": true, + "supports_tool_choice": true }, "friendliai/meta-llama-3.1-8b-instruct": { "max_tokens": 8192, @@ -2295,7 +2716,8 @@ "supports_function_calling": true, "supports_parallel_function_calling": true, "supports_system_messages": true, - "supports_response_schema": true + "supports_response_schema": true, + "supports_tool_choice": true }, "friendliai/meta-llama-3.1-70b-instruct": { "max_tokens": 8192, @@ -2308,7 +2730,8 @@ "supports_function_calling": true, "supports_parallel_function_calling": true, "supports_system_messages": true, - "supports_response_schema": true + "supports_response_schema": true, + "supports_tool_choice": true }, "claude-instant-1.2": { "max_tokens": 8191, @@ -2317,7 +2740,8 @@ "input_cost_per_token": 0.000000163, "output_cost_per_token": 0.000000551, "litellm_provider": "anthropic", - "mode": "chat" + "mode": "chat", + "supports_tool_choice": true }, "claude-2": { "max_tokens": 8191, @@ -2335,7 +2759,8 @@ "input_cost_per_token": 0.000008, "output_cost_per_token": 0.000024, "litellm_provider": "anthropic", - "mode": "chat" + "mode": "chat", + "supports_tool_choice": true }, "claude-3-haiku-20240307": { "max_tokens": 4096, @@ -2352,9 +2777,31 @@ "tool_use_system_prompt_tokens": 264, "supports_assistant_prefill": true, "supports_prompt_caching": true, - "supports_response_schema": true + "supports_response_schema": true, + "deprecation_date": "2025-03-01", + "supports_tool_choice": true }, "claude-3-5-haiku-20241022": { + "max_tokens": 8192, + "max_input_tokens": 200000, + "max_output_tokens": 8192, + "input_cost_per_token": 0.0000008, + "output_cost_per_token": 0.000004, + "cache_creation_input_token_cost": 0.000001, + "cache_read_input_token_cost": 0.0000008, + "litellm_provider": "anthropic", + "mode": "chat", + "supports_function_calling": true, + "supports_vision": true, + "tool_use_system_prompt_tokens": 264, + "supports_assistant_prefill": true, + "supports_pdf_input": true, + "supports_prompt_caching": true, + "supports_response_schema": true, + "deprecation_date": "2025-10-01", + "supports_tool_choice": true + }, + "claude-3-5-haiku-latest": { "max_tokens": 8192, "max_input_tokens": 200000, "max_output_tokens": 8192, @@ -2365,10 +2812,33 @@ "litellm_provider": "anthropic", "mode": "chat", "supports_function_calling": true, + "supports_vision": true, "tool_use_system_prompt_tokens": 264, "supports_assistant_prefill": true, + "supports_pdf_input": true, "supports_prompt_caching": true, - "supports_response_schema": true + "supports_response_schema": true, + "deprecation_date": "2025-10-01", + "supports_tool_choice": true + }, + "claude-3-opus-latest": { + "max_tokens": 4096, + "max_input_tokens": 200000, + "max_output_tokens": 4096, + "input_cost_per_token": 0.000015, + "output_cost_per_token": 0.000075, + "cache_creation_input_token_cost": 0.00001875, + "cache_read_input_token_cost": 0.0000015, + "litellm_provider": "anthropic", + "mode": "chat", + "supports_function_calling": true, + "supports_vision": true, + "tool_use_system_prompt_tokens": 395, + "supports_assistant_prefill": true, + "supports_prompt_caching": true, + "supports_response_schema": true, + "deprecation_date": "2025-03-01", + "supports_tool_choice": true }, "claude-3-opus-20240229": { "max_tokens": 4096, @@ -2385,7 +2855,9 @@ "tool_use_system_prompt_tokens": 395, "supports_assistant_prefill": true, "supports_prompt_caching": true, - "supports_response_schema": true + "supports_response_schema": true, + "deprecation_date": "2025-03-01", + "supports_tool_choice": true }, "claude-3-sonnet-20240229": { "max_tokens": 4096, @@ -2400,7 +2872,29 @@ "tool_use_system_prompt_tokens": 159, "supports_assistant_prefill": true, "supports_prompt_caching": true, - "supports_response_schema": true + "supports_response_schema": true, + "deprecation_date": "2025-07-21", + "supports_tool_choice": true + }, + "claude-3-5-sonnet-latest": { + "max_tokens": 8192, + "max_input_tokens": 200000, + "max_output_tokens": 8192, + "input_cost_per_token": 0.000003, + "output_cost_per_token": 0.000015, + "cache_creation_input_token_cost": 0.00000375, + "cache_read_input_token_cost": 0.0000003, + "litellm_provider": "anthropic", + "mode": "chat", + "supports_function_calling": true, + "supports_vision": true, + "tool_use_system_prompt_tokens": 159, + "supports_assistant_prefill": true, + "supports_pdf_input": true, + "supports_prompt_caching": true, + "supports_response_schema": true, + "deprecation_date": "2025-06-01", + "supports_tool_choice": true }, "claude-3-5-sonnet-20240620": { "max_tokens": 8192, @@ -2416,8 +2910,51 @@ "supports_vision": true, "tool_use_system_prompt_tokens": 159, "supports_assistant_prefill": true, + "supports_pdf_input": true, "supports_prompt_caching": true, - "supports_response_schema": true + "supports_response_schema": true, + "deprecation_date": "2025-06-01", + "supports_tool_choice": true + }, + "claude-3-7-sonnet-latest": { + "max_tokens": 128000, + "max_input_tokens": 200000, + "max_output_tokens": 128000, + "input_cost_per_token": 0.000003, + "output_cost_per_token": 0.000015, + "cache_creation_input_token_cost": 0.00000375, + "cache_read_input_token_cost": 0.0000003, + "litellm_provider": "anthropic", + "mode": "chat", + "supports_function_calling": true, + "supports_vision": true, + "tool_use_system_prompt_tokens": 159, + "supports_assistant_prefill": true, + "supports_pdf_input": true, + "supports_prompt_caching": true, + "supports_response_schema": true, + "deprecation_date": "2025-06-01", + "supports_tool_choice": true + }, + "claude-3-7-sonnet-20250219": { + "max_tokens": 128000, + "max_input_tokens": 200000, + "max_output_tokens": 128000, + "input_cost_per_token": 0.000003, + "output_cost_per_token": 0.000015, + "cache_creation_input_token_cost": 0.00000375, + "cache_read_input_token_cost": 0.0000003, + "litellm_provider": "anthropic", + "mode": "chat", + "supports_function_calling": true, + "supports_vision": true, + "tool_use_system_prompt_tokens": 159, + "supports_assistant_prefill": true, + "supports_pdf_input": true, + "supports_prompt_caching": true, + "supports_response_schema": true, + "deprecation_date": "2026-02-01", + "supports_tool_choice": true }, "claude-3-5-sonnet-20241022": { "max_tokens": 8192, @@ -2435,7 +2972,9 @@ "supports_assistant_prefill": true, "supports_pdf_input": true, "supports_prompt_caching": true, - "supports_response_schema": true + "supports_response_schema": true, + "deprecation_date": "2025-10-01", + "supports_tool_choice": true }, "text-bison": { "max_tokens": 2048, @@ -2521,7 +3060,8 @@ "output_cost_per_character": 0.0000005, "litellm_provider": "vertex_ai-chat-models", "mode": "chat", - "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models" + "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models", + "supports_tool_choice": true }, "chat-bison@001": { "max_tokens": 4096, @@ -2533,7 +3073,8 @@ "output_cost_per_character": 0.0000005, "litellm_provider": "vertex_ai-chat-models", "mode": "chat", - "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models" + "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models", + "supports_tool_choice": true }, "chat-bison@002": { "max_tokens": 4096, @@ -2545,7 +3086,9 @@ "output_cost_per_character": 0.0000005, "litellm_provider": "vertex_ai-chat-models", "mode": "chat", - "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models" + "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models", + "deprecation_date": "2025-04-09", + "supports_tool_choice": true }, "chat-bison-32k": { "max_tokens": 8192, @@ -2557,7 +3100,8 @@ "output_cost_per_character": 0.0000005, "litellm_provider": "vertex_ai-chat-models", "mode": "chat", - "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models" + "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models", + "supports_tool_choice": true }, "chat-bison-32k@002": { "max_tokens": 8192, @@ -2569,7 +3113,8 @@ "output_cost_per_character": 0.0000005, "litellm_provider": "vertex_ai-chat-models", "mode": "chat", - "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models" + "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models", + "supports_tool_choice": true }, "code-bison": { "max_tokens": 1024, @@ -2581,7 +3126,8 @@ "output_cost_per_character": 0.0000005, "litellm_provider": "vertex_ai-code-text-models", "mode": "chat", - "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models" + "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models", + "supports_tool_choice": true }, "code-bison@001": { "max_tokens": 1024, @@ -2681,7 +3227,8 @@ "output_cost_per_character": 0.0000005, "litellm_provider": "vertex_ai-code-chat-models", "mode": "chat", - "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models" + "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models", + "supports_tool_choice": true }, "codechat-bison": { "max_tokens": 1024, @@ -2693,7 +3240,8 @@ "output_cost_per_character": 0.0000005, "litellm_provider": "vertex_ai-code-chat-models", "mode": "chat", - "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models" + "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models", + "supports_tool_choice": true }, "codechat-bison@001": { "max_tokens": 1024, @@ -2705,7 +3253,8 @@ "output_cost_per_character": 0.0000005, "litellm_provider": "vertex_ai-code-chat-models", "mode": "chat", - "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models" + "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models", + "supports_tool_choice": true }, "codechat-bison@002": { "max_tokens": 1024, @@ -2717,7 +3266,8 @@ "output_cost_per_character": 0.0000005, "litellm_provider": "vertex_ai-code-chat-models", "mode": "chat", - "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models" + "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models", + "supports_tool_choice": true }, "codechat-bison-32k": { "max_tokens": 8192, @@ -2729,7 +3279,8 @@ "output_cost_per_character": 0.0000005, "litellm_provider": "vertex_ai-code-chat-models", "mode": "chat", - "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models" + "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models", + "supports_tool_choice": true }, "codechat-bison-32k@002": { "max_tokens": 8192, @@ -2741,7 +3292,8 @@ "output_cost_per_character": 0.0000005, "litellm_provider": "vertex_ai-code-chat-models", "mode": "chat", - "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models" + "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models", + "supports_tool_choice": true }, "gemini-pro": { "max_tokens": 8192, @@ -2756,7 +3308,8 @@ "litellm_provider": "vertex_ai-language-models", "mode": "chat", "supports_function_calling": true, - "source": "https://cloud.google.com/vertex-ai/generative-ai/pricing" + "source": "https://cloud.google.com/vertex-ai/generative-ai/pricing", + "supports_tool_choice": true }, "gemini-1.0-pro": { "max_tokens": 8192, @@ -2771,7 +3324,8 @@ "litellm_provider": "vertex_ai-language-models", "mode": "chat", "supports_function_calling": true, - "source": "https://cloud.google.com/vertex-ai/generative-ai/pricing#google_models" + "source": "https://cloud.google.com/vertex-ai/generative-ai/pricing#google_models", + "supports_tool_choice": true }, "gemini-1.0-pro-001": { "max_tokens": 8192, @@ -2786,7 +3340,9 @@ "litellm_provider": "vertex_ai-language-models", "mode": "chat", "supports_function_calling": true, - "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models" + "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models", + "deprecation_date": "2025-04-09", + "supports_tool_choice": true }, "gemini-1.0-ultra": { "max_tokens": 8192, @@ -2801,7 +3357,8 @@ "litellm_provider": "vertex_ai-language-models", "mode": "chat", "supports_function_calling": true, - "source": "As of Jun, 2024. There is no available doc on vertex ai pricing gemini-1.0-ultra-001. Using gemini-1.0-pro pricing. Got max_tokens info here: https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models" + "source": "As of Jun, 2024. There is no available doc on vertex ai pricing gemini-1.0-ultra-001. Using gemini-1.0-pro pricing. Got max_tokens info here: https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models", + "supports_tool_choice": true }, "gemini-1.0-ultra-001": { "max_tokens": 8192, @@ -2816,7 +3373,8 @@ "litellm_provider": "vertex_ai-language-models", "mode": "chat", "supports_function_calling": true, - "source": "As of Jun, 2024. There is no available doc on vertex ai pricing gemini-1.0-ultra-001. Using gemini-1.0-pro pricing. Got max_tokens info here: https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models" + "source": "As of Jun, 2024. There is no available doc on vertex ai pricing gemini-1.0-ultra-001. Using gemini-1.0-pro pricing. Got max_tokens info here: https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models", + "supports_tool_choice": true }, "gemini-1.0-pro-002": { "max_tokens": 8192, @@ -2831,7 +3389,9 @@ "litellm_provider": "vertex_ai-language-models", "mode": "chat", "supports_function_calling": true, - "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models" + "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models", + "deprecation_date": "2025-04-09", + "supports_tool_choice": true }, "gemini-1.5-pro": { "max_tokens": 8192, @@ -2886,7 +3446,8 @@ "supports_function_calling": true, "supports_tool_choice": true, "supports_response_schema": true, - "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#gemini-1.5-pro" + "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#gemini-1.5-pro", + "deprecation_date": "2025-09-24" }, "gemini-1.5-pro-001": { "max_tokens": 8192, @@ -2913,7 +3474,8 @@ "supports_function_calling": true, "supports_tool_choice": true, "supports_response_schema": true, - "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models" + "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models", + "deprecation_date": "2025-05-24" }, "gemini-1.5-pro-preview-0514": { "max_tokens": 8192, @@ -3022,7 +3584,8 @@ "supports_function_calling": true, "supports_vision": true, "supports_response_schema": true, - "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models" + "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models", + "supports_tool_choice": true }, "gemini-1.5-flash-exp-0827": { "max_tokens": 8192, @@ -3054,7 +3617,8 @@ "supports_function_calling": true, "supports_vision": true, "supports_response_schema": true, - "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models" + "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models", + "supports_tool_choice": true }, "gemini-1.5-flash-002": { "max_tokens": 8192, @@ -3086,7 +3650,9 @@ "supports_function_calling": true, "supports_vision": true, "supports_response_schema": true, - "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#gemini-1.5-flash" + "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#gemini-1.5-flash", + "deprecation_date": "2025-09-24", + "supports_tool_choice": true }, "gemini-1.5-flash-001": { "max_tokens": 8192, @@ -3118,7 +3684,9 @@ "supports_function_calling": true, "supports_vision": true, "supports_response_schema": true, - "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models" + "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models", + "deprecation_date": "2025-05-24", + "supports_tool_choice": true }, "gemini-1.5-flash-preview-0514": { "max_tokens": 8192, @@ -3149,7 +3717,8 @@ "supports_system_messages": true, "supports_function_calling": true, "supports_vision": true, - "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models" + "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models", + "supports_tool_choice": true }, "gemini-pro-experimental": { "max_tokens": 8192, @@ -3186,13 +3755,15 @@ "max_images_per_prompt": 16, "max_videos_per_prompt": 1, "max_video_length": 2, - "input_cost_per_token": 0.00000025, - "output_cost_per_token": 0.0000005, + "input_cost_per_token": 0.0000005, + "output_cost_per_token": 0.0000015, + "input_cost_per_image": 0.0025, "litellm_provider": "vertex_ai-vision-models", "mode": "chat", "supports_function_calling": true, "supports_vision": true, - "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models" + "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models", + "supports_tool_choice": true }, "gemini-1.0-pro-vision": { "max_tokens": 2048, @@ -3201,13 +3772,15 @@ "max_images_per_prompt": 16, "max_videos_per_prompt": 1, "max_video_length": 2, - "input_cost_per_token": 0.00000025, - "output_cost_per_token": 0.0000005, + "input_cost_per_token": 0.0000005, + "output_cost_per_token": 0.0000015, + "input_cost_per_image": 0.0025, "litellm_provider": "vertex_ai-vision-models", "mode": "chat", "supports_function_calling": true, "supports_vision": true, - "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models" + "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models", + "supports_tool_choice": true }, "gemini-1.0-pro-vision-001": { "max_tokens": 2048, @@ -3216,13 +3789,16 @@ "max_images_per_prompt": 16, "max_videos_per_prompt": 1, "max_video_length": 2, - "input_cost_per_token": 0.00000025, - "output_cost_per_token": 0.0000005, + "input_cost_per_token": 0.0000005, + "output_cost_per_token": 0.0000015, + "input_cost_per_image": 0.0025, "litellm_provider": "vertex_ai-vision-models", "mode": "chat", "supports_function_calling": true, "supports_vision": true, - "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models" + "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models", + "deprecation_date": "2025-04-09", + "supports_tool_choice": true }, "medlm-medium": { "max_tokens": 8192, @@ -3232,7 +3808,8 @@ "output_cost_per_character": 0.000001, "litellm_provider": "vertex_ai-language-models", "mode": "chat", - "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models" + "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models", + "supports_tool_choice": true }, "medlm-large": { "max_tokens": 1024, @@ -3242,7 +3819,44 @@ "output_cost_per_character": 0.000015, "litellm_provider": "vertex_ai-language-models", "mode": "chat", - "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models" + "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models", + "supports_tool_choice": true + }, + "gemini-2.0-pro-exp-02-05": { + "max_tokens": 8192, + "max_input_tokens": 2097152, + "max_output_tokens": 8192, + "max_images_per_prompt": 3000, + "max_videos_per_prompt": 10, + "max_video_length": 1, + "max_audio_length_hours": 8.4, + "max_audio_per_prompt": 1, + "max_pdf_size_mb": 30, + "input_cost_per_image": 0, + "input_cost_per_video_per_second": 0, + "input_cost_per_audio_per_second": 0, + "input_cost_per_token": 0, + "input_cost_per_character": 0, + "input_cost_per_token_above_128k_tokens": 0, + "input_cost_per_character_above_128k_tokens": 0, + "input_cost_per_image_above_128k_tokens": 0, + "input_cost_per_video_per_second_above_128k_tokens": 0, + "input_cost_per_audio_per_second_above_128k_tokens": 0, + "output_cost_per_token": 0, + "output_cost_per_character": 0, + "output_cost_per_token_above_128k_tokens": 0, + "output_cost_per_character_above_128k_tokens": 0, + "litellm_provider": "vertex_ai-language-models", + "mode": "chat", + "supports_system_messages": true, + "supports_function_calling": true, + "supports_vision": true, + "supports_audio_input": true, + "supports_video_input": true, + "supports_pdf_input": true, + "supports_response_schema": true, + "supports_tool_choice": true, + "source": "https://cloud.google.com/vertex-ai/generative-ai/pricing" }, "gemini-2.0-flash-exp": { "max_tokens": 8192, @@ -3275,7 +3889,56 @@ "supports_vision": true, "supports_response_schema": true, "supports_audio_output": true, - "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#gemini-2.0-flash" + "source": "https://cloud.google.com/vertex-ai/generative-ai/pricing", + "supports_tool_choice": true + }, + "gemini/gemini-2.0-flash": { + "max_tokens": 8192, + "max_input_tokens": 1048576, + "max_output_tokens": 8192, + "max_images_per_prompt": 3000, + "max_videos_per_prompt": 10, + "max_video_length": 1, + "max_audio_length_hours": 8.4, + "max_audio_per_prompt": 1, + "max_pdf_size_mb": 30, + "input_cost_per_audio_token": 0.0000007, + "input_cost_per_token": 0.0000001, + "output_cost_per_token": 0.0000004, + "litellm_provider": "gemini", + "mode": "chat", + "rpm": 10000, + "tpm": 10000000, + "supports_system_messages": true, + "supports_function_calling": true, + "supports_vision": true, + "supports_response_schema": true, + "supports_audio_output": true, + "supports_tool_choice": true, + "source": "https://ai.google.dev/pricing#2_0flash" + }, + "gemini-2.0-flash-001": { + "max_tokens": 8192, + "max_input_tokens": 1048576, + "max_output_tokens": 8192, + "max_images_per_prompt": 3000, + "max_videos_per_prompt": 10, + "max_video_length": 1, + "max_audio_length_hours": 8.4, + "max_audio_per_prompt": 1, + "max_pdf_size_mb": 30, + "input_cost_per_audio_token": 0.000001, + "input_cost_per_token": 0.00000015, + "output_cost_per_token": 0.0000006, + "litellm_provider": "vertex_ai-language-models", + "mode": "chat", + "supports_system_messages": true, + "supports_function_calling": true, + "supports_vision": true, + "supports_response_schema": true, + "supports_audio_output": true, + "supports_tool_choice": true, + "source": "https://cloud.google.com/vertex-ai/generative-ai/pricing" }, "gemini-2.0-flash-thinking-exp": { "max_tokens": 8192, @@ -3308,7 +3971,67 @@ "supports_vision": true, "supports_response_schema": true, "supports_audio_output": true, - "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#gemini-2.0-flash" + "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#gemini-2.0-flash", + "supports_tool_choice": true + }, + "gemini-2.0-flash-thinking-exp-01-21": { + "max_tokens": 65536, + "max_input_tokens": 1048576, + "max_output_tokens": 65536, + "max_images_per_prompt": 3000, + "max_videos_per_prompt": 10, + "max_video_length": 1, + "max_audio_length_hours": 8.4, + "max_audio_per_prompt": 1, + "max_pdf_size_mb": 30, + "input_cost_per_image": 0, + "input_cost_per_video_per_second": 0, + "input_cost_per_audio_per_second": 0, + "input_cost_per_token": 0, + "input_cost_per_character": 0, + "input_cost_per_token_above_128k_tokens": 0, + "input_cost_per_character_above_128k_tokens": 0, + "input_cost_per_image_above_128k_tokens": 0, + "input_cost_per_video_per_second_above_128k_tokens": 0, + "input_cost_per_audio_per_second_above_128k_tokens": 0, + "output_cost_per_token": 0, + "output_cost_per_character": 0, + "output_cost_per_token_above_128k_tokens": 0, + "output_cost_per_character_above_128k_tokens": 0, + "litellm_provider": "vertex_ai-language-models", + "mode": "chat", + "supports_system_messages": true, + "supports_function_calling": false, + "supports_vision": true, + "supports_response_schema": false, + "supports_audio_output": false, + "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#gemini-2.0-flash", + "supports_tool_choice": true + }, + "gemini/gemini-2.0-flash-001": { + "max_tokens": 8192, + "max_input_tokens": 1048576, + "max_output_tokens": 8192, + "max_images_per_prompt": 3000, + "max_videos_per_prompt": 10, + "max_video_length": 1, + "max_audio_length_hours": 8.4, + "max_audio_per_prompt": 1, + "max_pdf_size_mb": 30, + "input_cost_per_audio_token": 0.0000007, + "input_cost_per_token": 0.0000001, + "output_cost_per_token": 0.0000004, + "litellm_provider": "gemini", + "mode": "chat", + "rpm": 10000, + "tpm": 10000000, + "supports_system_messages": true, + "supports_function_calling": true, + "supports_vision": true, + "supports_response_schema": true, + "supports_audio_output": false, + "supports_tool_choice": true, + "source": "https://ai.google.dev/pricing#2_0flash" }, "gemini/gemini-2.0-flash-exp": { "max_tokens": 8192, @@ -3343,7 +4066,33 @@ "supports_audio_output": true, "tpm": 4000000, "rpm": 10, - "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#gemini-2.0-flash" + "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#gemini-2.0-flash", + "supports_tool_choice": true + }, + "gemini/gemini-2.0-flash-lite-preview-02-05": { + "max_tokens": 8192, + "max_input_tokens": 1048576, + "max_output_tokens": 8192, + "max_images_per_prompt": 3000, + "max_videos_per_prompt": 10, + "max_video_length": 1, + "max_audio_length_hours": 8.4, + "max_audio_per_prompt": 1, + "max_pdf_size_mb": 30, + "input_cost_per_audio_token": 0.000000075, + "input_cost_per_token": 0.000000075, + "output_cost_per_token": 0.0000003, + "litellm_provider": "gemini", + "mode": "chat", + "rpm": 60000, + "tpm": 10000000, + "supports_system_messages": true, + "supports_function_calling": true, + "supports_vision": true, + "supports_response_schema": true, + "supports_audio_output": false, + "supports_tool_choice": true, + "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#gemini-2.0-flash-lite" }, "gemini/gemini-2.0-flash-thinking-exp": { "max_tokens": 8192, @@ -3378,7 +4127,8 @@ "supports_audio_output": true, "tpm": 4000000, "rpm": 10, - "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#gemini-2.0-flash" + "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#gemini-2.0-flash", + "supports_tool_choice": true }, "vertex_ai/claude-3-sonnet": { "max_tokens": 4096, @@ -3390,7 +4140,8 @@ "mode": "chat", "supports_function_calling": true, "supports_vision": true, - "supports_assistant_prefill": true + "supports_assistant_prefill": true, + "supports_tool_choice": true }, "vertex_ai/claude-3-sonnet@20240229": { "max_tokens": 4096, @@ -3402,7 +4153,8 @@ "mode": "chat", "supports_function_calling": true, "supports_vision": true, - "supports_assistant_prefill": true + "supports_assistant_prefill": true, + "supports_tool_choice": true }, "vertex_ai/claude-3-5-sonnet": { "max_tokens": 8192, @@ -3413,8 +4165,10 @@ "litellm_provider": "vertex_ai-anthropic_models", "mode": "chat", "supports_function_calling": true, + "supports_pdf_input": true, "supports_vision": true, - "supports_assistant_prefill": true + "supports_assistant_prefill": true, + "supports_tool_choice": true }, "vertex_ai/claude-3-5-sonnet@20240620": { "max_tokens": 8192, @@ -3425,8 +4179,10 @@ "litellm_provider": "vertex_ai-anthropic_models", "mode": "chat", "supports_function_calling": true, + "supports_pdf_input": true, "supports_vision": true, - "supports_assistant_prefill": true + "supports_assistant_prefill": true, + "supports_tool_choice": true }, "vertex_ai/claude-3-5-sonnet-v2": { "max_tokens": 8192, @@ -3437,8 +4193,10 @@ "litellm_provider": "vertex_ai-anthropic_models", "mode": "chat", "supports_function_calling": true, + "supports_pdf_input": true, "supports_vision": true, - "supports_assistant_prefill": true + "supports_assistant_prefill": true, + "supports_tool_choice": true }, "vertex_ai/claude-3-5-sonnet-v2@20241022": { "max_tokens": 8192, @@ -3449,8 +4207,30 @@ "litellm_provider": "vertex_ai-anthropic_models", "mode": "chat", "supports_function_calling": true, + "supports_pdf_input": true, "supports_vision": true, - "supports_assistant_prefill": true + "supports_assistant_prefill": true, + "supports_tool_choice": true + }, + "vertex_ai/claude-3-7-sonnet@20250219": { + "max_tokens": 8192, + "max_input_tokens": 200000, + "max_output_tokens": 8192, + "input_cost_per_token": 0.000003, + "output_cost_per_token": 0.000015, + "cache_creation_input_token_cost": 0.00000375, + "cache_read_input_token_cost": 0.0000003, + "litellm_provider": "vertex_ai-anthropic_models", + "mode": "chat", + "supports_function_calling": true, + "supports_pdf_input": true, + "supports_vision": true, + "tool_use_system_prompt_tokens": 159, + "supports_assistant_prefill": true, + "supports_prompt_caching": true, + "supports_response_schema": true, + "deprecation_date": "2025-06-01", + "supports_tool_choice": true }, "vertex_ai/claude-3-haiku": { "max_tokens": 4096, @@ -3462,7 +4242,8 @@ "mode": "chat", "supports_function_calling": true, "supports_vision": true, - "supports_assistant_prefill": true + "supports_assistant_prefill": true, + "supports_tool_choice": true }, "vertex_ai/claude-3-haiku@20240307": { "max_tokens": 4096, @@ -3474,7 +4255,8 @@ "mode": "chat", "supports_function_calling": true, "supports_vision": true, - "supports_assistant_prefill": true + "supports_assistant_prefill": true, + "supports_tool_choice": true }, "vertex_ai/claude-3-5-haiku": { "max_tokens": 8192, @@ -3485,7 +4267,9 @@ "litellm_provider": "vertex_ai-anthropic_models", "mode": "chat", "supports_function_calling": true, - "supports_assistant_prefill": true + "supports_pdf_input": true, + "supports_assistant_prefill": true, + "supports_tool_choice": true }, "vertex_ai/claude-3-5-haiku@20241022": { "max_tokens": 8192, @@ -3496,7 +4280,9 @@ "litellm_provider": "vertex_ai-anthropic_models", "mode": "chat", "supports_function_calling": true, - "supports_assistant_prefill": true + "supports_pdf_input": true, + "supports_assistant_prefill": true, + "supports_tool_choice": true }, "vertex_ai/claude-3-opus": { "max_tokens": 4096, @@ -3508,7 +4294,8 @@ "mode": "chat", "supports_function_calling": true, "supports_vision": true, - "supports_assistant_prefill": true + "supports_assistant_prefill": true, + "supports_tool_choice": true }, "vertex_ai/claude-3-opus@20240229": { "max_tokens": 4096, @@ -3520,7 +4307,8 @@ "mode": "chat", "supports_function_calling": true, "supports_vision": true, - "supports_assistant_prefill": true + "supports_assistant_prefill": true, + "supports_tool_choice": true }, "vertex_ai/meta/llama3-405b-instruct-maas": { "max_tokens": 32000, @@ -3530,7 +4318,8 @@ "output_cost_per_token": 0.0, "litellm_provider": "vertex_ai-llama_models", "mode": "chat", - "source": "https://cloud.google.com/vertex-ai/generative-ai/pricing#partner-models" + "source": "https://cloud.google.com/vertex-ai/generative-ai/pricing#partner-models", + "supports_tool_choice": true }, "vertex_ai/meta/llama3-70b-instruct-maas": { "max_tokens": 32000, @@ -3540,7 +4329,8 @@ "output_cost_per_token": 0.0, "litellm_provider": "vertex_ai-llama_models", "mode": "chat", - "source": "https://cloud.google.com/vertex-ai/generative-ai/pricing#partner-models" + "source": "https://cloud.google.com/vertex-ai/generative-ai/pricing#partner-models", + "supports_tool_choice": true }, "vertex_ai/meta/llama3-8b-instruct-maas": { "max_tokens": 32000, @@ -3550,7 +4340,8 @@ "output_cost_per_token": 0.0, "litellm_provider": "vertex_ai-llama_models", "mode": "chat", - "source": "https://cloud.google.com/vertex-ai/generative-ai/pricing#partner-models" + "source": "https://cloud.google.com/vertex-ai/generative-ai/pricing#partner-models", + "supports_tool_choice": true }, "vertex_ai/meta/llama-3.2-90b-vision-instruct-maas": { "max_tokens": 128000, @@ -3562,7 +4353,8 @@ "mode": "chat", "supports_system_messages": true, "supports_vision": true, - "source": "https://console.cloud.google.com/vertex-ai/publishers/meta/model-garden/llama-3.2-90b-vision-instruct-maas" + "source": "https://console.cloud.google.com/vertex-ai/publishers/meta/model-garden/llama-3.2-90b-vision-instruct-maas", + "supports_tool_choice": true }, "vertex_ai/mistral-large@latest": { "max_tokens": 8191, @@ -3572,7 +4364,8 @@ "output_cost_per_token": 0.000006, "litellm_provider": "vertex_ai-mistral_models", "mode": "chat", - "supports_function_calling": true + "supports_function_calling": true, + "supports_tool_choice": true }, "vertex_ai/mistral-large@2411-001": { "max_tokens": 8191, @@ -3582,7 +4375,8 @@ "output_cost_per_token": 0.000006, "litellm_provider": "vertex_ai-mistral_models", "mode": "chat", - "supports_function_calling": true + "supports_function_calling": true, + "supports_tool_choice": true }, "vertex_ai/mistral-large-2411": { "max_tokens": 8191, @@ -3592,7 +4386,8 @@ "output_cost_per_token": 0.000006, "litellm_provider": "vertex_ai-mistral_models", "mode": "chat", - "supports_function_calling": true + "supports_function_calling": true, + "supports_tool_choice": true }, "vertex_ai/mistral-large@2407": { "max_tokens": 8191, @@ -3602,7 +4397,8 @@ "output_cost_per_token": 0.000006, "litellm_provider": "vertex_ai-mistral_models", "mode": "chat", - "supports_function_calling": true + "supports_function_calling": true, + "supports_tool_choice": true }, "vertex_ai/mistral-nemo@latest": { "max_tokens": 128000, @@ -3612,7 +4408,8 @@ "output_cost_per_token": 0.00000015, "litellm_provider": "vertex_ai-mistral_models", "mode": "chat", - "supports_function_calling": true + "supports_function_calling": true, + "supports_tool_choice": true }, "vertex_ai/jamba-1.5-mini@001": { "max_tokens": 256000, @@ -3621,7 +4418,8 @@ "input_cost_per_token": 0.0000002, "output_cost_per_token": 0.0000004, "litellm_provider": "vertex_ai-ai21_models", - "mode": "chat" + "mode": "chat", + "supports_tool_choice": true }, "vertex_ai/jamba-1.5-large@001": { "max_tokens": 256000, @@ -3630,7 +4428,8 @@ "input_cost_per_token": 0.000002, "output_cost_per_token": 0.000008, "litellm_provider": "vertex_ai-ai21_models", - "mode": "chat" + "mode": "chat", + "supports_tool_choice": true }, "vertex_ai/jamba-1.5": { "max_tokens": 256000, @@ -3639,7 +4438,8 @@ "input_cost_per_token": 0.0000002, "output_cost_per_token": 0.0000004, "litellm_provider": "vertex_ai-ai21_models", - "mode": "chat" + "mode": "chat", + "supports_tool_choice": true }, "vertex_ai/jamba-1.5-mini": { "max_tokens": 256000, @@ -3648,7 +4448,8 @@ "input_cost_per_token": 0.0000002, "output_cost_per_token": 0.0000004, "litellm_provider": "vertex_ai-ai21_models", - "mode": "chat" + "mode": "chat", + "supports_tool_choice": true }, "vertex_ai/jamba-1.5-large": { "max_tokens": 256000, @@ -3657,7 +4458,8 @@ "input_cost_per_token": 0.000002, "output_cost_per_token": 0.000008, "litellm_provider": "vertex_ai-ai21_models", - "mode": "chat" + "mode": "chat", + "supports_tool_choice": true }, "vertex_ai/mistral-nemo@2407": { "max_tokens": 128000, @@ -3667,7 +4469,8 @@ "output_cost_per_token": 0.000003, "litellm_provider": "vertex_ai-mistral_models", "mode": "chat", - "supports_function_calling": true + "supports_function_calling": true, + "supports_tool_choice": true }, "vertex_ai/codestral@latest": { "max_tokens": 128000, @@ -3677,7 +4480,8 @@ "output_cost_per_token": 0.0000006, "litellm_provider": "vertex_ai-mistral_models", "mode": "chat", - "supports_function_calling": true + "supports_function_calling": true, + "supports_tool_choice": true }, "vertex_ai/codestral@2405": { "max_tokens": 128000, @@ -3687,7 +4491,19 @@ "output_cost_per_token": 0.0000006, "litellm_provider": "vertex_ai-mistral_models", "mode": "chat", - "supports_function_calling": true + "supports_function_calling": true, + "supports_tool_choice": true + }, + "vertex_ai/codestral-2501": { + "max_tokens": 128000, + "max_input_tokens": 128000, + "max_output_tokens": 128000, + "input_cost_per_token": 0.0000002, + "output_cost_per_token": 0.0000006, + "litellm_provider": "vertex_ai-mistral_models", + "mode": "chat", + "supports_function_calling": true, + "supports_tool_choice": true }, "vertex_ai/imagegeneration@006": { "output_cost_per_image": 0.020, @@ -3901,7 +4717,9 @@ "supports_prompt_caching": true, "tpm": 4000000, "rpm": 2000, - "source": "https://ai.google.dev/pricing" + "source": "https://ai.google.dev/pricing", + "deprecation_date": "2025-09-24", + "supports_tool_choice": true }, "gemini/gemini-1.5-flash-001": { "max_tokens": 8192, @@ -3928,7 +4746,9 @@ "supports_prompt_caching": true, "tpm": 4000000, "rpm": 2000, - "source": "https://ai.google.dev/pricing" + "source": "https://ai.google.dev/pricing", + "deprecation_date": "2025-05-24", + "supports_tool_choice": true }, "gemini/gemini-1.5-flash": { "max_tokens": 8192, @@ -3952,7 +4772,8 @@ "supports_response_schema": true, "tpm": 4000000, "rpm": 2000, - "source": "https://ai.google.dev/pricing" + "source": "https://ai.google.dev/pricing", + "supports_tool_choice": true }, "gemini/gemini-1.5-flash-latest": { "max_tokens": 8192, @@ -3977,7 +4798,8 @@ "supports_prompt_caching": true, "tpm": 4000000, "rpm": 2000, - "source": "https://ai.google.dev/pricing" + "source": "https://ai.google.dev/pricing", + "supports_tool_choice": true }, "gemini/gemini-1.5-flash-8b": { "max_tokens": 8192, @@ -4002,7 +4824,8 @@ "supports_prompt_caching": true, "tpm": 4000000, "rpm": 4000, - "source": "https://ai.google.dev/pricing" + "source": "https://ai.google.dev/pricing", + "supports_tool_choice": true }, "gemini/gemini-1.5-flash-8b-exp-0924": { "max_tokens": 8192, @@ -4027,7 +4850,8 @@ "supports_prompt_caching": true, "tpm": 4000000, "rpm": 4000, - "source": "https://ai.google.dev/pricing" + "source": "https://ai.google.dev/pricing", + "supports_tool_choice": true }, "gemini/gemini-exp-1114": { "max_tokens": 8192, @@ -4046,6 +4870,7 @@ "litellm_provider": "gemini", "mode": "chat", "supports_system_messages": true, + "supports_tool_choice": true, "supports_function_calling": true, "supports_vision": true, "supports_response_schema": true, @@ -4053,7 +4878,8 @@ "rpm": 1000, "source": "https://ai.google.dev/pricing", "metadata": { - "notes": "Rate limits not documented for gemini-exp-1114. Assuming same as gemini-1.5-pro." + "notes": "Rate limits not documented for gemini-exp-1114. Assuming same as gemini-1.5-pro.", + "supports_tool_choice": true } }, "gemini/gemini-exp-1206": { @@ -4074,13 +4900,15 @@ "mode": "chat", "supports_system_messages": true, "supports_function_calling": true, + "supports_tool_choice": true, "supports_vision": true, "supports_response_schema": true, "tpm": 4000000, "rpm": 1000, "source": "https://ai.google.dev/pricing", "metadata": { - "notes": "Rate limits not documented for gemini-exp-1206. Assuming same as gemini-1.5-pro." + "notes": "Rate limits not documented for gemini-exp-1206. Assuming same as gemini-1.5-pro.", + "supports_tool_choice": true } }, "gemini/gemini-1.5-flash-exp-0827": { @@ -4105,7 +4933,8 @@ "supports_response_schema": true, "tpm": 4000000, "rpm": 2000, - "source": "https://ai.google.dev/pricing" + "source": "https://ai.google.dev/pricing", + "supports_tool_choice": true }, "gemini/gemini-1.5-flash-8b-exp-0827": { "max_tokens": 8192, @@ -4129,7 +4958,8 @@ "supports_response_schema": true, "tpm": 4000000, "rpm": 4000, - "source": "https://ai.google.dev/pricing" + "source": "https://ai.google.dev/pricing", + "supports_tool_choice": true }, "gemini/gemini-pro": { "max_tokens": 8192, @@ -4145,7 +4975,8 @@ "rpd": 30000, "tpm": 120000, "rpm": 360, - "source": "https://ai.google.dev/gemini-api/docs/models/gemini" + "source": "https://ai.google.dev/gemini-api/docs/models/gemini", + "supports_tool_choice": true }, "gemini/gemini-1.5-pro": { "max_tokens": 8192, @@ -4184,7 +5015,8 @@ "supports_prompt_caching": true, "tpm": 4000000, "rpm": 1000, - "source": "https://ai.google.dev/pricing" + "source": "https://ai.google.dev/pricing", + "deprecation_date": "2025-09-24" }, "gemini/gemini-1.5-pro-001": { "max_tokens": 8192, @@ -4204,7 +5036,8 @@ "supports_prompt_caching": true, "tpm": 4000000, "rpm": 1000, - "source": "https://ai.google.dev/pricing" + "source": "https://ai.google.dev/pricing", + "deprecation_date": "2025-05-24" }, "gemini/gemini-1.5-pro-exp-0801": { "max_tokens": 8192, @@ -4278,7 +5111,8 @@ "rpd": 30000, "tpm": 120000, "rpm": 360, - "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models" + "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models", + "supports_tool_choice": true }, "gemini/gemini-gemma-2-27b-it": { "max_tokens": 8192, @@ -4289,7 +5123,8 @@ "mode": "chat", "supports_function_calling": true, "supports_vision": true, - "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models" + "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models", + "supports_tool_choice": true }, "gemini/gemini-gemma-2-9b-it": { "max_tokens": 8192, @@ -4300,7 +5135,8 @@ "mode": "chat", "supports_function_calling": true, "supports_vision": true, - "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models" + "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/learn/models#foundation_models", + "supports_tool_choice": true }, "command-r": { "max_tokens": 4096, @@ -4310,7 +5146,8 @@ "output_cost_per_token": 0.0000006, "litellm_provider": "cohere_chat", "mode": "chat", - "supports_function_calling": true + "supports_function_calling": true, + "supports_tool_choice": true }, "command-r-08-2024": { "max_tokens": 4096, @@ -4320,7 +5157,8 @@ "output_cost_per_token": 0.0000006, "litellm_provider": "cohere_chat", "mode": "chat", - "supports_function_calling": true + "supports_function_calling": true, + "supports_tool_choice": true }, "command-r7b-12-2024": { "max_tokens": 4096, @@ -4331,7 +5169,8 @@ "litellm_provider": "cohere_chat", "mode": "chat", "supports_function_calling": true, - "source": "https://docs.cohere.com/v2/docs/command-r7b" + "source": "https://docs.cohere.com/v2/docs/command-r7b", + "supports_tool_choice": true }, "command-light": { "max_tokens": 4096, @@ -4340,7 +5179,8 @@ "input_cost_per_token": 0.0000003, "output_cost_per_token": 0.0000006, "litellm_provider": "cohere_chat", - "mode": "chat" + "mode": "chat", + "supports_tool_choice": true }, "command-r-plus": { "max_tokens": 4096, @@ -4350,7 +5190,8 @@ "output_cost_per_token": 0.00001, "litellm_provider": "cohere_chat", "mode": "chat", - "supports_function_calling": true + "supports_function_calling": true, + "supports_tool_choice": true }, "command-r-plus-08-2024": { "max_tokens": 4096, @@ -4360,7 +5201,8 @@ "output_cost_per_token": 0.00001, "litellm_provider": "cohere_chat", "mode": "chat", - "supports_function_calling": true + "supports_function_calling": true, + "supports_tool_choice": true }, "command-nightly": { "max_tokens": 4096, @@ -4496,7 +5338,8 @@ "input_cost_per_token": 0.0000001, "output_cost_per_token": 0.0000005, "litellm_provider": "replicate", - "mode": "chat" + "mode": "chat", + "supports_tool_choice": true }, "replicate/meta/llama-2-13b-chat": { "max_tokens": 4096, @@ -4505,7 +5348,8 @@ "input_cost_per_token": 0.0000001, "output_cost_per_token": 0.0000005, "litellm_provider": "replicate", - "mode": "chat" + "mode": "chat", + "supports_tool_choice": true }, "replicate/meta/llama-2-70b": { "max_tokens": 4096, @@ -4514,7 +5358,8 @@ "input_cost_per_token": 0.00000065, "output_cost_per_token": 0.00000275, "litellm_provider": "replicate", - "mode": "chat" + "mode": "chat", + "supports_tool_choice": true }, "replicate/meta/llama-2-70b-chat": { "max_tokens": 4096, @@ -4523,7 +5368,8 @@ "input_cost_per_token": 0.00000065, "output_cost_per_token": 0.00000275, "litellm_provider": "replicate", - "mode": "chat" + "mode": "chat", + "supports_tool_choice": true }, "replicate/meta/llama-2-7b": { "max_tokens": 4096, @@ -4532,7 +5378,8 @@ "input_cost_per_token": 0.00000005, "output_cost_per_token": 0.00000025, "litellm_provider": "replicate", - "mode": "chat" + "mode": "chat", + "supports_tool_choice": true }, "replicate/meta/llama-2-7b-chat": { "max_tokens": 4096, @@ -4541,7 +5388,8 @@ "input_cost_per_token": 0.00000005, "output_cost_per_token": 0.00000025, "litellm_provider": "replicate", - "mode": "chat" + "mode": "chat", + "supports_tool_choice": true }, "replicate/meta/llama-3-70b": { "max_tokens": 8192, @@ -4550,7 +5398,8 @@ "input_cost_per_token": 0.00000065, "output_cost_per_token": 0.00000275, "litellm_provider": "replicate", - "mode": "chat" + "mode": "chat", + "supports_tool_choice": true }, "replicate/meta/llama-3-70b-instruct": { "max_tokens": 8192, @@ -4559,7 +5408,8 @@ "input_cost_per_token": 0.00000065, "output_cost_per_token": 0.00000275, "litellm_provider": "replicate", - "mode": "chat" + "mode": "chat", + "supports_tool_choice": true }, "replicate/meta/llama-3-8b": { "max_tokens": 8086, @@ -4568,7 +5418,8 @@ "input_cost_per_token": 0.00000005, "output_cost_per_token": 0.00000025, "litellm_provider": "replicate", - "mode": "chat" + "mode": "chat", + "supports_tool_choice": true }, "replicate/meta/llama-3-8b-instruct": { "max_tokens": 8086, @@ -4577,7 +5428,8 @@ "input_cost_per_token": 0.00000005, "output_cost_per_token": 0.00000025, "litellm_provider": "replicate", - "mode": "chat" + "mode": "chat", + "supports_tool_choice": true }, "replicate/mistralai/mistral-7b-v0.1": { "max_tokens": 4096, @@ -4586,7 +5438,8 @@ "input_cost_per_token": 0.00000005, "output_cost_per_token": 0.00000025, "litellm_provider": "replicate", - "mode": "chat" + "mode": "chat", + "supports_tool_choice": true }, "replicate/mistralai/mistral-7b-instruct-v0.2": { "max_tokens": 4096, @@ -4595,7 +5448,8 @@ "input_cost_per_token": 0.00000005, "output_cost_per_token": 0.00000025, "litellm_provider": "replicate", - "mode": "chat" + "mode": "chat", + "supports_tool_choice": true }, "replicate/mistralai/mixtral-8x7b-instruct-v0.1": { "max_tokens": 4096, @@ -4604,17 +5458,33 @@ "input_cost_per_token": 0.0000003, "output_cost_per_token": 0.000001, "litellm_provider": "replicate", - "mode": "chat" + "mode": "chat", + "supports_tool_choice": true + }, + "openrouter/deepseek/deepseek-r1": { + "max_tokens": 8192, + "max_input_tokens": 65336, + "max_output_tokens": 8192, + "input_cost_per_token": 0.00000055, + "input_cost_per_token_cache_hit": 0.00000014, + "output_cost_per_token": 0.00000219, + "litellm_provider": "openrouter", + "mode": "chat", + "supports_function_calling": true, + "supports_assistant_prefill": true, + "supports_tool_choice": true, + "supports_prompt_caching": true }, "openrouter/deepseek/deepseek-chat": { "max_tokens": 8192, - "max_input_tokens": 66000, - "max_output_tokens": 4096, + "max_input_tokens": 65536, + "max_output_tokens": 8192, "input_cost_per_token": 0.00000014, "output_cost_per_token": 0.00000028, "litellm_provider": "openrouter", "supports_prompt_caching": true, - "mode": "chat" + "mode": "chat", + "supports_tool_choice": true }, "openrouter/deepseek/deepseek-coder": { "max_tokens": 8192, @@ -4624,14 +5494,16 @@ "output_cost_per_token": 0.00000028, "litellm_provider": "openrouter", "supports_prompt_caching": true, - "mode": "chat" + "mode": "chat", + "supports_tool_choice": true }, "openrouter/microsoft/wizardlm-2-8x22b:nitro": { "max_tokens": 65536, "input_cost_per_token": 0.000001, "output_cost_per_token": 0.000001, "litellm_provider": "openrouter", - "mode": "chat" + "mode": "chat", + "supports_tool_choice": true }, "openrouter/google/gemini-pro-1.5": { "max_tokens": 8192, @@ -4643,28 +5515,54 @@ "litellm_provider": "openrouter", "mode": "chat", "supports_function_calling": true, - "supports_vision": true + "supports_vision": true, + "supports_tool_choice": true + }, + "openrouter/google/gemini-2.0-flash-001": { + "max_tokens": 8192, + "max_input_tokens": 1048576, + "max_output_tokens": 8192, + "max_images_per_prompt": 3000, + "max_videos_per_prompt": 10, + "max_video_length": 1, + "max_audio_length_hours": 8.4, + "max_audio_per_prompt": 1, + "max_pdf_size_mb": 30, + "input_cost_per_audio_token": 0.0000007, + "input_cost_per_token": 0.0000001, + "output_cost_per_token": 0.0000004, + "litellm_provider": "openrouter", + "mode": "chat", + "supports_system_messages": true, + "supports_function_calling": true, + "supports_vision": true, + "supports_response_schema": true, + "supports_audio_output": true, + "supports_tool_choice": true }, "openrouter/mistralai/mixtral-8x22b-instruct": { "max_tokens": 65536, "input_cost_per_token": 0.00000065, "output_cost_per_token": 0.00000065, "litellm_provider": "openrouter", - "mode": "chat" + "mode": "chat", + "supports_tool_choice": true }, "openrouter/cohere/command-r-plus": { "max_tokens": 128000, "input_cost_per_token": 0.000003, "output_cost_per_token": 0.000015, "litellm_provider": "openrouter", - "mode": "chat" + "mode": "chat", + "supports_tool_choice": true }, "openrouter/databricks/dbrx-instruct": { "max_tokens": 32768, "input_cost_per_token": 0.0000006, "output_cost_per_token": 0.0000006, "litellm_provider": "openrouter", - "mode": "chat" + "mode": "chat", + "supports_tool_choice": true }, "openrouter/anthropic/claude-3-haiku": { "max_tokens": 200000, @@ -4674,7 +5572,8 @@ "litellm_provider": "openrouter", "mode": "chat", "supports_function_calling": true, - "supports_vision": true + "supports_vision": true, + "supports_tool_choice": true }, "openrouter/anthropic/claude-3-5-haiku": { "max_tokens": 200000, @@ -4682,7 +5581,8 @@ "output_cost_per_token": 0.000005, "litellm_provider": "openrouter", "mode": "chat", - "supports_function_calling": true + "supports_function_calling": true, + "supports_tool_choice": true }, "openrouter/anthropic/claude-3-haiku-20240307": { "max_tokens": 4096, @@ -4694,7 +5594,8 @@ "mode": "chat", "supports_function_calling": true, "supports_vision": true, - "tool_use_system_prompt_tokens": 264 + "tool_use_system_prompt_tokens": 264, + "supports_tool_choice": true }, "openrouter/anthropic/claude-3-5-haiku-20241022": { "max_tokens": 8192, @@ -4705,7 +5606,8 @@ "litellm_provider": "openrouter", "mode": "chat", "supports_function_calling": true, - "tool_use_system_prompt_tokens": 264 + "tool_use_system_prompt_tokens": 264, + "supports_tool_choice": true }, "openrouter/anthropic/claude-3.5-sonnet": { "max_tokens": 8192, @@ -4718,7 +5620,8 @@ "supports_function_calling": true, "supports_vision": true, "tool_use_system_prompt_tokens": 159, - "supports_assistant_prefill": true + "supports_assistant_prefill": true, + "supports_tool_choice": true }, "openrouter/anthropic/claude-3.5-sonnet:beta": { "max_tokens": 8192, @@ -4730,7 +5633,37 @@ "mode": "chat", "supports_function_calling": true, "supports_vision": true, - "tool_use_system_prompt_tokens": 159 + "tool_use_system_prompt_tokens": 159, + "supports_tool_choice": true + }, + "openrouter/anthropic/claude-3.7-sonnet": { + "max_tokens": 8192, + "max_input_tokens": 200000, + "max_output_tokens": 8192, + "input_cost_per_token": 0.000003, + "output_cost_per_token": 0.000015, + "input_cost_per_image": 0.0048, + "litellm_provider": "openrouter", + "mode": "chat", + "supports_function_calling": true, + "supports_vision": true, + "tool_use_system_prompt_tokens": 159, + "supports_assistant_prefill": true, + "supports_tool_choice": true + }, + "openrouter/anthropic/claude-3.7-sonnet:beta": { + "max_tokens": 8192, + "max_input_tokens": 200000, + "max_output_tokens": 8192, + "input_cost_per_token": 0.000003, + "output_cost_per_token": 0.000015, + "input_cost_per_image": 0.0048, + "litellm_provider": "openrouter", + "mode": "chat", + "supports_function_calling": true, + "supports_vision": true, + "tool_use_system_prompt_tokens": 159, + "supports_tool_choice": true }, "openrouter/anthropic/claude-3-sonnet": { "max_tokens": 200000, @@ -4740,21 +5673,24 @@ "litellm_provider": "openrouter", "mode": "chat", "supports_function_calling": true, - "supports_vision": true + "supports_vision": true, + "supports_tool_choice": true }, "openrouter/mistralai/mistral-large": { "max_tokens": 32000, "input_cost_per_token": 0.000008, "output_cost_per_token": 0.000024, "litellm_provider": "openrouter", - "mode": "chat" + "mode": "chat", + "supports_tool_choice": true }, "openrouter/cognitivecomputations/dolphin-mixtral-8x7b": { "max_tokens": 32769, "input_cost_per_token": 0.0000005, "output_cost_per_token": 0.0000005, "litellm_provider": "openrouter", - "mode": "chat" + "mode": "chat", + "supports_tool_choice": true }, "openrouter/google/gemini-pro-vision": { "max_tokens": 45875, @@ -4764,42 +5700,48 @@ "litellm_provider": "openrouter", "mode": "chat", "supports_function_calling": true, - "supports_vision": true + "supports_vision": true, + "supports_tool_choice": true }, "openrouter/fireworks/firellava-13b": { "max_tokens": 4096, "input_cost_per_token": 0.0000002, "output_cost_per_token": 0.0000002, "litellm_provider": "openrouter", - "mode": "chat" + "mode": "chat", + "supports_tool_choice": true }, "openrouter/meta-llama/llama-3-8b-instruct:free": { "max_tokens": 8192, "input_cost_per_token": 0.0, "output_cost_per_token": 0.0, "litellm_provider": "openrouter", - "mode": "chat" + "mode": "chat", + "supports_tool_choice": true }, "openrouter/meta-llama/llama-3-8b-instruct:extended": { "max_tokens": 16384, "input_cost_per_token": 0.000000225, "output_cost_per_token": 0.00000225, "litellm_provider": "openrouter", - "mode": "chat" + "mode": "chat", + "supports_tool_choice": true }, "openrouter/meta-llama/llama-3-70b-instruct:nitro": { "max_tokens": 8192, "input_cost_per_token": 0.0000009, "output_cost_per_token": 0.0000009, "litellm_provider": "openrouter", - "mode": "chat" + "mode": "chat", + "supports_tool_choice": true }, "openrouter/meta-llama/llama-3-70b-instruct": { "max_tokens": 8192, "input_cost_per_token": 0.00000059, "output_cost_per_token": 0.00000079, "litellm_provider": "openrouter", - "mode": "chat" + "mode": "chat", + "supports_tool_choice": true }, "openrouter/openai/o1": { "max_tokens": 100000, @@ -4815,7 +5757,8 @@ "supports_vision": true, "supports_prompt_caching": true, "supports_system_messages": true, - "supports_response_schema": true + "supports_response_schema": true, + "supports_tool_choice": true }, "openrouter/openai/o1-mini": { "max_tokens": 65536, @@ -4827,7 +5770,8 @@ "mode": "chat", "supports_function_calling": true, "supports_parallel_function_calling": true, - "supports_vision": false + "supports_vision": false, + "supports_tool_choice": true }, "openrouter/openai/o1-mini-2024-09-12": { "max_tokens": 65536, @@ -4839,7 +5783,8 @@ "mode": "chat", "supports_function_calling": true, "supports_parallel_function_calling": true, - "supports_vision": false + "supports_vision": false, + "supports_tool_choice": true }, "openrouter/openai/o1-preview": { "max_tokens": 32768, @@ -4851,7 +5796,8 @@ "mode": "chat", "supports_function_calling": true, "supports_parallel_function_calling": true, - "supports_vision": false + "supports_vision": false, + "supports_tool_choice": true }, "openrouter/openai/o1-preview-2024-09-12": { "max_tokens": 32768, @@ -4863,7 +5809,8 @@ "mode": "chat", "supports_function_calling": true, "supports_parallel_function_calling": true, - "supports_vision": false + "supports_vision": false, + "supports_tool_choice": true }, "openrouter/openai/gpt-4o": { "max_tokens": 4096, @@ -4875,7 +5822,8 @@ "mode": "chat", "supports_function_calling": true, "supports_parallel_function_calling": true, - "supports_vision": true + "supports_vision": true, + "supports_tool_choice": true }, "openrouter/openai/gpt-4o-2024-05-13": { "max_tokens": 4096, @@ -4887,7 +5835,8 @@ "mode": "chat", "supports_function_calling": true, "supports_parallel_function_calling": true, - "supports_vision": true + "supports_vision": true, + "supports_tool_choice": true }, "openrouter/openai/gpt-4-vision-preview": { "max_tokens": 130000, @@ -4897,28 +5846,32 @@ "litellm_provider": "openrouter", "mode": "chat", "supports_function_calling": true, - "supports_vision": true + "supports_vision": true, + "supports_tool_choice": true }, "openrouter/openai/gpt-3.5-turbo": { "max_tokens": 4095, "input_cost_per_token": 0.0000015, "output_cost_per_token": 0.000002, "litellm_provider": "openrouter", - "mode": "chat" + "mode": "chat", + "supports_tool_choice": true }, "openrouter/openai/gpt-3.5-turbo-16k": { "max_tokens": 16383, "input_cost_per_token": 0.000003, "output_cost_per_token": 0.000004, "litellm_provider": "openrouter", - "mode": "chat" + "mode": "chat", + "supports_tool_choice": true }, "openrouter/openai/gpt-4": { "max_tokens": 8192, "input_cost_per_token": 0.00003, "output_cost_per_token": 0.00006, "litellm_provider": "openrouter", - "mode": "chat" + "mode": "chat", + "supports_tool_choice": true }, "openrouter/anthropic/claude-instant-v1": { "max_tokens": 100000, @@ -4926,7 +5879,8 @@ "input_cost_per_token": 0.00000163, "output_cost_per_token": 0.00000551, "litellm_provider": "openrouter", - "mode": "chat" + "mode": "chat", + "supports_tool_choice": true }, "openrouter/anthropic/claude-2": { "max_tokens": 100000, @@ -4934,7 +5888,8 @@ "input_cost_per_token": 0.00001102, "output_cost_per_token": 0.00003268, "litellm_provider": "openrouter", - "mode": "chat" + "mode": "chat", + "supports_tool_choice": true }, "openrouter/anthropic/claude-3-opus": { "max_tokens": 4096, @@ -4946,98 +5901,112 @@ "mode": "chat", "supports_function_calling": true, "supports_vision": true, - "tool_use_system_prompt_tokens": 395 + "tool_use_system_prompt_tokens": 395, + "supports_tool_choice": true }, "openrouter/google/palm-2-chat-bison": { "max_tokens": 25804, "input_cost_per_token": 0.0000005, "output_cost_per_token": 0.0000005, "litellm_provider": "openrouter", - "mode": "chat" + "mode": "chat", + "supports_tool_choice": true }, "openrouter/google/palm-2-codechat-bison": { "max_tokens": 20070, "input_cost_per_token": 0.0000005, "output_cost_per_token": 0.0000005, "litellm_provider": "openrouter", - "mode": "chat" + "mode": "chat", + "supports_tool_choice": true }, "openrouter/meta-llama/llama-2-13b-chat": { "max_tokens": 4096, "input_cost_per_token": 0.0000002, "output_cost_per_token": 0.0000002, "litellm_provider": "openrouter", - "mode": "chat" + "mode": "chat", + "supports_tool_choice": true }, "openrouter/meta-llama/llama-2-70b-chat": { "max_tokens": 4096, "input_cost_per_token": 0.0000015, "output_cost_per_token": 0.0000015, "litellm_provider": "openrouter", - "mode": "chat" + "mode": "chat", + "supports_tool_choice": true }, "openrouter/meta-llama/codellama-34b-instruct": { "max_tokens": 8192, "input_cost_per_token": 0.0000005, "output_cost_per_token": 0.0000005, "litellm_provider": "openrouter", - "mode": "chat" + "mode": "chat", + "supports_tool_choice": true }, "openrouter/nousresearch/nous-hermes-llama2-13b": { "max_tokens": 4096, "input_cost_per_token": 0.0000002, "output_cost_per_token": 0.0000002, "litellm_provider": "openrouter", - "mode": "chat" + "mode": "chat", + "supports_tool_choice": true }, "openrouter/mancer/weaver": { "max_tokens": 8000, "input_cost_per_token": 0.000005625, "output_cost_per_token": 0.000005625, "litellm_provider": "openrouter", - "mode": "chat" + "mode": "chat", + "supports_tool_choice": true }, "openrouter/gryphe/mythomax-l2-13b": { "max_tokens": 8192, "input_cost_per_token": 0.000001875, "output_cost_per_token": 0.000001875, "litellm_provider": "openrouter", - "mode": "chat" + "mode": "chat", + "supports_tool_choice": true }, "openrouter/jondurbin/airoboros-l2-70b-2.1": { "max_tokens": 4096, "input_cost_per_token": 0.000013875, "output_cost_per_token": 0.000013875, "litellm_provider": "openrouter", - "mode": "chat" + "mode": "chat", + "supports_tool_choice": true }, "openrouter/undi95/remm-slerp-l2-13b": { "max_tokens": 6144, "input_cost_per_token": 0.000001875, "output_cost_per_token": 0.000001875, "litellm_provider": "openrouter", - "mode": "chat" + "mode": "chat", + "supports_tool_choice": true }, "openrouter/pygmalionai/mythalion-13b": { "max_tokens": 4096, "input_cost_per_token": 0.000001875, "output_cost_per_token": 0.000001875, "litellm_provider": "openrouter", - "mode": "chat" + "mode": "chat", + "supports_tool_choice": true }, "openrouter/mistralai/mistral-7b-instruct": { "max_tokens": 8192, "input_cost_per_token": 0.00000013, "output_cost_per_token": 0.00000013, "litellm_provider": "openrouter", - "mode": "chat" + "mode": "chat", + "supports_tool_choice": true }, "openrouter/mistralai/mistral-7b-instruct:free": { "max_tokens": 8192, "input_cost_per_token": 0.0, "output_cost_per_token": 0.0, "litellm_provider": "openrouter", - "mode": "chat" + "mode": "chat", + "supports_tool_choice": true }, "openrouter/qwen/qwen-2.5-coder-32b-instruct": { "max_tokens": 33792, @@ -5046,7 +6015,8 @@ "input_cost_per_token": 0.00000018, "output_cost_per_token": 0.00000018, "litellm_provider": "openrouter", - "mode": "chat" + "mode": "chat", + "supports_tool_choice": true }, "j2-ultra": { "max_tokens": 8192, @@ -5064,7 +6034,8 @@ "input_cost_per_token": 0.0000002, "output_cost_per_token": 0.0000004, "litellm_provider": "ai21", - "mode": "chat" + "mode": "chat", + "supports_tool_choice": true }, "jamba-1.5-large@001": { "max_tokens": 256000, @@ -5073,7 +6044,8 @@ "input_cost_per_token": 0.000002, "output_cost_per_token": 0.000008, "litellm_provider": "ai21", - "mode": "chat" + "mode": "chat", + "supports_tool_choice": true }, "jamba-1.5": { "max_tokens": 256000, @@ -5082,7 +6054,8 @@ "input_cost_per_token": 0.0000002, "output_cost_per_token": 0.0000004, "litellm_provider": "ai21", - "mode": "chat" + "mode": "chat", + "supports_tool_choice": true }, "jamba-1.5-mini": { "max_tokens": 256000, @@ -5091,7 +6064,8 @@ "input_cost_per_token": 0.0000002, "output_cost_per_token": 0.0000004, "litellm_provider": "ai21", - "mode": "chat" + "mode": "chat", + "supports_tool_choice": true }, "jamba-1.5-large": { "max_tokens": 256000, @@ -5100,7 +6074,28 @@ "input_cost_per_token": 0.000002, "output_cost_per_token": 0.000008, "litellm_provider": "ai21", - "mode": "chat" + "mode": "chat", + "supports_tool_choice": true + }, + "jamba-large-1.6": { + "max_tokens": 256000, + "max_input_tokens": 256000, + "max_output_tokens": 256000, + "input_cost_per_token": 0.000002, + "output_cost_per_token": 0.000008, + "litellm_provider": "ai21", + "mode": "chat", + "supports_tool_choice": true + }, + "jamba-mini-1.6": { + "max_tokens": 256000, + "max_input_tokens": 256000, + "max_output_tokens": 256000, + "input_cost_per_token": 0.0000002, + "output_cost_per_token": 0.0000004, + "litellm_provider": "ai21", + "mode": "chat", + "supports_tool_choice": true }, "j2-mid": { "max_tokens": 8192, @@ -5208,6 +6203,37 @@ "mode": "chat", "supports_system_messages": true }, + "ai21.jamba-1-5-large-v1:0": { + "max_tokens": 256000, + "max_input_tokens": 256000, + "max_output_tokens": 256000, + "input_cost_per_token": 0.000002, + "output_cost_per_token": 0.000008, + "litellm_provider": "bedrock", + "mode": "chat" + }, + "ai21.jamba-1-5-mini-v1:0": { + "max_tokens": 256000, + "max_input_tokens": 256000, + "max_output_tokens": 256000, + "input_cost_per_token": 0.0000002, + "output_cost_per_token": 0.0000004, + "litellm_provider": "bedrock", + "mode": "chat" + }, + "amazon.rerank-v1:0": { + "max_tokens": 32000, + "max_input_tokens": 32000, + "max_output_tokens": 32000, + "max_query_tokens": 32000, + "max_document_chunks_per_query": 100, + "max_tokens_per_document_chunk": 512, + "input_cost_per_token": 0.0, + "input_cost_per_query": 0.001, + "output_cost_per_token": 0.0, + "litellm_provider": "bedrock", + "mode": "rerank" + }, "amazon.titan-text-lite-v1": { "max_tokens": 4000, "max_input_tokens": 42000, @@ -5276,7 +6302,8 @@ "input_cost_per_token": 0.00000015, "output_cost_per_token": 0.0000002, "litellm_provider": "bedrock", - "mode": "chat" + "mode": "chat", + "supports_tool_choice": true }, "mistral.mixtral-8x7b-instruct-v0:1": { "max_tokens": 8191, @@ -5285,7 +6312,8 @@ "input_cost_per_token": 0.00000045, "output_cost_per_token": 0.0000007, "litellm_provider": "bedrock", - "mode": "chat" + "mode": "chat", + "supports_tool_choice": true }, "mistral.mistral-large-2402-v1:0": { "max_tokens": 8191, @@ -5295,7 +6323,8 @@ "output_cost_per_token": 0.000024, "litellm_provider": "bedrock", "mode": "chat", - "supports_function_calling": true + "supports_function_calling": true, + "supports_tool_choice": true }, "mistral.mistral-large-2407-v1:0": { "max_tokens": 8191, @@ -5305,7 +6334,8 @@ "output_cost_per_token": 0.000009, "litellm_provider": "bedrock", "mode": "chat", - "supports_function_calling": true + "supports_function_calling": true, + "supports_tool_choice": true }, "mistral.mistral-small-2402-v1:0": { "max_tokens": 8191, @@ -5315,7 +6345,8 @@ "output_cost_per_token": 0.000003, "litellm_provider": "bedrock", "mode": "chat", - "supports_function_calling": true + "supports_function_calling": true, + "supports_tool_choice": true }, "bedrock/us-west-2/mistral.mixtral-8x7b-instruct-v0:1": { "max_tokens": 8191, @@ -5324,7 +6355,8 @@ "input_cost_per_token": 0.00000045, "output_cost_per_token": 0.0000007, "litellm_provider": "bedrock", - "mode": "chat" + "mode": "chat", + "supports_tool_choice": true }, "bedrock/us-east-1/mistral.mixtral-8x7b-instruct-v0:1": { "max_tokens": 8191, @@ -5333,7 +6365,8 @@ "input_cost_per_token": 0.00000045, "output_cost_per_token": 0.0000007, "litellm_provider": "bedrock", - "mode": "chat" + "mode": "chat", + "supports_tool_choice": true }, "bedrock/eu-west-3/mistral.mixtral-8x7b-instruct-v0:1": { "max_tokens": 8191, @@ -5342,7 +6375,8 @@ "input_cost_per_token": 0.00000059, "output_cost_per_token": 0.00000091, "litellm_provider": "bedrock", - "mode": "chat" + "mode": "chat", + "supports_tool_choice": true }, "bedrock/us-west-2/mistral.mistral-7b-instruct-v0:2": { "max_tokens": 8191, @@ -5351,7 +6385,8 @@ "input_cost_per_token": 0.00000015, "output_cost_per_token": 0.0000002, "litellm_provider": "bedrock", - "mode": "chat" + "mode": "chat", + "supports_tool_choice": true }, "bedrock/us-east-1/mistral.mistral-7b-instruct-v0:2": { "max_tokens": 8191, @@ -5360,7 +6395,8 @@ "input_cost_per_token": 0.00000015, "output_cost_per_token": 0.0000002, "litellm_provider": "bedrock", - "mode": "chat" + "mode": "chat", + "supports_tool_choice": true }, "bedrock/eu-west-3/mistral.mistral-7b-instruct-v0:2": { "max_tokens": 8191, @@ -5369,7 +6405,8 @@ "input_cost_per_token": 0.0000002, "output_cost_per_token": 0.00000026, "litellm_provider": "bedrock", - "mode": "chat" + "mode": "chat", + "supports_tool_choice": true }, "bedrock/us-east-1/mistral.mistral-large-2402-v1:0": { "max_tokens": 8191, @@ -5379,7 +6416,8 @@ "output_cost_per_token": 0.000024, "litellm_provider": "bedrock", "mode": "chat", - "supports_function_calling": true + "supports_function_calling": true, + "supports_tool_choice": true }, "bedrock/us-west-2/mistral.mistral-large-2402-v1:0": { "max_tokens": 8191, @@ -5389,7 +6427,8 @@ "output_cost_per_token": 0.000024, "litellm_provider": "bedrock", "mode": "chat", - "supports_function_calling": true + "supports_function_calling": true, + "supports_tool_choice": true }, "bedrock/eu-west-3/mistral.mistral-large-2402-v1:0": { "max_tokens": 8191, @@ -5399,7 +6438,8 @@ "output_cost_per_token": 0.0000312, "litellm_provider": "bedrock", "mode": "chat", - "supports_function_calling": true + "supports_function_calling": true, + "supports_tool_choice": true }, "amazon.nova-micro-v1:0": { "max_tokens": 4096, @@ -5410,7 +6450,8 @@ "litellm_provider": "bedrock_converse", "mode": "chat", "supports_function_calling": true, - "supports_prompt_caching": true + "supports_prompt_caching": true, + "supports_response_schema": true }, "us.amazon.nova-micro-v1:0": { "max_tokens": 4096, @@ -5421,7 +6462,20 @@ "litellm_provider": "bedrock_converse", "mode": "chat", "supports_function_calling": true, - "supports_prompt_caching": true + "supports_prompt_caching": true, + "supports_response_schema": true + }, + "eu.amazon.nova-micro-v1:0": { + "max_tokens": 4096, + "max_input_tokens": 300000, + "max_output_tokens": 4096, + "input_cost_per_token": 0.000000046, + "output_cost_per_token": 0.000000184, + "litellm_provider": "bedrock_converse", + "mode": "chat", + "supports_function_calling": true, + "supports_prompt_caching": true, + "supports_response_schema": true }, "amazon.nova-lite-v1:0": { "max_tokens": 4096, @@ -5434,7 +6488,8 @@ "supports_function_calling": true, "supports_vision": true, "supports_pdf_input": true, - "supports_prompt_caching": true + "supports_prompt_caching": true, + "supports_response_schema": true }, "us.amazon.nova-lite-v1:0": { "max_tokens": 4096, @@ -5447,7 +6502,22 @@ "supports_function_calling": true, "supports_vision": true, "supports_pdf_input": true, - "supports_prompt_caching": true + "supports_prompt_caching": true, + "supports_response_schema": true + }, + "eu.amazon.nova-lite-v1:0": { + "max_tokens": 4096, + "max_input_tokens": 128000, + "max_output_tokens": 4096, + "input_cost_per_token": 0.000000078, + "output_cost_per_token": 0.000000312, + "litellm_provider": "bedrock_converse", + "mode": "chat", + "supports_function_calling": true, + "supports_vision": true, + "supports_pdf_input": true, + "supports_prompt_caching": true, + "supports_response_schema": true }, "amazon.nova-pro-v1:0": { "max_tokens": 4096, @@ -5460,7 +6530,8 @@ "supports_function_calling": true, "supports_vision": true, "supports_pdf_input": true, - "supports_prompt_caching": true + "supports_prompt_caching": true, + "supports_response_schema": true }, "us.amazon.nova-pro-v1:0": { "max_tokens": 4096, @@ -5473,7 +6544,23 @@ "supports_function_calling": true, "supports_vision": true, "supports_pdf_input": true, - "supports_prompt_caching": true + "supports_prompt_caching": true, + "supports_response_schema": true + }, + "eu.amazon.nova-pro-v1:0": { + "max_tokens": 4096, + "max_input_tokens": 300000, + "max_output_tokens": 4096, + "input_cost_per_token": 0.00000105, + "output_cost_per_token": 0.0000042, + "litellm_provider": "bedrock_converse", + "mode": "chat", + "supports_function_calling": true, + "supports_vision": true, + "supports_pdf_input": true, + "supports_prompt_caching": true, + "supports_response_schema": true, + "source": "https://aws.amazon.com/bedrock/pricing/" }, "anthropic.claude-3-sonnet-20240229-v1:0": { "max_tokens": 4096, @@ -5484,7 +6571,26 @@ "litellm_provider": "bedrock", "mode": "chat", "supports_function_calling": true, - "supports_vision": true + "supports_response_schema": true, + "supports_vision": true, + "supports_pdf_input": true, + "supports_tool_choice": true + }, + "bedrock/invoke/anthropic.claude-3-5-sonnet-20240620-v1:0": { + "max_tokens": 4096, + "max_input_tokens": 200000, + "max_output_tokens": 4096, + "input_cost_per_token": 0.000003, + "output_cost_per_token": 0.000015, + "litellm_provider": "bedrock", + "mode": "chat", + "supports_function_calling": true, + "supports_response_schema": true, + "supports_vision": true, + "supports_tool_choice": true, + "metadata": { + "notes": "Anthropic via Invoke route does not currently support pdf input." + } }, "anthropic.claude-3-5-sonnet-20240620-v1:0": { "max_tokens": 4096, @@ -5495,7 +6601,25 @@ "litellm_provider": "bedrock", "mode": "chat", "supports_function_calling": true, - "supports_vision": true + "supports_response_schema": true, + "supports_vision": true, + "supports_pdf_input": true, + "supports_tool_choice": true + }, + "anthropic.claude-3-7-sonnet-20250219-v1:0": { + "max_tokens": 8192, + "max_input_tokens": 200000, + "max_output_tokens": 8192, + "input_cost_per_token": 0.000003, + "output_cost_per_token": 0.000015, + "litellm_provider": "bedrock_converse", + "mode": "chat", + "supports_function_calling": true, + "supports_vision": true, + "supports_assistant_prefill": true, + "supports_prompt_caching": true, + "supports_response_schema": true, + "supports_tool_choice": true }, "anthropic.claude-3-5-sonnet-20241022-v2:0": { "max_tokens": 8192, @@ -5507,9 +6631,11 @@ "mode": "chat", "supports_function_calling": true, "supports_vision": true, + "supports_pdf_input": true, "supports_assistant_prefill": true, "supports_prompt_caching": true, - "supports_response_schema": true + "supports_response_schema": true, + "supports_tool_choice": true }, "anthropic.claude-3-haiku-20240307-v1:0": { "max_tokens": 4096, @@ -5520,19 +6646,25 @@ "litellm_provider": "bedrock", "mode": "chat", "supports_function_calling": true, - "supports_vision": true + "supports_response_schema": true, + "supports_vision": true, + "supports_pdf_input": true, + "supports_tool_choice": true }, "anthropic.claude-3-5-haiku-20241022-v1:0": { "max_tokens": 8192, "max_input_tokens": 200000, "max_output_tokens": 8192, - "input_cost_per_token": 0.000001, - "output_cost_per_token": 0.000005, + "input_cost_per_token": 0.0000008, + "output_cost_per_token": 0.000004, "litellm_provider": "bedrock", "mode": "chat", "supports_assistant_prefill": true, + "supports_pdf_input": true, "supports_function_calling": true, - "supports_prompt_caching": true + "supports_response_schema": true, + "supports_prompt_caching": true, + "supports_tool_choice": true }, "anthropic.claude-3-opus-20240229-v1:0": { "max_tokens": 4096, @@ -5543,7 +6675,9 @@ "litellm_provider": "bedrock", "mode": "chat", "supports_function_calling": true, - "supports_vision": true + "supports_response_schema": true, + "supports_vision": true, + "supports_tool_choice": true }, "us.anthropic.claude-3-sonnet-20240229-v1:0": { "max_tokens": 4096, @@ -5554,7 +6688,10 @@ "litellm_provider": "bedrock", "mode": "chat", "supports_function_calling": true, - "supports_vision": true + "supports_response_schema": true, + "supports_vision": true, + "supports_pdf_input": true, + "supports_tool_choice": true }, "us.anthropic.claude-3-5-sonnet-20240620-v1:0": { "max_tokens": 4096, @@ -5565,7 +6702,10 @@ "litellm_provider": "bedrock", "mode": "chat", "supports_function_calling": true, - "supports_vision": true + "supports_response_schema": true, + "supports_vision": true, + "supports_pdf_input": true, + "supports_tool_choice": true }, "us.anthropic.claude-3-5-sonnet-20241022-v2:0": { "max_tokens": 8192, @@ -5577,9 +6717,26 @@ "mode": "chat", "supports_function_calling": true, "supports_vision": true, + "supports_pdf_input": true, "supports_assistant_prefill": true, "supports_prompt_caching": true, - "supports_response_schema": true + "supports_response_schema": true, + "supports_tool_choice": true + }, + "us.anthropic.claude-3-7-sonnet-20250219-v1:0": { + "max_tokens": 8192, + "max_input_tokens": 200000, + "max_output_tokens": 8192, + "input_cost_per_token": 0.000003, + "output_cost_per_token": 0.000015, + "litellm_provider": "bedrock_converse", + "mode": "chat", + "supports_function_calling": true, + "supports_vision": true, + "supports_assistant_prefill": true, + "supports_prompt_caching": true, + "supports_response_schema": true, + "supports_tool_choice": true }, "us.anthropic.claude-3-haiku-20240307-v1:0": { "max_tokens": 4096, @@ -5590,19 +6747,25 @@ "litellm_provider": "bedrock", "mode": "chat", "supports_function_calling": true, - "supports_vision": true + "supports_response_schema": true, + "supports_vision": true, + "supports_pdf_input": true, + "supports_tool_choice": true }, "us.anthropic.claude-3-5-haiku-20241022-v1:0": { "max_tokens": 8192, "max_input_tokens": 200000, "max_output_tokens": 8192, - "input_cost_per_token": 0.000001, - "output_cost_per_token": 0.000005, + "input_cost_per_token": 0.0000008, + "output_cost_per_token": 0.000004, "litellm_provider": "bedrock", "mode": "chat", "supports_assistant_prefill": true, + "supports_pdf_input": true, "supports_function_calling": true, - "supports_prompt_caching": true + "supports_prompt_caching": true, + "supports_response_schema": true, + "supports_tool_choice": true }, "us.anthropic.claude-3-opus-20240229-v1:0": { "max_tokens": 4096, @@ -5613,7 +6776,9 @@ "litellm_provider": "bedrock", "mode": "chat", "supports_function_calling": true, - "supports_vision": true + "supports_response_schema": true, + "supports_vision": true, + "supports_tool_choice": true }, "eu.anthropic.claude-3-sonnet-20240229-v1:0": { "max_tokens": 4096, @@ -5624,7 +6789,10 @@ "litellm_provider": "bedrock", "mode": "chat", "supports_function_calling": true, - "supports_vision": true + "supports_response_schema": true, + "supports_vision": true, + "supports_pdf_input": true, + "supports_tool_choice": true }, "eu.anthropic.claude-3-5-sonnet-20240620-v1:0": { "max_tokens": 4096, @@ -5635,7 +6803,10 @@ "litellm_provider": "bedrock", "mode": "chat", "supports_function_calling": true, - "supports_vision": true + "supports_response_schema": true, + "supports_vision": true, + "supports_pdf_input": true, + "supports_tool_choice": true }, "eu.anthropic.claude-3-5-sonnet-20241022-v2:0": { "max_tokens": 8192, @@ -5647,9 +6818,11 @@ "mode": "chat", "supports_function_calling": true, "supports_vision": true, + "supports_pdf_input": true, "supports_assistant_prefill": true, "supports_prompt_caching": true, - "supports_response_schema": true + "supports_response_schema": true, + "supports_tool_choice": true }, "eu.anthropic.claude-3-haiku-20240307-v1:0": { "max_tokens": 4096, @@ -5660,20 +6833,25 @@ "litellm_provider": "bedrock", "mode": "chat", "supports_function_calling": true, - "supports_vision": true + "supports_response_schema": true, + "supports_vision": true, + "supports_pdf_input": true, + "supports_tool_choice": true }, "eu.anthropic.claude-3-5-haiku-20241022-v1:0": { "max_tokens": 8192, "max_input_tokens": 200000, "max_output_tokens": 8192, - "input_cost_per_token": 0.000001, - "output_cost_per_token": 0.000005, + "input_cost_per_token": 0.00000025, + "output_cost_per_token": 0.00000125, "litellm_provider": "bedrock", "mode": "chat", "supports_function_calling": true, "supports_assistant_prefill": true, + "supports_pdf_input": true, "supports_prompt_caching": true, - "supports_response_schema": true + "supports_response_schema": true, + "supports_tool_choice": true }, "eu.anthropic.claude-3-opus-20240229-v1:0": { "max_tokens": 4096, @@ -5684,7 +6862,9 @@ "litellm_provider": "bedrock", "mode": "chat", "supports_function_calling": true, - "supports_vision": true + "supports_response_schema": true, + "supports_vision": true, + "supports_tool_choice": true }, "anthropic.claude-v1": { "max_tokens": 8191, @@ -5702,7 +6882,8 @@ "input_cost_per_token": 0.000008, "output_cost_per_token": 0.000024, "litellm_provider": "bedrock", - "mode": "chat" + "mode": "chat", + "supports_tool_choice": true }, "bedrock/us-west-2/anthropic.claude-v1": { "max_tokens": 8191, @@ -5711,7 +6892,8 @@ "input_cost_per_token": 0.000008, "output_cost_per_token": 0.000024, "litellm_provider": "bedrock", - "mode": "chat" + "mode": "chat", + "supports_tool_choice": true }, "bedrock/ap-northeast-1/anthropic.claude-v1": { "max_tokens": 8191, @@ -5720,7 +6902,8 @@ "input_cost_per_token": 0.000008, "output_cost_per_token": 0.000024, "litellm_provider": "bedrock", - "mode": "chat" + "mode": "chat", + "supports_tool_choice": true }, "bedrock/ap-northeast-1/1-month-commitment/anthropic.claude-v1": { "max_tokens": 8191, @@ -5810,7 +6993,8 @@ "input_cost_per_token": 0.000008, "output_cost_per_token": 0.000024, "litellm_provider": "bedrock", - "mode": "chat" + "mode": "chat", + "supports_tool_choice": true }, "bedrock/us-east-1/anthropic.claude-v2": { "max_tokens": 8191, @@ -5819,7 +7003,8 @@ "input_cost_per_token": 0.000008, "output_cost_per_token": 0.000024, "litellm_provider": "bedrock", - "mode": "chat" + "mode": "chat", + "supports_tool_choice": true }, "bedrock/us-west-2/anthropic.claude-v2": { "max_tokens": 8191, @@ -5828,7 +7013,8 @@ "input_cost_per_token": 0.000008, "output_cost_per_token": 0.000024, "litellm_provider": "bedrock", - "mode": "chat" + "mode": "chat", + "supports_tool_choice": true }, "bedrock/ap-northeast-1/anthropic.claude-v2": { "max_tokens": 8191, @@ -5837,7 +7023,8 @@ "input_cost_per_token": 0.000008, "output_cost_per_token": 0.000024, "litellm_provider": "bedrock", - "mode": "chat" + "mode": "chat", + "supports_tool_choice": true }, "bedrock/ap-northeast-1/1-month-commitment/anthropic.claude-v2": { "max_tokens": 8191, @@ -5846,7 +7033,8 @@ "input_cost_per_second": 0.0455, "output_cost_per_second": 0.0455, "litellm_provider": "bedrock", - "mode": "chat" + "mode": "chat", + "supports_tool_choice": true }, "bedrock/ap-northeast-1/6-month-commitment/anthropic.claude-v2": { "max_tokens": 8191, @@ -5855,7 +7043,8 @@ "input_cost_per_second": 0.02527, "output_cost_per_second": 0.02527, "litellm_provider": "bedrock", - "mode": "chat" + "mode": "chat", + "supports_tool_choice": true }, "bedrock/eu-central-1/anthropic.claude-v2": { "max_tokens": 8191, @@ -5864,7 +7053,8 @@ "input_cost_per_token": 0.000008, "output_cost_per_token": 0.000024, "litellm_provider": "bedrock", - "mode": "chat" + "mode": "chat", + "supports_tool_choice": true }, "bedrock/eu-central-1/1-month-commitment/anthropic.claude-v2": { "max_tokens": 8191, @@ -5873,7 +7063,8 @@ "input_cost_per_second": 0.0415, "output_cost_per_second": 0.0415, "litellm_provider": "bedrock", - "mode": "chat" + "mode": "chat", + "supports_tool_choice": true }, "bedrock/eu-central-1/6-month-commitment/anthropic.claude-v2": { "max_tokens": 8191, @@ -5882,7 +7073,8 @@ "input_cost_per_second": 0.02305, "output_cost_per_second": 0.02305, "litellm_provider": "bedrock", - "mode": "chat" + "mode": "chat", + "supports_tool_choice": true }, "bedrock/us-east-1/1-month-commitment/anthropic.claude-v2": { "max_tokens": 8191, @@ -5891,7 +7083,8 @@ "input_cost_per_second": 0.0175, "output_cost_per_second": 0.0175, "litellm_provider": "bedrock", - "mode": "chat" + "mode": "chat", + "supports_tool_choice": true }, "bedrock/us-east-1/6-month-commitment/anthropic.claude-v2": { "max_tokens": 8191, @@ -5900,7 +7093,8 @@ "input_cost_per_second": 0.00972, "output_cost_per_second": 0.00972, "litellm_provider": "bedrock", - "mode": "chat" + "mode": "chat", + "supports_tool_choice": true }, "bedrock/us-west-2/1-month-commitment/anthropic.claude-v2": { "max_tokens": 8191, @@ -5909,7 +7103,8 @@ "input_cost_per_second": 0.0175, "output_cost_per_second": 0.0175, "litellm_provider": "bedrock", - "mode": "chat" + "mode": "chat", + "supports_tool_choice": true }, "bedrock/us-west-2/6-month-commitment/anthropic.claude-v2": { "max_tokens": 8191, @@ -5918,7 +7113,8 @@ "input_cost_per_second": 0.00972, "output_cost_per_second": 0.00972, "litellm_provider": "bedrock", - "mode": "chat" + "mode": "chat", + "supports_tool_choice": true }, "anthropic.claude-v2:1": { "max_tokens": 8191, @@ -5927,7 +7123,8 @@ "input_cost_per_token": 0.000008, "output_cost_per_token": 0.000024, "litellm_provider": "bedrock", - "mode": "chat" + "mode": "chat", + "supports_tool_choice": true }, "bedrock/us-east-1/anthropic.claude-v2:1": { "max_tokens": 8191, @@ -5936,7 +7133,8 @@ "input_cost_per_token": 0.000008, "output_cost_per_token": 0.000024, "litellm_provider": "bedrock", - "mode": "chat" + "mode": "chat", + "supports_tool_choice": true }, "bedrock/us-west-2/anthropic.claude-v2:1": { "max_tokens": 8191, @@ -5945,7 +7143,8 @@ "input_cost_per_token": 0.000008, "output_cost_per_token": 0.000024, "litellm_provider": "bedrock", - "mode": "chat" + "mode": "chat", + "supports_tool_choice": true }, "bedrock/ap-northeast-1/anthropic.claude-v2:1": { "max_tokens": 8191, @@ -5954,7 +7153,8 @@ "input_cost_per_token": 0.000008, "output_cost_per_token": 0.000024, "litellm_provider": "bedrock", - "mode": "chat" + "mode": "chat", + "supports_tool_choice": true }, "bedrock/ap-northeast-1/1-month-commitment/anthropic.claude-v2:1": { "max_tokens": 8191, @@ -5963,7 +7163,8 @@ "input_cost_per_second": 0.0455, "output_cost_per_second": 0.0455, "litellm_provider": "bedrock", - "mode": "chat" + "mode": "chat", + "supports_tool_choice": true }, "bedrock/ap-northeast-1/6-month-commitment/anthropic.claude-v2:1": { "max_tokens": 8191, @@ -5972,7 +7173,8 @@ "input_cost_per_second": 0.02527, "output_cost_per_second": 0.02527, "litellm_provider": "bedrock", - "mode": "chat" + "mode": "chat", + "supports_tool_choice": true }, "bedrock/eu-central-1/anthropic.claude-v2:1": { "max_tokens": 8191, @@ -5981,7 +7183,8 @@ "input_cost_per_token": 0.000008, "output_cost_per_token": 0.000024, "litellm_provider": "bedrock", - "mode": "chat" + "mode": "chat", + "supports_tool_choice": true }, "bedrock/eu-central-1/1-month-commitment/anthropic.claude-v2:1": { "max_tokens": 8191, @@ -5990,7 +7193,8 @@ "input_cost_per_second": 0.0415, "output_cost_per_second": 0.0415, "litellm_provider": "bedrock", - "mode": "chat" + "mode": "chat", + "supports_tool_choice": true }, "bedrock/eu-central-1/6-month-commitment/anthropic.claude-v2:1": { "max_tokens": 8191, @@ -5999,7 +7203,8 @@ "input_cost_per_second": 0.02305, "output_cost_per_second": 0.02305, "litellm_provider": "bedrock", - "mode": "chat" + "mode": "chat", + "supports_tool_choice": true }, "bedrock/us-east-1/1-month-commitment/anthropic.claude-v2:1": { "max_tokens": 8191, @@ -6008,7 +7213,8 @@ "input_cost_per_second": 0.0175, "output_cost_per_second": 0.0175, "litellm_provider": "bedrock", - "mode": "chat" + "mode": "chat", + "supports_tool_choice": true }, "bedrock/us-east-1/6-month-commitment/anthropic.claude-v2:1": { "max_tokens": 8191, @@ -6017,7 +7223,8 @@ "input_cost_per_second": 0.00972, "output_cost_per_second": 0.00972, "litellm_provider": "bedrock", - "mode": "chat" + "mode": "chat", + "supports_tool_choice": true }, "bedrock/us-west-2/1-month-commitment/anthropic.claude-v2:1": { "max_tokens": 8191, @@ -6026,7 +7233,8 @@ "input_cost_per_second": 0.0175, "output_cost_per_second": 0.0175, "litellm_provider": "bedrock", - "mode": "chat" + "mode": "chat", + "supports_tool_choice": true }, "bedrock/us-west-2/6-month-commitment/anthropic.claude-v2:1": { "max_tokens": 8191, @@ -6035,16 +7243,18 @@ "input_cost_per_second": 0.00972, "output_cost_per_second": 0.00972, "litellm_provider": "bedrock", - "mode": "chat" + "mode": "chat", + "supports_tool_choice": true }, "anthropic.claude-instant-v1": { "max_tokens": 8191, "max_input_tokens": 100000, "max_output_tokens": 8191, - "input_cost_per_token": 0.00000163, - "output_cost_per_token": 0.00000551, + "input_cost_per_token": 0.0000008, + "output_cost_per_token": 0.0000024, "litellm_provider": "bedrock", - "mode": "chat" + "mode": "chat", + "supports_tool_choice": true }, "bedrock/us-east-1/anthropic.claude-instant-v1": { "max_tokens": 8191, @@ -6053,7 +7263,8 @@ "input_cost_per_token": 0.0000008, "output_cost_per_token": 0.0000024, "litellm_provider": "bedrock", - "mode": "chat" + "mode": "chat", + "supports_tool_choice": true }, "bedrock/us-east-1/1-month-commitment/anthropic.claude-instant-v1": { "max_tokens": 8191, @@ -6062,7 +7273,8 @@ "input_cost_per_second": 0.011, "output_cost_per_second": 0.011, "litellm_provider": "bedrock", - "mode": "chat" + "mode": "chat", + "supports_tool_choice": true }, "bedrock/us-east-1/6-month-commitment/anthropic.claude-instant-v1": { "max_tokens": 8191, @@ -6071,7 +7283,8 @@ "input_cost_per_second": 0.00611, "output_cost_per_second": 0.00611, "litellm_provider": "bedrock", - "mode": "chat" + "mode": "chat", + "supports_tool_choice": true }, "bedrock/us-west-2/1-month-commitment/anthropic.claude-instant-v1": { "max_tokens": 8191, @@ -6080,7 +7293,8 @@ "input_cost_per_second": 0.011, "output_cost_per_second": 0.011, "litellm_provider": "bedrock", - "mode": "chat" + "mode": "chat", + "supports_tool_choice": true }, "bedrock/us-west-2/6-month-commitment/anthropic.claude-instant-v1": { "max_tokens": 8191, @@ -6089,7 +7303,8 @@ "input_cost_per_second": 0.00611, "output_cost_per_second": 0.00611, "litellm_provider": "bedrock", - "mode": "chat" + "mode": "chat", + "supports_tool_choice": true }, "bedrock/us-west-2/anthropic.claude-instant-v1": { "max_tokens": 8191, @@ -6098,7 +7313,8 @@ "input_cost_per_token": 0.0000008, "output_cost_per_token": 0.0000024, "litellm_provider": "bedrock", - "mode": "chat" + "mode": "chat", + "supports_tool_choice": true }, "bedrock/ap-northeast-1/anthropic.claude-instant-v1": { "max_tokens": 8191, @@ -6107,7 +7323,8 @@ "input_cost_per_token": 0.00000223, "output_cost_per_token": 0.00000755, "litellm_provider": "bedrock", - "mode": "chat" + "mode": "chat", + "supports_tool_choice": true }, "bedrock/ap-northeast-1/1-month-commitment/anthropic.claude-instant-v1": { "max_tokens": 8191, @@ -6116,7 +7333,8 @@ "input_cost_per_second": 0.01475, "output_cost_per_second": 0.01475, "litellm_provider": "bedrock", - "mode": "chat" + "mode": "chat", + "supports_tool_choice": true }, "bedrock/ap-northeast-1/6-month-commitment/anthropic.claude-instant-v1": { "max_tokens": 8191, @@ -6125,7 +7343,8 @@ "input_cost_per_second": 0.008194, "output_cost_per_second": 0.008194, "litellm_provider": "bedrock", - "mode": "chat" + "mode": "chat", + "supports_tool_choice": true }, "bedrock/eu-central-1/anthropic.claude-instant-v1": { "max_tokens": 8191, @@ -6134,7 +7353,8 @@ "input_cost_per_token": 0.00000248, "output_cost_per_token": 0.00000838, "litellm_provider": "bedrock", - "mode": "chat" + "mode": "chat", + "supports_tool_choice": true }, "bedrock/eu-central-1/1-month-commitment/anthropic.claude-instant-v1": { "max_tokens": 8191, @@ -6143,7 +7363,8 @@ "input_cost_per_second": 0.01635, "output_cost_per_second": 0.01635, "litellm_provider": "bedrock", - "mode": "chat" + "mode": "chat", + "supports_tool_choice": true }, "bedrock/eu-central-1/6-month-commitment/anthropic.claude-instant-v1": { "max_tokens": 8191, @@ -6152,7 +7373,21 @@ "input_cost_per_second": 0.009083, "output_cost_per_second": 0.009083, "litellm_provider": "bedrock", - "mode": "chat" + "mode": "chat", + "supports_tool_choice": true + }, + "cohere.rerank-v3-5:0": { + "max_tokens": 32000, + "max_input_tokens": 32000, + "max_output_tokens": 32000, + "max_query_tokens": 32000, + "max_document_chunks_per_query": 100, + "max_tokens_per_document_chunk": 512, + "input_cost_per_token": 0.0, + "input_cost_per_query": 0.002, + "output_cost_per_token": 0.0, + "litellm_provider": "bedrock", + "mode": "rerank" }, "cohere.command-text-v14": { "max_tokens": 4096, @@ -6249,7 +7484,9 @@ "input_cost_per_token": 0.00000072, "output_cost_per_token": 0.00000072, "litellm_provider": "bedrock_converse", - "mode": "chat" + "mode": "chat", + "supports_function_calling": true, + "supports_tool_choice": false }, "meta.llama2-13b-chat-v1": { "max_tokens": 4096, @@ -6554,7 +7791,8 @@ "litellm_provider": "bedrock", "mode": "chat", "supports_function_calling": true, - "supports_tool_choice": false + "supports_tool_choice": false, + "supports_vision": true }, "us.meta.llama3-2-11b-instruct-v1:0": { "max_tokens": 128000, @@ -6565,7 +7803,8 @@ "litellm_provider": "bedrock", "mode": "chat", "supports_function_calling": true, - "supports_tool_choice": false + "supports_tool_choice": false, + "supports_vision": true }, "meta.llama3-2-90b-instruct-v1:0": { "max_tokens": 128000, @@ -6576,7 +7815,8 @@ "litellm_provider": "bedrock", "mode": "chat", "supports_function_calling": true, - "supports_tool_choice": false + "supports_tool_choice": false, + "supports_vision": true }, "us.meta.llama3-2-90b-instruct-v1:0": { "max_tokens": 128000, @@ -6587,6 +7827,18 @@ "litellm_provider": "bedrock", "mode": "chat", "supports_function_calling": true, + "supports_tool_choice": false, + "supports_vision": true + }, + "us.meta.llama3-3-70b-instruct-v1:0": { + "max_tokens": 4096, + "max_input_tokens": 128000, + "max_output_tokens": 4096, + "input_cost_per_token": 0.00000072, + "output_cost_per_token": 0.00000072, + "litellm_provider": "bedrock_converse", + "mode": "chat", + "supports_function_calling": true, "supports_tool_choice": false }, "512-x-512/50-steps/stability.stable-diffusion-xl-v0": { @@ -6783,7 +8035,8 @@ "supports_function_calling": true, "supports_parallel_function_calling": true, "supports_response_schema": true, - "mode": "chat" + "mode": "chat", + "supports_tool_choice": true }, "together_ai/meta-llama/Meta-Llama-3.1-70B-Instruct-Turbo": { "input_cost_per_token": 0.00000088, @@ -6792,7 +8045,8 @@ "supports_function_calling": true, "supports_parallel_function_calling": true, "supports_response_schema": true, - "mode": "chat" + "mode": "chat", + "supports_tool_choice": true }, "together_ai/meta-llama/Meta-Llama-3.1-405B-Instruct-Turbo": { "input_cost_per_token": 0.0000035, @@ -6800,7 +8054,8 @@ "litellm_provider": "together_ai", "supports_function_calling": true, "supports_parallel_function_calling": true, - "mode": "chat" + "mode": "chat", + "supports_tool_choice": true }, "together_ai/meta-llama/Llama-3.3-70B-Instruct-Turbo": { "input_cost_per_token": 0.00000088, @@ -6809,7 +8064,8 @@ "supports_function_calling": true, "supports_parallel_function_calling": true, "supports_response_schema": true, - "mode": "chat" + "mode": "chat", + "supports_tool_choice": true }, "together_ai/meta-llama/Llama-3.3-70B-Instruct-Turbo-Free": { "input_cost_per_token": 0, @@ -6818,7 +8074,8 @@ "supports_function_calling": true, "supports_parallel_function_calling": true, "supports_response_schema": true, - "mode": "chat" + "mode": "chat", + "supports_tool_choice": true }, "together_ai/mistralai/Mixtral-8x7B-Instruct-v0.1": { "input_cost_per_token": 0.0000006, @@ -6827,20 +8084,23 @@ "supports_function_calling": true, "supports_parallel_function_calling": true, "supports_response_schema": true, - "mode": "chat" + "mode": "chat", + "supports_tool_choice": true }, "together_ai/mistralai/Mistral-7B-Instruct-v0.1": { "litellm_provider": "together_ai", "supports_function_calling": true, "supports_parallel_function_calling": true, "supports_response_schema": true, - "mode": "chat" + "mode": "chat", + "supports_tool_choice": true }, "together_ai/togethercomputer/CodeLlama-34b-Instruct": { "litellm_provider": "together_ai", "supports_function_calling": true, "supports_parallel_function_calling": true, - "mode": "chat" + "mode": "chat", + "supports_tool_choice": true }, "ollama/codegemma": { "max_tokens": 8192, @@ -7081,7 +8341,8 @@ "input_cost_per_token": 0.00000070, "output_cost_per_token": 0.00000090, "litellm_provider": "deepinfra", - "mode": "chat" + "mode": "chat", + "supports_tool_choice": true }, "deepinfra/Gryphe/MythoMax-L2-13b": { "max_tokens": 4096, @@ -7090,7 +8351,8 @@ "input_cost_per_token": 0.00000022, "output_cost_per_token": 0.00000022, "litellm_provider": "deepinfra", - "mode": "chat" + "mode": "chat", + "supports_tool_choice": true }, "deepinfra/mistralai/Mistral-7B-Instruct-v0.1": { "max_tokens": 8191, @@ -7099,7 +8361,8 @@ "input_cost_per_token": 0.00000013, "output_cost_per_token": 0.00000013, "litellm_provider": "deepinfra", - "mode": "chat" + "mode": "chat", + "supports_tool_choice": true }, "deepinfra/meta-llama/Llama-2-70b-chat-hf": { "max_tokens": 4096, @@ -7108,7 +8371,8 @@ "input_cost_per_token": 0.00000070, "output_cost_per_token": 0.00000090, "litellm_provider": "deepinfra", - "mode": "chat" + "mode": "chat", + "supports_tool_choice": true }, "deepinfra/cognitivecomputations/dolphin-2.6-mixtral-8x7b": { "max_tokens": 8191, @@ -7117,7 +8381,8 @@ "input_cost_per_token": 0.00000027, "output_cost_per_token": 0.00000027, "litellm_provider": "deepinfra", - "mode": "chat" + "mode": "chat", + "supports_tool_choice": true }, "deepinfra/codellama/CodeLlama-34b-Instruct-hf": { "max_tokens": 4096, @@ -7126,7 +8391,8 @@ "input_cost_per_token": 0.00000060, "output_cost_per_token": 0.00000060, "litellm_provider": "deepinfra", - "mode": "chat" + "mode": "chat", + "supports_tool_choice": true }, "deepinfra/deepinfra/mixtral": { "max_tokens": 4096, @@ -7144,7 +8410,8 @@ "input_cost_per_token": 0.00000060, "output_cost_per_token": 0.00000060, "litellm_provider": "deepinfra", - "mode": "chat" + "mode": "chat", + "supports_tool_choice": true }, "deepinfra/mistralai/Mixtral-8x7B-Instruct-v0.1": { "max_tokens": 8191, @@ -7153,7 +8420,8 @@ "input_cost_per_token": 0.00000027, "output_cost_per_token": 0.00000027, "litellm_provider": "deepinfra", - "mode": "chat" + "mode": "chat", + "supports_tool_choice": true }, "deepinfra/deepinfra/airoboros-70b": { "max_tokens": 4096, @@ -7162,7 +8430,8 @@ "input_cost_per_token": 0.00000070, "output_cost_per_token": 0.00000090, "litellm_provider": "deepinfra", - "mode": "chat" + "mode": "chat", + "supports_tool_choice": true }, "deepinfra/01-ai/Yi-34B-Chat": { "max_tokens": 4096, @@ -7171,7 +8440,8 @@ "input_cost_per_token": 0.00000060, "output_cost_per_token": 0.00000060, "litellm_provider": "deepinfra", - "mode": "chat" + "mode": "chat", + "supports_tool_choice": true }, "deepinfra/01-ai/Yi-6B-200K": { "max_tokens": 4096, @@ -7189,7 +8459,8 @@ "input_cost_per_token": 0.00000070, "output_cost_per_token": 0.00000090, "litellm_provider": "deepinfra", - "mode": "chat" + "mode": "chat", + "supports_tool_choice": true }, "deepinfra/meta-llama/Llama-2-13b-chat-hf": { "max_tokens": 4096, @@ -7198,7 +8469,8 @@ "input_cost_per_token": 0.00000022, "output_cost_per_token": 0.00000022, "litellm_provider": "deepinfra", - "mode": "chat" + "mode": "chat", + "supports_tool_choice": true }, "deepinfra/amazon/MistralLite": { "max_tokens": 8191, @@ -7207,7 +8479,8 @@ "input_cost_per_token": 0.00000020, "output_cost_per_token": 0.00000020, "litellm_provider": "deepinfra", - "mode": "chat" + "mode": "chat", + "supports_tool_choice": true }, "deepinfra/meta-llama/Llama-2-7b-chat-hf": { "max_tokens": 4096, @@ -7216,7 +8489,8 @@ "input_cost_per_token": 0.00000013, "output_cost_per_token": 0.00000013, "litellm_provider": "deepinfra", - "mode": "chat" + "mode": "chat", + "supports_tool_choice": true }, "deepinfra/meta-llama/Meta-Llama-3-8B-Instruct": { "max_tokens": 8191, @@ -7225,7 +8499,8 @@ "input_cost_per_token": 0.00000008, "output_cost_per_token": 0.00000008, "litellm_provider": "deepinfra", - "mode": "chat" + "mode": "chat", + "supports_tool_choice": true }, "deepinfra/meta-llama/Meta-Llama-3-70B-Instruct": { "max_tokens": 8191, @@ -7234,7 +8509,8 @@ "input_cost_per_token": 0.00000059, "output_cost_per_token": 0.00000079, "litellm_provider": "deepinfra", - "mode": "chat" + "mode": "chat", + "supports_tool_choice": true }, "deepinfra/meta-llama/Meta-Llama-3.1-405B-Instruct": { "max_tokens": 32768, @@ -7245,7 +8521,8 @@ "litellm_provider": "deepinfra", "mode": "chat", "supports_function_calling": true, - "supports_parallel_function_calling": true + "supports_parallel_function_calling": true, + "supports_tool_choice": true }, "deepinfra/01-ai/Yi-34B-200K": { "max_tokens": 4096, @@ -7263,7 +8540,8 @@ "input_cost_per_token": 0.00000013, "output_cost_per_token": 0.00000013, "litellm_provider": "deepinfra", - "mode": "chat" + "mode": "chat", + "supports_tool_choice": true }, "perplexity/codellama-34b-instruct": { "max_tokens": 16384, @@ -7308,7 +8586,8 @@ "input_cost_per_token": 0.000005, "output_cost_per_token": 0.000005, "litellm_provider": "perplexity", - "mode": "chat" + "mode": "chat", + "deprecation_date": "2025-02-22" }, "perplexity/llama-3.1-sonar-large-128k-online": { "max_tokens": 127072, @@ -7317,7 +8596,8 @@ "input_cost_per_token": 0.000001, "output_cost_per_token": 0.000001, "litellm_provider": "perplexity", - "mode": "chat" + "mode": "chat", + "deprecation_date": "2025-02-22" }, "perplexity/llama-3.1-sonar-large-128k-chat": { "max_tokens": 131072, @@ -7326,7 +8606,8 @@ "input_cost_per_token": 0.000001, "output_cost_per_token": 0.000001, "litellm_provider": "perplexity", - "mode": "chat" + "mode": "chat", + "deprecation_date": "2025-02-22" }, "perplexity/llama-3.1-sonar-small-128k-chat": { "max_tokens": 131072, @@ -7335,7 +8616,8 @@ "input_cost_per_token": 0.0000002, "output_cost_per_token": 0.0000002, "litellm_provider": "perplexity", - "mode": "chat" + "mode": "chat", + "deprecation_date": "2025-02-22" }, "perplexity/llama-3.1-sonar-small-128k-online": { "max_tokens": 127072, @@ -7344,6 +8626,43 @@ "input_cost_per_token": 0.0000002, "output_cost_per_token": 0.0000002, "litellm_provider": "perplexity", + "mode": "chat" , + "deprecation_date": "2025-02-22" + }, + "perplexity/sonar": { + "max_tokens": 127072, + "max_input_tokens": 127072, + "max_output_tokens": 127072, + "input_cost_per_token": 0.000001, + "output_cost_per_token": 0.000001, + "litellm_provider": "perplexity", + "mode": "chat" + }, + "perplexity/sonar-pro": { + "max_tokens": 200000, + "max_input_tokens": 200000, + "max_output_tokens": 8096, + "input_cost_per_token": 0.000003, + "output_cost_per_token": 0.000015, + "litellm_provider": "perplexity", + "mode": "chat" + }, + "perplexity/sonar": { + "max_tokens": 127072, + "max_input_tokens": 127072, + "max_output_tokens": 127072, + "input_cost_per_token": 0.000001, + "output_cost_per_token": 0.000001, + "litellm_provider": "perplexity", + "mode": "chat" + }, + "perplexity/sonar-pro": { + "max_tokens": 200000, + "max_input_tokens": 200000, + "max_output_tokens": 8096, + "input_cost_per_token": 0.000003, + "output_cost_per_token": 0.000015, + "litellm_provider": "perplexity", "mode": "chat" }, "perplexity/pplx-7b-chat": { @@ -7459,7 +8778,8 @@ "mode": "chat", "supports_function_calling": true, "supports_response_schema": true, - "source": "https://fireworks.ai/pricing" + "source": "https://fireworks.ai/pricing", + "supports_tool_choice": true }, "fireworks_ai/accounts/fireworks/models/llama-v3p2-3b-instruct": { "max_tokens": 16384, @@ -7471,7 +8791,8 @@ "mode": "chat", "supports_function_calling": true, "supports_response_schema": true, - "source": "https://fireworks.ai/pricing" + "source": "https://fireworks.ai/pricing", + "supports_tool_choice": true }, "fireworks_ai/accounts/fireworks/models/llama-v3p1-8b-instruct": { "max_tokens": 16384, @@ -7483,7 +8804,8 @@ "mode": "chat", "supports_function_calling": true, "supports_response_schema": true, - "source": "https://fireworks.ai/pricing" + "source": "https://fireworks.ai/pricing", + "supports_tool_choice": true }, "fireworks_ai/accounts/fireworks/models/llama-v3p2-11b-vision-instruct": { "max_tokens": 16384, @@ -7496,7 +8818,8 @@ "supports_function_calling": true, "supports_vision": true, "supports_response_schema": true, - "source": "https://fireworks.ai/pricing" + "source": "https://fireworks.ai/pricing", + "supports_tool_choice": true }, "accounts/fireworks/models/llama-v3p2-90b-vision-instruct": { "max_tokens": 16384, @@ -7521,7 +8844,8 @@ "mode": "chat", "supports_function_calling": true, "supports_response_schema": true, - "source": "https://fireworks.ai/pricing" + "source": "https://fireworks.ai/pricing", + "supports_tool_choice": true }, "fireworks_ai/accounts/fireworks/models/mixtral-8x22b-instruct-hf": { "max_tokens": 65536, @@ -7533,7 +8857,8 @@ "mode": "chat", "supports_function_calling": true, "supports_response_schema": true, - "source": "https://fireworks.ai/pricing" + "source": "https://fireworks.ai/pricing", + "supports_tool_choice": true }, "fireworks_ai/accounts/fireworks/models/qwen2-72b-instruct": { "max_tokens": 32768, @@ -7545,7 +8870,8 @@ "mode": "chat", "supports_function_calling": true, "supports_response_schema": true, - "source": "https://fireworks.ai/pricing" + "source": "https://fireworks.ai/pricing", + "supports_tool_choice": true }, "fireworks_ai/accounts/fireworks/models/qwen2p5-coder-32b-instruct": { "max_tokens": 4096, @@ -7557,7 +8883,8 @@ "mode": "chat", "supports_function_calling": true, "supports_response_schema": true, - "source": "https://fireworks.ai/pricing" + "source": "https://fireworks.ai/pricing", + "supports_tool_choice": true }, "fireworks_ai/accounts/fireworks/models/yi-large": { "max_tokens": 32768, @@ -7569,7 +8896,8 @@ "mode": "chat", "supports_function_calling": true, "supports_response_schema": true, - "source": "https://fireworks.ai/pricing" + "source": "https://fireworks.ai/pricing", + "supports_tool_choice": true }, "fireworks_ai/accounts/fireworks/models/deepseek-coder-v2-instruct": { "max_tokens": 65536, @@ -7581,7 +8909,8 @@ "mode": "chat", "supports_function_calling": true, "supports_response_schema": true, - "source": "https://fireworks.ai/pricing" + "source": "https://fireworks.ai/pricing", + "supports_tool_choice": true }, "fireworks_ai/accounts/fireworks/models/deepseek-v3": { "max_tokens": 8192, @@ -7592,7 +8921,8 @@ "litellm_provider": "fireworks_ai", "mode": "chat", "supports_response_schema": true, - "source": "https://fireworks.ai/pricing" + "source": "https://fireworks.ai/pricing", + "supports_tool_choice": true }, "fireworks_ai/nomic-ai/nomic-embed-text-v1.5": { @@ -7966,7 +9296,8 @@ "litellm_provider": "databricks", "mode": "chat", "source": "https://www.databricks.com/product/pricing/foundation-model-serving", - "metadata": {"notes": "Input/output cost per token is dbu cost * $0.070, based on databricks Llama 3.1 70B conversion. Number provided for reference, '*_dbu_cost_per_token' used in actual calculation."} + "metadata": {"notes": "Input/output cost per token is dbu cost * $0.070, based on databricks Llama 3.1 70B conversion. Number provided for reference, '*_dbu_cost_per_token' used in actual calculation."}, + "supports_tool_choice": true }, "databricks/databricks-meta-llama-3-1-70b-instruct": { "max_tokens": 128000, @@ -7979,7 +9310,8 @@ "litellm_provider": "databricks", "mode": "chat", "source": "https://www.databricks.com/product/pricing/foundation-model-serving", - "metadata": {"notes": "Input/output cost per token is dbu cost * $0.070, based on databricks Llama 3.1 70B conversion. Number provided for reference, '*_dbu_cost_per_token' used in actual calculation."} + "metadata": {"notes": "Input/output cost per token is dbu cost * $0.070, based on databricks Llama 3.1 70B conversion. Number provided for reference, '*_dbu_cost_per_token' used in actual calculation."}, + "supports_tool_choice": true }, "databricks/meta-llama-3.3-70b-instruct": { "max_tokens": 128000, @@ -7992,7 +9324,8 @@ "litellm_provider": "databricks", "mode": "chat", "source": "https://www.databricks.com/product/pricing/foundation-model-serving", - "metadata": {"notes": "Input/output cost per token is dbu cost * $0.070, based on databricks Llama 3.1 70B conversion. Number provided for reference, '*_dbu_cost_per_token' used in actual calculation."} + "metadata": {"notes": "Input/output cost per token is dbu cost * $0.070, based on databricks Llama 3.1 70B conversion. Number provided for reference, '*_dbu_cost_per_token' used in actual calculation."}, + "supports_tool_choice": true }, "databricks/databricks-dbrx-instruct": { "max_tokens": 32768, @@ -8005,7 +9338,8 @@ "litellm_provider": "databricks", "mode": "chat", "source": "https://www.databricks.com/product/pricing/foundation-model-serving", - "metadata": {"notes": "Input/output cost per token is dbu cost * $0.070, based on databricks Llama 3.1 70B conversion. Number provided for reference, '*_dbu_cost_per_token' used in actual calculation."} + "metadata": {"notes": "Input/output cost per token is dbu cost * $0.070, based on databricks Llama 3.1 70B conversion. Number provided for reference, '*_dbu_cost_per_token' used in actual calculation."}, + "supports_tool_choice": true }, "databricks/databricks-meta-llama-3-70b-instruct": { "max_tokens": 128000, @@ -8018,7 +9352,8 @@ "litellm_provider": "databricks", "mode": "chat", "source": "https://www.databricks.com/product/pricing/foundation-model-serving", - "metadata": {"notes": "Input/output cost per token is dbu cost * $0.070, based on databricks Llama 3.1 70B conversion. Number provided for reference, '*_dbu_cost_per_token' used in actual calculation."} + "metadata": {"notes": "Input/output cost per token is dbu cost * $0.070, based on databricks Llama 3.1 70B conversion. Number provided for reference, '*_dbu_cost_per_token' used in actual calculation."}, + "supports_tool_choice": true }, "databricks/databricks-llama-2-70b-chat": { "max_tokens": 4096, @@ -8031,7 +9366,8 @@ "litellm_provider": "databricks", "mode": "chat", "source": "https://www.databricks.com/product/pricing/foundation-model-serving", - "metadata": {"notes": "Input/output cost per token is dbu cost * $0.070, based on databricks Llama 3.1 70B conversion. Number provided for reference, '*_dbu_cost_per_token' used in actual calculation."} + "metadata": {"notes": "Input/output cost per token is dbu cost * $0.070, based on databricks Llama 3.1 70B conversion. Number provided for reference, '*_dbu_cost_per_token' used in actual calculation."}, + "supports_tool_choice": true }, "databricks/databricks-mixtral-8x7b-instruct": { "max_tokens": 4096, @@ -8044,7 +9380,8 @@ "litellm_provider": "databricks", "mode": "chat", "source": "https://www.databricks.com/product/pricing/foundation-model-serving", - "metadata": {"notes": "Input/output cost per token is dbu cost * $0.070, based on databricks Llama 3.1 70B conversion. Number provided for reference, '*_dbu_cost_per_token' used in actual calculation."} + "metadata": {"notes": "Input/output cost per token is dbu cost * $0.070, based on databricks Llama 3.1 70B conversion. Number provided for reference, '*_dbu_cost_per_token' used in actual calculation."}, + "supports_tool_choice": true }, "databricks/databricks-mpt-30b-instruct": { "max_tokens": 8192, @@ -8057,7 +9394,8 @@ "litellm_provider": "databricks", "mode": "chat", "source": "https://www.databricks.com/product/pricing/foundation-model-serving", - "metadata": {"notes": "Input/output cost per token is dbu cost * $0.070, based on databricks Llama 3.1 70B conversion. Number provided for reference, '*_dbu_cost_per_token' used in actual calculation."} + "metadata": {"notes": "Input/output cost per token is dbu cost * $0.070, based on databricks Llama 3.1 70B conversion. Number provided for reference, '*_dbu_cost_per_token' used in actual calculation."}, + "supports_tool_choice": true }, "databricks/databricks-mpt-7b-instruct": { "max_tokens": 8192, @@ -8070,7 +9408,8 @@ "litellm_provider": "databricks", "mode": "chat", "source": "https://www.databricks.com/product/pricing/foundation-model-serving", - "metadata": {"notes": "Input/output cost per token is dbu cost * $0.070, based on databricks Llama 3.1 70B conversion. Number provided for reference, '*_dbu_cost_per_token' used in actual calculation."} + "metadata": {"notes": "Input/output cost per token is dbu cost * $0.070, based on databricks Llama 3.1 70B conversion. Number provided for reference, '*_dbu_cost_per_token' used in actual calculation."}, + "supports_tool_choice": true }, "databricks/databricks-bge-large-en": { "max_tokens": 512, @@ -8106,7 +9445,8 @@ "output_cost_per_token": 0.0000002, "litellm_provider": "sambanova", "supports_function_calling": true, - "mode": "chat" + "mode": "chat", + "supports_tool_choice": true }, "sambanova/Meta-Llama-3.1-70B-Instruct": { "max_tokens": 128000, @@ -8116,7 +9456,8 @@ "output_cost_per_token": 0.0000012, "litellm_provider": "sambanova", "supports_function_calling": true, - "mode": "chat" + "mode": "chat", + "supports_tool_choice": true }, "sambanova/Meta-Llama-3.1-405B-Instruct": { "max_tokens": 16000, @@ -8126,7 +9467,8 @@ "output_cost_per_token": 0.000010, "litellm_provider": "sambanova", "supports_function_calling": true, - "mode": "chat" + "mode": "chat", + "supports_tool_choice": true }, "sambanova/Meta-Llama-3.2-1B-Instruct": { "max_tokens": 16000, @@ -8136,7 +9478,8 @@ "output_cost_per_token": 0.0000008, "litellm_provider": "sambanova", "supports_function_calling": true, - "mode": "chat" + "mode": "chat", + "supports_tool_choice": true }, "sambanova/Meta-Llama-3.2-3B-Instruct": { "max_tokens": 4000, @@ -8146,7 +9489,8 @@ "output_cost_per_token": 0.0000016, "litellm_provider": "sambanova", "supports_function_calling": true, - "mode": "chat" + "mode": "chat", + "supports_tool_choice": true }, "sambanova/Qwen2.5-Coder-32B-Instruct": { "max_tokens": 8000, @@ -8156,7 +9500,8 @@ "output_cost_per_token": 0.000003, "litellm_provider": "sambanova", "supports_function_calling": true, - "mode": "chat" + "mode": "chat", + "supports_tool_choice": true }, "sambanova/Qwen2.5-72B-Instruct": { "max_tokens": 8000, @@ -8166,6 +9511,29 @@ "output_cost_per_token": 0.000004, "litellm_provider": "sambanova", "supports_function_calling": true, - "mode": "chat" + "mode": "chat", + "supports_tool_choice": true + }, + "assemblyai/nano": { + "mode": "audio_transcription", + "input_cost_per_second": 0.00010278, + "output_cost_per_second": 0.00, + "litellm_provider": "assemblyai" + }, + "assemblyai/best": { + "mode": "audio_transcription", + "input_cost_per_second": 0.00003333, + "output_cost_per_second": 0.00, + "litellm_provider": "assemblyai" + }, + "jina-reranker-v2-base-multilingual": { + "max_tokens": 1024, + "max_input_tokens": 1024, + "max_output_tokens": 1024, + "max_document_chunks_per_query": 2048, + "input_cost_per_token": 0.000000018, + "output_cost_per_token": 0.000000018, + "litellm_provider": "jina_ai", + "mode": "rerank" } } diff --git a/litellm/proxy/_experimental/out/404.html b/litellm/proxy/_experimental/out/404.html deleted file mode 100644 index fa46309825..0000000000 --- a/litellm/proxy/_experimental/out/404.html +++ /dev/null @@ -1 +0,0 @@ -404: This page could not be found.LiteLLM Dashboard404This page could not be found. \ No newline at end of file diff --git a/litellm/proxy/_experimental/out/_next/static/NklxcmMcgRgF-HsEoNQ7w/_buildManifest.js b/litellm/proxy/_experimental/out/_next/static/Z1erUy-o9upLJI4iG8OBo/_buildManifest.js similarity index 100% rename from litellm/proxy/_experimental/out/_next/static/NklxcmMcgRgF-HsEoNQ7w/_buildManifest.js rename to litellm/proxy/_experimental/out/_next/static/Z1erUy-o9upLJI4iG8OBo/_buildManifest.js diff --git a/litellm/proxy/_experimental/out/_next/static/NklxcmMcgRgF-HsEoNQ7w/_ssgManifest.js b/litellm/proxy/_experimental/out/_next/static/Z1erUy-o9upLJI4iG8OBo/_ssgManifest.js similarity index 100% rename from litellm/proxy/_experimental/out/_next/static/NklxcmMcgRgF-HsEoNQ7w/_ssgManifest.js rename to litellm/proxy/_experimental/out/_next/static/Z1erUy-o9upLJI4iG8OBo/_ssgManifest.js diff --git a/litellm/proxy/_experimental/out/_next/static/chunks/117-2d8e84979f319d39.js b/litellm/proxy/_experimental/out/_next/static/chunks/117-883150efc583d711.js similarity index 100% rename from litellm/proxy/_experimental/out/_next/static/chunks/117-2d8e84979f319d39.js rename to litellm/proxy/_experimental/out/_next/static/chunks/117-883150efc583d711.js diff --git a/litellm/proxy/_experimental/out/_next/static/chunks/157-cf7bc8b3ae1b80ba.js b/litellm/proxy/_experimental/out/_next/static/chunks/157-cf7bc8b3ae1b80ba.js new file mode 100644 index 0000000000..6a596c25d8 --- /dev/null +++ b/litellm/proxy/_experimental/out/_next/static/chunks/157-cf7bc8b3ae1b80ba.js @@ -0,0 +1,11 @@ +(self.webpackChunk_N_E=self.webpackChunk_N_E||[]).push([[157],{12660:function(e,t,n){"use strict";n.d(t,{Z:function(){return l}});var r=n(1119),o=n(2265),i={icon:{tag:"svg",attrs:{viewBox:"64 64 896 896",focusable:"false"},children:[{tag:"path",attrs:{d:"M917.7 148.8l-42.4-42.4c-1.6-1.6-3.6-2.3-5.7-2.3s-4.1.8-5.7 2.3l-76.1 76.1a199.27 199.27 0 00-112.1-34.3c-51.2 0-102.4 19.5-141.5 58.6L432.3 308.7a8.03 8.03 0 000 11.3L704 591.7c1.6 1.6 3.6 2.3 5.7 2.3 2 0 4.1-.8 5.7-2.3l101.9-101.9c68.9-69 77-175.7 24.3-253.5l76.1-76.1c3.1-3.2 3.1-8.3 0-11.4zM769.1 441.7l-59.4 59.4-186.8-186.8 59.4-59.4c24.9-24.9 58.1-38.7 93.4-38.7 35.3 0 68.4 13.7 93.4 38.7 24.9 24.9 38.7 58.1 38.7 93.4 0 35.3-13.8 68.4-38.7 93.4zm-190.2 105a8.03 8.03 0 00-11.3 0L501 613.3 410.7 523l66.7-66.7c3.1-3.1 3.1-8.2 0-11.3L441 408.6a8.03 8.03 0 00-11.3 0L363 475.3l-43-43a7.85 7.85 0 00-5.7-2.3c-2 0-4.1.8-5.7 2.3L206.8 534.2c-68.9 69-77 175.7-24.3 253.5l-76.1 76.1a8.03 8.03 0 000 11.3l42.4 42.4c1.6 1.6 3.6 2.3 5.7 2.3s4.1-.8 5.7-2.3l76.1-76.1c33.7 22.9 72.9 34.3 112.1 34.3 51.2 0 102.4-19.5 141.5-58.6l101.9-101.9c3.1-3.1 3.1-8.2 0-11.3l-43-43 66.7-66.7c3.1-3.1 3.1-8.2 0-11.3l-36.6-36.2zM441.7 769.1a131.32 131.32 0 01-93.4 38.7c-35.3 0-68.4-13.7-93.4-38.7a131.32 131.32 0 01-38.7-93.4c0-35.3 13.7-68.4 38.7-93.4l59.4-59.4 186.8 186.8-59.4 59.4z"}}]},name:"api",theme:"outlined"},a=n(55015),l=o.forwardRef(function(e,t){return o.createElement(a.Z,(0,r.Z)({},e,{ref:t,icon:i}))})},88009:function(e,t,n){"use strict";n.d(t,{Z:function(){return l}});var r=n(1119),o=n(2265),i={icon:{tag:"svg",attrs:{viewBox:"64 64 896 896",focusable:"false"},children:[{tag:"path",attrs:{d:"M464 144H160c-8.8 0-16 7.2-16 16v304c0 8.8 7.2 16 16 16h304c8.8 0 16-7.2 16-16V160c0-8.8-7.2-16-16-16zm-52 268H212V212h200v200zm452-268H560c-8.8 0-16 7.2-16 16v304c0 8.8 7.2 16 16 16h304c8.8 0 16-7.2 16-16V160c0-8.8-7.2-16-16-16zm-52 268H612V212h200v200zM464 544H160c-8.8 0-16 7.2-16 16v304c0 8.8 7.2 16 16 16h304c8.8 0 16-7.2 16-16V560c0-8.8-7.2-16-16-16zm-52 268H212V612h200v200zm452-268H560c-8.8 0-16 7.2-16 16v304c0 8.8 7.2 16 16 16h304c8.8 0 16-7.2 16-16V560c0-8.8-7.2-16-16-16zm-52 268H612V612h200v200z"}}]},name:"appstore",theme:"outlined"},a=n(55015),l=o.forwardRef(function(e,t){return o.createElement(a.Z,(0,r.Z)({},e,{ref:t,icon:i}))})},37527:function(e,t,n){"use strict";n.d(t,{Z:function(){return l}});var r=n(1119),o=n(2265),i={icon:{tag:"svg",attrs:{viewBox:"64 64 896 896",focusable:"false"},children:[{tag:"path",attrs:{d:"M894 462c30.9 0 43.8-39.7 18.7-58L530.8 126.2a31.81 31.81 0 00-37.6 0L111.3 404c-25.1 18.2-12.2 58 18.8 58H192v374h-72c-4.4 0-8 3.6-8 8v52c0 4.4 3.6 8 8 8h784c4.4 0 8-3.6 8-8v-52c0-4.4-3.6-8-8-8h-72V462h62zM512 196.7l271.1 197.2H240.9L512 196.7zM264 462h117v374H264V462zm189 0h117v374H453V462zm307 374H642V462h118v374z"}}]},name:"bank",theme:"outlined"},a=n(55015),l=o.forwardRef(function(e,t){return o.createElement(a.Z,(0,r.Z)({},e,{ref:t,icon:i}))})},9775:function(e,t,n){"use strict";n.d(t,{Z:function(){return l}});var r=n(1119),o=n(2265),i={icon:{tag:"svg",attrs:{viewBox:"64 64 896 896",focusable:"false"},children:[{tag:"path",attrs:{d:"M888 792H200V168c0-4.4-3.6-8-8-8h-56c-4.4 0-8 3.6-8 8v688c0 4.4 3.6 8 8 8h752c4.4 0 8-3.6 8-8v-56c0-4.4-3.6-8-8-8zm-600-80h56c4.4 0 8-3.6 8-8V560c0-4.4-3.6-8-8-8h-56c-4.4 0-8 3.6-8 8v144c0 4.4 3.6 8 8 8zm152 0h56c4.4 0 8-3.6 8-8V384c0-4.4-3.6-8-8-8h-56c-4.4 0-8 3.6-8 8v320c0 4.4 3.6 8 8 8zm152 0h56c4.4 0 8-3.6 8-8V462c0-4.4-3.6-8-8-8h-56c-4.4 0-8 3.6-8 8v242c0 4.4 3.6 8 8 8zm152 0h56c4.4 0 8-3.6 8-8V304c0-4.4-3.6-8-8-8h-56c-4.4 0-8 3.6-8 8v400c0 4.4 3.6 8 8 8z"}}]},name:"bar-chart",theme:"outlined"},a=n(55015),l=o.forwardRef(function(e,t){return o.createElement(a.Z,(0,r.Z)({},e,{ref:t,icon:i}))})},68208:function(e,t,n){"use strict";n.d(t,{Z:function(){return l}});var r=n(1119),o=n(2265),i={icon:{tag:"svg",attrs:{viewBox:"64 64 896 896",focusable:"false"},children:[{tag:"path",attrs:{d:"M856 376H648V168c0-8.8-7.2-16-16-16H168c-8.8 0-16 7.2-16 16v464c0 8.8 7.2 16 16 16h208v208c0 8.8 7.2 16 16 16h464c8.8 0 16-7.2 16-16V392c0-8.8-7.2-16-16-16zm-480 16v188H220V220h360v156H392c-8.8 0-16 7.2-16 16zm204 52v136H444V444h136zm224 360H444V648h188c8.8 0 16-7.2 16-16V444h156v360z"}}]},name:"block",theme:"outlined"},a=n(55015),l=o.forwardRef(function(e,t){return o.createElement(a.Z,(0,r.Z)({},e,{ref:t,icon:i}))})},9738:function(e,t,n){"use strict";n.d(t,{Z:function(){return l}});var r=n(1119),o=n(2265),i={icon:{tag:"svg",attrs:{viewBox:"64 64 896 896",focusable:"false"},children:[{tag:"path",attrs:{d:"M912 190h-69.9c-9.8 0-19.1 4.5-25.1 12.2L404.7 724.5 207 474a32 32 0 00-25.1-12.2H112c-6.7 0-10.4 7.7-6.3 12.9l273.9 347c12.8 16.2 37.4 16.2 50.3 0l488.4-618.9c4.1-5.1.4-12.8-6.3-12.8z"}}]},name:"check",theme:"outlined"},a=n(55015),l=o.forwardRef(function(e,t){return o.createElement(a.Z,(0,r.Z)({},e,{ref:t,icon:i}))})},44625:function(e,t,n){"use strict";n.d(t,{Z:function(){return l}});var r=n(1119),o=n(2265),i={icon:{tag:"svg",attrs:{viewBox:"64 64 896 896",focusable:"false"},children:[{tag:"path",attrs:{d:"M832 64H192c-17.7 0-32 14.3-32 32v832c0 17.7 14.3 32 32 32h640c17.7 0 32-14.3 32-32V96c0-17.7-14.3-32-32-32zm-600 72h560v208H232V136zm560 480H232V408h560v208zm0 272H232V680h560v208zM304 240a40 40 0 1080 0 40 40 0 10-80 0zm0 272a40 40 0 1080 0 40 40 0 10-80 0zm0 272a40 40 0 1080 0 40 40 0 10-80 0z"}}]},name:"database",theme:"outlined"},a=n(55015),l=o.forwardRef(function(e,t){return o.createElement(a.Z,(0,r.Z)({},e,{ref:t,icon:i}))})},70464:function(e,t,n){"use strict";n.d(t,{Z:function(){return l}});var r=n(1119),o=n(2265),i={icon:{tag:"svg",attrs:{viewBox:"64 64 896 896",focusable:"false"},children:[{tag:"path",attrs:{d:"M884 256h-75c-5.1 0-9.9 2.5-12.9 6.6L512 654.2 227.9 262.6c-3-4.1-7.8-6.6-12.9-6.6h-75c-6.5 0-10.3 7.4-6.5 12.7l352.6 486.1c12.8 17.6 39 17.6 51.7 0l352.6-486.1c3.9-5.3.1-12.7-6.4-12.7z"}}]},name:"down",theme:"outlined"},a=n(55015),l=o.forwardRef(function(e,t){return o.createElement(a.Z,(0,r.Z)({},e,{ref:t,icon:i}))})},73879:function(e,t,n){"use strict";n.d(t,{Z:function(){return l}});var r=n(1119),o=n(2265),i={icon:{tag:"svg",attrs:{viewBox:"64 64 896 896",focusable:"false"},children:[{tag:"path",attrs:{d:"M505.7 661a8 8 0 0012.6 0l112-141.7c4.1-5.2.4-12.9-6.3-12.9h-74.1V168c0-4.4-3.6-8-8-8h-60c-4.4 0-8 3.6-8 8v338.3H400c-6.7 0-10.4 7.7-6.3 12.9l112 141.8zM878 626h-60c-4.4 0-8 3.6-8 8v154H214V634c0-4.4-3.6-8-8-8h-60c-4.4 0-8 3.6-8 8v198c0 17.7 14.3 32 32 32h684c17.7 0 32-14.3 32-32V634c0-4.4-3.6-8-8-8z"}}]},name:"download",theme:"outlined"},a=n(55015),l=o.forwardRef(function(e,t){return o.createElement(a.Z,(0,r.Z)({},e,{ref:t,icon:i}))})},39760:function(e,t,n){"use strict";n.d(t,{Z:function(){return l}});var r=n(1119),o=n(2265),i={icon:{tag:"svg",attrs:{viewBox:"64 64 896 896",focusable:"false"},children:[{tag:"path",attrs:{d:"M176 511a56 56 0 10112 0 56 56 0 10-112 0zm280 0a56 56 0 10112 0 56 56 0 10-112 0zm280 0a56 56 0 10112 0 56 56 0 10-112 0z"}}]},name:"ellipsis",theme:"outlined"},a=n(55015),l=o.forwardRef(function(e,t){return o.createElement(a.Z,(0,r.Z)({},e,{ref:t,icon:i}))})},41169:function(e,t,n){"use strict";n.d(t,{Z:function(){return l}});var r=n(1119),o=n(2265),i={icon:{tag:"svg",attrs:{viewBox:"64 64 896 896",focusable:"false"},children:[{tag:"path",attrs:{d:"M512 472a40 40 0 1080 0 40 40 0 10-80 0zm367 352.9L696.3 352V178H768v-68H256v68h71.7v174L145 824.9c-2.8 7.4-4.3 15.2-4.3 23.1 0 35.3 28.7 64 64 64h614.6c7.9 0 15.7-1.5 23.1-4.3 33-12.7 49.4-49.8 36.6-82.8zM395.7 364.7V180h232.6v184.7L719.2 600c-20.7-5.3-42.1-8-63.9-8-61.2 0-119.2 21.5-165.3 60a188.78 188.78 0 01-121.3 43.9c-32.7 0-64.1-8.3-91.8-23.7l118.8-307.5zM210.5 844l41.7-107.8c35.7 18.1 75.4 27.8 116.6 27.8 61.2 0 119.2-21.5 165.3-60 33.9-28.2 76.3-43.9 121.3-43.9 35 0 68.4 9.5 97.6 27.1L813.5 844h-603z"}}]},name:"experiment",theme:"outlined"},a=n(55015),l=o.forwardRef(function(e,t){return o.createElement(a.Z,(0,r.Z)({},e,{ref:t,icon:i}))})},6520:function(e,t,n){"use strict";n.d(t,{Z:function(){return l}});var r=n(1119),o=n(2265),i={icon:{tag:"svg",attrs:{viewBox:"64 64 896 896",focusable:"false"},children:[{tag:"path",attrs:{d:"M942.2 486.2C847.4 286.5 704.1 186 512 186c-192.2 0-335.4 100.5-430.2 300.3a60.3 60.3 0 000 51.5C176.6 737.5 319.9 838 512 838c192.2 0 335.4-100.5 430.2-300.3 7.7-16.2 7.7-35 0-51.5zM512 766c-161.3 0-279.4-81.8-362.7-254C232.6 339.8 350.7 258 512 258c161.3 0 279.4 81.8 362.7 254C791.5 684.2 673.4 766 512 766zm-4-430c-97.2 0-176 78.8-176 176s78.8 176 176 176 176-78.8 176-176-78.8-176-176-176zm0 288c-61.9 0-112-50.1-112-112s50.1-112 112-112 112 50.1 112 112-50.1 112-112 112z"}}]},name:"eye",theme:"outlined"},a=n(55015),l=o.forwardRef(function(e,t){return o.createElement(a.Z,(0,r.Z)({},e,{ref:t,icon:i}))})},15424:function(e,t,n){"use strict";n.d(t,{Z:function(){return l}});var r=n(1119),o=n(2265),i={icon:{tag:"svg",attrs:{viewBox:"64 64 896 896",focusable:"false"},children:[{tag:"path",attrs:{d:"M512 64C264.6 64 64 264.6 64 512s200.6 448 448 448 448-200.6 448-448S759.4 64 512 64zm0 820c-205.4 0-372-166.6-372-372s166.6-372 372-372 372 166.6 372 372-166.6 372-372 372z"}},{tag:"path",attrs:{d:"M464 336a48 48 0 1096 0 48 48 0 10-96 0zm72 112h-48c-4.4 0-8 3.6-8 8v272c0 4.4 3.6 8 8 8h48c4.4 0 8-3.6 8-8V456c0-4.4-3.6-8-8-8z"}}]},name:"info-circle",theme:"outlined"},a=n(55015),l=o.forwardRef(function(e,t){return o.createElement(a.Z,(0,r.Z)({},e,{ref:t,icon:i}))})},92403:function(e,t,n){"use strict";n.d(t,{Z:function(){return l}});var r=n(1119),o=n(2265),i={icon:{tag:"svg",attrs:{viewBox:"64 64 896 896",focusable:"false"},children:[{tag:"path",attrs:{d:"M608 112c-167.9 0-304 136.1-304 304 0 70.3 23.9 135 63.9 186.5l-41.1 41.1-62.3-62.3a8.15 8.15 0 00-11.4 0l-39.8 39.8a8.15 8.15 0 000 11.4l62.3 62.3-44.9 44.9-62.3-62.3a8.15 8.15 0 00-11.4 0l-39.8 39.8a8.15 8.15 0 000 11.4l62.3 62.3-65.3 65.3a8.03 8.03 0 000 11.3l42.3 42.3c3.1 3.1 8.2 3.1 11.3 0l253.6-253.6A304.06 304.06 0 00608 720c167.9 0 304-136.1 304-304S775.9 112 608 112zm161.2 465.2C726.2 620.3 668.9 644 608 644c-60.9 0-118.2-23.7-161.2-66.8-43.1-43-66.8-100.3-66.8-161.2 0-60.9 23.7-118.2 66.8-161.2 43-43.1 100.3-66.8 161.2-66.8 60.9 0 118.2 23.7 161.2 66.8 43.1 43 66.8 100.3 66.8 161.2 0 60.9-23.7 118.2-66.8 161.2z"}}]},name:"key",theme:"outlined"},a=n(55015),l=o.forwardRef(function(e,t){return o.createElement(a.Z,(0,r.Z)({},e,{ref:t,icon:i}))})},15327:function(e,t,n){"use strict";n.d(t,{Z:function(){return l}});var r=n(1119),o=n(2265),i={icon:{tag:"svg",attrs:{viewBox:"64 64 896 896",focusable:"false"},children:[{tag:"path",attrs:{d:"M724 218.3V141c0-6.7-7.7-10.4-12.9-6.3L260.3 486.8a31.86 31.86 0 000 50.3l450.8 352.1c5.3 4.1 12.9.4 12.9-6.3v-77.3c0-4.9-2.3-9.6-6.1-12.6l-360-281 360-281.1c3.8-3 6.1-7.7 6.1-12.6z"}}]},name:"left",theme:"outlined"},a=n(55015),l=o.forwardRef(function(e,t){return o.createElement(a.Z,(0,r.Z)({},e,{ref:t,icon:i}))})},48231:function(e,t,n){"use strict";n.d(t,{Z:function(){return l}});var r=n(1119),o=n(2265),i={icon:{tag:"svg",attrs:{viewBox:"64 64 896 896",focusable:"false"},children:[{tag:"path",attrs:{d:"M888 792H200V168c0-4.4-3.6-8-8-8h-56c-4.4 0-8 3.6-8 8v688c0 4.4 3.6 8 8 8h752c4.4 0 8-3.6 8-8v-56c0-4.4-3.6-8-8-8zM305.8 637.7c3.1 3.1 8.1 3.1 11.3 0l138.3-137.6L583 628.5c3.1 3.1 8.2 3.1 11.3 0l275.4-275.3c3.1-3.1 3.1-8.2 0-11.3l-39.6-39.6a8.03 8.03 0 00-11.3 0l-230 229.9L461.4 404a8.03 8.03 0 00-11.3 0L266.3 586.7a8.03 8.03 0 000 11.3l39.5 39.7z"}}]},name:"line-chart",theme:"outlined"},a=n(55015),l=o.forwardRef(function(e,t){return o.createElement(a.Z,(0,r.Z)({},e,{ref:t,icon:i}))})},40428:function(e,t,n){"use strict";n.d(t,{Z:function(){return l}});var r=n(1119),o=n(2265),i={icon:{tag:"svg",attrs:{viewBox:"64 64 896 896",focusable:"false"},children:[{tag:"path",attrs:{d:"M868 732h-70.3c-4.8 0-9.3 2.1-12.3 5.8-7 8.5-14.5 16.7-22.4 24.5a353.84 353.84 0 01-112.7 75.9A352.8 352.8 0 01512.4 866c-47.9 0-94.3-9.4-137.9-27.8a353.84 353.84 0 01-112.7-75.9 353.28 353.28 0 01-76-112.5C167.3 606.2 158 559.9 158 512s9.4-94.2 27.8-137.8c17.8-42.1 43.4-80 76-112.5s70.5-58.1 112.7-75.9c43.6-18.4 90-27.8 137.9-27.8 47.9 0 94.3 9.3 137.9 27.8 42.2 17.8 80.1 43.4 112.7 75.9 7.9 7.9 15.3 16.1 22.4 24.5 3 3.7 7.6 5.8 12.3 5.8H868c6.3 0 10.2-7 6.7-12.3C798 160.5 663.8 81.6 511.3 82 271.7 82.6 79.6 277.1 82 516.4 84.4 751.9 276.2 942 512.4 942c152.1 0 285.7-78.8 362.3-197.7 3.4-5.3-.4-12.3-6.7-12.3zm88.9-226.3L815 393.7c-5.3-4.2-13-.4-13 6.3v76H488c-4.4 0-8 3.6-8 8v56c0 4.4 3.6 8 8 8h314v76c0 6.7 7.8 10.5 13 6.3l141.9-112a8 8 0 000-12.6z"}}]},name:"logout",theme:"outlined"},a=n(55015),l=o.forwardRef(function(e,t){return o.createElement(a.Z,(0,r.Z)({},e,{ref:t,icon:i}))})},45246:function(e,t,n){"use strict";n.d(t,{Z:function(){return l}});var r=n(1119),o=n(2265),i={icon:{tag:"svg",attrs:{viewBox:"64 64 896 896",focusable:"false"},children:[{tag:"path",attrs:{d:"M696 480H328c-4.4 0-8 3.6-8 8v48c0 4.4 3.6 8 8 8h368c4.4 0 8-3.6 8-8v-48c0-4.4-3.6-8-8-8z"}},{tag:"path",attrs:{d:"M512 64C264.6 64 64 264.6 64 512s200.6 448 448 448 448-200.6 448-448S759.4 64 512 64zm0 820c-205.4 0-372-166.6-372-372s166.6-372 372-372 372 166.6 372 372-166.6 372-372 372z"}}]},name:"minus-circle",theme:"outlined"},a=n(55015),l=o.forwardRef(function(e,t){return o.createElement(a.Z,(0,r.Z)({},e,{ref:t,icon:i}))})},28595:function(e,t,n){"use strict";n.d(t,{Z:function(){return l}});var r=n(1119),o=n(2265),i={icon:{tag:"svg",attrs:{viewBox:"64 64 896 896",focusable:"false"},children:[{tag:"path",attrs:{d:"M512 64C264.6 64 64 264.6 64 512s200.6 448 448 448 448-200.6 448-448S759.4 64 512 64zm0 820c-205.4 0-372-166.6-372-372s166.6-372 372-372 372 166.6 372 372-166.6 372-372 372z"}},{tag:"path",attrs:{d:"M719.4 499.1l-296.1-215A15.9 15.9 0 00398 297v430c0 13.1 14.8 20.5 25.3 12.9l296.1-215a15.9 15.9 0 000-25.8zm-257.6 134V390.9L628.5 512 461.8 633.1z"}}]},name:"play-circle",theme:"outlined"},a=n(55015),l=o.forwardRef(function(e,t){return o.createElement(a.Z,(0,r.Z)({},e,{ref:t,icon:i}))})},96473:function(e,t,n){"use strict";n.d(t,{Z:function(){return l}});var r=n(1119),o=n(2265),i={icon:{tag:"svg",attrs:{viewBox:"64 64 896 896",focusable:"false"},children:[{tag:"path",attrs:{d:"M482 152h60q8 0 8 8v704q0 8-8 8h-60q-8 0-8-8V160q0-8 8-8z"}},{tag:"path",attrs:{d:"M192 474h672q8 0 8 8v60q0 8-8 8H160q-8 0-8-8v-60q0-8 8-8z"}}]},name:"plus",theme:"outlined"},a=n(55015),l=o.forwardRef(function(e,t){return o.createElement(a.Z,(0,r.Z)({},e,{ref:t,icon:i}))})},57400:function(e,t,n){"use strict";n.d(t,{Z:function(){return l}});var r=n(1119),o=n(2265),i={icon:{tag:"svg",attrs:{viewBox:"0 0 1024 1024",focusable:"false"},children:[{tag:"path",attrs:{d:"M512 64L128 192v384c0 212.1 171.9 384 384 384s384-171.9 384-384V192L512 64zm312 512c0 172.3-139.7 312-312 312S200 748.3 200 576V246l312-110 312 110v330z"}},{tag:"path",attrs:{d:"M378.4 475.1a35.91 35.91 0 00-50.9 0 35.91 35.91 0 000 50.9l129.4 129.4 2.1 2.1a33.98 33.98 0 0048.1 0L730.6 434a33.98 33.98 0 000-48.1l-2.8-2.8a33.98 33.98 0 00-48.1 0L483 579.7 378.4 475.1z"}}]},name:"safety",theme:"outlined"},a=n(55015),l=o.forwardRef(function(e,t){return o.createElement(a.Z,(0,r.Z)({},e,{ref:t,icon:i}))})},29436:function(e,t,n){"use strict";n.d(t,{Z:function(){return l}});var r=n(1119),o=n(2265),i={icon:{tag:"svg",attrs:{viewBox:"64 64 896 896",focusable:"false"},children:[{tag:"path",attrs:{d:"M909.6 854.5L649.9 594.8C690.2 542.7 712 479 712 412c0-80.2-31.3-155.4-87.9-212.1-56.6-56.7-132-87.9-212.1-87.9s-155.5 31.3-212.1 87.9C143.2 256.5 112 331.8 112 412c0 80.1 31.3 155.5 87.9 212.1C256.5 680.8 331.8 712 412 712c67 0 130.6-21.8 182.7-62l259.7 259.6a8.2 8.2 0 0011.6 0l43.6-43.5a8.2 8.2 0 000-11.6zM570.4 570.4C528 612.7 471.8 636 412 636s-116-23.3-158.4-65.6C211.3 528 188 471.8 188 412s23.3-116.1 65.6-158.4C296 211.3 352.2 188 412 188s116.1 23.2 158.4 65.6S636 352.2 636 412s-23.3 116.1-65.6 158.4z"}}]},name:"search",theme:"outlined"},a=n(55015),l=o.forwardRef(function(e,t){return o.createElement(a.Z,(0,r.Z)({},e,{ref:t,icon:i}))})},55322:function(e,t,n){"use strict";n.d(t,{Z:function(){return l}});var r=n(1119),o=n(2265),i={icon:{tag:"svg",attrs:{viewBox:"64 64 896 896",focusable:"false"},children:[{tag:"path",attrs:{d:"M924.8 625.7l-65.5-56c3.1-19 4.7-38.4 4.7-57.8s-1.6-38.8-4.7-57.8l65.5-56a32.03 32.03 0 009.3-35.2l-.9-2.6a443.74 443.74 0 00-79.7-137.9l-1.8-2.1a32.12 32.12 0 00-35.1-9.5l-81.3 28.9c-30-24.6-63.5-44-99.7-57.6l-15.7-85a32.05 32.05 0 00-25.8-25.7l-2.7-.5c-52.1-9.4-106.9-9.4-159 0l-2.7.5a32.05 32.05 0 00-25.8 25.7l-15.8 85.4a351.86 351.86 0 00-99 57.4l-81.9-29.1a32 32 0 00-35.1 9.5l-1.8 2.1a446.02 446.02 0 00-79.7 137.9l-.9 2.6c-4.5 12.5-.8 26.5 9.3 35.2l66.3 56.6c-3.1 18.8-4.6 38-4.6 57.1 0 19.2 1.5 38.4 4.6 57.1L99 625.5a32.03 32.03 0 00-9.3 35.2l.9 2.6c18.1 50.4 44.9 96.9 79.7 137.9l1.8 2.1a32.12 32.12 0 0035.1 9.5l81.9-29.1c29.8 24.5 63.1 43.9 99 57.4l15.8 85.4a32.05 32.05 0 0025.8 25.7l2.7.5a449.4 449.4 0 00159 0l2.7-.5a32.05 32.05 0 0025.8-25.7l15.7-85a350 350 0 0099.7-57.6l81.3 28.9a32 32 0 0035.1-9.5l1.8-2.1c34.8-41.1 61.6-87.5 79.7-137.9l.9-2.6c4.5-12.3.8-26.3-9.3-35zM788.3 465.9c2.5 15.1 3.8 30.6 3.8 46.1s-1.3 31-3.8 46.1l-6.6 40.1 74.7 63.9a370.03 370.03 0 01-42.6 73.6L721 702.8l-31.4 25.8c-23.9 19.6-50.5 35-79.3 45.8l-38.1 14.3-17.9 97a377.5 377.5 0 01-85 0l-17.9-97.2-37.8-14.5c-28.5-10.8-55-26.2-78.7-45.7l-31.4-25.9-93.4 33.2c-17-22.9-31.2-47.6-42.6-73.6l75.5-64.5-6.5-40c-2.4-14.9-3.7-30.3-3.7-45.5 0-15.3 1.2-30.6 3.7-45.5l6.5-40-75.5-64.5c11.3-26.1 25.6-50.7 42.6-73.6l93.4 33.2 31.4-25.9c23.7-19.5 50.2-34.9 78.7-45.7l37.9-14.3 17.9-97.2c28.1-3.2 56.8-3.2 85 0l17.9 97 38.1 14.3c28.7 10.8 55.4 26.2 79.3 45.8l31.4 25.8 92.8-32.9c17 22.9 31.2 47.6 42.6 73.6L781.8 426l6.5 39.9zM512 326c-97.2 0-176 78.8-176 176s78.8 176 176 176 176-78.8 176-176-78.8-176-176-176zm79.2 255.2A111.6 111.6 0 01512 614c-29.9 0-58-11.7-79.2-32.8A111.6 111.6 0 01400 502c0-29.9 11.7-58 32.8-79.2C454 401.6 482.1 390 512 390c29.9 0 58 11.6 79.2 32.8A111.6 111.6 0 01624 502c0 29.9-11.7 58-32.8 79.2z"}}]},name:"setting",theme:"outlined"},a=n(55015),l=o.forwardRef(function(e,t){return o.createElement(a.Z,(0,r.Z)({},e,{ref:t,icon:i}))})},41361:function(e,t,n){"use strict";n.d(t,{Z:function(){return l}});var r=n(1119),o=n(2265),i={icon:{tag:"svg",attrs:{viewBox:"64 64 896 896",focusable:"false"},children:[{tag:"path",attrs:{d:"M824.2 699.9a301.55 301.55 0 00-86.4-60.4C783.1 602.8 812 546.8 812 484c0-110.8-92.4-201.7-203.2-200-109.1 1.7-197 90.6-197 200 0 62.8 29 118.8 74.2 155.5a300.95 300.95 0 00-86.4 60.4C345 754.6 314 826.8 312 903.8a8 8 0 008 8.2h56c4.3 0 7.9-3.4 8-7.7 1.9-58 25.4-112.3 66.7-153.5A226.62 226.62 0 01612 684c60.9 0 118.2 23.7 161.3 66.8C814.5 792 838 846.3 840 904.3c.1 4.3 3.7 7.7 8 7.7h56a8 8 0 008-8.2c-2-77-33-149.2-87.8-203.9zM612 612c-34.2 0-66.4-13.3-90.5-37.5a126.86 126.86 0 01-37.5-91.8c.3-32.8 13.4-64.5 36.3-88 24-24.6 56.1-38.3 90.4-38.7 33.9-.3 66.8 12.9 91 36.6 24.8 24.3 38.4 56.8 38.4 91.4 0 34.2-13.3 66.3-37.5 90.5A127.3 127.3 0 01612 612zM361.5 510.4c-.9-8.7-1.4-17.5-1.4-26.4 0-15.9 1.5-31.4 4.3-46.5.7-3.6-1.2-7.3-4.5-8.8-13.6-6.1-26.1-14.5-36.9-25.1a127.54 127.54 0 01-38.7-95.4c.9-32.1 13.8-62.6 36.3-85.6 24.7-25.3 57.9-39.1 93.2-38.7 31.9.3 62.7 12.6 86 34.4 7.9 7.4 14.7 15.6 20.4 24.4 2 3.1 5.9 4.4 9.3 3.2 17.6-6.1 36.2-10.4 55.3-12.4 5.6-.6 8.8-6.6 6.3-11.6-32.5-64.3-98.9-108.7-175.7-109.9-110.9-1.7-203.3 89.2-203.3 199.9 0 62.8 28.9 118.8 74.2 155.5-31.8 14.7-61.1 35-86.5 60.4-54.8 54.7-85.8 126.9-87.8 204a8 8 0 008 8.2h56.1c4.3 0 7.9-3.4 8-7.7 1.9-58 25.4-112.3 66.7-153.5 29.4-29.4 65.4-49.8 104.7-59.7 3.9-1 6.5-4.7 6-8.7z"}}]},name:"team",theme:"outlined"},a=n(55015),l=o.forwardRef(function(e,t){return o.createElement(a.Z,(0,r.Z)({},e,{ref:t,icon:i}))})},19574:function(e,t,n){"use strict";n.d(t,{Z:function(){return l}});var r=n(1119),o=n(2265),i={icon:{tag:"svg",attrs:{viewBox:"64 64 896 896",focusable:"false"},children:[{tag:"path",attrs:{d:"M848 359.3H627.7L825.8 109c4.1-5.3.4-13-6.3-13H436c-2.8 0-5.5 1.5-6.9 4L170 547.5c-3.1 5.3.7 12 6.9 12h174.4l-89.4 357.6c-1.9 7.8 7.5 13.3 13.3 7.7L853.5 373c5.2-4.9 1.7-13.7-5.5-13.7zM378.2 732.5l60.3-241H281.1l189.6-327.4h224.6L487 427.4h211L378.2 732.5z"}}]},name:"thunderbolt",theme:"outlined"},a=n(55015),l=o.forwardRef(function(e,t){return o.createElement(a.Z,(0,r.Z)({},e,{ref:t,icon:i}))})},3632:function(e,t,n){"use strict";n.d(t,{Z:function(){return l}});var r=n(1119),o=n(2265),i={icon:{tag:"svg",attrs:{viewBox:"64 64 896 896",focusable:"false"},children:[{tag:"path",attrs:{d:"M400 317.7h73.9V656c0 4.4 3.6 8 8 8h60c4.4 0 8-3.6 8-8V317.7H624c6.7 0 10.4-7.7 6.3-12.9L518.3 163a8 8 0 00-12.6 0l-112 141.7c-4.1 5.3-.4 13 6.3 13zM878 626h-60c-4.4 0-8 3.6-8 8v154H214V634c0-4.4-3.6-8-8-8h-60c-4.4 0-8 3.6-8 8v198c0 17.7 14.3 32 32 32h684c17.7 0 32-14.3 32-32V634c0-4.4-3.6-8-8-8z"}}]},name:"upload",theme:"outlined"},a=n(55015),l=o.forwardRef(function(e,t){return o.createElement(a.Z,(0,r.Z)({},e,{ref:t,icon:i}))})},15883:function(e,t,n){"use strict";n.d(t,{Z:function(){return l}});var r=n(1119),o=n(2265),i={icon:{tag:"svg",attrs:{viewBox:"64 64 896 896",focusable:"false"},children:[{tag:"path",attrs:{d:"M858.5 763.6a374 374 0 00-80.6-119.5 375.63 375.63 0 00-119.5-80.6c-.4-.2-.8-.3-1.2-.5C719.5 518 760 444.7 760 362c0-137-111-248-248-248S264 225 264 362c0 82.7 40.5 156 102.8 201.1-.4.2-.8.3-1.2.5-44.8 18.9-85 46-119.5 80.6a375.63 375.63 0 00-80.6 119.5A371.7 371.7 0 00136 901.8a8 8 0 008 8.2h60c4.4 0 7.9-3.5 8-7.8 2-77.2 33-149.5 87.8-204.3 56.7-56.7 132-87.9 212.2-87.9s155.5 31.2 212.2 87.9C779 752.7 810 825 812 902.2c.1 4.4 3.6 7.8 8 7.8h60a8 8 0 008-8.2c-1-47.8-10.9-94.3-29.5-138.2zM512 534c-45.9 0-89.1-17.9-121.6-50.4S340 407.9 340 362c0-45.9 17.9-89.1 50.4-121.6S466.1 190 512 190s89.1 17.9 121.6 50.4S684 316.1 684 362c0 45.9-17.9 89.1-50.4 121.6S557.9 534 512 534z"}}]},name:"user",theme:"outlined"},a=n(55015),l=o.forwardRef(function(e,t){return o.createElement(a.Z,(0,r.Z)({},e,{ref:t,icon:i}))})},58747:function(e,t,n){"use strict";n.d(t,{Z:function(){return i}});var r=n(5853),o=n(2265);let i=e=>{var t=(0,r._T)(e,[]);return o.createElement("svg",Object.assign({xmlns:"http://www.w3.org/2000/svg",viewBox:"0 0 24 24",fill:"currentColor"},t),o.createElement("path",{d:"M11.9999 13.1714L16.9497 8.22168L18.3639 9.63589L11.9999 15.9999L5.63599 9.63589L7.0502 8.22168L11.9999 13.1714Z"}))}},4537:function(e,t,n){"use strict";n.d(t,{Z:function(){return i}});var r=n(5853),o=n(2265);let i=e=>{var t=(0,r._T)(e,[]);return o.createElement("svg",Object.assign({xmlns:"http://www.w3.org/2000/svg",viewBox:"0 0 24 24",fill:"currentColor"},t),o.createElement("path",{d:"M12 22C6.47715 22 2 17.5228 2 12C2 6.47715 6.47715 2 12 2C17.5228 2 22 6.47715 22 12C22 17.5228 17.5228 22 12 22ZM12 10.5858L9.17157 7.75736L7.75736 9.17157L10.5858 12L7.75736 14.8284L9.17157 16.2426L12 13.4142L14.8284 16.2426L16.2426 14.8284L13.4142 12L16.2426 9.17157L14.8284 7.75736L12 10.5858Z"}))}},69907:function(e,t,n){"use strict";n.d(t,{Z:function(){return em}});var r=n(5853),o=n(2265),i=n(47625),a=n(93765),l=n(61994),c=n(59221),s=n(86757),u=n.n(s),d=n(95645),f=n.n(d),p=n(77571),h=n.n(p),m=n(82559),g=n.n(m),v=n(21652),y=n.n(v),b=n(57165),x=n(81889),w=n(9841),S=n(58772),k=n(34067),E=n(16630),C=n(85355),O=n(82944),j=["layout","type","stroke","connectNulls","isRange","ref"];function P(e){return(P="function"==typeof Symbol&&"symbol"==typeof Symbol.iterator?function(e){return typeof e}:function(e){return e&&"function"==typeof Symbol&&e.constructor===Symbol&&e!==Symbol.prototype?"symbol":typeof e})(e)}function M(){return(M=Object.assign?Object.assign.bind():function(e){for(var t=1;t=0||(o[n]=e[n]);return o}(e,t);if(Object.getOwnPropertySymbols){var i=Object.getOwnPropertySymbols(e);for(r=0;r=0)&&Object.prototype.propertyIsEnumerable.call(e,n)&&(o[n]=e[n])}return o}(i,j));return o.createElement(w.m,{clipPath:n?"url(#clipPath-".concat(r,")"):null},o.createElement(b.H,M({},(0,O.L6)(d,!0),{points:e,connectNulls:s,type:l,baseLine:t,layout:a,stroke:"none",className:"recharts-area-area"})),"none"!==c&&o.createElement(b.H,M({},(0,O.L6)(this.props,!1),{className:"recharts-area-curve",layout:a,type:l,connectNulls:s,fill:"none",points:e})),"none"!==c&&u&&o.createElement(b.H,M({},(0,O.L6)(this.props,!1),{className:"recharts-area-curve",layout:a,type:l,connectNulls:s,fill:"none",points:t})))}},{key:"renderAreaWithAnimation",value:function(e,t){var n=this,r=this.props,i=r.points,a=r.baseLine,l=r.isAnimationActive,s=r.animationBegin,u=r.animationDuration,d=r.animationEasing,f=r.animationId,p=this.state,m=p.prevPoints,v=p.prevBaseLine;return o.createElement(c.ZP,{begin:s,duration:u,isActive:l,easing:d,from:{t:0},to:{t:1},key:"area-".concat(f),onAnimationEnd:this.handleAnimationEnd,onAnimationStart:this.handleAnimationStart},function(r){var l=r.t;if(m){var c,s=m.length/i.length,u=i.map(function(e,t){var n=Math.floor(t*s);if(m[n]){var r=m[n],o=(0,E.k4)(r.x,e.x),i=(0,E.k4)(r.y,e.y);return I(I({},e),{},{x:o(l),y:i(l)})}return e});return c=(0,E.hj)(a)&&"number"==typeof a?(0,E.k4)(v,a)(l):h()(a)||g()(a)?(0,E.k4)(v,0)(l):a.map(function(e,t){var n=Math.floor(t*s);if(v[n]){var r=v[n],o=(0,E.k4)(r.x,e.x),i=(0,E.k4)(r.y,e.y);return I(I({},e),{},{x:o(l),y:i(l)})}return e}),n.renderAreaStatically(u,c,e,t)}return o.createElement(w.m,null,o.createElement("defs",null,o.createElement("clipPath",{id:"animationClipPath-".concat(t)},n.renderClipRect(l))),o.createElement(w.m,{clipPath:"url(#animationClipPath-".concat(t,")")},n.renderAreaStatically(i,a,e,t)))})}},{key:"renderArea",value:function(e,t){var n=this.props,r=n.points,o=n.baseLine,i=n.isAnimationActive,a=this.state,l=a.prevPoints,c=a.prevBaseLine,s=a.totalLength;return i&&r&&r.length&&(!l&&s>0||!y()(l,r)||!y()(c,o))?this.renderAreaWithAnimation(e,t):this.renderAreaStatically(r,o,e,t)}},{key:"render",value:function(){var e,t=this.props,n=t.hide,r=t.dot,i=t.points,a=t.className,c=t.top,s=t.left,u=t.xAxis,d=t.yAxis,f=t.width,p=t.height,m=t.isAnimationActive,g=t.id;if(n||!i||!i.length)return null;var v=this.state.isAnimationFinished,y=1===i.length,b=(0,l.Z)("recharts-area",a),x=u&&u.allowDataOverflow,k=d&&d.allowDataOverflow,E=x||k,C=h()(g)?this.id:g,j=null!==(e=(0,O.L6)(r,!1))&&void 0!==e?e:{r:3,strokeWidth:2},P=j.r,M=j.strokeWidth,N=((0,O.$k)(r)?r:{}).clipDot,I=void 0===N||N,R=2*(void 0===P?3:P)+(void 0===M?2:M);return o.createElement(w.m,{className:b},x||k?o.createElement("defs",null,o.createElement("clipPath",{id:"clipPath-".concat(C)},o.createElement("rect",{x:x?s:s-f/2,y:k?c:c-p/2,width:x?f:2*f,height:k?p:2*p})),!I&&o.createElement("clipPath",{id:"clipPath-dots-".concat(C)},o.createElement("rect",{x:s-R/2,y:c-R/2,width:f+R,height:p+R}))):null,y?null:this.renderArea(E,C),(r||y)&&this.renderDots(E,I,C),(!m||v)&&S.e.renderCallByParent(this.props,i))}}],r=[{key:"getDerivedStateFromProps",value:function(e,t){return e.animationId!==t.prevAnimationId?{prevAnimationId:e.animationId,curPoints:e.points,curBaseLine:e.baseLine,prevPoints:t.curPoints,prevBaseLine:t.curBaseLine}:e.points!==t.curPoints||e.baseLine!==t.curBaseLine?{curPoints:e.points,curBaseLine:e.baseLine}:null}}],n&&R(a.prototype,n),r&&R(a,r),Object.defineProperty(a,"prototype",{writable:!1}),a}(o.PureComponent);D(L,"displayName","Area"),D(L,"defaultProps",{stroke:"#3182bd",fill:"#3182bd",fillOpacity:.6,xAxisId:0,yAxisId:0,legendType:"line",connectNulls:!1,points:[],dot:!1,activeDot:!0,hide:!1,isAnimationActive:!k.x.isSsr,animationBegin:0,animationDuration:1500,animationEasing:"ease"}),D(L,"getBaseValue",function(e,t,n,r){var o=e.layout,i=e.baseValue,a=t.props.baseValue,l=null!=a?a:i;if((0,E.hj)(l)&&"number"==typeof l)return l;var c="horizontal"===o?r:n,s=c.scale.domain();if("number"===c.type){var u=Math.max(s[0],s[1]),d=Math.min(s[0],s[1]);return"dataMin"===l?d:"dataMax"===l?u:u<0?u:Math.max(Math.min(s[0],s[1]),0)}return"dataMin"===l?s[0]:"dataMax"===l?s[1]:s[0]}),D(L,"getComposedData",function(e){var t,n=e.props,r=e.item,o=e.xAxis,i=e.yAxis,a=e.xAxisTicks,l=e.yAxisTicks,c=e.bandSize,s=e.dataKey,u=e.stackedData,d=e.dataStartIndex,f=e.displayedData,p=e.offset,h=n.layout,m=u&&u.length,g=L.getBaseValue(n,r,o,i),v="horizontal"===h,y=!1,b=f.map(function(e,t){m?n=u[d+t]:Array.isArray(n=(0,C.F$)(e,s))?y=!0:n=[g,n];var n,r=null==n[1]||m&&null==(0,C.F$)(e,s);return v?{x:(0,C.Hv)({axis:o,ticks:a,bandSize:c,entry:e,index:t}),y:r?null:i.scale(n[1]),value:n,payload:e}:{x:r?null:o.scale(n[1]),y:(0,C.Hv)({axis:i,ticks:l,bandSize:c,entry:e,index:t}),value:n,payload:e}});return t=m||y?b.map(function(e){var t=Array.isArray(e.value)?e.value[0]:null;return v?{x:e.x,y:null!=t&&null!=e.y?i.scale(t):null}:{x:null!=t?o.scale(t):null,y:e.y}}):v?i.scale(g):o.scale(g),I({points:b,baseLine:t,layout:h,isRange:y},p)}),D(L,"renderDotItem",function(e,t){return o.isValidElement(e)?o.cloneElement(e,t):u()(e)?e(t):o.createElement(x.o,M({},t,{className:"recharts-area-dot"}))});var z=n(97059),B=n(62994),F=n(25311),H=(0,a.z)({chartName:"AreaChart",GraphicalChild:L,axisComponents:[{axisType:"xAxis",AxisComp:z.K},{axisType:"yAxis",AxisComp:B.B}],formatAxisMap:F.t9}),q=n(56940),W=n(8147),K=n(22190),U=n(13137),V=["type","layout","connectNulls","ref"];function G(e){return(G="function"==typeof Symbol&&"symbol"==typeof Symbol.iterator?function(e){return typeof e}:function(e){return e&&"function"==typeof Symbol&&e.constructor===Symbol&&e!==Symbol.prototype?"symbol":typeof e})(e)}function X(){return(X=Object.assign?Object.assign.bind():function(e){for(var t=1;te.length)&&(t=e.length);for(var n=0,r=Array(t);ni){c=[].concat(Q(r.slice(0,s)),[i-u]);break}var d=c.length%2==0?[0,l]:[l];return[].concat(Q(a.repeat(r,Math.floor(t/o))),Q(c),d).map(function(e){return"".concat(e,"px")}).join(", ")}),eo(en(e),"id",(0,E.EL)("recharts-line-")),eo(en(e),"pathRef",function(t){e.mainCurve=t}),eo(en(e),"handleAnimationEnd",function(){e.setState({isAnimationFinished:!0}),e.props.onAnimationEnd&&e.props.onAnimationEnd()}),eo(en(e),"handleAnimationStart",function(){e.setState({isAnimationFinished:!1}),e.props.onAnimationStart&&e.props.onAnimationStart()}),e}return n=[{key:"componentDidMount",value:function(){if(this.props.isAnimationActive){var e=this.getTotalLength();this.setState({totalLength:e})}}},{key:"componentDidUpdate",value:function(){if(this.props.isAnimationActive){var e=this.getTotalLength();e!==this.state.totalLength&&this.setState({totalLength:e})}}},{key:"getTotalLength",value:function(){var e=this.mainCurve;try{return e&&e.getTotalLength&&e.getTotalLength()||0}catch(e){return 0}}},{key:"renderErrorBar",value:function(e,t){if(this.props.isAnimationActive&&!this.state.isAnimationFinished)return null;var n=this.props,r=n.points,i=n.xAxis,a=n.yAxis,l=n.layout,c=n.children,s=(0,O.NN)(c,U.W);if(!s)return null;var u=function(e,t){return{x:e.x,y:e.y,value:e.value,errorVal:(0,C.F$)(e.payload,t)}};return o.createElement(w.m,{clipPath:e?"url(#clipPath-".concat(t,")"):null},s.map(function(e){return o.cloneElement(e,{key:"bar-".concat(e.props.dataKey),data:r,xAxis:i,yAxis:a,layout:l,dataPointFormatter:u})}))}},{key:"renderDots",value:function(e,t,n){if(this.props.isAnimationActive&&!this.state.isAnimationFinished)return null;var r=this.props,i=r.dot,l=r.points,c=r.dataKey,s=(0,O.L6)(this.props,!1),u=(0,O.L6)(i,!0),d=l.map(function(e,t){var n=Y(Y(Y({key:"dot-".concat(t),r:3},s),u),{},{value:e.value,dataKey:c,cx:e.x,cy:e.y,index:t,payload:e.payload});return a.renderDotItem(i,n)}),f={clipPath:e?"url(#clipPath-".concat(t?"":"dots-").concat(n,")"):null};return o.createElement(w.m,X({className:"recharts-line-dots",key:"dots"},f),d)}},{key:"renderCurveStatically",value:function(e,t,n,r){var i=this.props,a=i.type,l=i.layout,c=i.connectNulls,s=(i.ref,function(e,t){if(null==e)return{};var n,r,o=function(e,t){if(null==e)return{};var n,r,o={},i=Object.keys(e);for(r=0;r=0||(o[n]=e[n]);return o}(e,t);if(Object.getOwnPropertySymbols){var i=Object.getOwnPropertySymbols(e);for(r=0;r=0)&&Object.prototype.propertyIsEnumerable.call(e,n)&&(o[n]=e[n])}return o}(i,V)),u=Y(Y(Y({},(0,O.L6)(s,!0)),{},{fill:"none",className:"recharts-line-curve",clipPath:t?"url(#clipPath-".concat(n,")"):null,points:e},r),{},{type:a,layout:l,connectNulls:c});return o.createElement(b.H,X({},u,{pathRef:this.pathRef}))}},{key:"renderCurveWithAnimation",value:function(e,t){var n=this,r=this.props,i=r.points,a=r.strokeDasharray,l=r.isAnimationActive,s=r.animationBegin,u=r.animationDuration,d=r.animationEasing,f=r.animationId,p=r.animateNewValues,h=r.width,m=r.height,g=this.state,v=g.prevPoints,y=g.totalLength;return o.createElement(c.ZP,{begin:s,duration:u,isActive:l,easing:d,from:{t:0},to:{t:1},key:"line-".concat(f),onAnimationEnd:this.handleAnimationEnd,onAnimationStart:this.handleAnimationStart},function(r){var o,l=r.t;if(v){var c=v.length/i.length,s=i.map(function(e,t){var n=Math.floor(t*c);if(v[n]){var r=v[n],o=(0,E.k4)(r.x,e.x),i=(0,E.k4)(r.y,e.y);return Y(Y({},e),{},{x:o(l),y:i(l)})}if(p){var a=(0,E.k4)(2*h,e.x),s=(0,E.k4)(m/2,e.y);return Y(Y({},e),{},{x:a(l),y:s(l)})}return Y(Y({},e),{},{x:e.x,y:e.y})});return n.renderCurveStatically(s,e,t)}var u=(0,E.k4)(0,y)(l);if(a){var d="".concat(a).split(/[,\s]+/gim).map(function(e){return parseFloat(e)});o=n.getStrokeDasharray(u,y,d)}else o=n.generateSimpleStrokeDasharray(y,u);return n.renderCurveStatically(i,e,t,{strokeDasharray:o})})}},{key:"renderCurve",value:function(e,t){var n=this.props,r=n.points,o=n.isAnimationActive,i=this.state,a=i.prevPoints,l=i.totalLength;return o&&r&&r.length&&(!a&&l>0||!y()(a,r))?this.renderCurveWithAnimation(e,t):this.renderCurveStatically(r,e,t)}},{key:"render",value:function(){var e,t=this.props,n=t.hide,r=t.dot,i=t.points,a=t.className,c=t.xAxis,s=t.yAxis,u=t.top,d=t.left,f=t.width,p=t.height,m=t.isAnimationActive,g=t.id;if(n||!i||!i.length)return null;var v=this.state.isAnimationFinished,y=1===i.length,b=(0,l.Z)("recharts-line",a),x=c&&c.allowDataOverflow,k=s&&s.allowDataOverflow,E=x||k,C=h()(g)?this.id:g,j=null!==(e=(0,O.L6)(r,!1))&&void 0!==e?e:{r:3,strokeWidth:2},P=j.r,M=j.strokeWidth,N=((0,O.$k)(r)?r:{}).clipDot,I=void 0===N||N,R=2*(void 0===P?3:P)+(void 0===M?2:M);return o.createElement(w.m,{className:b},x||k?o.createElement("defs",null,o.createElement("clipPath",{id:"clipPath-".concat(C)},o.createElement("rect",{x:x?d:d-f/2,y:k?u:u-p/2,width:x?f:2*f,height:k?p:2*p})),!I&&o.createElement("clipPath",{id:"clipPath-dots-".concat(C)},o.createElement("rect",{x:d-R/2,y:u-R/2,width:f+R,height:p+R}))):null,!y&&this.renderCurve(E,C),this.renderErrorBar(E,C),(y||r)&&this.renderDots(E,I,C),(!m||v)&&S.e.renderCallByParent(this.props,i))}}],r=[{key:"getDerivedStateFromProps",value:function(e,t){return e.animationId!==t.prevAnimationId?{prevAnimationId:e.animationId,curPoints:e.points,prevPoints:t.curPoints}:e.points!==t.curPoints?{curPoints:e.points}:null}},{key:"repeat",value:function(e,t){for(var n=e.length%2!=0?[].concat(Q(e),[0]):e,r=[],o=0;o{let{data:n=[],categories:a=[],index:l,stack:c=!1,colors:s=ef.s,valueFormatter:u=eh.Cj,startEndOnly:d=!1,showXAxis:f=!0,showYAxis:p=!0,yAxisWidth:h=56,intervalType:m="equidistantPreserveStart",showAnimation:g=!1,animationDuration:v=900,showTooltip:y=!0,showLegend:b=!0,showGridLines:w=!0,showGradient:S=!0,autoMinValue:k=!1,curveType:E="linear",minValue:C,maxValue:O,connectNulls:j=!1,allowDecimals:P=!0,noDataText:M,className:N,onValueChange:I,enableLegendSlider:R=!1,customTooltip:T,rotateLabelX:A,tickGap:_=5}=e,D=(0,r._T)(e,["data","categories","index","stack","colors","valueFormatter","startEndOnly","showXAxis","showYAxis","yAxisWidth","intervalType","showAnimation","animationDuration","showTooltip","showLegend","showGridLines","showGradient","autoMinValue","curveType","minValue","maxValue","connectNulls","allowDecimals","noDataText","className","onValueChange","enableLegendSlider","customTooltip","rotateLabelX","tickGap"]),Z=(f||p)&&(!d||p)?20:0,[F,U]=(0,o.useState)(60),[V,G]=(0,o.useState)(void 0),[X,$]=(0,o.useState)(void 0),Y=(0,eu.me)(a,s),Q=(0,eu.i4)(k,C,O),J=!!I;function ee(e){J&&(e===X&&!V||(0,eu.FB)(n,e)&&V&&V.dataKey===e?($(void 0),null==I||I(null)):($(e),null==I||I({eventType:"category",categoryClicked:e})),G(void 0))}return o.createElement("div",Object.assign({ref:t,className:(0,ep.q)("w-full h-80",N)},D),o.createElement(i.h,{className:"h-full w-full"},(null==n?void 0:n.length)?o.createElement(H,{data:n,onClick:J&&(X||V)?()=>{G(void 0),$(void 0),null==I||I(null)}:void 0},w?o.createElement(q.q,{className:(0,ep.q)("stroke-1","stroke-tremor-border","dark:stroke-dark-tremor-border"),horizontal:!0,vertical:!1}):null,o.createElement(z.K,{padding:{left:Z,right:Z},hide:!f,dataKey:l,tick:{transform:"translate(0, 6)"},ticks:d?[n[0][l],n[n.length-1][l]]:void 0,fill:"",stroke:"",className:(0,ep.q)("text-tremor-label","fill-tremor-content","dark:fill-dark-tremor-content"),interval:d?"preserveStartEnd":m,tickLine:!1,axisLine:!1,minTickGap:_,angle:null==A?void 0:A.angle,dy:null==A?void 0:A.verticalShift,height:null==A?void 0:A.xAxisHeight}),o.createElement(B.B,{width:h,hide:!p,axisLine:!1,tickLine:!1,type:"number",domain:Q,tick:{transform:"translate(-3, 0)"},fill:"",stroke:"",className:(0,ep.q)("text-tremor-label","fill-tremor-content","dark:fill-dark-tremor-content"),tickFormatter:u,allowDecimals:P}),o.createElement(W.u,{wrapperStyle:{outline:"none"},isAnimationActive:!1,cursor:{stroke:"#d1d5db",strokeWidth:1},content:y?e=>{let{active:t,payload:n,label:r}=e;return T?o.createElement(T,{payload:null==n?void 0:n.map(e=>{var t;return Object.assign(Object.assign({},e),{color:null!==(t=Y.get(e.dataKey))&&void 0!==t?t:ed.fr.Gray})}),active:t,label:r}):o.createElement(ec.ZP,{active:t,payload:n,label:r,valueFormatter:u,categoryColors:Y})}:o.createElement(o.Fragment,null),position:{y:0}}),b?o.createElement(K.D,{verticalAlign:"top",height:F,content:e=>{let{payload:t}=e;return(0,el.Z)({payload:t},Y,U,X,J?e=>ee(e):void 0,R)}}):null,a.map(e=>{var t,n;return o.createElement("defs",{key:e},S?o.createElement("linearGradient",{className:(0,eh.bM)(null!==(t=Y.get(e))&&void 0!==t?t:ed.fr.Gray,ef.K.text).textColor,id:Y.get(e),x1:"0",y1:"0",x2:"0",y2:"1"},o.createElement("stop",{offset:"5%",stopColor:"currentColor",stopOpacity:V||X&&X!==e?.15:.4}),o.createElement("stop",{offset:"95%",stopColor:"currentColor",stopOpacity:0})):o.createElement("linearGradient",{className:(0,eh.bM)(null!==(n=Y.get(e))&&void 0!==n?n:ed.fr.Gray,ef.K.text).textColor,id:Y.get(e),x1:"0",y1:"0",x2:"0",y2:"1"},o.createElement("stop",{stopColor:"currentColor",stopOpacity:V||X&&X!==e?.1:.3})))}),a.map(e=>{var t;return o.createElement(L,{className:(0,eh.bM)(null!==(t=Y.get(e))&&void 0!==t?t:ed.fr.Gray,ef.K.text).strokeColor,strokeOpacity:V||X&&X!==e?.3:1,activeDot:e=>{var t;let{cx:r,cy:i,stroke:a,strokeLinecap:l,strokeLinejoin:c,strokeWidth:s,dataKey:u}=e;return o.createElement(x.o,{className:(0,ep.q)("stroke-tremor-background dark:stroke-dark-tremor-background",I?"cursor-pointer":"",(0,eh.bM)(null!==(t=Y.get(u))&&void 0!==t?t:ed.fr.Gray,ef.K.text).fillColor),cx:r,cy:i,r:5,fill:"",stroke:a,strokeLinecap:l,strokeLinejoin:c,strokeWidth:s,onClick:(t,r)=>{r.stopPropagation(),J&&(e.index===(null==V?void 0:V.index)&&e.dataKey===(null==V?void 0:V.dataKey)||(0,eu.FB)(n,e.dataKey)&&X&&X===e.dataKey?($(void 0),G(void 0),null==I||I(null)):($(e.dataKey),G({index:e.index,dataKey:e.dataKey}),null==I||I(Object.assign({eventType:"dot",categoryClicked:e.dataKey},e.payload))))}})},dot:t=>{var r;let{stroke:i,strokeLinecap:a,strokeLinejoin:l,strokeWidth:c,cx:s,cy:u,dataKey:d,index:f}=t;return(0,eu.FB)(n,e)&&!(V||X&&X!==e)||(null==V?void 0:V.index)===f&&(null==V?void 0:V.dataKey)===e?o.createElement(x.o,{key:f,cx:s,cy:u,r:5,stroke:i,fill:"",strokeLinecap:a,strokeLinejoin:l,strokeWidth:c,className:(0,ep.q)("stroke-tremor-background dark:stroke-dark-tremor-background",I?"cursor-pointer":"",(0,eh.bM)(null!==(r=Y.get(d))&&void 0!==r?r:ed.fr.Gray,ef.K.text).fillColor)}):o.createElement(o.Fragment,{key:f})},key:e,name:e,type:E,dataKey:e,stroke:"",fill:"url(#".concat(Y.get(e),")"),strokeWidth:2,strokeLinejoin:"round",strokeLinecap:"round",isAnimationActive:g,animationDuration:v,stackId:c?"a":void 0,connectNulls:j})}),I?a.map(e=>o.createElement(ea,{className:(0,ep.q)("cursor-pointer"),strokeOpacity:0,key:e,name:e,type:E,dataKey:e,stroke:"transparent",fill:"transparent",legendType:"none",tooltipType:"none",strokeWidth:12,connectNulls:j,onClick:(e,t)=>{t.stopPropagation();let{name:n}=e;ee(n)}})):null):o.createElement(es.Z,{noDataText:M})))});em.displayName="AreaChart"},40278:function(e,t,n){"use strict";n.d(t,{Z:function(){return k}});var r=n(5853),o=n(7084),i=n(26898),a=n(65954),l=n(1153),c=n(2265),s=n(47625),u=n(93765),d=n(31699),f=n(97059),p=n(62994),h=n(25311),m=(0,u.z)({chartName:"BarChart",GraphicalChild:d.$,defaultTooltipEventType:"axis",validateTooltipEventTypes:["axis","item"],axisComponents:[{axisType:"xAxis",AxisComp:f.K},{axisType:"yAxis",AxisComp:p.B}],formatAxisMap:h.t9}),g=n(56940),v=n(8147),y=n(22190),b=n(65278),x=n(98593),w=n(69448),S=n(32644);let k=c.forwardRef((e,t)=>{let{data:n=[],categories:u=[],index:h,colors:k=i.s,valueFormatter:E=l.Cj,layout:C="horizontal",stack:O=!1,relative:j=!1,startEndOnly:P=!1,animationDuration:M=900,showAnimation:N=!1,showXAxis:I=!0,showYAxis:R=!0,yAxisWidth:T=56,intervalType:A="equidistantPreserveStart",showTooltip:_=!0,showLegend:D=!0,showGridLines:Z=!0,autoMinValue:L=!1,minValue:z,maxValue:B,allowDecimals:F=!0,noDataText:H,onValueChange:q,enableLegendSlider:W=!1,customTooltip:K,rotateLabelX:U,tickGap:V=5,className:G}=e,X=(0,r._T)(e,["data","categories","index","colors","valueFormatter","layout","stack","relative","startEndOnly","animationDuration","showAnimation","showXAxis","showYAxis","yAxisWidth","intervalType","showTooltip","showLegend","showGridLines","autoMinValue","minValue","maxValue","allowDecimals","noDataText","onValueChange","enableLegendSlider","customTooltip","rotateLabelX","tickGap","className"]),$=I||R?20:0,[Y,Q]=(0,c.useState)(60),J=(0,S.me)(u,k),[ee,et]=c.useState(void 0),[en,er]=(0,c.useState)(void 0),eo=!!q;function ei(e,t,n){var r,o,i,a;n.stopPropagation(),q&&((0,S.vZ)(ee,Object.assign(Object.assign({},e.payload),{value:e.value}))?(er(void 0),et(void 0),null==q||q(null)):(er(null===(o=null===(r=e.tooltipPayload)||void 0===r?void 0:r[0])||void 0===o?void 0:o.dataKey),et(Object.assign(Object.assign({},e.payload),{value:e.value})),null==q||q(Object.assign({eventType:"bar",categoryClicked:null===(a=null===(i=e.tooltipPayload)||void 0===i?void 0:i[0])||void 0===a?void 0:a.dataKey},e.payload))))}let ea=(0,S.i4)(L,z,B);return c.createElement("div",Object.assign({ref:t,className:(0,a.q)("w-full h-80",G)},X),c.createElement(s.h,{className:"h-full w-full"},(null==n?void 0:n.length)?c.createElement(m,{data:n,stackOffset:O?"sign":j?"expand":"none",layout:"vertical"===C?"vertical":"horizontal",onClick:eo&&(en||ee)?()=>{et(void 0),er(void 0),null==q||q(null)}:void 0},Z?c.createElement(g.q,{className:(0,a.q)("stroke-1","stroke-tremor-border","dark:stroke-dark-tremor-border"),horizontal:"vertical"!==C,vertical:"vertical"===C}):null,"vertical"!==C?c.createElement(f.K,{padding:{left:$,right:$},hide:!I,dataKey:h,interval:P?"preserveStartEnd":A,tick:{transform:"translate(0, 6)"},ticks:P?[n[0][h],n[n.length-1][h]]:void 0,fill:"",stroke:"",className:(0,a.q)("mt-4 text-tremor-label","fill-tremor-content","dark:fill-dark-tremor-content"),tickLine:!1,axisLine:!1,angle:null==U?void 0:U.angle,dy:null==U?void 0:U.verticalShift,height:null==U?void 0:U.xAxisHeight,minTickGap:V}):c.createElement(f.K,{hide:!I,type:"number",tick:{transform:"translate(-3, 0)"},domain:ea,fill:"",stroke:"",className:(0,a.q)("text-tremor-label","fill-tremor-content","dark:fill-dark-tremor-content"),tickLine:!1,axisLine:!1,tickFormatter:E,minTickGap:V,allowDecimals:F,angle:null==U?void 0:U.angle,dy:null==U?void 0:U.verticalShift,height:null==U?void 0:U.xAxisHeight}),"vertical"!==C?c.createElement(p.B,{width:T,hide:!R,axisLine:!1,tickLine:!1,type:"number",domain:ea,tick:{transform:"translate(-3, 0)"},fill:"",stroke:"",className:(0,a.q)("text-tremor-label","fill-tremor-content","dark:fill-dark-tremor-content"),tickFormatter:j?e=>"".concat((100*e).toString()," %"):E,allowDecimals:F}):c.createElement(p.B,{width:T,hide:!R,dataKey:h,axisLine:!1,tickLine:!1,ticks:P?[n[0][h],n[n.length-1][h]]:void 0,type:"category",interval:"preserveStartEnd",tick:{transform:"translate(0, 6)"},fill:"",stroke:"",className:(0,a.q)("text-tremor-label","fill-tremor-content","dark:fill-dark-tremor-content")}),c.createElement(v.u,{wrapperStyle:{outline:"none"},isAnimationActive:!1,cursor:{fill:"#d1d5db",opacity:"0.15"},content:_?e=>{let{active:t,payload:n,label:r}=e;return K?c.createElement(K,{payload:null==n?void 0:n.map(e=>{var t;return Object.assign(Object.assign({},e),{color:null!==(t=J.get(e.dataKey))&&void 0!==t?t:o.fr.Gray})}),active:t,label:r}):c.createElement(x.ZP,{active:t,payload:n,label:r,valueFormatter:E,categoryColors:J})}:c.createElement(c.Fragment,null),position:{y:0}}),D?c.createElement(y.D,{verticalAlign:"top",height:Y,content:e=>{let{payload:t}=e;return(0,b.Z)({payload:t},J,Q,en,eo?e=>{eo&&(e!==en||ee?(er(e),null==q||q({eventType:"category",categoryClicked:e})):(er(void 0),null==q||q(null)),et(void 0))}:void 0,W)}}):null,u.map(e=>{var t;return c.createElement(d.$,{className:(0,a.q)((0,l.bM)(null!==(t=J.get(e))&&void 0!==t?t:o.fr.Gray,i.K.background).fillColor,q?"cursor-pointer":""),key:e,name:e,type:"linear",stackId:O||j?"a":void 0,dataKey:e,fill:"",isAnimationActive:N,animationDuration:M,shape:e=>((e,t,n,r)=>{let{fillOpacity:o,name:i,payload:a,value:l}=e,{x:s,width:u,y:d,height:f}=e;return"horizontal"===r&&f<0?(d+=f,f=Math.abs(f)):"vertical"===r&&u<0&&(s+=u,u=Math.abs(u)),c.createElement("rect",{x:s,y:d,width:u,height:f,opacity:t||n&&n!==i?(0,S.vZ)(t,Object.assign(Object.assign({},a),{value:l}))?o:.3:o})})(e,ee,en,C),onClick:ei})})):c.createElement(w.Z,{noDataText:H})))});k.displayName="BarChart"},14042:function(e,t,n){"use strict";n.d(t,{Z:function(){return ez}});var r=n(5853),o=n(7084),i=n(26898),a=n(65954),l=n(1153),c=n(2265),s=n(60474),u=n(47625),d=n(93765),f=n(86757),p=n.n(f),h=n(9841),m=n(81889),g=n(61994),v=n(82944),y=["points","className","baseLinePoints","connectNulls"];function b(){return(b=Object.assign?Object.assign.bind():function(e){for(var t=1;te.length)&&(t=e.length);for(var n=0,r=Array(t);n0&&void 0!==arguments[0]?arguments[0]:[],t=[[]];return e.forEach(function(e){S(e)?t[t.length-1].push(e):t[t.length-1].length>0&&t.push([])}),S(e[0])&&t[t.length-1].push(e[0]),t[t.length-1].length<=0&&(t=t.slice(0,-1)),t},E=function(e,t){var n=k(e);t&&(n=[n.reduce(function(e,t){return[].concat(x(e),x(t))},[])]);var r=n.map(function(e){return e.reduce(function(e,t,n){return"".concat(e).concat(0===n?"M":"L").concat(t.x,",").concat(t.y)},"")}).join("");return 1===n.length?"".concat(r,"Z"):r},C=function(e,t,n){var r=E(e,n);return"".concat("Z"===r.slice(-1)?r.slice(0,-1):r,"L").concat(E(t.reverse(),n).slice(1))},O=function(e){var t=e.points,n=e.className,r=e.baseLinePoints,o=e.connectNulls,i=function(e,t){if(null==e)return{};var n,r,o=function(e,t){if(null==e)return{};var n,r,o={},i=Object.keys(e);for(r=0;r=0||(o[n]=e[n]);return o}(e,t);if(Object.getOwnPropertySymbols){var i=Object.getOwnPropertySymbols(e);for(r=0;r=0)&&Object.prototype.propertyIsEnumerable.call(e,n)&&(o[n]=e[n])}return o}(e,y);if(!t||!t.length)return null;var a=(0,g.Z)("recharts-polygon",n);if(r&&r.length){var l=i.stroke&&"none"!==i.stroke,s=C(t,r,o);return c.createElement("g",{className:a},c.createElement("path",b({},(0,v.L6)(i,!0),{fill:"Z"===s.slice(-1)?i.fill:"none",stroke:"none",d:s})),l?c.createElement("path",b({},(0,v.L6)(i,!0),{fill:"none",d:E(t,o)})):null,l?c.createElement("path",b({},(0,v.L6)(i,!0),{fill:"none",d:E(r,o)})):null)}var u=E(t,o);return c.createElement("path",b({},(0,v.L6)(i,!0),{fill:"Z"===u.slice(-1)?i.fill:"none",className:a,d:u}))},j=n(58811),P=n(41637),M=n(39206);function N(e){return(N="function"==typeof Symbol&&"symbol"==typeof Symbol.iterator?function(e){return typeof e}:function(e){return e&&"function"==typeof Symbol&&e.constructor===Symbol&&e!==Symbol.prototype?"symbol":typeof e})(e)}function I(){return(I=Object.assign?Object.assign.bind():function(e){for(var t=1;t1e-5?"outer"===t?"start":"end":n<-.00001?"outer"===t?"end":"start":"middle"}},{key:"renderAxisLine",value:function(){var e=this.props,t=e.cx,n=e.cy,r=e.radius,o=e.axisLine,i=e.axisLineType,a=T(T({},(0,v.L6)(this.props,!1)),{},{fill:"none"},(0,v.L6)(o,!1));if("circle"===i)return c.createElement(m.o,I({className:"recharts-polar-angle-axis-line"},a,{cx:t,cy:n,r:r}));var l=this.props.ticks.map(function(e){return(0,M.op)(t,n,r,e.coordinate)});return c.createElement(O,I({className:"recharts-polar-angle-axis-line"},a,{points:l}))}},{key:"renderTicks",value:function(){var e=this,t=this.props,n=t.ticks,r=t.tick,o=t.tickLine,a=t.tickFormatter,l=t.stroke,s=(0,v.L6)(this.props,!1),u=(0,v.L6)(r,!1),d=T(T({},s),{},{fill:"none"},(0,v.L6)(o,!1)),f=n.map(function(t,n){var f=e.getTickLineCoord(t),p=T(T(T({textAnchor:e.getTickTextAnchor(t)},s),{},{stroke:"none",fill:l},u),{},{index:n,payload:t,x:f.x2,y:f.y2});return c.createElement(h.m,I({className:"recharts-polar-angle-axis-tick",key:"tick-".concat(t.coordinate)},(0,P.bw)(e.props,t,n)),o&&c.createElement("line",I({className:"recharts-polar-angle-axis-tick-line"},d,f)),r&&i.renderTickItem(r,p,a?a(t.value,n):t.value))});return c.createElement(h.m,{className:"recharts-polar-angle-axis-ticks"},f)}},{key:"render",value:function(){var e=this.props,t=e.ticks,n=e.radius,r=e.axisLine;return!(n<=0)&&t&&t.length?c.createElement(h.m,{className:"recharts-polar-angle-axis"},r&&this.renderAxisLine(),this.renderTicks()):null}}],r=[{key:"renderTickItem",value:function(e,t,n){return c.isValidElement(e)?c.cloneElement(e,t):p()(e)?e(t):c.createElement(j.x,I({},t,{className:"recharts-polar-angle-axis-tick-value"}),n)}}],n&&A(i.prototype,n),r&&A(i,r),Object.defineProperty(i,"prototype",{writable:!1}),i}(c.PureComponent);Z(B,"displayName","PolarAngleAxis"),Z(B,"axisType","angleAxis"),Z(B,"defaultProps",{type:"category",angleAxisId:0,scale:"auto",cx:0,cy:0,orientation:"outer",axisLine:!0,tickLine:!0,tickSize:8,tick:!0,hide:!1,allowDuplicatedCategory:!0});var F=n(35802),H=n.n(F),q=n(37891),W=n.n(q),K=n(26680),U=["cx","cy","angle","ticks","axisLine"],V=["ticks","tick","angle","tickFormatter","stroke"];function G(e){return(G="function"==typeof Symbol&&"symbol"==typeof Symbol.iterator?function(e){return typeof e}:function(e){return e&&"function"==typeof Symbol&&e.constructor===Symbol&&e!==Symbol.prototype?"symbol":typeof e})(e)}function X(){return(X=Object.assign?Object.assign.bind():function(e){for(var t=1;t=0||(o[n]=e[n]);return o}(e,t);if(Object.getOwnPropertySymbols){var i=Object.getOwnPropertySymbols(e);for(r=0;r=0)&&Object.prototype.propertyIsEnumerable.call(e,n)&&(o[n]=e[n])}return o}function J(e,t){for(var n=0;n0?el()(e,"paddingAngle",0):0;if(n){var l=(0,eg.k4)(n.endAngle-n.startAngle,e.endAngle-e.startAngle),c=ek(ek({},e),{},{startAngle:i+a,endAngle:i+l(r)+a});o.push(c),i=c.endAngle}else{var s=e.endAngle,d=e.startAngle,f=(0,eg.k4)(0,s-d)(r),p=ek(ek({},e),{},{startAngle:i+a,endAngle:i+f+a});o.push(p),i=p.endAngle}}),c.createElement(h.m,null,e.renderSectorsStatically(o))})}},{key:"attachKeyboardHandlers",value:function(e){var t=this;e.onkeydown=function(e){if(!e.altKey)switch(e.key){case"ArrowLeft":var n=++t.state.sectorToFocus%t.sectorRefs.length;t.sectorRefs[n].focus(),t.setState({sectorToFocus:n});break;case"ArrowRight":var r=--t.state.sectorToFocus<0?t.sectorRefs.length-1:t.state.sectorToFocus%t.sectorRefs.length;t.sectorRefs[r].focus(),t.setState({sectorToFocus:r});break;case"Escape":t.sectorRefs[t.state.sectorToFocus].blur(),t.setState({sectorToFocus:0})}}}},{key:"renderSectors",value:function(){var e=this.props,t=e.sectors,n=e.isAnimationActive,r=this.state.prevSectors;return n&&t&&t.length&&(!r||!es()(r,t))?this.renderSectorsWithAnimation():this.renderSectorsStatically(t)}},{key:"componentDidMount",value:function(){this.pieRef&&this.attachKeyboardHandlers(this.pieRef)}},{key:"render",value:function(){var e=this,t=this.props,n=t.hide,r=t.sectors,o=t.className,i=t.label,a=t.cx,l=t.cy,s=t.innerRadius,u=t.outerRadius,d=t.isAnimationActive,f=this.state.isAnimationFinished;if(n||!r||!r.length||!(0,eg.hj)(a)||!(0,eg.hj)(l)||!(0,eg.hj)(s)||!(0,eg.hj)(u))return null;var p=(0,g.Z)("recharts-pie",o);return c.createElement(h.m,{tabIndex:this.props.rootTabIndex,className:p,ref:function(t){e.pieRef=t}},this.renderSectors(),i&&this.renderLabels(r),K._.renderCallByParent(this.props,null,!1),(!d||f)&&ep.e.renderCallByParent(this.props,r,!1))}}],r=[{key:"getDerivedStateFromProps",value:function(e,t){return t.prevIsAnimationActive!==e.isAnimationActive?{prevIsAnimationActive:e.isAnimationActive,prevAnimationId:e.animationId,curSectors:e.sectors,prevSectors:[],isAnimationFinished:!0}:e.isAnimationActive&&e.animationId!==t.prevAnimationId?{prevAnimationId:e.animationId,curSectors:e.sectors,prevSectors:t.curSectors,isAnimationFinished:!0}:e.sectors!==t.curSectors?{curSectors:e.sectors,isAnimationFinished:!0}:null}},{key:"getTextAnchor",value:function(e,t){return e>t?"start":e=360?x:x-1)*u,S=i.reduce(function(e,t){var n=(0,ev.F$)(t,b,0);return e+((0,eg.hj)(n)?n:0)},0);return S>0&&(t=i.map(function(e,t){var r,o=(0,ev.F$)(e,b,0),i=(0,ev.F$)(e,f,t),a=((0,eg.hj)(o)?o:0)/S,s=(r=t?n.endAngle+(0,eg.uY)(v)*u*(0!==o?1:0):c)+(0,eg.uY)(v)*((0!==o?m:0)+a*w),d=(r+s)/2,p=(g.innerRadius+g.outerRadius)/2,y=[{name:i,value:o,payload:e,dataKey:b,type:h}],x=(0,M.op)(g.cx,g.cy,p,d);return n=ek(ek(ek({percent:a,cornerRadius:l,name:i,tooltipPayload:y,midAngle:d,middleRadius:p,tooltipPosition:x},e),g),{},{value:(0,ev.F$)(e,b),startAngle:r,endAngle:s,payload:e,paddingAngle:(0,eg.uY)(v)*u})})),ek(ek({},g),{},{sectors:t,data:i})});var eI=(0,d.z)({chartName:"PieChart",GraphicalChild:eN,validateTooltipEventTypes:["item"],defaultTooltipEventType:"item",legendContent:"children",axisComponents:[{axisType:"angleAxis",AxisComp:B},{axisType:"radiusAxis",AxisComp:eo}],formatAxisMap:M.t9,defaultProps:{layout:"centric",startAngle:0,endAngle:360,cx:"50%",cy:"50%",innerRadius:0,outerRadius:"80%"}}),eR=n(8147),eT=n(69448),eA=n(98593);let e_=e=>{let{active:t,payload:n,valueFormatter:r}=e;if(t&&(null==n?void 0:n[0])){let e=null==n?void 0:n[0];return c.createElement(eA.$B,null,c.createElement("div",{className:(0,a.q)("px-4 py-2")},c.createElement(eA.zX,{value:r(e.value),name:e.name,color:e.payload.color})))}return null},eD=(e,t)=>e.map((e,n)=>{let r=ne||t((0,l.vP)(n.map(e=>e[r]))),eL=e=>{let{cx:t,cy:n,innerRadius:r,outerRadius:o,startAngle:i,endAngle:a,className:l}=e;return c.createElement("g",null,c.createElement(s.L,{cx:t,cy:n,innerRadius:r,outerRadius:o,startAngle:i,endAngle:a,className:l,fill:"",opacity:.3,style:{outline:"none"}}))},ez=c.forwardRef((e,t)=>{let{data:n=[],category:s="value",index:d="name",colors:f=i.s,variant:p="donut",valueFormatter:h=l.Cj,label:m,showLabel:g=!0,animationDuration:v=900,showAnimation:y=!1,showTooltip:b=!0,noDataText:x,onValueChange:w,customTooltip:S,className:k}=e,E=(0,r._T)(e,["data","category","index","colors","variant","valueFormatter","label","showLabel","animationDuration","showAnimation","showTooltip","noDataText","onValueChange","customTooltip","className"]),C="donut"==p,O=eZ(m,h,n,s),[j,P]=c.useState(void 0),M=!!w;return(0,c.useEffect)(()=>{let e=document.querySelectorAll(".recharts-pie-sector");e&&e.forEach(e=>{e.setAttribute("style","outline: none")})},[j]),c.createElement("div",Object.assign({ref:t,className:(0,a.q)("w-full h-40",k)},E),c.createElement(u.h,{className:"h-full w-full"},(null==n?void 0:n.length)?c.createElement(eI,{onClick:M&&j?()=>{P(void 0),null==w||w(null)}:void 0,margin:{top:0,left:0,right:0,bottom:0}},g&&C?c.createElement("text",{className:(0,a.q)("fill-tremor-content-emphasis","dark:fill-dark-tremor-content-emphasis"),x:"50%",y:"50%",textAnchor:"middle",dominantBaseline:"middle"},O):null,c.createElement(eN,{className:(0,a.q)("stroke-tremor-background dark:stroke-dark-tremor-background",w?"cursor-pointer":"cursor-default"),data:eD(n,f),cx:"50%",cy:"50%",startAngle:90,endAngle:-270,innerRadius:C?"75%":"0%",outerRadius:"100%",stroke:"",strokeLinejoin:"round",dataKey:s,nameKey:d,isAnimationActive:y,animationDuration:v,onClick:function(e,t,n){n.stopPropagation(),M&&(j===t?(P(void 0),null==w||w(null)):(P(t),null==w||w(Object.assign({eventType:"slice"},e.payload.payload))))},activeIndex:j,inactiveShape:eL,style:{outline:"none"}}),c.createElement(eR.u,{wrapperStyle:{outline:"none"},isAnimationActive:!1,content:b?e=>{var t;let{active:n,payload:r}=e;return S?c.createElement(S,{payload:null==r?void 0:r.map(e=>{var t,n,i;return Object.assign(Object.assign({},e),{color:null!==(i=null===(n=null===(t=null==r?void 0:r[0])||void 0===t?void 0:t.payload)||void 0===n?void 0:n.color)&&void 0!==i?i:o.fr.Gray})}),active:n,label:null===(t=null==r?void 0:r[0])||void 0===t?void 0:t.name}):c.createElement(e_,{active:n,payload:r,valueFormatter:h})}:c.createElement(c.Fragment,null)})):c.createElement(eT.Z,{noDataText:x})))});ez.displayName="DonutChart"},65278:function(e,t,n){"use strict";n.d(t,{Z:function(){return m}});var r=n(2265);let o=(e,t)=>{let[n,o]=(0,r.useState)(t);(0,r.useEffect)(()=>{let t=()=>{o(window.innerWidth),e()};return t(),window.addEventListener("resize",t),()=>window.removeEventListener("resize",t)},[e,n])};var i=n(5853),a=n(26898),l=n(65954),c=n(1153);let s=e=>{var t=(0,i._T)(e,[]);return r.createElement("svg",Object.assign({},t,{xmlns:"http://www.w3.org/2000/svg",viewBox:"0 0 24 24",fill:"currentColor"}),r.createElement("path",{d:"M8 12L14 6V18L8 12Z"}))},u=e=>{var t=(0,i._T)(e,[]);return r.createElement("svg",Object.assign({},t,{xmlns:"http://www.w3.org/2000/svg",viewBox:"0 0 24 24",fill:"currentColor"}),r.createElement("path",{d:"M16 12L10 18V6L16 12Z"}))},d=(0,c.fn)("Legend"),f=e=>{let{name:t,color:n,onClick:o,activeLegend:i}=e,s=!!o;return r.createElement("li",{className:(0,l.q)(d("legendItem"),"group inline-flex items-center px-2 py-0.5 rounded-tremor-small transition whitespace-nowrap",s?"cursor-pointer":"cursor-default","text-tremor-content",s?"hover:bg-tremor-background-subtle":"","dark:text-dark-tremor-content",s?"dark:hover:bg-dark-tremor-background-subtle":""),onClick:e=>{e.stopPropagation(),null==o||o(t,n)}},r.createElement("svg",{className:(0,l.q)("flex-none h-2 w-2 mr-1.5",(0,c.bM)(n,a.K.text).textColor,i&&i!==t?"opacity-40":"opacity-100"),fill:"currentColor",viewBox:"0 0 8 8"},r.createElement("circle",{cx:4,cy:4,r:4})),r.createElement("p",{className:(0,l.q)("whitespace-nowrap truncate text-tremor-default","text-tremor-content",s?"group-hover:text-tremor-content-emphasis":"","dark:text-dark-tremor-content",i&&i!==t?"opacity-40":"opacity-100",s?"dark:group-hover:text-dark-tremor-content-emphasis":"")},t))},p=e=>{let{icon:t,onClick:n,disabled:o}=e,[i,a]=r.useState(!1),c=r.useRef(null);return r.useEffect(()=>(i?c.current=setInterval(()=>{null==n||n()},300):clearInterval(c.current),()=>clearInterval(c.current)),[i,n]),(0,r.useEffect)(()=>{o&&(clearInterval(c.current),a(!1))},[o]),r.createElement("button",{type:"button",className:(0,l.q)(d("legendSliderButton"),"w-5 group inline-flex items-center truncate rounded-tremor-small transition",o?"cursor-not-allowed":"cursor-pointer",o?"text-tremor-content-subtle":"text-tremor-content hover:text-tremor-content-emphasis hover:bg-tremor-background-subtle",o?"dark:text-dark-tremor-subtle":"dark:text-dark-tremor dark:hover:text-tremor-content-emphasis dark:hover:bg-dark-tremor-background-subtle"),disabled:o,onClick:e=>{e.stopPropagation(),null==n||n()},onMouseDown:e=>{e.stopPropagation(),a(!0)},onMouseUp:e=>{e.stopPropagation(),a(!1)}},r.createElement(t,{className:"w-full"}))},h=r.forwardRef((e,t)=>{var n,o;let{categories:c,colors:h=a.s,className:m,onClickLegendItem:g,activeLegend:v,enableLegendSlider:y=!1}=e,b=(0,i._T)(e,["categories","colors","className","onClickLegendItem","activeLegend","enableLegendSlider"]),x=r.useRef(null),[w,S]=r.useState(null),[k,E]=r.useState(null),C=r.useRef(null),O=(0,r.useCallback)(()=>{let e=null==x?void 0:x.current;e&&S({left:e.scrollLeft>0,right:e.scrollWidth-e.clientWidth>e.scrollLeft})},[S]),j=(0,r.useCallback)(e=>{var t;let n=null==x?void 0:x.current,r=null!==(t=null==n?void 0:n.clientWidth)&&void 0!==t?t:0;n&&y&&(n.scrollTo({left:"left"===e?n.scrollLeft-r:n.scrollLeft+r,behavior:"smooth"}),setTimeout(()=>{O()},400))},[y,O]);r.useEffect(()=>{let e=e=>{"ArrowLeft"===e?j("left"):"ArrowRight"===e&&j("right")};return k?(e(k),C.current=setInterval(()=>{e(k)},300)):clearInterval(C.current),()=>clearInterval(C.current)},[k,j]);let P=e=>{e.stopPropagation(),"ArrowLeft"!==e.key&&"ArrowRight"!==e.key||(e.preventDefault(),E(e.key))},M=e=>{e.stopPropagation(),E(null)};return r.useEffect(()=>{let e=null==x?void 0:x.current;return y&&(O(),null==e||e.addEventListener("keydown",P),null==e||e.addEventListener("keyup",M)),()=>{null==e||e.removeEventListener("keydown",P),null==e||e.removeEventListener("keyup",M)}},[O,y]),r.createElement("ol",Object.assign({ref:t,className:(0,l.q)(d("root"),"relative overflow-hidden",m)},b),r.createElement("div",{ref:x,tabIndex:0,className:(0,l.q)("h-full flex",y?(null==w?void 0:w.right)||(null==w?void 0:w.left)?"pl-4 pr-12 items-center overflow-auto snap-mandatory [&::-webkit-scrollbar]:hidden [scrollbar-width:none]":"":"flex-wrap")},c.map((e,t)=>r.createElement(f,{key:"item-".concat(t),name:e,color:h[t],onClick:g,activeLegend:v}))),y&&((null==w?void 0:w.right)||(null==w?void 0:w.left))?r.createElement(r.Fragment,null,r.createElement("div",{className:(0,l.q)("from-tremor-background","dark:from-dark-tremor-background","absolute top-0 bottom-0 left-0 w-4 bg-gradient-to-r to-transparent pointer-events-none")}),r.createElement("div",{className:(0,l.q)("to-tremor-background","dark:to-dark-tremor-background","absolute top-0 bottom-0 right-10 w-4 bg-gradient-to-r from-transparent pointer-events-none")}),r.createElement("div",{className:(0,l.q)("bg-tremor-background","dark:bg-dark-tremor-background","absolute flex top-0 pr-1 bottom-0 right-0 items-center justify-center h-full")},r.createElement(p,{icon:s,onClick:()=>{E(null),j("left")},disabled:!(null==w?void 0:w.left)}),r.createElement(p,{icon:u,onClick:()=>{E(null),j("right")},disabled:!(null==w?void 0:w.right)}))):null)});h.displayName="Legend";let m=(e,t,n,i,a,l)=>{let{payload:c}=e,s=(0,r.useRef)(null);o(()=>{var e,t;n((t=null===(e=s.current)||void 0===e?void 0:e.clientHeight)?Number(t)+20:60)});let u=c.filter(e=>"none"!==e.type);return r.createElement("div",{ref:s,className:"flex items-center justify-end"},r.createElement(h,{categories:u.map(e=>e.value),colors:u.map(e=>t.get(e.value)),onClickLegendItem:a,activeLegend:i,enableLegendSlider:l}))}},98593:function(e,t,n){"use strict";n.d(t,{$B:function(){return c},ZP:function(){return u},zX:function(){return s}});var r=n(2265),o=n(7084),i=n(26898),a=n(65954),l=n(1153);let c=e=>{let{children:t}=e;return r.createElement("div",{className:(0,a.q)("rounded-tremor-default text-tremor-default border","bg-tremor-background shadow-tremor-dropdown border-tremor-border","dark:bg-dark-tremor-background dark:shadow-dark-tremor-dropdown dark:border-dark-tremor-border")},t)},s=e=>{let{value:t,name:n,color:o}=e;return r.createElement("div",{className:"flex items-center justify-between space-x-8"},r.createElement("div",{className:"flex items-center space-x-2"},r.createElement("span",{className:(0,a.q)("shrink-0 rounded-tremor-full border-2 h-3 w-3","border-tremor-background shadow-tremor-card","dark:border-dark-tremor-background dark:shadow-dark-tremor-card",(0,l.bM)(o,i.K.background).bgColor)}),r.createElement("p",{className:(0,a.q)("text-right whitespace-nowrap","text-tremor-content","dark:text-dark-tremor-content")},n)),r.createElement("p",{className:(0,a.q)("font-medium tabular-nums text-right whitespace-nowrap","text-tremor-content-emphasis","dark:text-dark-tremor-content-emphasis")},t))},u=e=>{let{active:t,payload:n,label:i,categoryColors:l,valueFormatter:u}=e;if(t&&n){let e=n.filter(e=>"none"!==e.type);return r.createElement(c,null,r.createElement("div",{className:(0,a.q)("border-tremor-border border-b px-4 py-2","dark:border-dark-tremor-border")},r.createElement("p",{className:(0,a.q)("font-medium","text-tremor-content-emphasis","dark:text-dark-tremor-content-emphasis")},i)),r.createElement("div",{className:(0,a.q)("px-4 py-2 space-y-1")},e.map((e,t)=>{var n;let{value:i,name:a}=e;return r.createElement(s,{key:"id-".concat(t),value:u(i),name:a,color:null!==(n=l.get(a))&&void 0!==n?n:o.fr.Blue})})))}return null}},69448:function(e,t,n){"use strict";n.d(t,{Z:function(){return f}});var r=n(65954),o=n(2265),i=n(5853);let a=(0,n(1153).fn)("Flex"),l={start:"justify-start",end:"justify-end",center:"justify-center",between:"justify-between",around:"justify-around",evenly:"justify-evenly"},c={start:"items-start",end:"items-end",center:"items-center",baseline:"items-baseline",stretch:"items-stretch"},s={row:"flex-row",col:"flex-col","row-reverse":"flex-row-reverse","col-reverse":"flex-col-reverse"},u=o.forwardRef((e,t)=>{let{flexDirection:n="row",justifyContent:u="between",alignItems:d="center",children:f,className:p}=e,h=(0,i._T)(e,["flexDirection","justifyContent","alignItems","children","className"]);return o.createElement("div",Object.assign({ref:t,className:(0,r.q)(a("root"),"flex w-full",s[n],l[u],c[d],p)},h),f)});u.displayName="Flex";var d=n(84264);let f=e=>{let{noDataText:t="No data"}=e;return o.createElement(u,{alignItems:"center",justifyContent:"center",className:(0,r.q)("w-full h-full border border-dashed rounded-tremor-default","border-tremor-border","dark:border-dark-tremor-border")},o.createElement(d.Z,{className:(0,r.q)("text-tremor-content","dark:text-dark-tremor-content")},t))}},32644:function(e,t,n){"use strict";n.d(t,{FB:function(){return i},i4:function(){return o},me:function(){return r},vZ:function(){return function e(t,n){if(t===n)return!0;if("object"!=typeof t||"object"!=typeof n||null===t||null===n)return!1;let r=Object.keys(t),o=Object.keys(n);if(r.length!==o.length)return!1;for(let i of r)if(!o.includes(i)||!e(t[i],n[i]))return!1;return!0}}});let r=(e,t)=>{let n=new Map;return e.forEach((e,r)=>{n.set(e,t[r])}),n},o=(e,t,n)=>[e?"auto":null!=t?t:0,null!=n?n:"auto"];function i(e,t){let n=[];for(let r of e)if(Object.prototype.hasOwnProperty.call(r,t)&&(n.push(r[t]),n.length>1))return!1;return!0}},41649:function(e,t,n){"use strict";n.d(t,{Z:function(){return p}});var r=n(5853),o=n(2265),i=n(1526),a=n(7084),l=n(26898),c=n(65954),s=n(1153);let u={xs:{paddingX:"px-2",paddingY:"py-0.5",fontSize:"text-xs"},sm:{paddingX:"px-2.5",paddingY:"py-0.5",fontSize:"text-sm"},md:{paddingX:"px-3",paddingY:"py-0.5",fontSize:"text-md"},lg:{paddingX:"px-3.5",paddingY:"py-0.5",fontSize:"text-lg"},xl:{paddingX:"px-4",paddingY:"py-1",fontSize:"text-xl"}},d={xs:{height:"h-4",width:"w-4"},sm:{height:"h-4",width:"w-4"},md:{height:"h-4",width:"w-4"},lg:{height:"h-5",width:"w-5"},xl:{height:"h-6",width:"w-6"}},f=(0,s.fn)("Badge"),p=o.forwardRef((e,t)=>{let{color:n,icon:p,size:h=a.u8.SM,tooltip:m,className:g,children:v}=e,y=(0,r._T)(e,["color","icon","size","tooltip","className","children"]),b=p||null,{tooltipProps:x,getReferenceProps:w}=(0,i.l)();return o.createElement("span",Object.assign({ref:(0,s.lq)([t,x.refs.setReference]),className:(0,c.q)(f("root"),"w-max flex-shrink-0 inline-flex justify-center items-center cursor-default rounded-tremor-full",n?(0,c.q)((0,s.bM)(n,l.K.background).bgColor,(0,s.bM)(n,l.K.text).textColor,"bg-opacity-20 dark:bg-opacity-25"):(0,c.q)("bg-tremor-brand-muted text-tremor-brand-emphasis","dark:bg-dark-tremor-brand-muted dark:text-dark-tremor-brand-emphasis"),u[h].paddingX,u[h].paddingY,u[h].fontSize,g)},w,y),o.createElement(i.Z,Object.assign({text:m},x)),b?o.createElement(b,{className:(0,c.q)(f("icon"),"shrink-0 -ml-1 mr-1.5",d[h].height,d[h].width)}):null,o.createElement("p",{className:(0,c.q)(f("text"),"text-sm whitespace-nowrap")},v))});p.displayName="Badge"},47323:function(e,t,n){"use strict";n.d(t,{Z:function(){return m}});var r=n(5853),o=n(2265),i=n(1526),a=n(7084),l=n(65954),c=n(1153),s=n(26898);let u={xs:{paddingX:"px-1.5",paddingY:"py-1.5"},sm:{paddingX:"px-1.5",paddingY:"py-1.5"},md:{paddingX:"px-2",paddingY:"py-2"},lg:{paddingX:"px-2",paddingY:"py-2"},xl:{paddingX:"px-2.5",paddingY:"py-2.5"}},d={xs:{height:"h-3",width:"w-3"},sm:{height:"h-5",width:"w-5"},md:{height:"h-5",width:"w-5"},lg:{height:"h-7",width:"w-7"},xl:{height:"h-9",width:"w-9"}},f={simple:{rounded:"",border:"",ring:"",shadow:""},light:{rounded:"rounded-tremor-default",border:"",ring:"",shadow:""},shadow:{rounded:"rounded-tremor-default",border:"border",ring:"",shadow:"shadow-tremor-card dark:shadow-dark-tremor-card"},solid:{rounded:"rounded-tremor-default",border:"border-2",ring:"ring-1",shadow:""},outlined:{rounded:"rounded-tremor-default",border:"border",ring:"ring-2",shadow:""}},p=(e,t)=>{switch(e){case"simple":return{textColor:t?(0,c.bM)(t,s.K.text).textColor:"text-tremor-brand dark:text-dark-tremor-brand",bgColor:"",borderColor:"",ringColor:""};case"light":return{textColor:t?(0,c.bM)(t,s.K.text).textColor:"text-tremor-brand dark:text-dark-tremor-brand",bgColor:t?(0,l.q)((0,c.bM)(t,s.K.background).bgColor,"bg-opacity-20"):"bg-tremor-brand-muted dark:bg-dark-tremor-brand-muted",borderColor:"",ringColor:""};case"shadow":return{textColor:t?(0,c.bM)(t,s.K.text).textColor:"text-tremor-brand dark:text-dark-tremor-brand",bgColor:t?(0,l.q)((0,c.bM)(t,s.K.background).bgColor,"bg-opacity-20"):"bg-tremor-background dark:bg-dark-tremor-background",borderColor:"border-tremor-border dark:border-dark-tremor-border",ringColor:""};case"solid":return{textColor:t?(0,c.bM)(t,s.K.text).textColor:"text-tremor-brand-inverted dark:text-dark-tremor-brand-inverted",bgColor:t?(0,l.q)((0,c.bM)(t,s.K.background).bgColor,"bg-opacity-20"):"bg-tremor-brand dark:bg-dark-tremor-brand",borderColor:"border-tremor-brand-inverted dark:border-dark-tremor-brand-inverted",ringColor:"ring-tremor-ring dark:ring-dark-tremor-ring"};case"outlined":return{textColor:t?(0,c.bM)(t,s.K.text).textColor:"text-tremor-brand dark:text-dark-tremor-brand",bgColor:t?(0,l.q)((0,c.bM)(t,s.K.background).bgColor,"bg-opacity-20"):"bg-tremor-background dark:bg-dark-tremor-background",borderColor:t?(0,c.bM)(t,s.K.ring).borderColor:"border-tremor-brand-subtle dark:border-dark-tremor-brand-subtle",ringColor:t?(0,l.q)((0,c.bM)(t,s.K.ring).ringColor,"ring-opacity-40"):"ring-tremor-brand-muted dark:ring-dark-tremor-brand-muted"}}},h=(0,c.fn)("Icon"),m=o.forwardRef((e,t)=>{let{icon:n,variant:s="simple",tooltip:m,size:g=a.u8.SM,color:v,className:y}=e,b=(0,r._T)(e,["icon","variant","tooltip","size","color","className"]),x=p(s,v),{tooltipProps:w,getReferenceProps:S}=(0,i.l)();return o.createElement("span",Object.assign({ref:(0,c.lq)([t,w.refs.setReference]),className:(0,l.q)(h("root"),"inline-flex flex-shrink-0 items-center",x.bgColor,x.textColor,x.borderColor,x.ringColor,f[s].rounded,f[s].border,f[s].shadow,f[s].ring,u[g].paddingX,u[g].paddingY,y)},S,b),o.createElement(i.Z,Object.assign({text:m},w)),o.createElement(n,{className:(0,l.q)(h("icon"),"shrink-0",d[g].height,d[g].width)}))});m.displayName="Icon"},53003:function(e,t,n){"use strict";let r,o,i;n.d(t,{Z:function(){return nF}});var a,l,c,s,u=n(5853),d=n(2265),f=n(54887),p=n(13323),h=n(64518),m=n(96822),g=n(40293);function v(){for(var e=arguments.length,t=Array(e),n=0;n(0,g.r)(...t),[...t])}var y=n(72238),b=n(93689);let x=(0,d.createContext)(!1);var w=n(61424),S=n(27847);let k=d.Fragment,E=d.Fragment,C=(0,d.createContext)(null),O=(0,d.createContext)(null);Object.assign((0,S.yV)(function(e,t){var n;let r,o,i=(0,d.useRef)(null),a=(0,b.T)((0,b.h)(e=>{i.current=e}),t),l=v(i),c=function(e){let t=(0,d.useContext)(x),n=(0,d.useContext)(C),r=v(e),[o,i]=(0,d.useState)(()=>{if(!t&&null!==n||w.O.isServer)return null;let e=null==r?void 0:r.getElementById("headlessui-portal-root");if(e)return e;if(null===r)return null;let o=r.createElement("div");return o.setAttribute("id","headlessui-portal-root"),r.body.appendChild(o)});return(0,d.useEffect)(()=>{null!==o&&(null!=r&&r.body.contains(o)||null==r||r.body.appendChild(o))},[o,r]),(0,d.useEffect)(()=>{t||null!==n&&i(n.current)},[n,i,t]),o}(i),[s]=(0,d.useState)(()=>{var e;return w.O.isServer?null:null!=(e=null==l?void 0:l.createElement("div"))?e:null}),u=(0,d.useContext)(O),g=(0,y.H)();return(0,h.e)(()=>{!c||!s||c.contains(s)||(s.setAttribute("data-headlessui-portal",""),c.appendChild(s))},[c,s]),(0,h.e)(()=>{if(s&&u)return u.register(s)},[u,s]),n=()=>{var e;c&&s&&(s instanceof Node&&c.contains(s)&&c.removeChild(s),c.childNodes.length<=0&&(null==(e=c.parentElement)||e.removeChild(c)))},r=(0,p.z)(n),o=(0,d.useRef)(!1),(0,d.useEffect)(()=>(o.current=!1,()=>{o.current=!0,(0,m.Y)(()=>{o.current&&r()})}),[r]),g&&c&&s?(0,f.createPortal)((0,S.sY)({ourProps:{ref:a},theirProps:e,defaultTag:k,name:"Portal"}),s):null}),{Group:(0,S.yV)(function(e,t){let{target:n,...r}=e,o={ref:(0,b.T)(t)};return d.createElement(C.Provider,{value:n},(0,S.sY)({ourProps:o,theirProps:r,defaultTag:E,name:"Popover.Group"}))})});var j=n(31948),P=n(17684),M=n(98505),N=n(80004),I=n(38198),R=n(3141),T=((r=T||{})[r.Forwards=0]="Forwards",r[r.Backwards=1]="Backwards",r);function A(){let e=(0,d.useRef)(0);return(0,R.s)("keydown",t=>{"Tab"===t.key&&(e.current=t.shiftKey?1:0)},!0),e}var _=n(37863),D=n(47634),Z=n(37105),L=n(24536),z=n(37388),B=((o=B||{})[o.Open=0]="Open",o[o.Closed=1]="Closed",o),F=((i=F||{})[i.TogglePopover=0]="TogglePopover",i[i.ClosePopover=1]="ClosePopover",i[i.SetButton=2]="SetButton",i[i.SetButtonId=3]="SetButtonId",i[i.SetPanel=4]="SetPanel",i[i.SetPanelId=5]="SetPanelId",i);let H={0:e=>{let t={...e,popoverState:(0,L.E)(e.popoverState,{0:1,1:0})};return 0===t.popoverState&&(t.__demoMode=!1),t},1:e=>1===e.popoverState?e:{...e,popoverState:1},2:(e,t)=>e.button===t.button?e:{...e,button:t.button},3:(e,t)=>e.buttonId===t.buttonId?e:{...e,buttonId:t.buttonId},4:(e,t)=>e.panel===t.panel?e:{...e,panel:t.panel},5:(e,t)=>e.panelId===t.panelId?e:{...e,panelId:t.panelId}},q=(0,d.createContext)(null);function W(e){let t=(0,d.useContext)(q);if(null===t){let t=Error("<".concat(e," /> is missing a parent component."));throw Error.captureStackTrace&&Error.captureStackTrace(t,W),t}return t}q.displayName="PopoverContext";let K=(0,d.createContext)(null);function U(e){let t=(0,d.useContext)(K);if(null===t){let t=Error("<".concat(e," /> is missing a parent component."));throw Error.captureStackTrace&&Error.captureStackTrace(t,U),t}return t}K.displayName="PopoverAPIContext";let V=(0,d.createContext)(null);function G(){return(0,d.useContext)(V)}V.displayName="PopoverGroupContext";let X=(0,d.createContext)(null);function $(e,t){return(0,L.E)(t.type,H,e,t)}X.displayName="PopoverPanelContext";let Y=S.AN.RenderStrategy|S.AN.Static,Q=S.AN.RenderStrategy|S.AN.Static,J=Object.assign((0,S.yV)(function(e,t){var n,r,o,i;let a,l,c,s,u,f;let{__demoMode:h=!1,...m}=e,g=(0,d.useRef)(null),y=(0,b.T)(t,(0,b.h)(e=>{g.current=e})),x=(0,d.useRef)([]),w=(0,d.useReducer)($,{__demoMode:h,popoverState:h?0:1,buttons:x,button:null,buttonId:null,panel:null,panelId:null,beforePanelSentinel:(0,d.createRef)(),afterPanelSentinel:(0,d.createRef)()}),[{popoverState:k,button:E,buttonId:C,panel:P,panelId:N,beforePanelSentinel:R,afterPanelSentinel:T},A]=w,D=v(null!=(n=g.current)?n:E),z=(0,d.useMemo)(()=>{if(!E||!P)return!1;for(let e of document.querySelectorAll("body > *"))if(Number(null==e?void 0:e.contains(E))^Number(null==e?void 0:e.contains(P)))return!0;let e=(0,Z.GO)(),t=e.indexOf(E),n=(t+e.length-1)%e.length,r=(t+1)%e.length,o=e[n],i=e[r];return!P.contains(o)&&!P.contains(i)},[E,P]),B=(0,j.E)(C),F=(0,j.E)(N),H=(0,d.useMemo)(()=>({buttonId:B,panelId:F,close:()=>A({type:1})}),[B,F,A]),W=G(),U=null==W?void 0:W.registerPopover,V=(0,p.z)(()=>{var e;return null!=(e=null==W?void 0:W.isFocusWithinPopoverGroup())?e:(null==D?void 0:D.activeElement)&&((null==E?void 0:E.contains(D.activeElement))||(null==P?void 0:P.contains(D.activeElement)))});(0,d.useEffect)(()=>null==U?void 0:U(H),[U,H]);let[Y,Q]=(a=(0,d.useContext)(O),l=(0,d.useRef)([]),c=(0,p.z)(e=>(l.current.push(e),a&&a.register(e),()=>s(e))),s=(0,p.z)(e=>{let t=l.current.indexOf(e);-1!==t&&l.current.splice(t,1),a&&a.unregister(e)}),u=(0,d.useMemo)(()=>({register:c,unregister:s,portals:l}),[c,s,l]),[l,(0,d.useMemo)(()=>function(e){let{children:t}=e;return d.createElement(O.Provider,{value:u},t)},[u])]),J=function(){var e;let{defaultContainers:t=[],portals:n,mainTreeNodeRef:r}=arguments.length>0&&void 0!==arguments[0]?arguments[0]:{},o=(0,d.useRef)(null!=(e=null==r?void 0:r.current)?e:null),i=v(o),a=(0,p.z)(()=>{var e,r,a;let l=[];for(let e of t)null!==e&&(e instanceof HTMLElement?l.push(e):"current"in e&&e.current instanceof HTMLElement&&l.push(e.current));if(null!=n&&n.current)for(let e of n.current)l.push(e);for(let t of null!=(e=null==i?void 0:i.querySelectorAll("html > *, body > *"))?e:[])t!==document.body&&t!==document.head&&t instanceof HTMLElement&&"headlessui-portal-root"!==t.id&&(t.contains(o.current)||t.contains(null==(a=null==(r=o.current)?void 0:r.getRootNode())?void 0:a.host)||l.some(e=>t.contains(e))||l.push(t));return l});return{resolveContainers:a,contains:(0,p.z)(e=>a().some(t=>t.contains(e))),mainTreeNodeRef:o,MainTreeNode:(0,d.useMemo)(()=>function(){return null!=r?null:d.createElement(I._,{features:I.A.Hidden,ref:o})},[o,r])}}({mainTreeNodeRef:null==W?void 0:W.mainTreeNodeRef,portals:Y,defaultContainers:[E,P]});r=null==D?void 0:D.defaultView,o="focus",i=e=>{var t,n,r,o;e.target!==window&&e.target instanceof HTMLElement&&0===k&&(V()||E&&P&&(J.contains(e.target)||null!=(n=null==(t=R.current)?void 0:t.contains)&&n.call(t,e.target)||null!=(o=null==(r=T.current)?void 0:r.contains)&&o.call(r,e.target)||A({type:1})))},f=(0,j.E)(i),(0,d.useEffect)(()=>{function e(e){f.current(e)}return(r=null!=r?r:window).addEventListener(o,e,!0),()=>r.removeEventListener(o,e,!0)},[r,o,!0]),(0,M.O)(J.resolveContainers,(e,t)=>{A({type:1}),(0,Z.sP)(t,Z.tJ.Loose)||(e.preventDefault(),null==E||E.focus())},0===k);let ee=(0,p.z)(e=>{A({type:1});let t=e?e instanceof HTMLElement?e:"current"in e&&e.current instanceof HTMLElement?e.current:E:E;null==t||t.focus()}),et=(0,d.useMemo)(()=>({close:ee,isPortalled:z}),[ee,z]),en=(0,d.useMemo)(()=>({open:0===k,close:ee}),[k,ee]);return d.createElement(X.Provider,{value:null},d.createElement(q.Provider,{value:w},d.createElement(K.Provider,{value:et},d.createElement(_.up,{value:(0,L.E)(k,{0:_.ZM.Open,1:_.ZM.Closed})},d.createElement(Q,null,(0,S.sY)({ourProps:{ref:y},theirProps:m,slot:en,defaultTag:"div",name:"Popover"}),d.createElement(J.MainTreeNode,null))))))}),{Button:(0,S.yV)(function(e,t){let n=(0,P.M)(),{id:r="headlessui-popover-button-".concat(n),...o}=e,[i,a]=W("Popover.Button"),{isPortalled:l}=U("Popover.Button"),c=(0,d.useRef)(null),s="headlessui-focus-sentinel-".concat((0,P.M)()),u=G(),f=null==u?void 0:u.closeOthers,h=null!==(0,d.useContext)(X);(0,d.useEffect)(()=>{if(!h)return a({type:3,buttonId:r}),()=>{a({type:3,buttonId:null})}},[h,r,a]);let[m]=(0,d.useState)(()=>Symbol()),g=(0,b.T)(c,t,h?null:e=>{if(e)i.buttons.current.push(m);else{let e=i.buttons.current.indexOf(m);-1!==e&&i.buttons.current.splice(e,1)}i.buttons.current.length>1&&console.warn("You are already using a but only 1 is supported."),e&&a({type:2,button:e})}),y=(0,b.T)(c,t),x=v(c),w=(0,p.z)(e=>{var t,n,r;if(h){if(1===i.popoverState)return;switch(e.key){case z.R.Space:case z.R.Enter:e.preventDefault(),null==(n=(t=e.target).click)||n.call(t),a({type:1}),null==(r=i.button)||r.focus()}}else switch(e.key){case z.R.Space:case z.R.Enter:e.preventDefault(),e.stopPropagation(),1===i.popoverState&&(null==f||f(i.buttonId)),a({type:0});break;case z.R.Escape:if(0!==i.popoverState)return null==f?void 0:f(i.buttonId);if(!c.current||null!=x&&x.activeElement&&!c.current.contains(x.activeElement))return;e.preventDefault(),e.stopPropagation(),a({type:1})}}),k=(0,p.z)(e=>{h||e.key===z.R.Space&&e.preventDefault()}),E=(0,p.z)(t=>{var n,r;(0,D.P)(t.currentTarget)||e.disabled||(h?(a({type:1}),null==(n=i.button)||n.focus()):(t.preventDefault(),t.stopPropagation(),1===i.popoverState&&(null==f||f(i.buttonId)),a({type:0}),null==(r=i.button)||r.focus()))}),C=(0,p.z)(e=>{e.preventDefault(),e.stopPropagation()}),O=0===i.popoverState,j=(0,d.useMemo)(()=>({open:O}),[O]),M=(0,N.f)(e,c),R=h?{ref:y,type:M,onKeyDown:w,onClick:E}:{ref:g,id:i.buttonId,type:M,"aria-expanded":0===i.popoverState,"aria-controls":i.panel?i.panelId:void 0,onKeyDown:w,onKeyUp:k,onClick:E,onMouseDown:C},_=A(),B=(0,p.z)(()=>{let e=i.panel;e&&(0,L.E)(_.current,{[T.Forwards]:()=>(0,Z.jA)(e,Z.TO.First),[T.Backwards]:()=>(0,Z.jA)(e,Z.TO.Last)})===Z.fE.Error&&(0,Z.jA)((0,Z.GO)().filter(e=>"true"!==e.dataset.headlessuiFocusGuard),(0,L.E)(_.current,{[T.Forwards]:Z.TO.Next,[T.Backwards]:Z.TO.Previous}),{relativeTo:i.button})});return d.createElement(d.Fragment,null,(0,S.sY)({ourProps:R,theirProps:o,slot:j,defaultTag:"button",name:"Popover.Button"}),O&&!h&&l&&d.createElement(I._,{id:s,features:I.A.Focusable,"data-headlessui-focus-guard":!0,as:"button",type:"button",onFocus:B}))}),Overlay:(0,S.yV)(function(e,t){let n=(0,P.M)(),{id:r="headlessui-popover-overlay-".concat(n),...o}=e,[{popoverState:i},a]=W("Popover.Overlay"),l=(0,b.T)(t),c=(0,_.oJ)(),s=null!==c?(c&_.ZM.Open)===_.ZM.Open:0===i,u=(0,p.z)(e=>{if((0,D.P)(e.currentTarget))return e.preventDefault();a({type:1})}),f=(0,d.useMemo)(()=>({open:0===i}),[i]);return(0,S.sY)({ourProps:{ref:l,id:r,"aria-hidden":!0,onClick:u},theirProps:o,slot:f,defaultTag:"div",features:Y,visible:s,name:"Popover.Overlay"})}),Panel:(0,S.yV)(function(e,t){let n=(0,P.M)(),{id:r="headlessui-popover-panel-".concat(n),focus:o=!1,...i}=e,[a,l]=W("Popover.Panel"),{close:c,isPortalled:s}=U("Popover.Panel"),u="headlessui-focus-sentinel-before-".concat((0,P.M)()),f="headlessui-focus-sentinel-after-".concat((0,P.M)()),m=(0,d.useRef)(null),g=(0,b.T)(m,t,e=>{l({type:4,panel:e})}),y=v(m),x=(0,S.Y2)();(0,h.e)(()=>(l({type:5,panelId:r}),()=>{l({type:5,panelId:null})}),[r,l]);let w=(0,_.oJ)(),k=null!==w?(w&_.ZM.Open)===_.ZM.Open:0===a.popoverState,E=(0,p.z)(e=>{var t;if(e.key===z.R.Escape){if(0!==a.popoverState||!m.current||null!=y&&y.activeElement&&!m.current.contains(y.activeElement))return;e.preventDefault(),e.stopPropagation(),l({type:1}),null==(t=a.button)||t.focus()}});(0,d.useEffect)(()=>{var t;e.static||1===a.popoverState&&(null==(t=e.unmount)||t)&&l({type:4,panel:null})},[a.popoverState,e.unmount,e.static,l]),(0,d.useEffect)(()=>{if(a.__demoMode||!o||0!==a.popoverState||!m.current)return;let e=null==y?void 0:y.activeElement;m.current.contains(e)||(0,Z.jA)(m.current,Z.TO.First)},[a.__demoMode,o,m,a.popoverState]);let C=(0,d.useMemo)(()=>({open:0===a.popoverState,close:c}),[a,c]),O={ref:g,id:r,onKeyDown:E,onBlur:o&&0===a.popoverState?e=>{var t,n,r,o,i;let c=e.relatedTarget;c&&m.current&&(null!=(t=m.current)&&t.contains(c)||(l({type:1}),(null!=(r=null==(n=a.beforePanelSentinel.current)?void 0:n.contains)&&r.call(n,c)||null!=(i=null==(o=a.afterPanelSentinel.current)?void 0:o.contains)&&i.call(o,c))&&c.focus({preventScroll:!0})))}:void 0,tabIndex:-1},j=A(),M=(0,p.z)(()=>{let e=m.current;e&&(0,L.E)(j.current,{[T.Forwards]:()=>{var t;(0,Z.jA)(e,Z.TO.First)===Z.fE.Error&&(null==(t=a.afterPanelSentinel.current)||t.focus())},[T.Backwards]:()=>{var e;null==(e=a.button)||e.focus({preventScroll:!0})}})}),N=(0,p.z)(()=>{let e=m.current;e&&(0,L.E)(j.current,{[T.Forwards]:()=>{var e;if(!a.button)return;let t=(0,Z.GO)(),n=t.indexOf(a.button),r=t.slice(0,n+1),o=[...t.slice(n+1),...r];for(let t of o.slice())if("true"===t.dataset.headlessuiFocusGuard||null!=(e=a.panel)&&e.contains(t)){let e=o.indexOf(t);-1!==e&&o.splice(e,1)}(0,Z.jA)(o,Z.TO.First,{sorted:!1})},[T.Backwards]:()=>{var t;(0,Z.jA)(e,Z.TO.Previous)===Z.fE.Error&&(null==(t=a.button)||t.focus())}})});return d.createElement(X.Provider,{value:r},k&&s&&d.createElement(I._,{id:u,ref:a.beforePanelSentinel,features:I.A.Focusable,"data-headlessui-focus-guard":!0,as:"button",type:"button",onFocus:M}),(0,S.sY)({mergeRefs:x,ourProps:O,theirProps:i,slot:C,defaultTag:"div",features:Q,visible:k,name:"Popover.Panel"}),k&&s&&d.createElement(I._,{id:f,ref:a.afterPanelSentinel,features:I.A.Focusable,"data-headlessui-focus-guard":!0,as:"button",type:"button",onFocus:N}))}),Group:(0,S.yV)(function(e,t){let n;let r=(0,d.useRef)(null),o=(0,b.T)(r,t),[i,a]=(0,d.useState)([]),l={mainTreeNodeRef:n=(0,d.useRef)(null),MainTreeNode:(0,d.useMemo)(()=>function(){return d.createElement(I._,{features:I.A.Hidden,ref:n})},[n])},c=(0,p.z)(e=>{a(t=>{let n=t.indexOf(e);if(-1!==n){let e=t.slice();return e.splice(n,1),e}return t})}),s=(0,p.z)(e=>(a(t=>[...t,e]),()=>c(e))),u=(0,p.z)(()=>{var e;let t=(0,g.r)(r);if(!t)return!1;let n=t.activeElement;return!!(null!=(e=r.current)&&e.contains(n))||i.some(e=>{var r,o;return(null==(r=t.getElementById(e.buttonId.current))?void 0:r.contains(n))||(null==(o=t.getElementById(e.panelId.current))?void 0:o.contains(n))})}),f=(0,p.z)(e=>{for(let t of i)t.buttonId.current!==e&&t.close()}),h=(0,d.useMemo)(()=>({registerPopover:s,unregisterPopover:c,isFocusWithinPopoverGroup:u,closeOthers:f,mainTreeNodeRef:l.mainTreeNodeRef}),[s,c,u,f,l.mainTreeNodeRef]),m=(0,d.useMemo)(()=>({}),[]);return d.createElement(V.Provider,{value:h},(0,S.sY)({ourProps:{ref:o},theirProps:e,slot:m,defaultTag:"div",name:"Popover.Group"}),d.createElement(l.MainTreeNode,null))})});var ee=n(33044),et=n(28517);let en=e=>{var t=(0,u._T)(e,[]);return d.createElement("svg",Object.assign({},t,{xmlns:"http://www.w3.org/2000/svg",viewBox:"0 0 20 20",fill:"currentColor"}),d.createElement("path",{fillRule:"evenodd",d:"M6 2a1 1 0 00-1 1v1H4a2 2 0 00-2 2v10a2 2 0 002 2h12a2 2 0 002-2V6a2 2 0 00-2-2h-1V3a1 1 0 10-2 0v1H7V3a1 1 0 00-1-1zm0 5a1 1 0 000 2h8a1 1 0 100-2H6z",clipRule:"evenodd"}))};var er=n(4537),eo=n(99735),ei=n(7656);function ea(e){(0,ei.Z)(1,arguments);var t=(0,eo.Z)(e);return t.setHours(0,0,0,0),t}function el(){return ea(Date.now())}function ec(e){(0,ei.Z)(1,arguments);var t=(0,eo.Z)(e);return t.setDate(1),t.setHours(0,0,0,0),t}var es=n(65954),eu=n(96398),ed=n(41154);function ef(e){var t,n;if((0,ei.Z)(1,arguments),e&&"function"==typeof e.forEach)t=e;else{if("object"!==(0,ed.Z)(e)||null===e)return new Date(NaN);t=Array.prototype.slice.call(e)}return t.forEach(function(e){var t=(0,eo.Z)(e);(void 0===n||nt||isNaN(t.getDate()))&&(n=t)}),n||new Date(NaN)}var eh=n(25721),em=n(47869);function eg(e,t){(0,ei.Z)(2,arguments);var n=(0,em.Z)(t);return(0,eh.Z)(e,-n)}var ev=n(55463);function ey(e,t){if((0,ei.Z)(2,arguments),!t||"object"!==(0,ed.Z)(t))return new Date(NaN);var n=t.years?(0,em.Z)(t.years):0,r=t.months?(0,em.Z)(t.months):0,o=t.weeks?(0,em.Z)(t.weeks):0,i=t.days?(0,em.Z)(t.days):0,a=t.hours?(0,em.Z)(t.hours):0,l=t.minutes?(0,em.Z)(t.minutes):0,c=t.seconds?(0,em.Z)(t.seconds):0;return new Date(eg(function(e,t){(0,ei.Z)(2,arguments);var n=(0,em.Z)(t);return(0,ev.Z)(e,-n)}(e,r+12*n),i+7*o).getTime()-1e3*(c+60*(l+60*a)))}function eb(e){(0,ei.Z)(1,arguments);var t=(0,eo.Z)(e),n=new Date(0);return n.setFullYear(t.getFullYear(),0,1),n.setHours(0,0,0,0),n}function ex(e){return(0,ei.Z)(1,arguments),e instanceof Date||"object"===(0,ed.Z)(e)&&"[object Date]"===Object.prototype.toString.call(e)}function ew(e){(0,ei.Z)(1,arguments);var t=(0,eo.Z)(e),n=t.getUTCDay();return t.setUTCDate(t.getUTCDate()-((n<1?7:0)+n-1)),t.setUTCHours(0,0,0,0),t}function eS(e){(0,ei.Z)(1,arguments);var t=(0,eo.Z)(e),n=t.getUTCFullYear(),r=new Date(0);r.setUTCFullYear(n+1,0,4),r.setUTCHours(0,0,0,0);var o=ew(r),i=new Date(0);i.setUTCFullYear(n,0,4),i.setUTCHours(0,0,0,0);var a=ew(i);return t.getTime()>=o.getTime()?n+1:t.getTime()>=a.getTime()?n:n-1}var ek={};function eE(e,t){(0,ei.Z)(1,arguments);var n,r,o,i,a,l,c,s,u=(0,em.Z)(null!==(n=null!==(r=null!==(o=null!==(i=null==t?void 0:t.weekStartsOn)&&void 0!==i?i:null==t?void 0:null===(a=t.locale)||void 0===a?void 0:null===(l=a.options)||void 0===l?void 0:l.weekStartsOn)&&void 0!==o?o:ek.weekStartsOn)&&void 0!==r?r:null===(c=ek.locale)||void 0===c?void 0:null===(s=c.options)||void 0===s?void 0:s.weekStartsOn)&&void 0!==n?n:0);if(!(u>=0&&u<=6))throw RangeError("weekStartsOn must be between 0 and 6 inclusively");var d=(0,eo.Z)(e),f=d.getUTCDay();return d.setUTCDate(d.getUTCDate()-((f=1&&f<=7))throw RangeError("firstWeekContainsDate must be between 1 and 7 inclusively");var p=new Date(0);p.setUTCFullYear(d+1,0,f),p.setUTCHours(0,0,0,0);var h=eE(p,t),m=new Date(0);m.setUTCFullYear(d,0,f),m.setUTCHours(0,0,0,0);var g=eE(m,t);return u.getTime()>=h.getTime()?d+1:u.getTime()>=g.getTime()?d:d-1}function eO(e,t){for(var n=Math.abs(e).toString();n.length0?n:1-n;return eO("yy"===t?r%100:r,t.length)},M:function(e,t){var n=e.getUTCMonth();return"M"===t?String(n+1):eO(n+1,2)},d:function(e,t){return eO(e.getUTCDate(),t.length)},h:function(e,t){return eO(e.getUTCHours()%12||12,t.length)},H:function(e,t){return eO(e.getUTCHours(),t.length)},m:function(e,t){return eO(e.getUTCMinutes(),t.length)},s:function(e,t){return eO(e.getUTCSeconds(),t.length)},S:function(e,t){var n=t.length;return eO(Math.floor(e.getUTCMilliseconds()*Math.pow(10,n-3)),t.length)}},eP={midnight:"midnight",noon:"noon",morning:"morning",afternoon:"afternoon",evening:"evening",night:"night"};function eM(e,t){var n=e>0?"-":"+",r=Math.abs(e),o=Math.floor(r/60),i=r%60;return 0===i?n+String(o):n+String(o)+(t||"")+eO(i,2)}function eN(e,t){return e%60==0?(e>0?"-":"+")+eO(Math.abs(e)/60,2):eI(e,t)}function eI(e,t){var n=Math.abs(e);return(e>0?"-":"+")+eO(Math.floor(n/60),2)+(t||"")+eO(n%60,2)}var eR={G:function(e,t,n){var r=e.getUTCFullYear()>0?1:0;switch(t){case"G":case"GG":case"GGG":return n.era(r,{width:"abbreviated"});case"GGGGG":return n.era(r,{width:"narrow"});default:return n.era(r,{width:"wide"})}},y:function(e,t,n){if("yo"===t){var r=e.getUTCFullYear();return n.ordinalNumber(r>0?r:1-r,{unit:"year"})}return ej.y(e,t)},Y:function(e,t,n,r){var o=eC(e,r),i=o>0?o:1-o;return"YY"===t?eO(i%100,2):"Yo"===t?n.ordinalNumber(i,{unit:"year"}):eO(i,t.length)},R:function(e,t){return eO(eS(e),t.length)},u:function(e,t){return eO(e.getUTCFullYear(),t.length)},Q:function(e,t,n){var r=Math.ceil((e.getUTCMonth()+1)/3);switch(t){case"Q":return String(r);case"QQ":return eO(r,2);case"Qo":return n.ordinalNumber(r,{unit:"quarter"});case"QQQ":return n.quarter(r,{width:"abbreviated",context:"formatting"});case"QQQQQ":return n.quarter(r,{width:"narrow",context:"formatting"});default:return n.quarter(r,{width:"wide",context:"formatting"})}},q:function(e,t,n){var r=Math.ceil((e.getUTCMonth()+1)/3);switch(t){case"q":return String(r);case"qq":return eO(r,2);case"qo":return n.ordinalNumber(r,{unit:"quarter"});case"qqq":return n.quarter(r,{width:"abbreviated",context:"standalone"});case"qqqqq":return n.quarter(r,{width:"narrow",context:"standalone"});default:return n.quarter(r,{width:"wide",context:"standalone"})}},M:function(e,t,n){var r=e.getUTCMonth();switch(t){case"M":case"MM":return ej.M(e,t);case"Mo":return n.ordinalNumber(r+1,{unit:"month"});case"MMM":return n.month(r,{width:"abbreviated",context:"formatting"});case"MMMMM":return n.month(r,{width:"narrow",context:"formatting"});default:return n.month(r,{width:"wide",context:"formatting"})}},L:function(e,t,n){var r=e.getUTCMonth();switch(t){case"L":return String(r+1);case"LL":return eO(r+1,2);case"Lo":return n.ordinalNumber(r+1,{unit:"month"});case"LLL":return n.month(r,{width:"abbreviated",context:"standalone"});case"LLLLL":return n.month(r,{width:"narrow",context:"standalone"});default:return n.month(r,{width:"wide",context:"standalone"})}},w:function(e,t,n,r){var o=function(e,t){(0,ei.Z)(1,arguments);var n=(0,eo.Z)(e);return Math.round((eE(n,t).getTime()-(function(e,t){(0,ei.Z)(1,arguments);var n,r,o,i,a,l,c,s,u=(0,em.Z)(null!==(n=null!==(r=null!==(o=null!==(i=null==t?void 0:t.firstWeekContainsDate)&&void 0!==i?i:null==t?void 0:null===(a=t.locale)||void 0===a?void 0:null===(l=a.options)||void 0===l?void 0:l.firstWeekContainsDate)&&void 0!==o?o:ek.firstWeekContainsDate)&&void 0!==r?r:null===(c=ek.locale)||void 0===c?void 0:null===(s=c.options)||void 0===s?void 0:s.firstWeekContainsDate)&&void 0!==n?n:1),d=eC(e,t),f=new Date(0);return f.setUTCFullYear(d,0,u),f.setUTCHours(0,0,0,0),eE(f,t)})(n,t).getTime())/6048e5)+1}(e,r);return"wo"===t?n.ordinalNumber(o,{unit:"week"}):eO(o,t.length)},I:function(e,t,n){var r=function(e){(0,ei.Z)(1,arguments);var t=(0,eo.Z)(e);return Math.round((ew(t).getTime()-(function(e){(0,ei.Z)(1,arguments);var t=eS(e),n=new Date(0);return n.setUTCFullYear(t,0,4),n.setUTCHours(0,0,0,0),ew(n)})(t).getTime())/6048e5)+1}(e);return"Io"===t?n.ordinalNumber(r,{unit:"week"}):eO(r,t.length)},d:function(e,t,n){return"do"===t?n.ordinalNumber(e.getUTCDate(),{unit:"date"}):ej.d(e,t)},D:function(e,t,n){var r=function(e){(0,ei.Z)(1,arguments);var t=(0,eo.Z)(e),n=t.getTime();return t.setUTCMonth(0,1),t.setUTCHours(0,0,0,0),Math.floor((n-t.getTime())/864e5)+1}(e);return"Do"===t?n.ordinalNumber(r,{unit:"dayOfYear"}):eO(r,t.length)},E:function(e,t,n){var r=e.getUTCDay();switch(t){case"E":case"EE":case"EEE":return n.day(r,{width:"abbreviated",context:"formatting"});case"EEEEE":return n.day(r,{width:"narrow",context:"formatting"});case"EEEEEE":return n.day(r,{width:"short",context:"formatting"});default:return n.day(r,{width:"wide",context:"formatting"})}},e:function(e,t,n,r){var o=e.getUTCDay(),i=(o-r.weekStartsOn+8)%7||7;switch(t){case"e":return String(i);case"ee":return eO(i,2);case"eo":return n.ordinalNumber(i,{unit:"day"});case"eee":return n.day(o,{width:"abbreviated",context:"formatting"});case"eeeee":return n.day(o,{width:"narrow",context:"formatting"});case"eeeeee":return n.day(o,{width:"short",context:"formatting"});default:return n.day(o,{width:"wide",context:"formatting"})}},c:function(e,t,n,r){var o=e.getUTCDay(),i=(o-r.weekStartsOn+8)%7||7;switch(t){case"c":return String(i);case"cc":return eO(i,t.length);case"co":return n.ordinalNumber(i,{unit:"day"});case"ccc":return n.day(o,{width:"abbreviated",context:"standalone"});case"ccccc":return n.day(o,{width:"narrow",context:"standalone"});case"cccccc":return n.day(o,{width:"short",context:"standalone"});default:return n.day(o,{width:"wide",context:"standalone"})}},i:function(e,t,n){var r=e.getUTCDay(),o=0===r?7:r;switch(t){case"i":return String(o);case"ii":return eO(o,t.length);case"io":return n.ordinalNumber(o,{unit:"day"});case"iii":return n.day(r,{width:"abbreviated",context:"formatting"});case"iiiii":return n.day(r,{width:"narrow",context:"formatting"});case"iiiiii":return n.day(r,{width:"short",context:"formatting"});default:return n.day(r,{width:"wide",context:"formatting"})}},a:function(e,t,n){var r=e.getUTCHours()/12>=1?"pm":"am";switch(t){case"a":case"aa":return n.dayPeriod(r,{width:"abbreviated",context:"formatting"});case"aaa":return n.dayPeriod(r,{width:"abbreviated",context:"formatting"}).toLowerCase();case"aaaaa":return n.dayPeriod(r,{width:"narrow",context:"formatting"});default:return n.dayPeriod(r,{width:"wide",context:"formatting"})}},b:function(e,t,n){var r,o=e.getUTCHours();switch(r=12===o?eP.noon:0===o?eP.midnight:o/12>=1?"pm":"am",t){case"b":case"bb":return n.dayPeriod(r,{width:"abbreviated",context:"formatting"});case"bbb":return n.dayPeriod(r,{width:"abbreviated",context:"formatting"}).toLowerCase();case"bbbbb":return n.dayPeriod(r,{width:"narrow",context:"formatting"});default:return n.dayPeriod(r,{width:"wide",context:"formatting"})}},B:function(e,t,n){var r,o=e.getUTCHours();switch(r=o>=17?eP.evening:o>=12?eP.afternoon:o>=4?eP.morning:eP.night,t){case"B":case"BB":case"BBB":return n.dayPeriod(r,{width:"abbreviated",context:"formatting"});case"BBBBB":return n.dayPeriod(r,{width:"narrow",context:"formatting"});default:return n.dayPeriod(r,{width:"wide",context:"formatting"})}},h:function(e,t,n){if("ho"===t){var r=e.getUTCHours()%12;return 0===r&&(r=12),n.ordinalNumber(r,{unit:"hour"})}return ej.h(e,t)},H:function(e,t,n){return"Ho"===t?n.ordinalNumber(e.getUTCHours(),{unit:"hour"}):ej.H(e,t)},K:function(e,t,n){var r=e.getUTCHours()%12;return"Ko"===t?n.ordinalNumber(r,{unit:"hour"}):eO(r,t.length)},k:function(e,t,n){var r=e.getUTCHours();return(0===r&&(r=24),"ko"===t)?n.ordinalNumber(r,{unit:"hour"}):eO(r,t.length)},m:function(e,t,n){return"mo"===t?n.ordinalNumber(e.getUTCMinutes(),{unit:"minute"}):ej.m(e,t)},s:function(e,t,n){return"so"===t?n.ordinalNumber(e.getUTCSeconds(),{unit:"second"}):ej.s(e,t)},S:function(e,t){return ej.S(e,t)},X:function(e,t,n,r){var o=(r._originalDate||e).getTimezoneOffset();if(0===o)return"Z";switch(t){case"X":return eN(o);case"XXXX":case"XX":return eI(o);default:return eI(o,":")}},x:function(e,t,n,r){var o=(r._originalDate||e).getTimezoneOffset();switch(t){case"x":return eN(o);case"xxxx":case"xx":return eI(o);default:return eI(o,":")}},O:function(e,t,n,r){var o=(r._originalDate||e).getTimezoneOffset();switch(t){case"O":case"OO":case"OOO":return"GMT"+eM(o,":");default:return"GMT"+eI(o,":")}},z:function(e,t,n,r){var o=(r._originalDate||e).getTimezoneOffset();switch(t){case"z":case"zz":case"zzz":return"GMT"+eM(o,":");default:return"GMT"+eI(o,":")}},t:function(e,t,n,r){return eO(Math.floor((r._originalDate||e).getTime()/1e3),t.length)},T:function(e,t,n,r){return eO((r._originalDate||e).getTime(),t.length)}},eT=function(e,t){switch(e){case"P":return t.date({width:"short"});case"PP":return t.date({width:"medium"});case"PPP":return t.date({width:"long"});default:return t.date({width:"full"})}},eA=function(e,t){switch(e){case"p":return t.time({width:"short"});case"pp":return t.time({width:"medium"});case"ppp":return t.time({width:"long"});default:return t.time({width:"full"})}},e_={p:eA,P:function(e,t){var n,r=e.match(/(P+)(p+)?/)||[],o=r[1],i=r[2];if(!i)return eT(e,t);switch(o){case"P":n=t.dateTime({width:"short"});break;case"PP":n=t.dateTime({width:"medium"});break;case"PPP":n=t.dateTime({width:"long"});break;default:n=t.dateTime({width:"full"})}return n.replace("{{date}}",eT(o,t)).replace("{{time}}",eA(i,t))}};function eD(e){var t=new Date(Date.UTC(e.getFullYear(),e.getMonth(),e.getDate(),e.getHours(),e.getMinutes(),e.getSeconds(),e.getMilliseconds()));return t.setUTCFullYear(e.getFullYear()),e.getTime()-t.getTime()}var eZ=["D","DD"],eL=["YY","YYYY"];function ez(e,t,n){if("YYYY"===e)throw RangeError("Use `yyyy` instead of `YYYY` (in `".concat(t,"`) for formatting years to the input `").concat(n,"`; see: https://github.com/date-fns/date-fns/blob/master/docs/unicodeTokens.md"));if("YY"===e)throw RangeError("Use `yy` instead of `YY` (in `".concat(t,"`) for formatting years to the input `").concat(n,"`; see: https://github.com/date-fns/date-fns/blob/master/docs/unicodeTokens.md"));if("D"===e)throw RangeError("Use `d` instead of `D` (in `".concat(t,"`) for formatting days of the month to the input `").concat(n,"`; see: https://github.com/date-fns/date-fns/blob/master/docs/unicodeTokens.md"));if("DD"===e)throw RangeError("Use `dd` instead of `DD` (in `".concat(t,"`) for formatting days of the month to the input `").concat(n,"`; see: https://github.com/date-fns/date-fns/blob/master/docs/unicodeTokens.md"))}var eB={lessThanXSeconds:{one:"less than a second",other:"less than {{count}} seconds"},xSeconds:{one:"1 second",other:"{{count}} seconds"},halfAMinute:"half a minute",lessThanXMinutes:{one:"less than a minute",other:"less than {{count}} minutes"},xMinutes:{one:"1 minute",other:"{{count}} minutes"},aboutXHours:{one:"about 1 hour",other:"about {{count}} hours"},xHours:{one:"1 hour",other:"{{count}} hours"},xDays:{one:"1 day",other:"{{count}} days"},aboutXWeeks:{one:"about 1 week",other:"about {{count}} weeks"},xWeeks:{one:"1 week",other:"{{count}} weeks"},aboutXMonths:{one:"about 1 month",other:"about {{count}} months"},xMonths:{one:"1 month",other:"{{count}} months"},aboutXYears:{one:"about 1 year",other:"about {{count}} years"},xYears:{one:"1 year",other:"{{count}} years"},overXYears:{one:"over 1 year",other:"over {{count}} years"},almostXYears:{one:"almost 1 year",other:"almost {{count}} years"}};function eF(e){return function(){var t=arguments.length>0&&void 0!==arguments[0]?arguments[0]:{},n=t.width?String(t.width):e.defaultWidth;return e.formats[n]||e.formats[e.defaultWidth]}}var eH={date:eF({formats:{full:"EEEE, MMMM do, y",long:"MMMM do, y",medium:"MMM d, y",short:"MM/dd/yyyy"},defaultWidth:"full"}),time:eF({formats:{full:"h:mm:ss a zzzz",long:"h:mm:ss a z",medium:"h:mm:ss a",short:"h:mm a"},defaultWidth:"full"}),dateTime:eF({formats:{full:"{{date}} 'at' {{time}}",long:"{{date}} 'at' {{time}}",medium:"{{date}}, {{time}}",short:"{{date}}, {{time}}"},defaultWidth:"full"})},eq={lastWeek:"'last' eeee 'at' p",yesterday:"'yesterday at' p",today:"'today at' p",tomorrow:"'tomorrow at' p",nextWeek:"eeee 'at' p",other:"P"};function eW(e){return function(t,n){var r;if("formatting"===(null!=n&&n.context?String(n.context):"standalone")&&e.formattingValues){var o=e.defaultFormattingWidth||e.defaultWidth,i=null!=n&&n.width?String(n.width):o;r=e.formattingValues[i]||e.formattingValues[o]}else{var a=e.defaultWidth,l=null!=n&&n.width?String(n.width):e.defaultWidth;r=e.values[l]||e.values[a]}return r[e.argumentCallback?e.argumentCallback(t):t]}}function eK(e){return function(t){var n,r=arguments.length>1&&void 0!==arguments[1]?arguments[1]:{},o=r.width,i=o&&e.matchPatterns[o]||e.matchPatterns[e.defaultMatchWidth],a=t.match(i);if(!a)return null;var l=a[0],c=o&&e.parsePatterns[o]||e.parsePatterns[e.defaultParseWidth],s=Array.isArray(c)?function(e,t){for(var n=0;n0?"in "+r:r+" ago":r},formatLong:eH,formatRelative:function(e,t,n,r){return eq[e]},localize:{ordinalNumber:function(e,t){var n=Number(e),r=n%100;if(r>20||r<10)switch(r%10){case 1:return n+"st";case 2:return n+"nd";case 3:return n+"rd"}return n+"th"},era:eW({values:{narrow:["B","A"],abbreviated:["BC","AD"],wide:["Before Christ","Anno Domini"]},defaultWidth:"wide"}),quarter:eW({values:{narrow:["1","2","3","4"],abbreviated:["Q1","Q2","Q3","Q4"],wide:["1st quarter","2nd quarter","3rd quarter","4th quarter"]},defaultWidth:"wide",argumentCallback:function(e){return e-1}}),month:eW({values:{narrow:["J","F","M","A","M","J","J","A","S","O","N","D"],abbreviated:["Jan","Feb","Mar","Apr","May","Jun","Jul","Aug","Sep","Oct","Nov","Dec"],wide:["January","February","March","April","May","June","July","August","September","October","November","December"]},defaultWidth:"wide"}),day:eW({values:{narrow:["S","M","T","W","T","F","S"],short:["Su","Mo","Tu","We","Th","Fr","Sa"],abbreviated:["Sun","Mon","Tue","Wed","Thu","Fri","Sat"],wide:["Sunday","Monday","Tuesday","Wednesday","Thursday","Friday","Saturday"]},defaultWidth:"wide"}),dayPeriod:eW({values:{narrow:{am:"a",pm:"p",midnight:"mi",noon:"n",morning:"morning",afternoon:"afternoon",evening:"evening",night:"night"},abbreviated:{am:"AM",pm:"PM",midnight:"midnight",noon:"noon",morning:"morning",afternoon:"afternoon",evening:"evening",night:"night"},wide:{am:"a.m.",pm:"p.m.",midnight:"midnight",noon:"noon",morning:"morning",afternoon:"afternoon",evening:"evening",night:"night"}},defaultWidth:"wide",formattingValues:{narrow:{am:"a",pm:"p",midnight:"mi",noon:"n",morning:"in the morning",afternoon:"in the afternoon",evening:"in the evening",night:"at night"},abbreviated:{am:"AM",pm:"PM",midnight:"midnight",noon:"noon",morning:"in the morning",afternoon:"in the afternoon",evening:"in the evening",night:"at night"},wide:{am:"a.m.",pm:"p.m.",midnight:"midnight",noon:"noon",morning:"in the morning",afternoon:"in the afternoon",evening:"in the evening",night:"at night"}},defaultFormattingWidth:"wide"})},match:{ordinalNumber:(a={matchPattern:/^(\d+)(th|st|nd|rd)?/i,parsePattern:/\d+/i,valueCallback:function(e){return parseInt(e,10)}},function(e){var t=arguments.length>1&&void 0!==arguments[1]?arguments[1]:{},n=e.match(a.matchPattern);if(!n)return null;var r=n[0],o=e.match(a.parsePattern);if(!o)return null;var i=a.valueCallback?a.valueCallback(o[0]):o[0];return{value:i=t.valueCallback?t.valueCallback(i):i,rest:e.slice(r.length)}}),era:eK({matchPatterns:{narrow:/^(b|a)/i,abbreviated:/^(b\.?\s?c\.?|b\.?\s?c\.?\s?e\.?|a\.?\s?d\.?|c\.?\s?e\.?)/i,wide:/^(before christ|before common era|anno domini|common era)/i},defaultMatchWidth:"wide",parsePatterns:{any:[/^b/i,/^(a|c)/i]},defaultParseWidth:"any"}),quarter:eK({matchPatterns:{narrow:/^[1234]/i,abbreviated:/^q[1234]/i,wide:/^[1234](th|st|nd|rd)? quarter/i},defaultMatchWidth:"wide",parsePatterns:{any:[/1/i,/2/i,/3/i,/4/i]},defaultParseWidth:"any",valueCallback:function(e){return e+1}}),month:eK({matchPatterns:{narrow:/^[jfmasond]/i,abbreviated:/^(jan|feb|mar|apr|may|jun|jul|aug|sep|oct|nov|dec)/i,wide:/^(january|february|march|april|may|june|july|august|september|october|november|december)/i},defaultMatchWidth:"wide",parsePatterns:{narrow:[/^j/i,/^f/i,/^m/i,/^a/i,/^m/i,/^j/i,/^j/i,/^a/i,/^s/i,/^o/i,/^n/i,/^d/i],any:[/^ja/i,/^f/i,/^mar/i,/^ap/i,/^may/i,/^jun/i,/^jul/i,/^au/i,/^s/i,/^o/i,/^n/i,/^d/i]},defaultParseWidth:"any"}),day:eK({matchPatterns:{narrow:/^[smtwf]/i,short:/^(su|mo|tu|we|th|fr|sa)/i,abbreviated:/^(sun|mon|tue|wed|thu|fri|sat)/i,wide:/^(sunday|monday|tuesday|wednesday|thursday|friday|saturday)/i},defaultMatchWidth:"wide",parsePatterns:{narrow:[/^s/i,/^m/i,/^t/i,/^w/i,/^t/i,/^f/i,/^s/i],any:[/^su/i,/^m/i,/^tu/i,/^w/i,/^th/i,/^f/i,/^sa/i]},defaultParseWidth:"any"}),dayPeriod:eK({matchPatterns:{narrow:/^(a|p|mi|n|(in the|at) (morning|afternoon|evening|night))/i,any:/^([ap]\.?\s?m\.?|midnight|noon|(in the|at) (morning|afternoon|evening|night))/i},defaultMatchWidth:"any",parsePatterns:{any:{am:/^a/i,pm:/^p/i,midnight:/^mi/i,noon:/^no/i,morning:/morning/i,afternoon:/afternoon/i,evening:/evening/i,night:/night/i}},defaultParseWidth:"any"})},options:{weekStartsOn:0,firstWeekContainsDate:1}},eV=/[yYQqMLwIdDecihHKkms]o|(\w)\1*|''|'(''|[^'])+('|$)|./g,eG=/P+p+|P+|p+|''|'(''|[^'])+('|$)|./g,eX=/^'([^]*?)'?$/,e$=/''/g,eY=/[a-zA-Z]/;function eQ(e,t,n){(0,ei.Z)(2,arguments);var r,o,i,a,l,c,s,u,d,f,p,h,m,g,v,y,b,x,w=String(t),S=null!==(r=null!==(o=null==n?void 0:n.locale)&&void 0!==o?o:ek.locale)&&void 0!==r?r:eU,k=(0,em.Z)(null!==(i=null!==(a=null!==(l=null!==(c=null==n?void 0:n.firstWeekContainsDate)&&void 0!==c?c:null==n?void 0:null===(s=n.locale)||void 0===s?void 0:null===(u=s.options)||void 0===u?void 0:u.firstWeekContainsDate)&&void 0!==l?l:ek.firstWeekContainsDate)&&void 0!==a?a:null===(d=ek.locale)||void 0===d?void 0:null===(f=d.options)||void 0===f?void 0:f.firstWeekContainsDate)&&void 0!==i?i:1);if(!(k>=1&&k<=7))throw RangeError("firstWeekContainsDate must be between 1 and 7 inclusively");var E=(0,em.Z)(null!==(p=null!==(h=null!==(m=null!==(g=null==n?void 0:n.weekStartsOn)&&void 0!==g?g:null==n?void 0:null===(v=n.locale)||void 0===v?void 0:null===(y=v.options)||void 0===y?void 0:y.weekStartsOn)&&void 0!==m?m:ek.weekStartsOn)&&void 0!==h?h:null===(b=ek.locale)||void 0===b?void 0:null===(x=b.options)||void 0===x?void 0:x.weekStartsOn)&&void 0!==p?p:0);if(!(E>=0&&E<=6))throw RangeError("weekStartsOn must be between 0 and 6 inclusively");if(!S.localize)throw RangeError("locale must contain localize property");if(!S.formatLong)throw RangeError("locale must contain formatLong property");var C=(0,eo.Z)(e);if(!function(e){return(0,ei.Z)(1,arguments),(!!ex(e)||"number"==typeof e)&&!isNaN(Number((0,eo.Z)(e)))}(C))throw RangeError("Invalid time value");var O=eD(C),j=function(e,t){return(0,ei.Z)(2,arguments),function(e,t){return(0,ei.Z)(2,arguments),new Date((0,eo.Z)(e).getTime()+(0,em.Z)(t))}(e,-(0,em.Z)(t))}(C,O),P={firstWeekContainsDate:k,weekStartsOn:E,locale:S,_originalDate:C};return w.match(eG).map(function(e){var t=e[0];return"p"===t||"P"===t?(0,e_[t])(e,S.formatLong):e}).join("").match(eV).map(function(r){if("''"===r)return"'";var o,i=r[0];if("'"===i)return(o=r.match(eX))?o[1].replace(e$,"'"):r;var a=eR[i];if(a)return null!=n&&n.useAdditionalWeekYearTokens||-1===eL.indexOf(r)||ez(r,t,String(e)),null!=n&&n.useAdditionalDayOfYearTokens||-1===eZ.indexOf(r)||ez(r,t,String(e)),a(j,r,S.localize,P);if(i.match(eY))throw RangeError("Format string contains an unescaped latin alphabet character `"+i+"`");return r}).join("")}var eJ=n(1153);let e0=(0,eJ.fn)("DateRangePicker"),e1=(e,t,n,r)=>{var o;if(n&&(e=null===(o=r.get(n))||void 0===o?void 0:o.from),e)return ea(e&&!t?e:ef([e,t]))},e2=(e,t,n,r)=>{var o,i;if(n&&(e=ea(null!==(i=null===(o=r.get(n))||void 0===o?void 0:o.to)&&void 0!==i?i:el())),e)return ea(e&&!t?e:ep([e,t]))},e6=[{value:"tdy",text:"Today",from:el()},{value:"w",text:"Last 7 days",from:ey(el(),{days:7})},{value:"t",text:"Last 30 days",from:ey(el(),{days:30})},{value:"m",text:"Month to Date",from:ec(el())},{value:"y",text:"Year to Date",from:eb(el())}],e3=(e,t,n,r)=>{let o=(null==n?void 0:n.code)||"en-US";if(!e&&!t)return"";if(e&&!t)return r?eQ(e,r):e.toLocaleDateString(o,{year:"numeric",month:"short",day:"numeric"});if(e&&t){if(function(e,t){(0,ei.Z)(2,arguments);var n=(0,eo.Z)(e),r=(0,eo.Z)(t);return n.getTime()===r.getTime()}(e,t))return r?eQ(e,r):e.toLocaleDateString(o,{year:"numeric",month:"short",day:"numeric"});if(e.getMonth()===t.getMonth()&&e.getFullYear()===t.getFullYear())return r?"".concat(eQ(e,r)," - ").concat(eQ(t,r)):"".concat(e.toLocaleDateString(o,{month:"short",day:"numeric"})," - \n ").concat(t.getDate(),", ").concat(t.getFullYear());{if(r)return"".concat(eQ(e,r)," - ").concat(eQ(t,r));let n={year:"numeric",month:"short",day:"numeric"};return"".concat(e.toLocaleDateString(o,n)," - \n ").concat(t.toLocaleDateString(o,n))}}return""};function e4(e){(0,ei.Z)(1,arguments);var t=(0,eo.Z)(e),n=t.getMonth();return t.setFullYear(t.getFullYear(),n+1,0),t.setHours(23,59,59,999),t}function e5(e,t){(0,ei.Z)(2,arguments);var n=(0,eo.Z)(e),r=(0,em.Z)(t),o=n.getFullYear(),i=n.getDate(),a=new Date(0);a.setFullYear(o,r,15),a.setHours(0,0,0,0);var l=function(e){(0,ei.Z)(1,arguments);var t=(0,eo.Z)(e),n=t.getFullYear(),r=t.getMonth(),o=new Date(0);return o.setFullYear(n,r+1,0),o.setHours(0,0,0,0),o.getDate()}(a);return n.setMonth(r,Math.min(i,l)),n}function e8(e,t){(0,ei.Z)(2,arguments);var n=(0,eo.Z)(e),r=(0,em.Z)(t);return isNaN(n.getTime())?new Date(NaN):(n.setFullYear(r),n)}function e7(e,t){(0,ei.Z)(2,arguments);var n=(0,eo.Z)(e),r=(0,eo.Z)(t);return 12*(n.getFullYear()-r.getFullYear())+(n.getMonth()-r.getMonth())}function e9(e,t){(0,ei.Z)(2,arguments);var n=(0,eo.Z)(e),r=(0,eo.Z)(t);return n.getFullYear()===r.getFullYear()&&n.getMonth()===r.getMonth()}function te(e,t){(0,ei.Z)(2,arguments);var n=(0,eo.Z)(e),r=(0,eo.Z)(t);return n.getTime()=0&&u<=6))throw RangeError("weekStartsOn must be between 0 and 6 inclusively");var d=(0,eo.Z)(e),f=d.getDay();return d.setDate(d.getDate()-((fr.getTime()}function ti(e,t){(0,ei.Z)(2,arguments);var n=ea(e),r=ea(t);return Math.round((n.getTime()-eD(n)-(r.getTime()-eD(r)))/864e5)}function ta(e,t){(0,ei.Z)(2,arguments);var n=(0,em.Z)(t);return(0,eh.Z)(e,7*n)}function tl(e,t){(0,ei.Z)(2,arguments);var n=(0,em.Z)(t);return(0,ev.Z)(e,12*n)}function tc(e,t){(0,ei.Z)(1,arguments);var n,r,o,i,a,l,c,s,u=(0,em.Z)(null!==(n=null!==(r=null!==(o=null!==(i=null==t?void 0:t.weekStartsOn)&&void 0!==i?i:null==t?void 0:null===(a=t.locale)||void 0===a?void 0:null===(l=a.options)||void 0===l?void 0:l.weekStartsOn)&&void 0!==o?o:ek.weekStartsOn)&&void 0!==r?r:null===(c=ek.locale)||void 0===c?void 0:null===(s=c.options)||void 0===s?void 0:s.weekStartsOn)&&void 0!==n?n:0);if(!(u>=0&&u<=6))throw RangeError("weekStartsOn must be between 0 and 6 inclusively");var d=(0,eo.Z)(e),f=d.getDay();return d.setDate(d.getDate()+((fe7(l,a)&&(a=(0,ev.Z)(l,-1*((void 0===s?1:s)-1))),c&&0>e7(a,c)&&(a=c),u=ec(a),f=t.month,h=(p=(0,d.useState)(u))[0],m=[void 0===f?h:f,p[1]])[0],v=m[1],[g,function(e){if(!t.disableNavigation){var n,r=ec(e);v(r),null===(n=t.onMonthChange)||void 0===n||n.call(t,r)}}]),x=b[0],w=b[1],S=function(e,t){for(var n=t.reverseMonths,r=t.numberOfMonths,o=ec(e),i=e7(ec((0,ev.Z)(o,r)),o),a=[],l=0;l=e7(i,n)))return(0,ev.Z)(i,-(r?void 0===o?1:o:1))}}(x,y),C=function(e){return S.some(function(t){return e9(e,t)})};return th.jsx(tM.Provider,{value:{currentMonth:x,displayMonths:S,goToMonth:w,goToDate:function(e,t){C(e)||(t&&te(e,t)?w((0,ev.Z)(e,1+-1*y.numberOfMonths)):w(e))},previousMonth:E,nextMonth:k,isDateDisplayed:C},children:e.children})}function tI(){var e=(0,d.useContext)(tM);if(!e)throw Error("useNavigation must be used within a NavigationProvider");return e}function tR(e){var t,n=tk(),r=n.classNames,o=n.styles,i=n.components,a=tI().goToMonth,l=function(t){a((0,ev.Z)(t,e.displayIndex?-e.displayIndex:0))},c=null!==(t=null==i?void 0:i.CaptionLabel)&&void 0!==t?t:tE,s=th.jsx(c,{id:e.id,displayMonth:e.displayMonth});return th.jsxs("div",{className:r.caption_dropdowns,style:o.caption_dropdowns,children:[th.jsx("div",{className:r.vhidden,children:s}),th.jsx(tj,{onChange:l,displayMonth:e.displayMonth}),th.jsx(tP,{onChange:l,displayMonth:e.displayMonth})]})}function tT(e){return th.jsx("svg",tu({width:"16px",height:"16px",viewBox:"0 0 120 120"},e,{children:th.jsx("path",{d:"M69.490332,3.34314575 C72.6145263,0.218951416 77.6798462,0.218951416 80.8040405,3.34314575 C83.8617626,6.40086786 83.9268205,11.3179931 80.9992143,14.4548388 L80.8040405,14.6568542 L35.461,60 L80.8040405,105.343146 C83.8617626,108.400868 83.9268205,113.317993 80.9992143,116.454839 L80.8040405,116.656854 C77.7463184,119.714576 72.8291931,119.779634 69.6923475,116.852028 L69.490332,116.656854 L18.490332,65.6568542 C15.4326099,62.5991321 15.367552,57.6820069 18.2951583,54.5451612 L18.490332,54.3431458 L69.490332,3.34314575 Z",fill:"currentColor",fillRule:"nonzero"})}))}function tA(e){return th.jsx("svg",tu({width:"16px",height:"16px",viewBox:"0 0 120 120"},e,{children:th.jsx("path",{d:"M49.8040405,3.34314575 C46.6798462,0.218951416 41.6145263,0.218951416 38.490332,3.34314575 C35.4326099,6.40086786 35.367552,11.3179931 38.2951583,14.4548388 L38.490332,14.6568542 L83.8333725,60 L38.490332,105.343146 C35.4326099,108.400868 35.367552,113.317993 38.2951583,116.454839 L38.490332,116.656854 C41.5480541,119.714576 46.4651794,119.779634 49.602025,116.852028 L49.8040405,116.656854 L100.804041,65.6568542 C103.861763,62.5991321 103.926821,57.6820069 100.999214,54.5451612 L100.804041,54.3431458 L49.8040405,3.34314575 Z",fill:"currentColor"})}))}var t_=(0,d.forwardRef)(function(e,t){var n=tk(),r=n.classNames,o=n.styles,i=[r.button_reset,r.button];e.className&&i.push(e.className);var a=i.join(" "),l=tu(tu({},o.button_reset),o.button);return e.style&&Object.assign(l,e.style),th.jsx("button",tu({},e,{ref:t,type:"button",className:a,style:l}))});function tD(e){var t,n,r=tk(),o=r.dir,i=r.locale,a=r.classNames,l=r.styles,c=r.labels,s=c.labelPrevious,u=c.labelNext,d=r.components;if(!e.nextMonth&&!e.previousMonth)return th.jsx(th.Fragment,{});var f=s(e.previousMonth,{locale:i}),p=[a.nav_button,a.nav_button_previous].join(" "),h=u(e.nextMonth,{locale:i}),m=[a.nav_button,a.nav_button_next].join(" "),g=null!==(t=null==d?void 0:d.IconRight)&&void 0!==t?t:tA,v=null!==(n=null==d?void 0:d.IconLeft)&&void 0!==n?n:tT;return th.jsxs("div",{className:a.nav,style:l.nav,children:[!e.hidePrevious&&th.jsx(t_,{name:"previous-month","aria-label":f,className:p,style:l.nav_button_previous,disabled:!e.previousMonth,onClick:e.onPreviousClick,children:"rtl"===o?th.jsx(g,{className:a.nav_icon,style:l.nav_icon}):th.jsx(v,{className:a.nav_icon,style:l.nav_icon})}),!e.hideNext&&th.jsx(t_,{name:"next-month","aria-label":h,className:m,style:l.nav_button_next,disabled:!e.nextMonth,onClick:e.onNextClick,children:"rtl"===o?th.jsx(v,{className:a.nav_icon,style:l.nav_icon}):th.jsx(g,{className:a.nav_icon,style:l.nav_icon})})]})}function tZ(e){var t=tk().numberOfMonths,n=tI(),r=n.previousMonth,o=n.nextMonth,i=n.goToMonth,a=n.displayMonths,l=a.findIndex(function(t){return e9(e.displayMonth,t)}),c=0===l,s=l===a.length-1;return th.jsx(tD,{displayMonth:e.displayMonth,hideNext:t>1&&(c||!s),hidePrevious:t>1&&(s||!c),nextMonth:o,previousMonth:r,onPreviousClick:function(){r&&i(r)},onNextClick:function(){o&&i(o)}})}function tL(e){var t,n,r=tk(),o=r.classNames,i=r.disableNavigation,a=r.styles,l=r.captionLayout,c=r.components,s=null!==(t=null==c?void 0:c.CaptionLabel)&&void 0!==t?t:tE;return n=i?th.jsx(s,{id:e.id,displayMonth:e.displayMonth}):"dropdown"===l?th.jsx(tR,{displayMonth:e.displayMonth,id:e.id}):"dropdown-buttons"===l?th.jsxs(th.Fragment,{children:[th.jsx(tR,{displayMonth:e.displayMonth,displayIndex:e.displayIndex,id:e.id}),th.jsx(tZ,{displayMonth:e.displayMonth,displayIndex:e.displayIndex,id:e.id})]}):th.jsxs(th.Fragment,{children:[th.jsx(s,{id:e.id,displayMonth:e.displayMonth,displayIndex:e.displayIndex}),th.jsx(tZ,{displayMonth:e.displayMonth,id:e.id})]}),th.jsx("div",{className:o.caption,style:a.caption,children:n})}function tz(e){var t=tk(),n=t.footer,r=t.styles,o=t.classNames.tfoot;return n?th.jsx("tfoot",{className:o,style:r.tfoot,children:th.jsx("tr",{children:th.jsx("td",{colSpan:8,children:n})})}):th.jsx(th.Fragment,{})}function tB(){var e=tk(),t=e.classNames,n=e.styles,r=e.showWeekNumber,o=e.locale,i=e.weekStartsOn,a=e.ISOWeek,l=e.formatters.formatWeekdayName,c=e.labels.labelWeekday,s=function(e,t,n){for(var r=n?tn(new Date):tt(new Date,{locale:e,weekStartsOn:t}),o=[],i=0;i<7;i++){var a=(0,eh.Z)(r,i);o.push(a)}return o}(o,i,a);return th.jsxs("tr",{style:n.head_row,className:t.head_row,children:[r&&th.jsx("td",{style:n.head_cell,className:t.head_cell}),s.map(function(e,r){return th.jsx("th",{scope:"col",className:t.head_cell,style:n.head_cell,"aria-label":c(e,{locale:o}),children:l(e,{locale:o})},r)})]})}function tF(){var e,t=tk(),n=t.classNames,r=t.styles,o=t.components,i=null!==(e=null==o?void 0:o.HeadRow)&&void 0!==e?e:tB;return th.jsx("thead",{style:r.head,className:n.head,children:th.jsx(i,{})})}function tH(e){var t=tk(),n=t.locale,r=t.formatters.formatDay;return th.jsx(th.Fragment,{children:r(e.date,{locale:n})})}var tq=(0,d.createContext)(void 0);function tW(e){return tm(e.initialProps)?th.jsx(tK,{initialProps:e.initialProps,children:e.children}):th.jsx(tq.Provider,{value:{selected:void 0,modifiers:{disabled:[]}},children:e.children})}function tK(e){var t=e.initialProps,n=e.children,r=t.selected,o=t.min,i=t.max,a={disabled:[]};return r&&a.disabled.push(function(e){var t=i&&r.length>i-1,n=r.some(function(t){return tr(t,e)});return!!(t&&!n)}),th.jsx(tq.Provider,{value:{selected:r,onDayClick:function(e,n,a){if(null===(l=t.onDayClick)||void 0===l||l.call(t,e,n,a),(!n.selected||!o||(null==r?void 0:r.length)!==o)&&(n.selected||!i||(null==r?void 0:r.length)!==i)){var l,c,s=r?td([],r,!0):[];if(n.selected){var u=s.findIndex(function(t){return tr(e,t)});s.splice(u,1)}else s.push(e);null===(c=t.onSelect)||void 0===c||c.call(t,s,e,n,a)}},modifiers:a},children:n})}function tU(){var e=(0,d.useContext)(tq);if(!e)throw Error("useSelectMultiple must be used within a SelectMultipleProvider");return e}var tV=(0,d.createContext)(void 0);function tG(e){return tg(e.initialProps)?th.jsx(tX,{initialProps:e.initialProps,children:e.children}):th.jsx(tV.Provider,{value:{selected:void 0,modifiers:{range_start:[],range_end:[],range_middle:[],disabled:[]}},children:e.children})}function tX(e){var t=e.initialProps,n=e.children,r=t.selected,o=r||{},i=o.from,a=o.to,l=t.min,c=t.max,s={range_start:[],range_end:[],range_middle:[],disabled:[]};if(i?(s.range_start=[i],a?(s.range_end=[a],tr(i,a)||(s.range_middle=[{after:i,before:a}])):s.range_end=[i]):a&&(s.range_start=[a],s.range_end=[a]),l&&(i&&!a&&s.disabled.push({after:eg(i,l-1),before:(0,eh.Z)(i,l-1)}),i&&a&&s.disabled.push({after:i,before:(0,eh.Z)(i,l-1)}),!i&&a&&s.disabled.push({after:eg(a,l-1),before:(0,eh.Z)(a,l-1)})),c){if(i&&!a&&(s.disabled.push({before:(0,eh.Z)(i,-c+1)}),s.disabled.push({after:(0,eh.Z)(i,c-1)})),i&&a){var u=c-(ti(a,i)+1);s.disabled.push({before:eg(i,u)}),s.disabled.push({after:(0,eh.Z)(a,u)})}!i&&a&&(s.disabled.push({before:(0,eh.Z)(a,-c+1)}),s.disabled.push({after:(0,eh.Z)(a,c-1)}))}return th.jsx(tV.Provider,{value:{selected:r,onDayClick:function(e,n,o){null===(c=t.onDayClick)||void 0===c||c.call(t,e,n,o);var i,a,l,c,s,u=(a=(i=r||{}).from,l=i.to,a&&l?tr(l,e)&&tr(a,e)?void 0:tr(l,e)?{from:l,to:void 0}:tr(a,e)?void 0:to(a,e)?{from:e,to:l}:{from:a,to:e}:l?to(e,l)?{from:l,to:e}:{from:e,to:l}:a?te(e,a)?{from:e,to:a}:{from:a,to:e}:{from:e,to:void 0});null===(s=t.onSelect)||void 0===s||s.call(t,u,e,n,o)},modifiers:s},children:n})}function t$(){var e=(0,d.useContext)(tV);if(!e)throw Error("useSelectRange must be used within a SelectRangeProvider");return e}function tY(e){return Array.isArray(e)?td([],e,!0):void 0!==e?[e]:[]}(l=s||(s={})).Outside="outside",l.Disabled="disabled",l.Selected="selected",l.Hidden="hidden",l.Today="today",l.RangeStart="range_start",l.RangeEnd="range_end",l.RangeMiddle="range_middle";var tQ=s.Selected,tJ=s.Disabled,t0=s.Hidden,t1=s.Today,t2=s.RangeEnd,t6=s.RangeMiddle,t3=s.RangeStart,t4=s.Outside,t5=(0,d.createContext)(void 0);function t8(e){var t,n,r,o=tk(),i=tU(),a=t$(),l=((t={})[tQ]=tY(o.selected),t[tJ]=tY(o.disabled),t[t0]=tY(o.hidden),t[t1]=[o.today],t[t2]=[],t[t6]=[],t[t3]=[],t[t4]=[],o.fromDate&&t[tJ].push({before:o.fromDate}),o.toDate&&t[tJ].push({after:o.toDate}),tm(o)?t[tJ]=t[tJ].concat(i.modifiers[tJ]):tg(o)&&(t[tJ]=t[tJ].concat(a.modifiers[tJ]),t[t3]=a.modifiers[t3],t[t6]=a.modifiers[t6],t[t2]=a.modifiers[t2]),t),c=(n=o.modifiers,r={},Object.entries(n).forEach(function(e){var t=e[0],n=e[1];r[t]=tY(n)}),r),s=tu(tu({},l),c);return th.jsx(t5.Provider,{value:s,children:e.children})}function t7(){var e=(0,d.useContext)(t5);if(!e)throw Error("useModifiers must be used within a ModifiersProvider");return e}function t9(e,t,n){var r=Object.keys(t).reduce(function(n,r){return t[r].some(function(t){if("boolean"==typeof t)return t;if(ex(t))return tr(e,t);if(Array.isArray(t)&&t.every(ex))return t.includes(e);if(t&&"object"==typeof t&&"from"in t)return r=t.from,o=t.to,r&&o?(0>ti(o,r)&&(r=(n=[o,r])[0],o=n[1]),ti(e,r)>=0&&ti(o,e)>=0):o?tr(o,e):!!r&&tr(r,e);if(t&&"object"==typeof t&&"dayOfWeek"in t)return t.dayOfWeek.includes(e.getDay());if(t&&"object"==typeof t&&"before"in t&&"after"in t){var n,r,o,i=ti(t.before,e),a=ti(t.after,e),l=i>0,c=a<0;return to(t.before,t.after)?c&&l:l||c}return t&&"object"==typeof t&&"after"in t?ti(e,t.after)>0:t&&"object"==typeof t&&"before"in t?ti(t.before,e)>0:"function"==typeof t&&t(e)})&&n.push(r),n},[]),o={};return r.forEach(function(e){return o[e]=!0}),n&&!e9(e,n)&&(o.outside=!0),o}var ne=(0,d.createContext)(void 0);function nt(e){var t=tI(),n=t7(),r=(0,d.useState)(),o=r[0],i=r[1],a=(0,d.useState)(),l=a[0],c=a[1],s=function(e,t){for(var n,r,o=ec(e[0]),i=e4(e[e.length-1]),a=o;a<=i;){var l=t9(a,t);if(!(!l.disabled&&!l.hidden)){a=(0,eh.Z)(a,1);continue}if(l.selected)return a;l.today&&!r&&(r=a),n||(n=a),a=(0,eh.Z)(a,1)}return r||n}(t.displayMonths,n),u=(null!=o?o:l&&t.isDateDisplayed(l))?l:s,f=function(e){i(e)},p=tk(),h=function(e,r){if(o){var i=function e(t,n){var r=n.moveBy,o=n.direction,i=n.context,a=n.modifiers,l=n.retry,c=void 0===l?{count:0,lastFocused:t}:l,s=i.weekStartsOn,u=i.fromDate,d=i.toDate,f=i.locale,p=({day:eh.Z,week:ta,month:ev.Z,year:tl,startOfWeek:function(e){return i.ISOWeek?tn(e):tt(e,{locale:f,weekStartsOn:s})},endOfWeek:function(e){return i.ISOWeek?ts(e):tc(e,{locale:f,weekStartsOn:s})}})[r](t,"after"===o?1:-1);"before"===o&&u?p=ef([u,p]):"after"===o&&d&&(p=ep([d,p]));var h=!0;if(a){var m=t9(p,a);h=!m.disabled&&!m.hidden}return h?p:c.count>365?c.lastFocused:e(p,{moveBy:r,direction:o,context:i,modifiers:a,retry:tu(tu({},c),{count:c.count+1})})}(o,{moveBy:e,direction:r,context:p,modifiers:n});tr(o,i)||(t.goToDate(i,o),f(i))}};return th.jsx(ne.Provider,{value:{focusedDay:o,focusTarget:u,blur:function(){c(o),i(void 0)},focus:f,focusDayAfter:function(){return h("day","after")},focusDayBefore:function(){return h("day","before")},focusWeekAfter:function(){return h("week","after")},focusWeekBefore:function(){return h("week","before")},focusMonthBefore:function(){return h("month","before")},focusMonthAfter:function(){return h("month","after")},focusYearBefore:function(){return h("year","before")},focusYearAfter:function(){return h("year","after")},focusStartOfWeek:function(){return h("startOfWeek","before")},focusEndOfWeek:function(){return h("endOfWeek","after")}},children:e.children})}function nn(){var e=(0,d.useContext)(ne);if(!e)throw Error("useFocusContext must be used within a FocusProvider");return e}var nr=(0,d.createContext)(void 0);function no(e){return tv(e.initialProps)?th.jsx(ni,{initialProps:e.initialProps,children:e.children}):th.jsx(nr.Provider,{value:{selected:void 0},children:e.children})}function ni(e){var t=e.initialProps,n=e.children,r={selected:t.selected,onDayClick:function(e,n,r){var o,i,a;if(null===(o=t.onDayClick)||void 0===o||o.call(t,e,n,r),n.selected&&!t.required){null===(i=t.onSelect)||void 0===i||i.call(t,void 0,e,n,r);return}null===(a=t.onSelect)||void 0===a||a.call(t,e,e,n,r)}};return th.jsx(nr.Provider,{value:r,children:n})}function na(){var e=(0,d.useContext)(nr);if(!e)throw Error("useSelectSingle must be used within a SelectSingleProvider");return e}function nl(e){var t,n,r,o,i,a,l,c,u,f,p,h,m,g,v,y,b,x,w,S,k,E,C,O,j,P,M,N,I,R,T,A,_,D,Z,L,z,B,F,H,q,W,K=(0,d.useRef)(null),U=(t=e.date,n=e.displayMonth,a=tk(),l=nn(),c=t9(t,t7(),n),u=tk(),f=na(),p=tU(),h=t$(),g=(m=nn()).focusDayAfter,v=m.focusDayBefore,y=m.focusWeekAfter,b=m.focusWeekBefore,x=m.blur,w=m.focus,S=m.focusMonthBefore,k=m.focusMonthAfter,E=m.focusYearBefore,C=m.focusYearAfter,O=m.focusStartOfWeek,j=m.focusEndOfWeek,P={onClick:function(e){var n,r,o,i;tv(u)?null===(n=f.onDayClick)||void 0===n||n.call(f,t,c,e):tm(u)?null===(r=p.onDayClick)||void 0===r||r.call(p,t,c,e):tg(u)?null===(o=h.onDayClick)||void 0===o||o.call(h,t,c,e):null===(i=u.onDayClick)||void 0===i||i.call(u,t,c,e)},onFocus:function(e){var n;w(t),null===(n=u.onDayFocus)||void 0===n||n.call(u,t,c,e)},onBlur:function(e){var n;x(),null===(n=u.onDayBlur)||void 0===n||n.call(u,t,c,e)},onKeyDown:function(e){var n;switch(e.key){case"ArrowLeft":e.preventDefault(),e.stopPropagation(),"rtl"===u.dir?g():v();break;case"ArrowRight":e.preventDefault(),e.stopPropagation(),"rtl"===u.dir?v():g();break;case"ArrowDown":e.preventDefault(),e.stopPropagation(),y();break;case"ArrowUp":e.preventDefault(),e.stopPropagation(),b();break;case"PageUp":e.preventDefault(),e.stopPropagation(),e.shiftKey?E():S();break;case"PageDown":e.preventDefault(),e.stopPropagation(),e.shiftKey?C():k();break;case"Home":e.preventDefault(),e.stopPropagation(),O();break;case"End":e.preventDefault(),e.stopPropagation(),j()}null===(n=u.onDayKeyDown)||void 0===n||n.call(u,t,c,e)},onKeyUp:function(e){var n;null===(n=u.onDayKeyUp)||void 0===n||n.call(u,t,c,e)},onMouseEnter:function(e){var n;null===(n=u.onDayMouseEnter)||void 0===n||n.call(u,t,c,e)},onMouseLeave:function(e){var n;null===(n=u.onDayMouseLeave)||void 0===n||n.call(u,t,c,e)},onPointerEnter:function(e){var n;null===(n=u.onDayPointerEnter)||void 0===n||n.call(u,t,c,e)},onPointerLeave:function(e){var n;null===(n=u.onDayPointerLeave)||void 0===n||n.call(u,t,c,e)},onTouchCancel:function(e){var n;null===(n=u.onDayTouchCancel)||void 0===n||n.call(u,t,c,e)},onTouchEnd:function(e){var n;null===(n=u.onDayTouchEnd)||void 0===n||n.call(u,t,c,e)},onTouchMove:function(e){var n;null===(n=u.onDayTouchMove)||void 0===n||n.call(u,t,c,e)},onTouchStart:function(e){var n;null===(n=u.onDayTouchStart)||void 0===n||n.call(u,t,c,e)}},M=tk(),N=na(),I=tU(),R=t$(),T=tv(M)?N.selected:tm(M)?I.selected:tg(M)?R.selected:void 0,A=!!(a.onDayClick||"default"!==a.mode),(0,d.useEffect)(function(){var e;!c.outside&&l.focusedDay&&A&&tr(l.focusedDay,t)&&(null===(e=K.current)||void 0===e||e.focus())},[l.focusedDay,t,K,A,c.outside]),D=(_=[a.classNames.day],Object.keys(c).forEach(function(e){var t=a.modifiersClassNames[e];if(t)_.push(t);else if(Object.values(s).includes(e)){var n=a.classNames["day_".concat(e)];n&&_.push(n)}}),_).join(" "),Z=tu({},a.styles.day),Object.keys(c).forEach(function(e){var t;Z=tu(tu({},Z),null===(t=a.modifiersStyles)||void 0===t?void 0:t[e])}),L=Z,z=!!(c.outside&&!a.showOutsideDays||c.hidden),B=null!==(i=null===(o=a.components)||void 0===o?void 0:o.DayContent)&&void 0!==i?i:tH,F={style:L,className:D,children:th.jsx(B,{date:t,displayMonth:n,activeModifiers:c}),role:"gridcell"},H=l.focusTarget&&tr(l.focusTarget,t)&&!c.outside,q=l.focusedDay&&tr(l.focusedDay,t),W=tu(tu(tu({},F),((r={disabled:c.disabled,role:"gridcell"})["aria-selected"]=c.selected,r.tabIndex=q||H?0:-1,r)),P),{isButton:A,isHidden:z,activeModifiers:c,selectedDays:T,buttonProps:W,divProps:F});return U.isHidden?th.jsx("div",{role:"gridcell"}):U.isButton?th.jsx(t_,tu({name:"day",ref:K},U.buttonProps)):th.jsx("div",tu({},U.divProps))}function nc(e){var t=e.number,n=e.dates,r=tk(),o=r.onWeekNumberClick,i=r.styles,a=r.classNames,l=r.locale,c=r.labels.labelWeekNumber,s=(0,r.formatters.formatWeekNumber)(Number(t),{locale:l});if(!o)return th.jsx("span",{className:a.weeknumber,style:i.weeknumber,children:s});var u=c(Number(t),{locale:l});return th.jsx(t_,{name:"week-number","aria-label":u,className:a.weeknumber,style:i.weeknumber,onClick:function(e){o(t,n,e)},children:s})}function ns(e){var t,n,r,o=tk(),i=o.styles,a=o.classNames,l=o.showWeekNumber,c=o.components,s=null!==(t=null==c?void 0:c.Day)&&void 0!==t?t:nl,u=null!==(n=null==c?void 0:c.WeekNumber)&&void 0!==n?n:nc;return l&&(r=th.jsx("td",{className:a.cell,style:i.cell,children:th.jsx(u,{number:e.weekNumber,dates:e.dates})})),th.jsxs("tr",{className:a.row,style:i.row,children:[r,e.dates.map(function(t){return th.jsx("td",{className:a.cell,style:i.cell,role:"presentation",children:th.jsx(s,{displayMonth:e.displayMonth,date:t})},function(e){return(0,ei.Z)(1,arguments),Math.floor(function(e){return(0,ei.Z)(1,arguments),(0,eo.Z)(e).getTime()}(e)/1e3)}(t))})]})}function nu(e,t,n){for(var r=(null==n?void 0:n.ISOWeek)?ts(t):tc(t,n),o=(null==n?void 0:n.ISOWeek)?tn(e):tt(e,n),i=ti(r,o),a=[],l=0;l<=i;l++)a.push((0,eh.Z)(o,l));return a.reduce(function(e,t){var r=(null==n?void 0:n.ISOWeek)?function(e){(0,ei.Z)(1,arguments);var t=(0,eo.Z)(e);return Math.round((tn(t).getTime()-(function(e){(0,ei.Z)(1,arguments);var t=function(e){(0,ei.Z)(1,arguments);var t=(0,eo.Z)(e),n=t.getFullYear(),r=new Date(0);r.setFullYear(n+1,0,4),r.setHours(0,0,0,0);var o=tn(r),i=new Date(0);i.setFullYear(n,0,4),i.setHours(0,0,0,0);var a=tn(i);return t.getTime()>=o.getTime()?n+1:t.getTime()>=a.getTime()?n:n-1}(e),n=new Date(0);return n.setFullYear(t,0,4),n.setHours(0,0,0,0),tn(n)})(t).getTime())/6048e5)+1}(t):function(e,t){(0,ei.Z)(1,arguments);var n=(0,eo.Z)(e);return Math.round((tt(n,t).getTime()-(function(e,t){(0,ei.Z)(1,arguments);var n,r,o,i,a,l,c,s,u=(0,em.Z)(null!==(n=null!==(r=null!==(o=null!==(i=null==t?void 0:t.firstWeekContainsDate)&&void 0!==i?i:null==t?void 0:null===(a=t.locale)||void 0===a?void 0:null===(l=a.options)||void 0===l?void 0:l.firstWeekContainsDate)&&void 0!==o?o:ek.firstWeekContainsDate)&&void 0!==r?r:null===(c=ek.locale)||void 0===c?void 0:null===(s=c.options)||void 0===s?void 0:s.firstWeekContainsDate)&&void 0!==n?n:1),d=function(e,t){(0,ei.Z)(1,arguments);var n,r,o,i,a,l,c,s,u=(0,eo.Z)(e),d=u.getFullYear(),f=(0,em.Z)(null!==(n=null!==(r=null!==(o=null!==(i=null==t?void 0:t.firstWeekContainsDate)&&void 0!==i?i:null==t?void 0:null===(a=t.locale)||void 0===a?void 0:null===(l=a.options)||void 0===l?void 0:l.firstWeekContainsDate)&&void 0!==o?o:ek.firstWeekContainsDate)&&void 0!==r?r:null===(c=ek.locale)||void 0===c?void 0:null===(s=c.options)||void 0===s?void 0:s.firstWeekContainsDate)&&void 0!==n?n:1);if(!(f>=1&&f<=7))throw RangeError("firstWeekContainsDate must be between 1 and 7 inclusively");var p=new Date(0);p.setFullYear(d+1,0,f),p.setHours(0,0,0,0);var h=tt(p,t),m=new Date(0);m.setFullYear(d,0,f),m.setHours(0,0,0,0);var g=tt(m,t);return u.getTime()>=h.getTime()?d+1:u.getTime()>=g.getTime()?d:d-1}(e,t),f=new Date(0);return f.setFullYear(d,0,u),f.setHours(0,0,0,0),tt(f,t)})(n,t).getTime())/6048e5)+1}(t,n),o=e.find(function(e){return e.weekNumber===r});return o?o.dates.push(t):e.push({weekNumber:r,dates:[t]}),e},[])}function nd(e){var t,n,r,o=tk(),i=o.locale,a=o.classNames,l=o.styles,c=o.hideHead,s=o.fixedWeeks,u=o.components,d=o.weekStartsOn,f=o.firstWeekContainsDate,p=o.ISOWeek,h=function(e,t){var n=nu(ec(e),e4(e),t);if(null==t?void 0:t.useFixedWeeks){var r=function(e,t){return(0,ei.Z)(1,arguments),function(e,t,n){(0,ei.Z)(2,arguments);var r=tt(e,n),o=tt(t,n);return Math.round((r.getTime()-eD(r)-(o.getTime()-eD(o)))/6048e5)}(function(e){(0,ei.Z)(1,arguments);var t=(0,eo.Z)(e),n=t.getMonth();return t.setFullYear(t.getFullYear(),n+1,0),t.setHours(0,0,0,0),t}(e),ec(e),t)+1}(e,t);if(r<6){var o=n[n.length-1],i=o.dates[o.dates.length-1],a=ta(i,6-r),l=nu(ta(i,1),a,t);n.push.apply(n,l)}}return n}(e.displayMonth,{useFixedWeeks:!!s,ISOWeek:p,locale:i,weekStartsOn:d,firstWeekContainsDate:f}),m=null!==(t=null==u?void 0:u.Head)&&void 0!==t?t:tF,g=null!==(n=null==u?void 0:u.Row)&&void 0!==n?n:ns,v=null!==(r=null==u?void 0:u.Footer)&&void 0!==r?r:tz;return th.jsxs("table",{id:e.id,className:a.table,style:l.table,role:"grid","aria-labelledby":e["aria-labelledby"],children:[!c&&th.jsx(m,{}),th.jsx("tbody",{className:a.tbody,style:l.tbody,children:h.map(function(t){return th.jsx(g,{displayMonth:e.displayMonth,dates:t.dates,weekNumber:t.weekNumber},t.weekNumber)})}),th.jsx(v,{displayMonth:e.displayMonth})]})}var nf="undefined"!=typeof window&&window.document&&window.document.createElement?d.useLayoutEffect:d.useEffect,np=!1,nh=0;function nm(){return"react-day-picker-".concat(++nh)}function ng(e){var t,n,r,o,i,a,l,c,s=tk(),u=s.dir,f=s.classNames,p=s.styles,h=s.components,m=tI().displayMonths,g=(r=null!=(t=s.id?"".concat(s.id,"-").concat(e.displayIndex):void 0)?t:np?nm():null,i=(o=(0,d.useState)(r))[0],a=o[1],nf(function(){null===i&&a(nm())},[]),(0,d.useEffect)(function(){!1===np&&(np=!0)},[]),null!==(n=null!=t?t:i)&&void 0!==n?n:void 0),v=s.id?"".concat(s.id,"-grid-").concat(e.displayIndex):void 0,y=[f.month],b=p.month,x=0===e.displayIndex,w=e.displayIndex===m.length-1,S=!x&&!w;"rtl"===u&&(w=(l=[x,w])[0],x=l[1]),x&&(y.push(f.caption_start),b=tu(tu({},b),p.caption_start)),w&&(y.push(f.caption_end),b=tu(tu({},b),p.caption_end)),S&&(y.push(f.caption_between),b=tu(tu({},b),p.caption_between));var k=null!==(c=null==h?void 0:h.Caption)&&void 0!==c?c:tL;return th.jsxs("div",{className:y.join(" "),style:b,children:[th.jsx(k,{id:g,displayMonth:e.displayMonth,displayIndex:e.displayIndex}),th.jsx(nd,{id:v,"aria-labelledby":g,displayMonth:e.displayMonth})]},e.displayIndex)}function nv(e){var t=tk(),n=t.classNames,r=t.styles;return th.jsx("div",{className:n.months,style:r.months,children:e.children})}function ny(e){var t,n,r=e.initialProps,o=tk(),i=nn(),a=tI(),l=(0,d.useState)(!1),c=l[0],s=l[1];(0,d.useEffect)(function(){o.initialFocus&&i.focusTarget&&(c||(i.focus(i.focusTarget),s(!0)))},[o.initialFocus,c,i.focus,i.focusTarget,i]);var u=[o.classNames.root,o.className];o.numberOfMonths>1&&u.push(o.classNames.multiple_months),o.showWeekNumber&&u.push(o.classNames.with_weeknumber);var f=tu(tu({},o.styles.root),o.style),p=Object.keys(r).filter(function(e){return e.startsWith("data-")}).reduce(function(e,t){var n;return tu(tu({},e),((n={})[t]=r[t],n))},{}),h=null!==(n=null===(t=r.components)||void 0===t?void 0:t.Months)&&void 0!==n?n:nv;return th.jsx("div",tu({className:u.join(" "),style:f,dir:o.dir,id:o.id,nonce:r.nonce,title:r.title,lang:r.lang},p,{children:th.jsx(h,{children:a.displayMonths.map(function(e,t){return th.jsx(ng,{displayIndex:t,displayMonth:e},t)})})}))}function nb(e){var t=e.children,n=function(e,t){var n={};for(var r in e)Object.prototype.hasOwnProperty.call(e,r)&&0>t.indexOf(r)&&(n[r]=e[r]);if(null!=e&&"function"==typeof Object.getOwnPropertySymbols)for(var o=0,r=Object.getOwnPropertySymbols(e);ot.indexOf(r[o])&&Object.prototype.propertyIsEnumerable.call(e,r[o])&&(n[r[o]]=e[r[o]]);return n}(e,["children"]);return th.jsx(tS,{initialProps:n,children:th.jsx(tN,{children:th.jsx(no,{initialProps:n,children:th.jsx(tW,{initialProps:n,children:th.jsx(tG,{initialProps:n,children:th.jsx(t8,{children:th.jsx(nt,{children:t})})})})})})})}function nx(e){return th.jsx(nb,tu({},e,{children:th.jsx(ny,{initialProps:e})}))}let nw=e=>{var t=(0,u._T)(e,[]);return d.createElement("svg",Object.assign({xmlns:"http://www.w3.org/2000/svg",viewBox:"0 0 24 24",fill:"currentColor"},t),d.createElement("path",{d:"M10.8284 12.0007L15.7782 16.9504L14.364 18.3646L8 12.0007L14.364 5.63672L15.7782 7.05093L10.8284 12.0007Z"}))},nS=e=>{var t=(0,u._T)(e,[]);return d.createElement("svg",Object.assign({xmlns:"http://www.w3.org/2000/svg",viewBox:"0 0 24 24",fill:"currentColor"},t),d.createElement("path",{d:"M13.1717 12.0007L8.22192 7.05093L9.63614 5.63672L16.0001 12.0007L9.63614 18.3646L8.22192 16.9504L13.1717 12.0007Z"}))},nk=e=>{var t=(0,u._T)(e,[]);return d.createElement("svg",Object.assign({xmlns:"http://www.w3.org/2000/svg",viewBox:"0 0 24 24",fill:"currentColor"},t),d.createElement("path",{d:"M4.83582 12L11.0429 18.2071L12.4571 16.7929L7.66424 12L12.4571 7.20712L11.0429 5.79291L4.83582 12ZM10.4857 12L16.6928 18.2071L18.107 16.7929L13.3141 12L18.107 7.20712L16.6928 5.79291L10.4857 12Z"}))},nE=e=>{var t=(0,u._T)(e,[]);return d.createElement("svg",Object.assign({xmlns:"http://www.w3.org/2000/svg",viewBox:"0 0 24 24",fill:"currentColor"},t),d.createElement("path",{d:"M19.1642 12L12.9571 5.79291L11.5429 7.20712L16.3358 12L11.5429 16.7929L12.9571 18.2071L19.1642 12ZM13.5143 12L7.30722 5.79291L5.89301 7.20712L10.6859 12L5.89301 16.7929L7.30722 18.2071L13.5143 12Z"}))};var nC=n(84264);n(41649);var nO=n(1526),nj=n(7084),nP=n(26898);let nM={xs:{paddingX:"px-2",paddingY:"py-0.5",fontSize:"text-xs"},sm:{paddingX:"px-2.5",paddingY:"py-1",fontSize:"text-sm"},md:{paddingX:"px-3",paddingY:"py-1.5",fontSize:"text-md"},lg:{paddingX:"px-3.5",paddingY:"py-1.5",fontSize:"text-lg"},xl:{paddingX:"px-3.5",paddingY:"py-1.5",fontSize:"text-xl"}},nN={xs:{paddingX:"px-2",paddingY:"py-0.5",fontSize:"text-xs"},sm:{paddingX:"px-2.5",paddingY:"py-0.5",fontSize:"text-sm"},md:{paddingX:"px-3",paddingY:"py-0.5",fontSize:"text-md"},lg:{paddingX:"px-3.5",paddingY:"py-0.5",fontSize:"text-lg"},xl:{paddingX:"px-4",paddingY:"py-1",fontSize:"text-xl"}},nI={xs:{height:"h-4",width:"w-4"},sm:{height:"h-4",width:"w-4"},md:{height:"h-4",width:"w-4"},lg:{height:"h-5",width:"w-5"},xl:{height:"h-6",width:"w-6"}},nR={[nj.wu.Increase]:{bgColor:(0,eJ.bM)(nj.fr.Emerald,nP.K.background).bgColor,textColor:(0,eJ.bM)(nj.fr.Emerald,nP.K.text).textColor},[nj.wu.ModerateIncrease]:{bgColor:(0,eJ.bM)(nj.fr.Emerald,nP.K.background).bgColor,textColor:(0,eJ.bM)(nj.fr.Emerald,nP.K.text).textColor},[nj.wu.Decrease]:{bgColor:(0,eJ.bM)(nj.fr.Rose,nP.K.background).bgColor,textColor:(0,eJ.bM)(nj.fr.Rose,nP.K.text).textColor},[nj.wu.ModerateDecrease]:{bgColor:(0,eJ.bM)(nj.fr.Rose,nP.K.background).bgColor,textColor:(0,eJ.bM)(nj.fr.Rose,nP.K.text).textColor},[nj.wu.Unchanged]:{bgColor:(0,eJ.bM)(nj.fr.Orange,nP.K.background).bgColor,textColor:(0,eJ.bM)(nj.fr.Orange,nP.K.text).textColor}},nT={[nj.wu.Increase]:e=>{var t=(0,u._T)(e,[]);return d.createElement("svg",Object.assign({xmlns:"http://www.w3.org/2000/svg",viewBox:"0 0 24 24",fill:"currentColor"},t),d.createElement("path",{d:"M13.0001 7.82843V20H11.0001V7.82843L5.63614 13.1924L4.22192 11.7782L12.0001 4L19.7783 11.7782L18.3641 13.1924L13.0001 7.82843Z"}))},[nj.wu.ModerateIncrease]:e=>{var t=(0,u._T)(e,[]);return d.createElement("svg",Object.assign({xmlns:"http://www.w3.org/2000/svg",viewBox:"0 0 24 24",fill:"currentColor"},t),d.createElement("path",{d:"M16.0037 9.41421L7.39712 18.0208L5.98291 16.6066L14.5895 8H7.00373V6H18.0037V17H16.0037V9.41421Z"}))},[nj.wu.Decrease]:e=>{var t=(0,u._T)(e,[]);return d.createElement("svg",Object.assign({xmlns:"http://www.w3.org/2000/svg",viewBox:"0 0 24 24",fill:"currentColor"},t),d.createElement("path",{d:"M13.0001 16.1716L18.3641 10.8076L19.7783 12.2218L12.0001 20L4.22192 12.2218L5.63614 10.8076L11.0001 16.1716V4H13.0001V16.1716Z"}))},[nj.wu.ModerateDecrease]:e=>{var t=(0,u._T)(e,[]);return d.createElement("svg",Object.assign({xmlns:"http://www.w3.org/2000/svg",viewBox:"0 0 24 24",fill:"currentColor"},t),d.createElement("path",{d:"M14.5895 16.0032L5.98291 7.39664L7.39712 5.98242L16.0037 14.589V7.00324H18.0037V18.0032H7.00373V16.0032H14.5895Z"}))},[nj.wu.Unchanged]:e=>{var t=(0,u._T)(e,[]);return d.createElement("svg",Object.assign({xmlns:"http://www.w3.org/2000/svg",viewBox:"0 0 24 24",fill:"currentColor"},t),d.createElement("path",{d:"M16.1716 10.9999L10.8076 5.63589L12.2218 4.22168L20 11.9999L12.2218 19.778L10.8076 18.3638L16.1716 12.9999H4V10.9999H16.1716Z"}))}},nA=(0,eJ.fn)("BadgeDelta");d.forwardRef((e,t)=>{let{deltaType:n=nj.wu.Increase,isIncreasePositive:r=!0,size:o=nj.u8.SM,tooltip:i,children:a,className:l}=e,c=(0,u._T)(e,["deltaType","isIncreasePositive","size","tooltip","children","className"]),s=nT[n],f=(0,eJ.Fo)(n,r),p=a?nN:nM,{tooltipProps:h,getReferenceProps:m}=(0,nO.l)();return d.createElement("span",Object.assign({ref:(0,eJ.lq)([t,h.refs.setReference]),className:(0,es.q)(nA("root"),"w-max flex-shrink-0 inline-flex justify-center items-center cursor-default rounded-tremor-full bg-opacity-20 dark:bg-opacity-25",nR[f].bgColor,nR[f].textColor,p[o].paddingX,p[o].paddingY,p[o].fontSize,l)},m,c),d.createElement(nO.Z,Object.assign({text:i},h)),d.createElement(s,{className:(0,es.q)(nA("icon"),"shrink-0",a?(0,es.q)("-ml-1 mr-1.5"):nI[o].height,nI[o].width)}),a?d.createElement("p",{className:(0,es.q)(nA("text"),"text-sm whitespace-nowrap")},a):null)}).displayName="BadgeDelta";var n_=n(47323);let nD=e=>{var{onClick:t,icon:n}=e,r=(0,u._T)(e,["onClick","icon"]);return d.createElement("button",Object.assign({type:"button",className:(0,es.q)("flex items-center justify-center p-1 h-7 w-7 outline-none focus:ring-2 transition duration-100 border border-tremor-border dark:border-dark-tremor-border hover:bg-tremor-background-muted dark:hover:bg-dark-tremor-background-muted rounded-tremor-small focus:border-tremor-brand-subtle select-none dark:focus:border-dark-tremor-brand-subtle focus:ring-tremor-brand-muted dark:focus:ring-dark-tremor-brand-muted text-tremor-content-subtle dark:text-dark-tremor-content-subtle hover:text-tremor-content dark:hover:text-dark-tremor-content")},r),d.createElement(n_.Z,{onClick:t,icon:n,variant:"simple",color:"slate",size:"sm"}))};function nZ(e){var{mode:t,defaultMonth:n,selected:r,onSelect:o,locale:i,disabled:a,enableYearNavigation:l,classNames:c,weekStartsOn:s=0}=e,f=(0,u._T)(e,["mode","defaultMonth","selected","onSelect","locale","disabled","enableYearNavigation","classNames","weekStartsOn"]);return d.createElement(nx,Object.assign({showOutsideDays:!0,mode:t,defaultMonth:n,selected:r,onSelect:o,locale:i,disabled:a,weekStartsOn:s,classNames:Object.assign({months:"flex flex-col sm:flex-row space-y-4 sm:space-x-4 sm:space-y-0",month:"space-y-4",caption:"flex justify-center pt-2 relative items-center",caption_label:"text-tremor-default text-tremor-content-emphasis dark:text-dark-tremor-content-emphasis font-medium",nav:"space-x-1 flex items-center",nav_button:"flex items-center justify-center p-1 h-7 w-7 outline-none focus:ring-2 transition duration-100 border border-tremor-border dark:border-dark-tremor-border hover:bg-tremor-background-muted dark:hover:bg-dark-tremor-background-muted rounded-tremor-small focus:border-tremor-brand-subtle dark:focus:border-dark-tremor-brand-subtle focus:ring-tremor-brand-muted dark:focus:ring-dark-tremor-brand-muted text-tremor-content-subtle dark:text-dark-tremor-content-subtle hover:text-tremor-content dark:hover:text-dark-tremor-content",nav_button_previous:"absolute left-1",nav_button_next:"absolute right-1",table:"w-full border-collapse space-y-1",head_row:"flex",head_cell:"w-9 font-normal text-center text-tremor-content-subtle dark:text-dark-tremor-content-subtle",row:"flex w-full mt-0.5",cell:"text-center p-0 relative focus-within:relative text-tremor-default text-tremor-content-emphasis dark:text-dark-tremor-content-emphasis",day:"h-9 w-9 p-0 hover:bg-tremor-background-subtle dark:hover:bg-dark-tremor-background-subtle outline-tremor-brand dark:outline-dark-tremor-brand rounded-tremor-default",day_today:"font-bold",day_selected:"aria-selected:bg-tremor-background-emphasis aria-selected:text-tremor-content-inverted dark:aria-selected:bg-dark-tremor-background-emphasis dark:aria-selected:text-dark-tremor-content-inverted ",day_disabled:"text-tremor-content-subtle dark:text-dark-tremor-content-subtle disabled:hover:bg-transparent",day_outside:"text-tremor-content-subtle dark:text-dark-tremor-content-subtle"},c),components:{IconLeft:e=>{var t=(0,u._T)(e,[]);return d.createElement(nw,Object.assign({className:"h-4 w-4"},t))},IconRight:e=>{var t=(0,u._T)(e,[]);return d.createElement(nS,Object.assign({className:"h-4 w-4"},t))},Caption:e=>{var t=(0,u._T)(e,[]);let{goToMonth:n,nextMonth:r,previousMonth:o,currentMonth:a}=tI();return d.createElement("div",{className:"flex justify-between items-center"},d.createElement("div",{className:"flex items-center space-x-1"},l&&d.createElement(nD,{onClick:()=>a&&n(tl(a,-1)),icon:nk}),d.createElement(nD,{onClick:()=>o&&n(o),icon:nw})),d.createElement(nC.Z,{className:"text-tremor-default tabular-nums capitalize text-tremor-content-emphasis dark:text-dark-tremor-content-emphasis font-medium"},eQ(t.displayMonth,"LLLL yyy",{locale:i})),d.createElement("div",{className:"flex items-center space-x-1"},d.createElement(nD,{onClick:()=>r&&n(r),icon:nS}),l&&d.createElement(nD,{onClick:()=>a&&n(tl(a,1)),icon:nE})))}}},f))}nZ.displayName="DateRangePicker",n(27281);var nL=n(57365),nz=n(44140);let nB=el(),nF=d.forwardRef((e,t)=>{var n,r;let{value:o,defaultValue:i,onValueChange:a,enableSelect:l=!0,minDate:c,maxDate:s,placeholder:f="Select range",selectPlaceholder:p="Select range",disabled:h=!1,locale:m=eU,enableClear:g=!0,displayFormat:v,children:y,className:b,enableYearNavigation:x=!1,weekStartsOn:w=0,disabledDates:S}=e,k=(0,u._T)(e,["value","defaultValue","onValueChange","enableSelect","minDate","maxDate","placeholder","selectPlaceholder","disabled","locale","enableClear","displayFormat","children","className","enableYearNavigation","weekStartsOn","disabledDates"]),[E,C]=(0,nz.Z)(i,o),[O,j]=(0,d.useState)(!1),[P,M]=(0,d.useState)(!1),N=(0,d.useMemo)(()=>{let e=[];return c&&e.push({before:c}),s&&e.push({after:s}),[...e,...null!=S?S:[]]},[c,s,S]),I=(0,d.useMemo)(()=>{let e=new Map;return y?d.Children.forEach(y,t=>{var n;e.set(t.props.value,{text:null!==(n=(0,eu.qg)(t))&&void 0!==n?n:t.props.value,from:t.props.from,to:t.props.to})}):e6.forEach(t=>{e.set(t.value,{text:t.text,from:t.from,to:nB})}),e},[y]),R=(0,d.useMemo)(()=>{if(y)return(0,eu.sl)(y);let e=new Map;return e6.forEach(t=>e.set(t.value,t.text)),e},[y]),T=(null==E?void 0:E.selectValue)||"",A=e1(null==E?void 0:E.from,c,T,I),_=e2(null==E?void 0:E.to,s,T,I),D=A||_?e3(A,_,m,v):f,Z=ec(null!==(r=null!==(n=null!=_?_:A)&&void 0!==n?n:s)&&void 0!==r?r:nB),L=g&&!h;return d.createElement("div",Object.assign({ref:t,className:(0,es.q)("w-full min-w-[10rem] relative flex justify-between text-tremor-default max-w-sm shadow-tremor-input dark:shadow-dark-tremor-input rounded-tremor-default",b)},k),d.createElement(J,{as:"div",className:(0,es.q)("w-full",l?"rounded-l-tremor-default":"rounded-tremor-default",O&&"ring-2 ring-tremor-brand-muted dark:ring-dark-tremor-brand-muted z-10")},d.createElement("div",{className:"relative w-full"},d.createElement(J.Button,{onFocus:()=>j(!0),onBlur:()=>j(!1),disabled:h,className:(0,es.q)("w-full outline-none text-left whitespace-nowrap truncate focus:ring-2 transition duration-100 rounded-l-tremor-default flex flex-nowrap border pl-3 py-2","rounded-l-tremor-default border-tremor-border text-tremor-content-emphasis focus:border-tremor-brand-subtle focus:ring-tremor-brand-muted","dark:border-dark-tremor-border dark:text-dark-tremor-content-emphasis dark:focus:border-dark-tremor-brand-subtle dark:focus:ring-dark-tremor-brand-muted",l?"rounded-l-tremor-default":"rounded-tremor-default",L?"pr-8":"pr-4",(0,eu.um)((0,eu.Uh)(A||_),h))},d.createElement(en,{className:(0,es.q)(e0("calendarIcon"),"flex-none shrink-0 h-5 w-5 -ml-0.5 mr-2","text-tremor-content-subtle","dark:text-dark-tremor-content-subtle"),"aria-hidden":"true"}),d.createElement("p",{className:"truncate"},D)),L&&A?d.createElement("button",{type:"button",className:(0,es.q)("absolute outline-none inset-y-0 right-0 flex items-center transition duration-100 mr-4"),onClick:e=>{e.preventDefault(),null==a||a({}),C({})}},d.createElement(er.Z,{className:(0,es.q)(e0("clearIcon"),"flex-none h-4 w-4","text-tremor-content-subtle","dark:text-dark-tremor-content-subtle")})):null),d.createElement(ee.u,{className:"absolute z-10 min-w-min left-0",enter:"transition ease duration-100 transform",enterFrom:"opacity-0 -translate-y-4",enterTo:"opacity-100 translate-y-0",leave:"transition ease duration-100 transform",leaveFrom:"opacity-100 translate-y-0",leaveTo:"opacity-0 -translate-y-4"},d.createElement(J.Panel,{focus:!0,className:(0,es.q)("divide-y overflow-y-auto outline-none rounded-tremor-default p-3 border my-1","bg-tremor-background border-tremor-border divide-tremor-border shadow-tremor-dropdown","dark:bg-dark-tremor-background dark:border-dark-tremor-border dark:divide-dark-tremor-border dark:shadow-dark-tremor-dropdown")},d.createElement(nZ,Object.assign({mode:"range",showOutsideDays:!0,defaultMonth:Z,selected:{from:A,to:_},onSelect:e=>{null==a||a({from:null==e?void 0:e.from,to:null==e?void 0:e.to}),C({from:null==e?void 0:e.from,to:null==e?void 0:e.to})},locale:m,disabled:N,enableYearNavigation:x,classNames:{day_range_middle:(0,es.q)("!rounded-none aria-selected:!bg-tremor-background-subtle aria-selected:dark:!bg-dark-tremor-background-subtle aria-selected:!text-tremor-content aria-selected:dark:!bg-dark-tremor-background-subtle"),day_range_start:"rounded-r-none rounded-l-tremor-small aria-selected:text-tremor-brand-inverted dark:aria-selected:text-dark-tremor-brand-inverted",day_range_end:"rounded-l-none rounded-r-tremor-small aria-selected:text-tremor-brand-inverted dark:aria-selected:text-dark-tremor-brand-inverted"},weekStartsOn:w},e))))),l&&d.createElement(et.R,{as:"div",className:(0,es.q)("w-48 -ml-px rounded-r-tremor-default",P&&"ring-2 ring-tremor-brand-muted dark:ring-dark-tremor-brand-muted z-10"),value:T,onChange:e=>{let{from:t,to:n}=I.get(e),r=null!=n?n:nB;null==a||a({from:t,to:r,selectValue:e}),C({from:t,to:r,selectValue:e})},disabled:h},e=>{var t;let{value:n}=e;return d.createElement(d.Fragment,null,d.createElement(et.R.Button,{onFocus:()=>M(!0),onBlur:()=>M(!1),className:(0,es.q)("w-full outline-none text-left whitespace-nowrap truncate rounded-r-tremor-default transition duration-100 border px-4 py-2","border-tremor-border shadow-tremor-input text-tremor-content-emphasis focus:border-tremor-brand-subtle","dark:border-dark-tremor-border dark:shadow-dark-tremor-input dark:text-dark-tremor-content-emphasis dark:focus:border-dark-tremor-brand-subtle",(0,eu.um)((0,eu.Uh)(n),h))},n&&null!==(t=R.get(n))&&void 0!==t?t:p),d.createElement(ee.u,{className:"absolute z-10 w-full inset-x-0 right-0",enter:"transition ease duration-100 transform",enterFrom:"opacity-0 -translate-y-4",enterTo:"opacity-100 translate-y-0",leave:"transition ease duration-100 transform",leaveFrom:"opacity-100 translate-y-0",leaveTo:"opacity-0 -translate-y-4"},d.createElement(et.R.Options,{className:(0,es.q)("divide-y overflow-y-auto outline-none border my-1","shadow-tremor-dropdown bg-tremor-background border-tremor-border divide-tremor-border rounded-tremor-default","dark:shadow-dark-tremor-dropdown dark:bg-dark-tremor-background dark:border-dark-tremor-border dark:divide-dark-tremor-border")},null!=y?y:e6.map(e=>d.createElement(nL.Z,{key:e.value,value:e.value},e.text)))))}))});nF.displayName="DateRangePicker"},92414:function(e,t,n){"use strict";n.d(t,{Z:function(){return v}});var r=n(5853),o=n(2265);n(42698),n(64016),n(8710);var i=n(33232),a=n(44140),l=n(58747);let c=e=>{var t=(0,r._T)(e,[]);return o.createElement("svg",Object.assign({xmlns:"http://www.w3.org/2000/svg",viewBox:"0 0 24 24",fill:"currentColor"},t),o.createElement("path",{d:"M18.031 16.6168L22.3137 20.8995L20.8995 22.3137L16.6168 18.031C15.0769 19.263 13.124 20 11 20C6.032 20 2 15.968 2 11C2 6.032 6.032 2 11 2C15.968 2 20 6.032 20 11C20 13.124 19.263 15.0769 18.031 16.6168ZM16.0247 15.8748C17.2475 14.6146 18 12.8956 18 11C18 7.1325 14.8675 4 11 4C7.1325 4 4 7.1325 4 11C4 14.8675 7.1325 18 11 18C12.8956 18 14.6146 17.2475 15.8748 16.0247L16.0247 15.8748Z"}))};var s=n(4537),u=n(28517),d=n(33044);let f=e=>{var t=(0,r._T)(e,[]);return o.createElement("svg",Object.assign({xmlns:"http://www.w3.org/2000/svg",width:"100%",height:"100%",fill:"none",viewBox:"0 0 24 24",stroke:"currentColor",strokeWidth:"2",strokeLinecap:"round",strokeLinejoin:"round"},t),o.createElement("line",{x1:"18",y1:"6",x2:"6",y2:"18"}),o.createElement("line",{x1:"6",y1:"6",x2:"18",y2:"18"}))};var p=n(65954),h=n(1153),m=n(96398);let g=(0,h.fn)("MultiSelect"),v=o.forwardRef((e,t)=>{let{defaultValue:n,value:h,onValueChange:v,placeholder:y="Select...",placeholderSearch:b="Search",disabled:x=!1,icon:w,children:S,className:k}=e,E=(0,r._T)(e,["defaultValue","value","onValueChange","placeholder","placeholderSearch","disabled","icon","children","className"]),[C,O]=(0,a.Z)(n,h),{reactElementChildren:j,optionsAvailable:P}=(0,o.useMemo)(()=>{let e=o.Children.toArray(S).filter(o.isValidElement);return{reactElementChildren:e,optionsAvailable:(0,m.n0)("",e)}},[S]),[M,N]=(0,o.useState)(""),I=(null!=C?C:[]).length>0,R=(0,o.useMemo)(()=>M?(0,m.n0)(M,j):P,[M,j,P]),T=()=>{N("")};return o.createElement(u.R,Object.assign({as:"div",ref:t,defaultValue:C,value:C,onChange:e=>{null==v||v(e),O(e)},disabled:x,className:(0,p.q)("w-full min-w-[10rem] relative text-tremor-default",k)},E,{multiple:!0}),e=>{let{value:t}=e;return o.createElement(o.Fragment,null,o.createElement(u.R.Button,{className:(0,p.q)("w-full outline-none text-left whitespace-nowrap truncate rounded-tremor-default focus:ring-2 transition duration-100 border pr-8 py-1.5","border-tremor-border shadow-tremor-input focus:border-tremor-brand-subtle focus:ring-tremor-brand-muted","dark:border-dark-tremor-border dark:shadow-dark-tremor-input dark:focus:border-dark-tremor-brand-subtle dark:focus:ring-dark-tremor-brand-muted",w?"pl-11 -ml-0.5":"pl-3",(0,m.um)(t.length>0,x))},w&&o.createElement("span",{className:(0,p.q)("absolute inset-y-0 left-0 flex items-center ml-px pl-2.5")},o.createElement(w,{className:(0,p.q)(g("Icon"),"flex-none h-5 w-5","text-tremor-content-subtle","dark:text-dark-tremor-content-subtle")})),o.createElement("div",{className:"h-6 flex items-center"},t.length>0?o.createElement("div",{className:"flex flex-nowrap overflow-x-scroll [&::-webkit-scrollbar]:hidden [scrollbar-width:none] gap-x-1 mr-5 -ml-1.5 relative"},P.filter(e=>t.includes(e.props.value)).map((e,n)=>{var r;return o.createElement("div",{key:n,className:(0,p.q)("max-w-[100px] lg:max-w-[200px] flex justify-center items-center pl-2 pr-1.5 py-1 font-medium","rounded-tremor-small","bg-tremor-background-muted dark:bg-dark-tremor-background-muted","bg-tremor-background-subtle dark:bg-dark-tremor-background-subtle","text-tremor-content-default dark:text-dark-tremor-content-default","text-tremor-content-emphasis dark:text-dark-tremor-content-emphasis")},o.createElement("div",{className:"text-xs truncate "},null!==(r=e.props.children)&&void 0!==r?r:e.props.value),o.createElement("div",{onClick:n=>{n.preventDefault();let r=t.filter(t=>t!==e.props.value);null==v||v(r),O(r)}},o.createElement(f,{className:(0,p.q)(g("clearIconItem"),"cursor-pointer rounded-tremor-full w-3.5 h-3.5 ml-2","text-tremor-content-subtle hover:text-tremor-content","dark:text-dark-tremor-content-subtle dark:hover:text-tremor-content")})))})):o.createElement("span",null,y)),o.createElement("span",{className:(0,p.q)("absolute inset-y-0 right-0 flex items-center mr-2.5")},o.createElement(l.Z,{className:(0,p.q)(g("arrowDownIcon"),"flex-none h-5 w-5","text-tremor-content-subtle","dark:text-dark-tremor-content-subtle")}))),I&&!x?o.createElement("button",{type:"button",className:(0,p.q)("absolute inset-y-0 right-0 flex items-center mr-8"),onClick:e=>{e.preventDefault(),O([]),null==v||v([])}},o.createElement(s.Z,{className:(0,p.q)(g("clearIconAllItems"),"flex-none h-4 w-4","text-tremor-content-subtle","dark:text-dark-tremor-content-subtle")})):null,o.createElement(d.u,{className:"absolute z-10 w-full",enter:"transition ease duration-100 transform",enterFrom:"opacity-0 -translate-y-4",enterTo:"opacity-100 translate-y-0",leave:"transition ease duration-100 transform",leaveFrom:"opacity-100 translate-y-0",leaveTo:"opacity-0 -translate-y-4"},o.createElement(u.R.Options,{className:(0,p.q)("divide-y overflow-y-auto outline-none rounded-tremor-default max-h-[228px] left-0 border my-1","bg-tremor-background border-tremor-border divide-tremor-border shadow-tremor-dropdown","dark:bg-dark-tremor-background dark:border-dark-tremor-border dark:divide-dark-tremor-border dark:shadow-dark-tremor-dropdown")},o.createElement("div",{className:(0,p.q)("flex items-center w-full px-2.5","bg-tremor-background-muted","dark:bg-dark-tremor-background-muted")},o.createElement("span",null,o.createElement(c,{className:(0,p.q)("flex-none w-4 h-4 mr-2","text-tremor-content-subtle","dark:text-dark-tremor-content-subtle")})),o.createElement("input",{name:"search",type:"input",autoComplete:"off",placeholder:b,className:(0,p.q)("w-full focus:outline-none focus:ring-none bg-transparent text-tremor-default py-2","text-tremor-content-emphasis","dark:text-dark-tremor-content-emphasis"),onKeyDown:e=>{"Space"===e.code&&""!==e.target.value&&e.stopPropagation()},onChange:e=>N(e.target.value),value:M})),o.createElement(i.Z.Provider,Object.assign({},{onBlur:{handleResetSearch:T}},{value:{selectedValue:t}}),R))))})});v.displayName="MultiSelect"},46030:function(e,t,n){"use strict";n.d(t,{Z:function(){return u}});var r=n(5853);n(42698),n(64016),n(8710);var o=n(33232),i=n(2265),a=n(65954),l=n(1153),c=n(28517);let s=(0,l.fn)("MultiSelectItem"),u=i.forwardRef((e,t)=>{let{value:n,className:u,children:d}=e,f=(0,r._T)(e,["value","className","children"]),{selectedValue:p}=(0,i.useContext)(o.Z),h=(0,l.NZ)(n,p);return i.createElement(c.R.Option,Object.assign({className:(0,a.q)(s("root"),"flex justify-start items-center cursor-default text-tremor-default p-2.5","ui-active:bg-tremor-background-muted ui-active:text-tremor-content-strong ui-selected:text-tremor-content-strong text-tremor-content-emphasis","dark:ui-active:bg-dark-tremor-background-muted dark:ui-active:text-dark-tremor-content-strong dark:ui-selected:text-dark-tremor-content-strong dark:ui-selected:bg-dark-tremor-background-muted dark:text-dark-tremor-content-emphasis",u),ref:t,key:n,value:n},f),i.createElement("input",{type:"checkbox",className:(0,a.q)(s("checkbox"),"flex-none focus:ring-none focus:outline-none cursor-pointer mr-2.5","accent-tremor-brand","dark:accent-dark-tremor-brand"),checked:h,readOnly:!0}),i.createElement("span",{className:"whitespace-nowrap truncate"},null!=d?d:n))});u.displayName="MultiSelectItem"},30150:function(e,t,n){"use strict";n.d(t,{Z:function(){return f}});var r=n(5853),o=n(2265);let i=e=>{var t=(0,r._T)(e,[]);return o.createElement("svg",Object.assign({},t,{xmlns:"http://www.w3.org/2000/svg",fill:"none",viewBox:"0 0 24 24",stroke:"currentColor",strokeWidth:"2.5"}),o.createElement("path",{d:"M12 4v16m8-8H4"}))},a=e=>{var t=(0,r._T)(e,[]);return o.createElement("svg",Object.assign({},t,{xmlns:"http://www.w3.org/2000/svg",fill:"none",viewBox:"0 0 24 24",stroke:"currentColor",strokeWidth:"2.5"}),o.createElement("path",{d:"M20 12H4"}))};var l=n(65954),c=n(1153),s=n(69262);let u="flex mx-auto text-tremor-content-subtle dark:text-dark-tremor-content-subtle",d="cursor-pointer hover:text-tremor-content dark:hover:text-dark-tremor-content",f=o.forwardRef((e,t)=>{let{onSubmit:n,enableStepper:f=!0,disabled:p,onValueChange:h,onChange:m}=e,g=(0,r._T)(e,["onSubmit","enableStepper","disabled","onValueChange","onChange"]),v=(0,o.useRef)(null),[y,b]=o.useState(!1),x=o.useCallback(()=>{b(!0)},[]),w=o.useCallback(()=>{b(!1)},[]),[S,k]=o.useState(!1),E=o.useCallback(()=>{k(!0)},[]),C=o.useCallback(()=>{k(!1)},[]);return o.createElement(s.Z,Object.assign({type:"number",ref:(0,c.lq)([v,t]),disabled:p,makeInputClassName:(0,c.fn)("NumberInput"),onKeyDown:e=>{var t;if("Enter"===e.key&&!e.ctrlKey&&!e.altKey&&!e.shiftKey){let e=null===(t=v.current)||void 0===t?void 0:t.value;null==n||n(parseFloat(null!=e?e:""))}"ArrowDown"===e.key&&x(),"ArrowUp"===e.key&&E()},onKeyUp:e=>{"ArrowDown"===e.key&&w(),"ArrowUp"===e.key&&C()},onChange:e=>{p||(null==h||h(parseFloat(e.target.value)),null==m||m(e))},stepper:f?o.createElement("div",{className:(0,l.q)("flex justify-center align-middle")},o.createElement("div",{tabIndex:-1,onClick:e=>e.preventDefault(),onMouseDown:e=>e.preventDefault(),onTouchStart:e=>{e.cancelable&&e.preventDefault()},onMouseUp:()=>{var e,t;p||(null===(e=v.current)||void 0===e||e.stepDown(),null===(t=v.current)||void 0===t||t.dispatchEvent(new Event("input",{bubbles:!0})))},className:(0,l.q)(!p&&d,u,"group py-[10px] px-2.5 border-l border-tremor-border dark:border-dark-tremor-border")},o.createElement(a,{"data-testid":"step-down",className:(y?"scale-95":"")+" h-4 w-4 duration-75 transition group-active:scale-95"})),o.createElement("div",{tabIndex:-1,onClick:e=>e.preventDefault(),onMouseDown:e=>e.preventDefault(),onTouchStart:e=>{e.cancelable&&e.preventDefault()},onMouseUp:()=>{var e,t;p||(null===(e=v.current)||void 0===e||e.stepUp(),null===(t=v.current)||void 0===t||t.dispatchEvent(new Event("input",{bubbles:!0})))},className:(0,l.q)(!p&&d,u,"group py-[10px] px-2.5 border-l border-tremor-border dark:border-dark-tremor-border")},o.createElement(i,{"data-testid":"step-up",className:(S?"scale-95":"")+" h-4 w-4 duration-75 transition group-active:scale-95"}))):null},g))});f.displayName="NumberInput"},27281:function(e,t,n){"use strict";n.d(t,{Z:function(){return h}});var r=n(5853),o=n(2265),i=n(58747),a=n(4537),l=n(65954),c=n(1153),s=n(96398),u=n(28517),d=n(33044),f=n(44140);let p=(0,c.fn)("Select"),h=o.forwardRef((e,t)=>{let{defaultValue:n,value:c,onValueChange:h,placeholder:m="Select...",disabled:g=!1,icon:v,enableClear:y=!0,children:b,className:x}=e,w=(0,r._T)(e,["defaultValue","value","onValueChange","placeholder","disabled","icon","enableClear","children","className"]),[S,k]=(0,f.Z)(n,c),E=(0,o.useMemo)(()=>{let e=o.Children.toArray(b).filter(o.isValidElement);return(0,s.sl)(e)},[b]);return o.createElement(u.R,Object.assign({as:"div",ref:t,defaultValue:S,value:S,onChange:e=>{null==h||h(e),k(e)},disabled:g,className:(0,l.q)("w-full min-w-[10rem] relative text-tremor-default",x)},w),e=>{var t;let{value:n}=e;return o.createElement(o.Fragment,null,o.createElement(u.R.Button,{className:(0,l.q)("w-full outline-none text-left whitespace-nowrap truncate rounded-tremor-default focus:ring-2 transition duration-100 border pr-8 py-2","border-tremor-border shadow-tremor-input focus:border-tremor-brand-subtle focus:ring-tremor-brand-muted","dark:border-dark-tremor-border dark:shadow-dark-tremor-input dark:focus:border-dark-tremor-brand-subtle dark:focus:ring-dark-tremor-brand-muted",v?"pl-10":"pl-3",(0,s.um)((0,s.Uh)(n),g))},v&&o.createElement("span",{className:(0,l.q)("absolute inset-y-0 left-0 flex items-center ml-px pl-2.5")},o.createElement(v,{className:(0,l.q)(p("Icon"),"flex-none h-5 w-5","text-tremor-content-subtle","dark:text-dark-tremor-content-subtle")})),o.createElement("span",{className:"w-[90%] block truncate"},n&&null!==(t=E.get(n))&&void 0!==t?t:m),o.createElement("span",{className:(0,l.q)("absolute inset-y-0 right-0 flex items-center mr-3")},o.createElement(i.Z,{className:(0,l.q)(p("arrowDownIcon"),"flex-none h-5 w-5","text-tremor-content-subtle","dark:text-dark-tremor-content-subtle")}))),y&&S?o.createElement("button",{type:"button",className:(0,l.q)("absolute inset-y-0 right-0 flex items-center mr-8"),onClick:e=>{e.preventDefault(),k(""),null==h||h("")}},o.createElement(a.Z,{className:(0,l.q)(p("clearIcon"),"flex-none h-4 w-4","text-tremor-content-subtle","dark:text-dark-tremor-content-subtle")})):null,o.createElement(d.u,{className:"absolute z-10 w-full",enter:"transition ease duration-100 transform",enterFrom:"opacity-0 -translate-y-4",enterTo:"opacity-100 translate-y-0",leave:"transition ease duration-100 transform",leaveFrom:"opacity-100 translate-y-0",leaveTo:"opacity-0 -translate-y-4"},o.createElement(u.R.Options,{className:(0,l.q)("divide-y overflow-y-auto outline-none rounded-tremor-default max-h-[228px] left-0 border my-1","bg-tremor-background border-tremor-border divide-tremor-border shadow-tremor-dropdown","dark:bg-dark-tremor-background dark:border-dark-tremor-border dark:divide-dark-tremor-border dark:shadow-dark-tremor-dropdown")},b)))})});h.displayName="Select"},57365:function(e,t,n){"use strict";n.d(t,{Z:function(){return c}});var r=n(5853),o=n(2265),i=n(28517),a=n(65954);let l=(0,n(1153).fn)("SelectItem"),c=o.forwardRef((e,t)=>{let{value:n,icon:c,className:s,children:u}=e,d=(0,r._T)(e,["value","icon","className","children"]);return o.createElement(i.R.Option,Object.assign({className:(0,a.q)(l("root"),"flex justify-start items-center cursor-default text-tremor-default px-2.5 py-2.5","ui-active:bg-tremor-background-muted ui-active:text-tremor-content-strong ui-selected:text-tremor-content-strong ui-selected:bg-tremor-background-muted text-tremor-content-emphasis","dark:ui-active:bg-dark-tremor-background-muted dark:ui-active:text-dark-tremor-content-strong dark:ui-selected:text-dark-tremor-content-strong dark:ui-selected:bg-dark-tremor-background-muted dark:text-dark-tremor-content-emphasis",s),ref:t,key:n,value:n},d),c&&o.createElement(c,{className:(0,a.q)(l("icon"),"flex-none w-5 h-5 mr-1.5","text-tremor-content-subtle","dark:text-dark-tremor-content-subtle")}),o.createElement("span",{className:"whitespace-nowrap truncate"},null!=u?u:n))});c.displayName="SelectItem"},92858:function(e,t,n){"use strict";n.d(t,{Z:function(){return N}});var r=n(5853),o=n(2265),i=n(62963),a=n(90945),l=n(13323),c=n(17684),s=n(80004),u=n(93689),d=n(38198),f=n(47634),p=n(56314),h=n(27847),m=n(64518);let g=(0,o.createContext)(null),v=Object.assign((0,h.yV)(function(e,t){let n=(0,c.M)(),{id:r="headlessui-description-".concat(n),...i}=e,a=function e(){let t=(0,o.useContext)(g);if(null===t){let t=Error("You used a component, but it is not inside a relevant parent.");throw Error.captureStackTrace&&Error.captureStackTrace(t,e),t}return t}(),l=(0,u.T)(t);(0,m.e)(()=>a.register(r),[r,a.register]);let s={ref:l,...a.props,id:r};return(0,h.sY)({ourProps:s,theirProps:i,slot:a.slot||{},defaultTag:"p",name:a.name||"Description"})}),{});var y=n(37388);let b=(0,o.createContext)(null),x=Object.assign((0,h.yV)(function(e,t){let n=(0,c.M)(),{id:r="headlessui-label-".concat(n),passive:i=!1,...a}=e,l=function e(){let t=(0,o.useContext)(b);if(null===t){let t=Error("You used a component, but it is not inside a relevant parent.");throw Error.captureStackTrace&&Error.captureStackTrace(t,e),t}return t}(),s=(0,u.T)(t);(0,m.e)(()=>l.register(r),[r,l.register]);let d={ref:s,...l.props,id:r};return i&&("onClick"in d&&(delete d.htmlFor,delete d.onClick),"onClick"in a&&delete a.onClick),(0,h.sY)({ourProps:d,theirProps:a,slot:l.slot||{},defaultTag:"label",name:l.name||"Label"})}),{}),w=(0,o.createContext)(null);w.displayName="GroupContext";let S=o.Fragment,k=Object.assign((0,h.yV)(function(e,t){let n=(0,c.M)(),{id:r="headlessui-switch-".concat(n),checked:m,defaultChecked:g=!1,onChange:v,name:b,value:x,form:S,...k}=e,E=(0,o.useContext)(w),C=(0,o.useRef)(null),O=(0,u.T)(C,t,null===E?null:E.setSwitch),[j,P]=(0,i.q)(m,v,g),M=(0,l.z)(()=>null==P?void 0:P(!j)),N=(0,l.z)(e=>{if((0,f.P)(e.currentTarget))return e.preventDefault();e.preventDefault(),M()}),I=(0,l.z)(e=>{e.key===y.R.Space?(e.preventDefault(),M()):e.key===y.R.Enter&&(0,p.g)(e.currentTarget)}),R=(0,l.z)(e=>e.preventDefault()),T=(0,o.useMemo)(()=>({checked:j}),[j]),A={id:r,ref:O,role:"switch",type:(0,s.f)(e,C),tabIndex:0,"aria-checked":j,"aria-labelledby":null==E?void 0:E.labelledby,"aria-describedby":null==E?void 0:E.describedby,onClick:N,onKeyUp:I,onKeyPress:R},_=(0,a.G)();return(0,o.useEffect)(()=>{var e;let t=null==(e=C.current)?void 0:e.closest("form");t&&void 0!==g&&_.addEventListener(t,"reset",()=>{P(g)})},[C,P]),o.createElement(o.Fragment,null,null!=b&&j&&o.createElement(d._,{features:d.A.Hidden,...(0,h.oA)({as:"input",type:"checkbox",hidden:!0,readOnly:!0,form:S,checked:j,name:b,value:x})}),(0,h.sY)({ourProps:A,theirProps:k,slot:T,defaultTag:"button",name:"Switch"}))}),{Group:function(e){var t;let[n,r]=(0,o.useState)(null),[i,a]=function(){let[e,t]=(0,o.useState)([]);return[e.length>0?e.join(" "):void 0,(0,o.useMemo)(()=>function(e){let n=(0,l.z)(e=>(t(t=>[...t,e]),()=>t(t=>{let n=t.slice(),r=n.indexOf(e);return -1!==r&&n.splice(r,1),n}))),r=(0,o.useMemo)(()=>({register:n,slot:e.slot,name:e.name,props:e.props}),[n,e.slot,e.name,e.props]);return o.createElement(b.Provider,{value:r},e.children)},[t])]}(),[c,s]=function(){let[e,t]=(0,o.useState)([]);return[e.length>0?e.join(" "):void 0,(0,o.useMemo)(()=>function(e){let n=(0,l.z)(e=>(t(t=>[...t,e]),()=>t(t=>{let n=t.slice(),r=n.indexOf(e);return -1!==r&&n.splice(r,1),n}))),r=(0,o.useMemo)(()=>({register:n,slot:e.slot,name:e.name,props:e.props}),[n,e.slot,e.name,e.props]);return o.createElement(g.Provider,{value:r},e.children)},[t])]}(),u=(0,o.useMemo)(()=>({switch:n,setSwitch:r,labelledby:i,describedby:c}),[n,r,i,c]);return o.createElement(s,{name:"Switch.Description"},o.createElement(a,{name:"Switch.Label",props:{htmlFor:null==(t=u.switch)?void 0:t.id,onClick(e){n&&("LABEL"===e.currentTarget.tagName&&e.preventDefault(),n.click(),n.focus({preventScroll:!0}))}}},o.createElement(w.Provider,{value:u},(0,h.sY)({ourProps:{},theirProps:e,defaultTag:S,name:"Switch.Group"}))))},Label:x,Description:v});var E=n(44140),C=n(26898),O=n(65954),j=n(1153),P=n(1526);let M=(0,j.fn)("Switch"),N=o.forwardRef((e,t)=>{let{checked:n,defaultChecked:i=!1,onChange:a,color:l,name:c,error:s,errorMessage:u,disabled:d,required:f,tooltip:p,id:h}=e,m=(0,r._T)(e,["checked","defaultChecked","onChange","color","name","error","errorMessage","disabled","required","tooltip","id"]),g={bgColor:l?(0,j.bM)(l,C.K.background).bgColor:"bg-tremor-brand dark:bg-dark-tremor-brand",ringColor:l?(0,j.bM)(l,C.K.ring).ringColor:"ring-tremor-brand-muted dark:ring-dark-tremor-brand-muted"},[v,y]=(0,E.Z)(i,n),[b,x]=(0,o.useState)(!1),{tooltipProps:w,getReferenceProps:S}=(0,P.l)(300);return o.createElement("div",{className:"flex flex-row items-center justify-start"},o.createElement(P.Z,Object.assign({text:p},w)),o.createElement("div",Object.assign({ref:(0,j.lq)([t,w.refs.setReference]),className:(0,O.q)(M("root"),"flex flex-row relative h-5")},m,S),o.createElement("input",{type:"checkbox",className:(0,O.q)(M("input"),"absolute w-5 h-5 cursor-pointer left-0 top-0 opacity-0"),name:c,required:f,checked:v,onChange:e=>{e.preventDefault()}}),o.createElement(k,{checked:v,onChange:e=>{y(e),null==a||a(e)},disabled:d,className:(0,O.q)(M("switch"),"w-10 h-5 group relative inline-flex flex-shrink-0 cursor-pointer items-center justify-center rounded-tremor-full","focus:outline-none",d?"cursor-not-allowed":""),onFocus:()=>x(!0),onBlur:()=>x(!1),id:h},o.createElement("span",{className:(0,O.q)(M("sr-only"),"sr-only")},"Switch ",v?"on":"off"),o.createElement("span",{"aria-hidden":"true",className:(0,O.q)(M("background"),v?g.bgColor:"bg-tremor-border dark:bg-dark-tremor-border","pointer-events-none absolute mx-auto h-3 w-9 rounded-tremor-full transition-colors duration-100 ease-in-out")}),o.createElement("span",{"aria-hidden":"true",className:(0,O.q)(M("round"),v?(0,O.q)(g.bgColor,"translate-x-5 border-tremor-background dark:border-dark-tremor-background"):"translate-x-0 bg-tremor-border dark:bg-dark-tremor-border border-tremor-background dark:border-dark-tremor-background","pointer-events-none absolute left-0 inline-block h-5 w-5 transform rounded-tremor-full border-2 shadow-tremor-input duration-100 ease-in-out transition",b?(0,O.q)("ring-2",g.ringColor):"")}))),s&&u?o.createElement("p",{className:(0,O.q)(M("errorMessage"),"text-sm text-red-500 mt-1 ")},u):null)});N.displayName="Switch"},87452:function(e,t,n){"use strict";n.d(t,{Z:function(){return d},r:function(){return u}});var r=n(5853),o=n(21886);n(42698),n(64016);var i=n(8710);n(33232);var a=n(65954),l=n(1153),c=n(2265);let s=(0,l.fn)("Accordion"),u=(0,c.createContext)({isOpen:!1}),d=c.forwardRef((e,t)=>{var n;let{defaultOpen:l=!1,children:d,className:f}=e,p=(0,r._T)(e,["defaultOpen","children","className"]),h=null!==(n=(0,c.useContext)(i.Z))&&void 0!==n?n:(0,a.q)("rounded-tremor-default border");return c.createElement(o.p,Object.assign({as:"div",ref:t,className:(0,a.q)(s("root"),"overflow-hidden","bg-tremor-background border-tremor-border","dark:bg-dark-tremor-background dark:border-dark-tremor-border",h,f),defaultOpen:l},p),e=>{let{open:t}=e;return c.createElement(u.Provider,{value:{isOpen:t}},d)})});d.displayName="Accordion"},88829:function(e,t,n){"use strict";n.d(t,{Z:function(){return c}});var r=n(5853),o=n(2265),i=n(21886),a=n(65954);let l=(0,n(1153).fn)("AccordionBody"),c=o.forwardRef((e,t)=>{let{children:n,className:c}=e,s=(0,r._T)(e,["children","className"]);return o.createElement(i.p.Panel,Object.assign({ref:t,className:(0,a.q)(l("root"),"w-full text-tremor-default px-4 pb-3","text-tremor-content","dark:text-dark-tremor-content",c)},s),n)});c.displayName="AccordionBody"},72208:function(e,t,n){"use strict";n.d(t,{Z:function(){return u}});var r=n(5853),o=n(2265),i=n(21886);let a=e=>{var t=(0,r._T)(e,[]);return o.createElement("svg",Object.assign({xmlns:"http://www.w3.org/2000/svg",viewBox:"0 0 24 24",fill:"currentColor"},t),o.createElement("path",{d:"M11.9999 10.8284L7.0502 15.7782L5.63599 14.364L11.9999 8L18.3639 14.364L16.9497 15.7782L11.9999 10.8284Z"}))};var l=n(87452),c=n(65954);let s=(0,n(1153).fn)("AccordionHeader"),u=o.forwardRef((e,t)=>{let{children:n,className:u}=e,d=(0,r._T)(e,["children","className"]),{isOpen:f}=(0,o.useContext)(l.r);return o.createElement(i.p.Button,Object.assign({ref:t,className:(0,c.q)(s("root"),"w-full flex items-center justify-between px-4 py-3","text-tremor-content-emphasis","dark:text-dark-tremor-content-emphasis",u)},d),o.createElement("div",{className:(0,c.q)(s("children"),"flex flex-1 text-inherit mr-4")},n),o.createElement("div",null,o.createElement(a,{className:(0,c.q)(s("arrowIcon"),"h-5 w-5 -mr-1","text-tremor-content-subtle","dark:text-dark-tremor-content-subtle",f?"transition-all":"transition-all -rotate-180")})))});u.displayName="AccordionHeader"},21626:function(e,t,n){"use strict";n.d(t,{Z:function(){return l}});var r=n(5853),o=n(2265),i=n(65954);let a=(0,n(1153).fn)("Table"),l=o.forwardRef((e,t)=>{let{children:n,className:l}=e,c=(0,r._T)(e,["children","className"]);return o.createElement("div",{className:(0,i.q)(a("root"),"overflow-auto",l)},o.createElement("table",Object.assign({ref:t,className:(0,i.q)(a("table"),"w-full text-tremor-default","text-tremor-content","dark:text-dark-tremor-content")},c),n))});l.displayName="Table"},97214:function(e,t,n){"use strict";n.d(t,{Z:function(){return l}});var r=n(5853),o=n(2265),i=n(65954);let a=(0,n(1153).fn)("TableBody"),l=o.forwardRef((e,t)=>{let{children:n,className:l}=e,c=(0,r._T)(e,["children","className"]);return o.createElement(o.Fragment,null,o.createElement("tbody",Object.assign({ref:t,className:(0,i.q)(a("root"),"align-top divide-y","divide-tremor-border","dark:divide-dark-tremor-border",l)},c),n))});l.displayName="TableBody"},28241:function(e,t,n){"use strict";n.d(t,{Z:function(){return l}});var r=n(5853),o=n(2265),i=n(65954);let a=(0,n(1153).fn)("TableCell"),l=o.forwardRef((e,t)=>{let{children:n,className:l}=e,c=(0,r._T)(e,["children","className"]);return o.createElement(o.Fragment,null,o.createElement("td",Object.assign({ref:t,className:(0,i.q)(a("root"),"align-middle whitespace-nowrap text-left p-4",l)},c),n))});l.displayName="TableCell"},58834:function(e,t,n){"use strict";n.d(t,{Z:function(){return l}});var r=n(5853),o=n(2265),i=n(65954);let a=(0,n(1153).fn)("TableHead"),l=o.forwardRef((e,t)=>{let{children:n,className:l}=e,c=(0,r._T)(e,["children","className"]);return o.createElement(o.Fragment,null,o.createElement("thead",Object.assign({ref:t,className:(0,i.q)(a("root"),"text-left","text-tremor-content","dark:text-dark-tremor-content",l)},c),n))});l.displayName="TableHead"},69552:function(e,t,n){"use strict";n.d(t,{Z:function(){return l}});var r=n(5853),o=n(2265),i=n(65954);let a=(0,n(1153).fn)("TableHeaderCell"),l=o.forwardRef((e,t)=>{let{children:n,className:l}=e,c=(0,r._T)(e,["children","className"]);return o.createElement(o.Fragment,null,o.createElement("th",Object.assign({ref:t,className:(0,i.q)(a("root"),"whitespace-nowrap text-left font-semibold top-0 px-4 py-3.5","text-tremor-content","dark:text-dark-tremor-content",l)},c),n))});l.displayName="TableHeaderCell"},71876:function(e,t,n){"use strict";n.d(t,{Z:function(){return l}});var r=n(5853),o=n(2265),i=n(65954);let a=(0,n(1153).fn)("TableRow"),l=o.forwardRef((e,t)=>{let{children:n,className:l}=e,c=(0,r._T)(e,["children","className"]);return o.createElement(o.Fragment,null,o.createElement("tr",Object.assign({ref:t,className:(0,i.q)(a("row"),l)},c),n))});l.displayName="TableRow"},97765:function(e,t,n){"use strict";n.d(t,{Z:function(){return c}});var r=n(5853),o=n(26898),i=n(65954),a=n(1153),l=n(2265);let c=l.forwardRef((e,t)=>{let{color:n,children:c,className:s}=e,u=(0,r._T)(e,["color","children","className"]);return l.createElement("p",Object.assign({ref:t,className:(0,i.q)(n?(0,a.bM)(n,o.K.lightText).textColor:"text-tremor-content-subtle dark:text-dark-tremor-content-subtle",s)},u),c)});c.displayName="Subtitle"},96889:function(e,t,n){"use strict";n.d(t,{Z:function(){return s}});var r=n(5853),o=n(2265),i=n(26898),a=n(65954),l=n(1153);let c=(0,l.fn)("BarList"),s=o.forwardRef((e,t)=>{var n;let s;let{data:u=[],color:d,valueFormatter:f=l.Cj,showAnimation:p=!1,className:h}=e,m=(0,r._T)(e,["data","color","valueFormatter","showAnimation","className"]),g=(n=u.map(e=>e.value),s=-1/0,n.forEach(e=>{s=Math.max(s,e)}),n.map(e=>0===e?0:Math.max(e/s*100,1)));return o.createElement("div",Object.assign({ref:t,className:(0,a.q)(c("root"),"flex justify-between space-x-6",h)},m),o.createElement("div",{className:(0,a.q)(c("bars"),"relative w-full")},u.map((e,t)=>{var n,r,s;let f=e.icon;return o.createElement("div",{key:null!==(n=e.key)&&void 0!==n?n:e.name,className:(0,a.q)(c("bar"),"flex items-center rounded-tremor-small bg-opacity-30","h-9",e.color||d?(0,l.bM)(null!==(r=e.color)&&void 0!==r?r:d,i.K.background).bgColor:"bg-tremor-brand-subtle dark:bg-dark-tremor-brand-subtle dark:bg-opacity-30",t===u.length-1?"mb-0":"mb-2"),style:{width:"".concat(g[t],"%"),transition:p?"all 1s":""}},o.createElement("div",{className:(0,a.q)("absolute max-w-full flex left-2")},f?o.createElement(f,{className:(0,a.q)(c("barIcon"),"flex-none h-5 w-5 mr-2","text-tremor-content","dark:text-dark-tremor-content")}):null,e.href?o.createElement("a",{href:e.href,target:null!==(s=e.target)&&void 0!==s?s:"_blank",rel:"noreferrer",className:(0,a.q)(c("barLink"),"whitespace-nowrap hover:underline truncate text-tremor-default","text-tremor-content-emphasis","dark:text-dark-tremor-content-emphasis")},e.name):o.createElement("p",{className:(0,a.q)(c("barText"),"whitespace-nowrap truncate text-tremor-default","text-tremor-content-emphasis","dark:text-dark-tremor-content-emphasis")},e.name)))})),o.createElement("div",{className:"text-right min-w-min"},u.map((e,t)=>{var n;return o.createElement("div",{key:null!==(n=e.key)&&void 0!==n?n:e.name,className:(0,a.q)(c("labelWrapper"),"flex justify-end items-center","h-9",t===u.length-1?"mb-0":"mb-2")},o.createElement("p",{className:(0,a.q)(c("labelText"),"whitespace-nowrap truncate text-tremor-default","text-tremor-content-emphasis","dark:text-dark-tremor-content-emphasis")},f(e.value)))})))});s.displayName="BarList"},44140:function(e,t,n){"use strict";n.d(t,{Z:function(){return o}});var r=n(2265);let o=(e,t)=>{let n=void 0!==t,[o,i]=(0,r.useState)(e);return[n?t:o,e=>{n||i(e)}]}},51646:function(e,t,n){"use strict";n.d(t,{Z:function(){return o}});var r=n(2265);function o(){let[,e]=r.useReducer(e=>e+1,0);return e}},12757:function(e,t,n){"use strict";n.d(t,{F:function(){return a},Z:function(){return i}});var r=n(36760),o=n.n(r);function i(e,t,n){return o()({["".concat(e,"-status-success")]:"success"===t,["".concat(e,"-status-warning")]:"warning"===t,["".concat(e,"-status-error")]:"error"===t,["".concat(e,"-status-validating")]:"validating"===t,["".concat(e,"-has-feedback")]:n})}let a=(e,t)=>t||e},67960:function(e,t,n){"use strict";n.d(t,{Z:function(){return e8}});var r=n(2265),o=n(36760),i=n.n(o),a=n(18694),l=n(71744),c=n(33759),s=e=>{let{prefixCls:t,className:n,style:o,size:a,shape:l}=e,c=i()({["".concat(t,"-lg")]:"large"===a,["".concat(t,"-sm")]:"small"===a}),s=i()({["".concat(t,"-circle")]:"circle"===l,["".concat(t,"-square")]:"square"===l,["".concat(t,"-round")]:"round"===l}),u=r.useMemo(()=>"number"==typeof a?{width:a,height:a,lineHeight:"".concat(a,"px")}:{},[a]);return r.createElement("span",{className:i()(t,c,s,n),style:Object.assign(Object.assign({},u),o)})},u=n(352),d=n(80669),f=n(3104);let p=new u.E4("ant-skeleton-loading",{"0%":{backgroundPosition:"100% 50%"},"100%":{backgroundPosition:"0 50%"}}),h=e=>({height:e,lineHeight:(0,u.bf)(e)}),m=e=>Object.assign({width:e},h(e)),g=e=>({background:e.skeletonLoadingBackground,backgroundSize:"400% 100%",animationName:p,animationDuration:e.skeletonLoadingMotionDuration,animationTimingFunction:"ease",animationIterationCount:"infinite"}),v=(e,t)=>Object.assign({width:t(e).mul(5).equal(),minWidth:t(e).mul(5).equal()},h(e)),y=e=>{let{skeletonAvatarCls:t,gradientFromColor:n,controlHeight:r,controlHeightLG:o,controlHeightSM:i}=e;return{["".concat(t)]:Object.assign({display:"inline-block",verticalAlign:"top",background:n},m(r)),["".concat(t).concat(t,"-circle")]:{borderRadius:"50%"},["".concat(t).concat(t,"-lg")]:Object.assign({},m(o)),["".concat(t).concat(t,"-sm")]:Object.assign({},m(i))}},b=e=>{let{controlHeight:t,borderRadiusSM:n,skeletonInputCls:r,controlHeightLG:o,controlHeightSM:i,gradientFromColor:a,calc:l}=e;return{["".concat(r)]:Object.assign({display:"inline-block",verticalAlign:"top",background:a,borderRadius:n},v(t,l)),["".concat(r,"-lg")]:Object.assign({},v(o,l)),["".concat(r,"-sm")]:Object.assign({},v(i,l))}},x=e=>Object.assign({width:e},h(e)),w=e=>{let{skeletonImageCls:t,imageSizeBase:n,gradientFromColor:r,borderRadiusSM:o,calc:i}=e;return{["".concat(t)]:Object.assign(Object.assign({display:"flex",alignItems:"center",justifyContent:"center",verticalAlign:"top",background:r,borderRadius:o},x(i(n).mul(2).equal())),{["".concat(t,"-path")]:{fill:"#bfbfbf"},["".concat(t,"-svg")]:Object.assign(Object.assign({},x(n)),{maxWidth:i(n).mul(4).equal(),maxHeight:i(n).mul(4).equal()}),["".concat(t,"-svg").concat(t,"-svg-circle")]:{borderRadius:"50%"}}),["".concat(t).concat(t,"-circle")]:{borderRadius:"50%"}}},S=(e,t,n)=>{let{skeletonButtonCls:r}=e;return{["".concat(n).concat(r,"-circle")]:{width:t,minWidth:t,borderRadius:"50%"},["".concat(n).concat(r,"-round")]:{borderRadius:t}}},k=(e,t)=>Object.assign({width:t(e).mul(2).equal(),minWidth:t(e).mul(2).equal()},h(e)),E=e=>{let{borderRadiusSM:t,skeletonButtonCls:n,controlHeight:r,controlHeightLG:o,controlHeightSM:i,gradientFromColor:a,calc:l}=e;return Object.assign(Object.assign(Object.assign(Object.assign(Object.assign({["".concat(n)]:Object.assign({display:"inline-block",verticalAlign:"top",background:a,borderRadius:t,width:l(r).mul(2).equal(),minWidth:l(r).mul(2).equal()},k(r,l))},S(e,r,n)),{["".concat(n,"-lg")]:Object.assign({},k(o,l))}),S(e,o,"".concat(n,"-lg"))),{["".concat(n,"-sm")]:Object.assign({},k(i,l))}),S(e,i,"".concat(n,"-sm")))},C=e=>{let{componentCls:t,skeletonAvatarCls:n,skeletonTitleCls:r,skeletonParagraphCls:o,skeletonButtonCls:i,skeletonInputCls:a,skeletonImageCls:l,controlHeight:c,controlHeightLG:s,controlHeightSM:u,gradientFromColor:d,padding:f,marginSM:p,borderRadius:h,titleHeight:v,blockRadius:x,paragraphLiHeight:S,controlHeightXS:k,paragraphMarginTop:C}=e;return{["".concat(t)]:{display:"table",width:"100%",["".concat(t,"-header")]:{display:"table-cell",paddingInlineEnd:f,verticalAlign:"top",["".concat(n)]:Object.assign({display:"inline-block",verticalAlign:"top",background:d},m(c)),["".concat(n,"-circle")]:{borderRadius:"50%"},["".concat(n,"-lg")]:Object.assign({},m(s)),["".concat(n,"-sm")]:Object.assign({},m(u))},["".concat(t,"-content")]:{display:"table-cell",width:"100%",verticalAlign:"top",["".concat(r)]:{width:"100%",height:v,background:d,borderRadius:x,["+ ".concat(o)]:{marginBlockStart:u}},["".concat(o)]:{padding:0,"> li":{width:"100%",height:S,listStyle:"none",background:d,borderRadius:x,"+ li":{marginBlockStart:k}}},["".concat(o,"> li:last-child:not(:first-child):not(:nth-child(2))")]:{width:"61%"}},["&-round ".concat(t,"-content")]:{["".concat(r,", ").concat(o," > li")]:{borderRadius:h}}},["".concat(t,"-with-avatar ").concat(t,"-content")]:{["".concat(r)]:{marginBlockStart:p,["+ ".concat(o)]:{marginBlockStart:C}}},["".concat(t).concat(t,"-element")]:Object.assign(Object.assign(Object.assign(Object.assign({display:"inline-block",width:"auto"},E(e)),y(e)),b(e)),w(e)),["".concat(t).concat(t,"-block")]:{width:"100%",["".concat(i)]:{width:"100%"},["".concat(a)]:{width:"100%"}},["".concat(t).concat(t,"-active")]:{["\n ".concat(r,",\n ").concat(o," > li,\n ").concat(n,",\n ").concat(i,",\n ").concat(a,",\n ").concat(l,"\n ")]:Object.assign({},g(e))}}};var O=(0,d.I$)("Skeleton",e=>{let{componentCls:t,calc:n}=e;return[C((0,f.TS)(e,{skeletonAvatarCls:"".concat(t,"-avatar"),skeletonTitleCls:"".concat(t,"-title"),skeletonParagraphCls:"".concat(t,"-paragraph"),skeletonButtonCls:"".concat(t,"-button"),skeletonInputCls:"".concat(t,"-input"),skeletonImageCls:"".concat(t,"-image"),imageSizeBase:n(e.controlHeight).mul(1.5).equal(),borderRadius:100,skeletonLoadingBackground:"linear-gradient(90deg, ".concat(e.gradientFromColor," 25%, ").concat(e.gradientToColor," 37%, ").concat(e.gradientFromColor," 63%)"),skeletonLoadingMotionDuration:"1.4s"}))]},e=>{let{colorFillContent:t,colorFill:n}=e;return{color:t,colorGradientEnd:n,gradientFromColor:t,gradientToColor:n,titleHeight:e.controlHeight/2,blockRadius:e.borderRadiusSM,paragraphMarginTop:e.marginLG+e.marginXXS,paragraphLiHeight:e.controlHeight/2}},{deprecatedTokens:[["color","gradientFromColor"],["colorGradientEnd","gradientToColor"]]}),j=n(1119),P={icon:{tag:"svg",attrs:{viewBox:"64 64 896 896",focusable:"false"},children:[{tag:"path",attrs:{d:"M888 792H200V168c0-4.4-3.6-8-8-8h-56c-4.4 0-8 3.6-8 8v688c0 4.4 3.6 8 8 8h752c4.4 0 8-3.6 8-8v-56c0-4.4-3.6-8-8-8zM288 604a64 64 0 10128 0 64 64 0 10-128 0zm118-224a48 48 0 1096 0 48 48 0 10-96 0zm158 228a96 96 0 10192 0 96 96 0 10-192 0zm148-314a56 56 0 10112 0 56 56 0 10-112 0z"}}]},name:"dot-chart",theme:"outlined"},M=n(55015),N=r.forwardRef(function(e,t){return r.createElement(M.Z,(0,j.Z)({},e,{ref:t,icon:P}))}),I=n(83145),R=e=>{let t=t=>{let{width:n,rows:r=2}=e;return Array.isArray(n)?n[t]:r-1===t?n:void 0},{prefixCls:n,className:o,style:a,rows:l}=e,c=(0,I.Z)(Array(l)).map((e,n)=>r.createElement("li",{key:n,style:{width:t(n)}}));return r.createElement("ul",{className:i()(n,o),style:a},c)},T=e=>{let{prefixCls:t,className:n,width:o,style:a}=e;return r.createElement("h3",{className:i()(t,n),style:Object.assign({width:o},a)})};function A(e){return e&&"object"==typeof e?e:{}}let _=e=>{let{prefixCls:t,loading:n,className:o,rootClassName:a,style:c,children:u,avatar:d=!1,title:f=!0,paragraph:p=!0,active:h,round:m}=e,{getPrefixCls:g,direction:v,skeleton:y}=r.useContext(l.E_),b=g("skeleton",t),[x,w,S]=O(b);if(n||!("loading"in e)){let e,t;let n=!!d,l=!!f,u=!!p;if(n){let t=Object.assign(Object.assign({prefixCls:"".concat(b,"-avatar")},l&&!u?{size:"large",shape:"square"}:{size:"large",shape:"circle"}),A(d));e=r.createElement("div",{className:"".concat(b,"-header")},r.createElement(s,Object.assign({},t)))}if(l||u){let e,o;if(l){let t=Object.assign(Object.assign({prefixCls:"".concat(b,"-title")},!n&&u?{width:"38%"}:n&&u?{width:"50%"}:{}),A(f));e=r.createElement(T,Object.assign({},t))}if(u){let e=Object.assign(Object.assign({prefixCls:"".concat(b,"-paragraph")},function(e,t){let n={};return e&&t||(n.width="61%"),!e&&t?n.rows=3:n.rows=2,n}(n,l)),A(p));o=r.createElement(R,Object.assign({},e))}t=r.createElement("div",{className:"".concat(b,"-content")},e,o)}let g=i()(b,{["".concat(b,"-with-avatar")]:n,["".concat(b,"-active")]:h,["".concat(b,"-rtl")]:"rtl"===v,["".concat(b,"-round")]:m},null==y?void 0:y.className,o,a,w,S);return x(r.createElement("div",{className:g,style:Object.assign(Object.assign({},null==y?void 0:y.style),c)},e,t))}return void 0!==u?u:null};_.Button=e=>{let{prefixCls:t,className:n,rootClassName:o,active:c,block:u=!1,size:d="default"}=e,{getPrefixCls:f}=r.useContext(l.E_),p=f("skeleton",t),[h,m,g]=O(p),v=(0,a.Z)(e,["prefixCls"]),y=i()(p,"".concat(p,"-element"),{["".concat(p,"-active")]:c,["".concat(p,"-block")]:u},n,o,m,g);return h(r.createElement("div",{className:y},r.createElement(s,Object.assign({prefixCls:"".concat(p,"-button"),size:d},v))))},_.Avatar=e=>{let{prefixCls:t,className:n,rootClassName:o,active:c,shape:u="circle",size:d="default"}=e,{getPrefixCls:f}=r.useContext(l.E_),p=f("skeleton",t),[h,m,g]=O(p),v=(0,a.Z)(e,["prefixCls","className"]),y=i()(p,"".concat(p,"-element"),{["".concat(p,"-active")]:c},n,o,m,g);return h(r.createElement("div",{className:y},r.createElement(s,Object.assign({prefixCls:"".concat(p,"-avatar"),shape:u,size:d},v))))},_.Input=e=>{let{prefixCls:t,className:n,rootClassName:o,active:c,block:u,size:d="default"}=e,{getPrefixCls:f}=r.useContext(l.E_),p=f("skeleton",t),[h,m,g]=O(p),v=(0,a.Z)(e,["prefixCls"]),y=i()(p,"".concat(p,"-element"),{["".concat(p,"-active")]:c,["".concat(p,"-block")]:u},n,o,m,g);return h(r.createElement("div",{className:y},r.createElement(s,Object.assign({prefixCls:"".concat(p,"-input"),size:d},v))))},_.Image=e=>{let{prefixCls:t,className:n,rootClassName:o,style:a,active:c}=e,{getPrefixCls:s}=r.useContext(l.E_),u=s("skeleton",t),[d,f,p]=O(u),h=i()(u,"".concat(u,"-element"),{["".concat(u,"-active")]:c},n,o,f,p);return d(r.createElement("div",{className:h},r.createElement("div",{className:i()("".concat(u,"-image"),n),style:a},r.createElement("svg",{viewBox:"0 0 1098 1024",xmlns:"http://www.w3.org/2000/svg",className:"".concat(u,"-image-svg")},r.createElement("path",{d:"M365.714286 329.142857q0 45.714286-32.036571 77.677714t-77.677714 32.036571-77.677714-32.036571-32.036571-77.677714 32.036571-77.677714 77.677714-32.036571 77.677714 32.036571 32.036571 77.677714zM950.857143 548.571429l0 256-804.571429 0 0-109.714286 182.857143-182.857143 91.428571 91.428571 292.571429-292.571429zM1005.714286 146.285714l-914.285714 0q-7.460571 0-12.873143 5.412571t-5.412571 12.873143l0 694.857143q0 7.460571 5.412571 12.873143t12.873143 5.412571l914.285714 0q7.460571 0 12.873143-5.412571t5.412571-12.873143l0-694.857143q0-7.460571-5.412571-12.873143t-12.873143-5.412571zM1097.142857 164.571429l0 694.857143q0 37.741714-26.843429 64.585143t-64.585143 26.843429l-914.285714 0q-37.741714 0-64.585143-26.843429t-26.843429-64.585143l0-694.857143q0-37.741714 26.843429-64.585143t64.585143-26.843429l914.285714 0q37.741714 0 64.585143 26.843429t26.843429 64.585143z",className:"".concat(u,"-image-path")})))))},_.Node=e=>{let{prefixCls:t,className:n,rootClassName:o,style:a,active:c,children:s}=e,{getPrefixCls:u}=r.useContext(l.E_),d=u("skeleton",t),[f,p,h]=O(d),m=i()(d,"".concat(d,"-element"),{["".concat(d,"-active")]:c},p,n,o,h),g=null!=s?s:r.createElement(N,null);return f(r.createElement("div",{className:m},r.createElement("div",{className:i()("".concat(d,"-image"),n),style:a},g)))};var D=n(49638),Z=n(39760),L=n(96473),z=n(11993),B=n(31686),F=n(26365),H=n(41154),q=n(6989),W=n(50506),K=n(79267),U=(0,r.createContext)(null),V=n(31474),G=n(58525),X=n(28791),$=n(53346),Y=function(e){var t=e.activeTabOffset,n=e.horizontal,o=e.rtl,i=e.indicator,a=void 0===i?{}:i,l=a.size,c=a.align,s=void 0===c?"center":c,u=(0,r.useState)(),d=(0,F.Z)(u,2),f=d[0],p=d[1],h=(0,r.useRef)(),m=r.useCallback(function(e){return"function"==typeof l?l(e):"number"==typeof l?l:e},[l]);function g(){$.Z.cancel(h.current)}return(0,r.useEffect)(function(){var e={};if(t){if(n){e.width=m(t.width);var r=o?"right":"left";"start"===s&&(e[r]=t[r]),"center"===s&&(e[r]=t[r]+t.width/2,e.transform=o?"translateX(50%)":"translateX(-50%)"),"end"===s&&(e[r]=t[r]+t.width,e.transform="translateX(-100%)")}else e.height=m(t.height),"start"===s&&(e.top=t.top),"center"===s&&(e.top=t.top+t.height/2,e.transform="translateY(-50%)"),"end"===s&&(e.top=t.top+t.height,e.transform="translateY(-100%)")}return g(),h.current=(0,$.Z)(function(){p(e)}),g},[t,n,o,s,m]),{style:f}},Q={width:0,height:0,left:0,top:0};function J(e,t){var n=r.useRef(e),o=r.useState({}),i=(0,F.Z)(o,2)[1];return[n.current,function(e){var r="function"==typeof e?e(n.current):e;r!==n.current&&t(r,n.current),n.current=r,i({})}]}var ee=n(27380);function et(e){var t=(0,r.useState)(0),n=(0,F.Z)(t,2),o=n[0],i=n[1],a=(0,r.useRef)(0),l=(0,r.useRef)();return l.current=e,(0,ee.o)(function(){var e;null===(e=l.current)||void 0===e||e.call(l)},[o]),function(){a.current===o&&(a.current+=1,i(a.current))}}var en={width:0,height:0,left:0,top:0,right:0};function er(e){var t;return e instanceof Map?(t={},e.forEach(function(e,n){t[n]=e})):t=e,JSON.stringify(t)}function eo(e){return String(e).replace(/"/g,"TABS_DQ")}function ei(e,t,n,r){return!!n&&!r&&!1!==e&&(void 0!==e||!1!==t&&null!==t)}var ea=r.forwardRef(function(e,t){var n=e.prefixCls,o=e.editable,i=e.locale,a=e.style;return o&&!1!==o.showAdd?r.createElement("button",{ref:t,type:"button",className:"".concat(n,"-nav-add"),style:a,"aria-label":(null==i?void 0:i.addAriaLabel)||"Add tab",onClick:function(e){o.onEdit("add",{event:e})}},o.addIcon||"+"):null}),el=r.forwardRef(function(e,t){var n,o=e.position,i=e.prefixCls,a=e.extra;if(!a)return null;var l={};return"object"!==(0,H.Z)(a)||r.isValidElement(a)?l.right=a:l=a,"right"===o&&(n=l.right),"left"===o&&(n=l.left),n?r.createElement("div",{className:"".concat(i,"-extra-content"),ref:t},n):null}),ec=n(71030),es=n(33082),eu=n(95814),ed=r.forwardRef(function(e,t){var n=e.prefixCls,o=e.id,a=e.tabs,l=e.locale,c=e.mobile,s=e.moreIcon,u=e.moreTransitionName,d=e.style,f=e.className,p=e.editable,h=e.tabBarGutter,m=e.rtl,g=e.removeAriaLabel,v=e.onTabClick,y=e.getPopupContainer,b=e.popupClassName,x=(0,r.useState)(!1),w=(0,F.Z)(x,2),S=w[0],k=w[1],E=(0,r.useState)(null),C=(0,F.Z)(E,2),O=C[0],j=C[1],P="".concat(o,"-more-popup"),M="".concat(n,"-dropdown"),N=null!==O?"".concat(P,"-").concat(O):null,I=null==l?void 0:l.dropdownAriaLabel,R=r.createElement(es.ZP,{onClick:function(e){v(e.key,e.domEvent),k(!1)},prefixCls:"".concat(M,"-menu"),id:P,tabIndex:-1,role:"listbox","aria-activedescendant":N,selectedKeys:[O],"aria-label":void 0!==I?I:"expanded dropdown"},a.map(function(e){var t=e.closable,n=e.disabled,i=e.closeIcon,a=e.key,l=e.label,c=ei(t,i,p,n);return r.createElement(es.sN,{key:a,id:"".concat(P,"-").concat(a),role:"option","aria-controls":o&&"".concat(o,"-panel-").concat(a),disabled:n},r.createElement("span",null,l),c&&r.createElement("button",{type:"button","aria-label":g||"remove",tabIndex:0,className:"".concat(M,"-menu-item-remove"),onClick:function(e){e.stopPropagation(),e.preventDefault(),e.stopPropagation(),p.onEdit("remove",{key:a,event:e})}},i||p.removeIcon||"\xd7"))}));function T(e){for(var t=a.filter(function(e){return!e.disabled}),n=t.findIndex(function(e){return e.key===O})||0,r=t.length,o=0;oMath.abs(l-n)?[l,c,s-t.x,u-t.y]:[n,r,i,o]},em=function(e){var t=e.current||{},n=t.offsetWidth,r=void 0===n?0:n,o=t.offsetHeight;if(e.current){var i=e.current.getBoundingClientRect(),a=i.width,l=i.height;if(1>Math.abs(a-r))return[a,l]}return[r,void 0===o?0:o]},eg=function(e,t){return e[t?0:1]},ev=r.forwardRef(function(e,t){var n,o,a,l,c,s,u,d,f,p,h,m,g,v,y,b,x,w,S,k,E,C,O,P,M,N,R,T,A,_,D,Z,L,H,q,W,K,$,ee,ei=e.className,ec=e.style,es=e.id,eu=e.animated,ed=e.activeKey,ev=e.rtl,ey=e.extra,eb=e.editable,ex=e.locale,ew=e.tabPosition,eS=e.tabBarGutter,ek=e.children,eE=e.onTabClick,eC=e.onTabScroll,eO=e.indicator,ej=r.useContext(U),eP=ej.prefixCls,eM=ej.tabs,eN=(0,r.useRef)(null),eI=(0,r.useRef)(null),eR=(0,r.useRef)(null),eT=(0,r.useRef)(null),eA=(0,r.useRef)(null),e_=(0,r.useRef)(null),eD=(0,r.useRef)(null),eZ="top"===ew||"bottom"===ew,eL=J(0,function(e,t){eZ&&eC&&eC({direction:e>t?"left":"right"})}),ez=(0,F.Z)(eL,2),eB=ez[0],eF=ez[1],eH=J(0,function(e,t){!eZ&&eC&&eC({direction:e>t?"top":"bottom"})}),eq=(0,F.Z)(eH,2),eW=eq[0],eK=eq[1],eU=(0,r.useState)([0,0]),eV=(0,F.Z)(eU,2),eG=eV[0],eX=eV[1],e$=(0,r.useState)([0,0]),eY=(0,F.Z)(e$,2),eQ=eY[0],eJ=eY[1],e0=(0,r.useState)([0,0]),e1=(0,F.Z)(e0,2),e2=e1[0],e6=e1[1],e3=(0,r.useState)([0,0]),e4=(0,F.Z)(e3,2),e5=e4[0],e8=e4[1],e7=(n=new Map,o=(0,r.useRef)([]),a=(0,r.useState)({}),l=(0,F.Z)(a,2)[1],c=(0,r.useRef)("function"==typeof n?n():n),s=et(function(){var e=c.current;o.current.forEach(function(t){e=t(e)}),o.current=[],c.current=e,l({})}),[c.current,function(e){o.current.push(e),s()}]),e9=(0,F.Z)(e7,2),te=e9[0],tt=e9[1],tn=(u=eQ[0],(0,r.useMemo)(function(){for(var e=new Map,t=te.get(null===(o=eM[0])||void 0===o?void 0:o.key)||Q,n=t.left+t.width,r=0;rtd?td:e}eZ&&ev?(tu=0,td=Math.max(0,to-tc)):(tu=Math.min(0,tc-to),td=0);var tp=(0,r.useRef)(null),th=(0,r.useState)(),tm=(0,F.Z)(th,2),tg=tm[0],tv=tm[1];function ty(){tv(Date.now())}function tb(){tp.current&&clearTimeout(tp.current)}d=function(e,t){function n(e,t){e(function(e){return tf(e+t)})}return!!tl&&(eZ?n(eF,e):n(eK,t),tb(),ty(),!0)},f=(0,r.useState)(),h=(p=(0,F.Z)(f,2))[0],m=p[1],g=(0,r.useState)(0),y=(v=(0,F.Z)(g,2))[0],b=v[1],x=(0,r.useState)(0),S=(w=(0,F.Z)(x,2))[0],k=w[1],E=(0,r.useState)(),O=(C=(0,F.Z)(E,2))[0],P=C[1],M=(0,r.useRef)(),N=(0,r.useRef)(),(R=(0,r.useRef)(null)).current={onTouchStart:function(e){var t=e.touches[0];m({x:t.screenX,y:t.screenY}),window.clearInterval(M.current)},onTouchMove:function(e){if(h){e.preventDefault();var t=e.touches[0],n=t.screenX,r=t.screenY;m({x:n,y:r});var o=n-h.x,i=r-h.y;d(o,i);var a=Date.now();b(a),k(a-y),P({x:o,y:i})}},onTouchEnd:function(){if(h&&(m(null),P(null),O)){var e=O.x/S,t=O.y/S;if(!(.1>Math.max(Math.abs(e),Math.abs(t)))){var n=e,r=t;M.current=window.setInterval(function(){if(.01>Math.abs(n)&&.01>Math.abs(r)){window.clearInterval(M.current);return}n*=.9046104802746175,r*=.9046104802746175,d(20*n,20*r)},20)}}},onWheel:function(e){var t=e.deltaX,n=e.deltaY,r=0,o=Math.abs(t),i=Math.abs(n);o===i?r="x"===N.current?t:n:o>i?(r=t,N.current="x"):(r=n,N.current="y"),d(-r,-r)&&e.preventDefault()}},r.useEffect(function(){function e(e){R.current.onTouchMove(e)}function t(e){R.current.onTouchEnd(e)}return document.addEventListener("touchmove",e,{passive:!1}),document.addEventListener("touchend",t,{passive:!1}),eT.current.addEventListener("touchstart",function(e){R.current.onTouchStart(e)},{passive:!1}),eT.current.addEventListener("wheel",function(e){R.current.onWheel(e)}),function(){document.removeEventListener("touchmove",e),document.removeEventListener("touchend",t)}},[]),(0,r.useEffect)(function(){return tb(),tg&&(tp.current=setTimeout(function(){tv(0)},100)),tb},[tg]);var tx=(T=eZ?eB:eW,L=(A=(0,B.Z)((0,B.Z)({},e),{},{tabs:eM})).tabs,H=A.tabPosition,q=A.rtl,["top","bottom"].includes(H)?(_="width",D=q?"right":"left",Z=Math.abs(T)):(_="height",D="top",Z=-T),(0,r.useMemo)(function(){if(!L.length)return[0,0];for(var e=L.length,t=e,n=0;nZ+tc){t=n-1;break}}for(var o=0,i=e-1;i>=0;i-=1)if((tn.get(L[i].key)||en)[D]=t?[0,0]:[o,t]},[tn,tc,to,ti,ta,Z,H,L.map(function(e){return e.key}).join("_"),q])),tw=(0,F.Z)(tx,2),tS=tw[0],tk=tw[1],tE=(0,G.Z)(function(){var e=arguments.length>0&&void 0!==arguments[0]?arguments[0]:ed,t=tn.get(e)||{width:0,height:0,left:0,right:0,top:0};if(eZ){var n=eB;ev?t.righteB+tc&&(n=t.right+t.width-tc):t.left<-eB?n=-t.left:t.left+t.width>-eB+tc&&(n=-(t.left+t.width-tc)),eK(0),eF(tf(n))}else{var r=eW;t.top<-eW?r=-t.top:t.top+t.height>-eW+tc&&(r=-(t.top+t.height-tc)),eF(0),eK(tf(r))}}),tC={};"top"===ew||"bottom"===ew?tC[ev?"marginRight":"marginLeft"]=eS:tC.marginTop=eS;var tO=eM.map(function(e,t){var n=e.key;return r.createElement(ep,{id:es,prefixCls:eP,key:n,tab:e,style:0===t?void 0:tC,closable:e.closable,editable:eb,active:n===ed,renderWrapper:ek,removeAriaLabel:null==ex?void 0:ex.removeAriaLabel,onClick:function(e){eE(n,e)},onFocus:function(){tE(n),ty(),eT.current&&(ev||(eT.current.scrollLeft=0),eT.current.scrollTop=0)}})}),tj=function(){return tt(function(){var e,t=new Map,n=null===(e=eA.current)||void 0===e?void 0:e.getBoundingClientRect();return eM.forEach(function(e){var r,o=e.key,i=null===(r=eA.current)||void 0===r?void 0:r.querySelector('[data-node-key="'.concat(eo(o),'"]'));if(i){var a=eh(i,n),l=(0,F.Z)(a,4),c=l[0],s=l[1],u=l[2],d=l[3];t.set(o,{width:c,height:s,left:u,top:d})}}),t})};(0,r.useEffect)(function(){tj()},[eM.map(function(e){return e.key}).join("_")]);var tP=et(function(){var e=em(eN),t=em(eI),n=em(eR);eX([e[0]-t[0]-n[0],e[1]-t[1]-n[1]]);var r=em(eD);e6(r),e8(em(e_));var o=em(eA);eJ([o[0]-r[0],o[1]-r[1]]),tj()}),tM=eM.slice(0,tS),tN=eM.slice(tk+1),tI=[].concat((0,I.Z)(tM),(0,I.Z)(tN)),tR=tn.get(ed),tT=Y({activeTabOffset:tR,horizontal:eZ,indicator:eO,rtl:ev}).style;(0,r.useEffect)(function(){tE()},[ed,tu,td,er(tR),er(tn),eZ]),(0,r.useEffect)(function(){tP()},[ev]);var tA=!!tI.length,t_="".concat(eP,"-nav-wrap");return eZ?ev?(K=eB>0,W=eB!==td):(W=eB<0,K=eB!==tu):($=eW<0,ee=eW!==tu),r.createElement(V.Z,{onResize:tP},r.createElement("div",{ref:(0,X.x1)(t,eN),role:"tablist",className:i()("".concat(eP,"-nav"),ei),style:ec,onKeyDown:function(){ty()}},r.createElement(el,{ref:eI,position:"left",extra:ey,prefixCls:eP}),r.createElement(V.Z,{onResize:tP},r.createElement("div",{className:i()(t_,(0,z.Z)((0,z.Z)((0,z.Z)((0,z.Z)({},"".concat(t_,"-ping-left"),W),"".concat(t_,"-ping-right"),K),"".concat(t_,"-ping-top"),$),"".concat(t_,"-ping-bottom"),ee)),ref:eT},r.createElement(V.Z,{onResize:tP},r.createElement("div",{ref:eA,className:"".concat(eP,"-nav-list"),style:{transform:"translate(".concat(eB,"px, ").concat(eW,"px)"),transition:tg?"none":void 0}},tO,r.createElement(ea,{ref:eD,prefixCls:eP,locale:ex,editable:eb,style:(0,B.Z)((0,B.Z)({},0===tO.length?void 0:tC),{},{visibility:tA?"hidden":null})}),r.createElement("div",{className:i()("".concat(eP,"-ink-bar"),(0,z.Z)({},"".concat(eP,"-ink-bar-animated"),eu.inkBar)),style:tT}))))),r.createElement(ef,(0,j.Z)({},e,{removeAriaLabel:null==ex?void 0:ex.removeAriaLabel,ref:e_,prefixCls:eP,tabs:tI,className:!tA&&ts,tabMoving:!!tg})),r.createElement(el,{ref:eR,position:"right",extra:ey,prefixCls:eP})))}),ey=r.forwardRef(function(e,t){var n=e.prefixCls,o=e.className,a=e.style,l=e.id,c=e.active,s=e.tabKey,u=e.children;return r.createElement("div",{id:l&&"".concat(l,"-panel-").concat(s),role:"tabpanel",tabIndex:c?0:-1,"aria-labelledby":l&&"".concat(l,"-tab-").concat(s),"aria-hidden":!c,style:a,className:i()(n,c&&"".concat(n,"-active"),o),ref:t},u)}),eb=["renderTabBar"],ex=["label","key"],ew=function(e){var t=e.renderTabBar,n=(0,q.Z)(e,eb),o=r.useContext(U).tabs;return t?t((0,B.Z)((0,B.Z)({},n),{},{panes:o.map(function(e){var t=e.label,n=e.key,o=(0,q.Z)(e,ex);return r.createElement(ey,(0,j.Z)({tab:t,key:n,tabKey:n},o))})}),ev):r.createElement(ev,n)},eS=n(47970),ek=["key","forceRender","style","className","destroyInactiveTabPane"],eE=function(e){var t=e.id,n=e.activeKey,o=e.animated,a=e.tabPosition,l=e.destroyInactiveTabPane,c=r.useContext(U),s=c.prefixCls,u=c.tabs,d=o.tabPane,f="".concat(s,"-tabpane");return r.createElement("div",{className:i()("".concat(s,"-content-holder"))},r.createElement("div",{className:i()("".concat(s,"-content"),"".concat(s,"-content-").concat(a),(0,z.Z)({},"".concat(s,"-content-animated"),d))},u.map(function(e){var a=e.key,c=e.forceRender,s=e.style,u=e.className,p=e.destroyInactiveTabPane,h=(0,q.Z)(e,ek),m=a===n;return r.createElement(eS.ZP,(0,j.Z)({key:a,visible:m,forceRender:c,removeOnLeave:!!(l||p),leavedClassName:"".concat(f,"-hidden")},o.tabPaneMotion),function(e,n){var o=e.style,l=e.className;return r.createElement(ey,(0,j.Z)({},h,{prefixCls:f,id:t,tabKey:a,animated:d,active:m,style:(0,B.Z)((0,B.Z)({},s),o),className:i()(u,l),ref:n}))})})))};n(32559);var eC=["id","prefixCls","className","items","direction","activeKey","defaultActiveKey","editable","animated","tabPosition","tabBarGutter","tabBarStyle","tabBarExtraContent","locale","moreIcon","moreTransitionName","destroyInactiveTabPane","renderTabBar","onChange","onTabClick","onTabScroll","getPopupContainer","popupClassName","indicator"],eO=0,ej=r.forwardRef(function(e,t){var n=e.id,o=e.prefixCls,a=void 0===o?"rc-tabs":o,l=e.className,c=e.items,s=e.direction,u=e.activeKey,d=e.defaultActiveKey,f=e.editable,p=e.animated,h=e.tabPosition,m=void 0===h?"top":h,g=e.tabBarGutter,v=e.tabBarStyle,y=e.tabBarExtraContent,b=e.locale,x=e.moreIcon,w=e.moreTransitionName,S=e.destroyInactiveTabPane,k=e.renderTabBar,E=e.onChange,C=e.onTabClick,O=e.onTabScroll,P=e.getPopupContainer,M=e.popupClassName,N=e.indicator,I=(0,q.Z)(e,eC),R=r.useMemo(function(){return(c||[]).filter(function(e){return e&&"object"===(0,H.Z)(e)&&"key"in e})},[c]),T="rtl"===s,A=function(){var e,t=arguments.length>0&&void 0!==arguments[0]?arguments[0]:{inkBar:!0,tabPane:!1};return(e=!1===t?{inkBar:!1,tabPane:!1}:!0===t?{inkBar:!0,tabPane:!1}:(0,B.Z)({inkBar:!0},"object"===(0,H.Z)(t)?t:{})).tabPaneMotion&&void 0===e.tabPane&&(e.tabPane=!0),!e.tabPaneMotion&&e.tabPane&&(e.tabPane=!1),e}(p),_=(0,r.useState)(!1),D=(0,F.Z)(_,2),Z=D[0],L=D[1];(0,r.useEffect)(function(){L((0,K.Z)())},[]);var V=(0,W.Z)(function(){var e;return null===(e=R[0])||void 0===e?void 0:e.key},{value:u,defaultValue:d}),G=(0,F.Z)(V,2),X=G[0],$=G[1],Y=(0,r.useState)(function(){return R.findIndex(function(e){return e.key===X})}),Q=(0,F.Z)(Y,2),J=Q[0],ee=Q[1];(0,r.useEffect)(function(){var e,t=R.findIndex(function(e){return e.key===X});-1===t&&(t=Math.max(0,Math.min(J,R.length-1)),$(null===(e=R[t])||void 0===e?void 0:e.key)),ee(t)},[R.map(function(e){return e.key}).join("_"),X,J]);var et=(0,W.Z)(null,{value:n}),en=(0,F.Z)(et,2),er=en[0],eo=en[1];(0,r.useEffect)(function(){n||(eo("rc-tabs-".concat(eO)),eO+=1)},[]);var ei={id:er,activeKey:X,animated:A,tabPosition:m,rtl:T,mobile:Z},ea=(0,B.Z)((0,B.Z)({},ei),{},{editable:f,locale:b,moreIcon:x,moreTransitionName:w,tabBarGutter:g,onTabClick:function(e,t){null==C||C(e,t);var n=e!==X;$(e),n&&(null==E||E(e))},onTabScroll:O,extra:y,style:v,panes:null,getPopupContainer:P,popupClassName:M,indicator:N});return r.createElement(U.Provider,{value:{tabs:R,prefixCls:a}},r.createElement("div",(0,j.Z)({ref:t,id:n,className:i()(a,"".concat(a,"-").concat(m),(0,z.Z)((0,z.Z)((0,z.Z)({},"".concat(a,"-mobile"),Z),"".concat(a,"-editable"),f),"".concat(a,"-rtl"),T),l)},I),r.createElement(ew,(0,j.Z)({},ea,{renderTabBar:k})),r.createElement(eE,(0,j.Z)({destroyInactiveTabPane:S},ei,{animated:A}))))}),eP=n(64024),eM=n(68710);let eN={motionAppear:!1,motionEnter:!0,motionLeave:!0};var eI=n(45287),eR=function(e,t){var n={};for(var r in e)Object.prototype.hasOwnProperty.call(e,r)&&0>t.indexOf(r)&&(n[r]=e[r]);if(null!=e&&"function"==typeof Object.getOwnPropertySymbols)for(var o=0,r=Object.getOwnPropertySymbols(e);ot.indexOf(r[o])&&Object.prototype.propertyIsEnumerable.call(e,r[o])&&(n[r[o]]=e[r[o]]);return n},eT=n(12918),eA=n(18544),e_=e=>{let{componentCls:t,motionDurationSlow:n}=e;return[{[t]:{["".concat(t,"-switch")]:{"&-appear, &-enter":{transition:"none","&-start":{opacity:0},"&-active":{opacity:1,transition:"opacity ".concat(n)}},"&-leave":{position:"absolute",transition:"none",inset:0,"&-start":{opacity:1},"&-active":{opacity:0,transition:"opacity ".concat(n)}}}}},[(0,eA.oN)(e,"slide-up"),(0,eA.oN)(e,"slide-down")]]};let eD=e=>{let{componentCls:t,tabsCardPadding:n,cardBg:r,cardGutter:o,colorBorderSecondary:i,itemSelectedColor:a}=e;return{["".concat(t,"-card")]:{["> ".concat(t,"-nav, > div > ").concat(t,"-nav")]:{["".concat(t,"-tab")]:{margin:0,padding:n,background:r,border:"".concat((0,u.bf)(e.lineWidth)," ").concat(e.lineType," ").concat(i),transition:"all ".concat(e.motionDurationSlow," ").concat(e.motionEaseInOut)},["".concat(t,"-tab-active")]:{color:a,background:e.colorBgContainer},["".concat(t,"-ink-bar")]:{visibility:"hidden"}},["&".concat(t,"-top, &").concat(t,"-bottom")]:{["> ".concat(t,"-nav, > div > ").concat(t,"-nav")]:{["".concat(t,"-tab + ").concat(t,"-tab")]:{marginLeft:{_skip_check_:!0,value:(0,u.bf)(o)}}}},["&".concat(t,"-top")]:{["> ".concat(t,"-nav, > div > ").concat(t,"-nav")]:{["".concat(t,"-tab")]:{borderRadius:"".concat((0,u.bf)(e.borderRadiusLG)," ").concat((0,u.bf)(e.borderRadiusLG)," 0 0")},["".concat(t,"-tab-active")]:{borderBottomColor:e.colorBgContainer}}},["&".concat(t,"-bottom")]:{["> ".concat(t,"-nav, > div > ").concat(t,"-nav")]:{["".concat(t,"-tab")]:{borderRadius:"0 0 ".concat((0,u.bf)(e.borderRadiusLG)," ").concat((0,u.bf)(e.borderRadiusLG))},["".concat(t,"-tab-active")]:{borderTopColor:e.colorBgContainer}}},["&".concat(t,"-left, &").concat(t,"-right")]:{["> ".concat(t,"-nav, > div > ").concat(t,"-nav")]:{["".concat(t,"-tab + ").concat(t,"-tab")]:{marginTop:(0,u.bf)(o)}}},["&".concat(t,"-left")]:{["> ".concat(t,"-nav, > div > ").concat(t,"-nav")]:{["".concat(t,"-tab")]:{borderRadius:{_skip_check_:!0,value:"".concat((0,u.bf)(e.borderRadiusLG)," 0 0 ").concat((0,u.bf)(e.borderRadiusLG))}},["".concat(t,"-tab-active")]:{borderRightColor:{_skip_check_:!0,value:e.colorBgContainer}}}},["&".concat(t,"-right")]:{["> ".concat(t,"-nav, > div > ").concat(t,"-nav")]:{["".concat(t,"-tab")]:{borderRadius:{_skip_check_:!0,value:"0 ".concat((0,u.bf)(e.borderRadiusLG)," ").concat((0,u.bf)(e.borderRadiusLG)," 0")}},["".concat(t,"-tab-active")]:{borderLeftColor:{_skip_check_:!0,value:e.colorBgContainer}}}}}}},eZ=e=>{let{componentCls:t,itemHoverColor:n,dropdownEdgeChildVerticalPadding:r}=e;return{["".concat(t,"-dropdown")]:Object.assign(Object.assign({},(0,eT.Wf)(e)),{position:"absolute",top:-9999,left:{_skip_check_:!0,value:-9999},zIndex:e.zIndexPopup,display:"block","&-hidden":{display:"none"},["".concat(t,"-dropdown-menu")]:{maxHeight:e.tabsDropdownHeight,margin:0,padding:"".concat((0,u.bf)(r)," 0"),overflowX:"hidden",overflowY:"auto",textAlign:{_skip_check_:!0,value:"left"},listStyleType:"none",backgroundColor:e.colorBgContainer,backgroundClip:"padding-box",borderRadius:e.borderRadiusLG,outline:"none",boxShadow:e.boxShadowSecondary,"&-item":Object.assign(Object.assign({},eT.vS),{display:"flex",alignItems:"center",minWidth:e.tabsDropdownWidth,margin:0,padding:"".concat((0,u.bf)(e.paddingXXS)," ").concat((0,u.bf)(e.paddingSM)),color:e.colorText,fontWeight:"normal",fontSize:e.fontSize,lineHeight:e.lineHeight,cursor:"pointer",transition:"all ".concat(e.motionDurationSlow),"> span":{flex:1,whiteSpace:"nowrap"},"&-remove":{flex:"none",marginLeft:{_skip_check_:!0,value:e.marginSM},color:e.colorTextDescription,fontSize:e.fontSizeSM,background:"transparent",border:0,cursor:"pointer","&:hover":{color:n}},"&:hover":{background:e.controlItemBgHover},"&-disabled":{"&, &:hover":{color:e.colorTextDisabled,background:"transparent",cursor:"not-allowed"}}})}})}},eL=e=>{let{componentCls:t,margin:n,colorBorderSecondary:r,horizontalMargin:o,verticalItemPadding:i,verticalItemMargin:a,calc:l}=e;return{["".concat(t,"-top, ").concat(t,"-bottom")]:{flexDirection:"column",["> ".concat(t,"-nav, > div > ").concat(t,"-nav")]:{margin:o,"&::before":{position:"absolute",right:{_skip_check_:!0,value:0},left:{_skip_check_:!0,value:0},borderBottom:"".concat((0,u.bf)(e.lineWidth)," ").concat(e.lineType," ").concat(r),content:"''"},["".concat(t,"-ink-bar")]:{height:e.lineWidthBold,"&-animated":{transition:"width ".concat(e.motionDurationSlow,", left ").concat(e.motionDurationSlow,",\n right ").concat(e.motionDurationSlow)}},["".concat(t,"-nav-wrap")]:{"&::before, &::after":{top:0,bottom:0,width:e.controlHeight},"&::before":{left:{_skip_check_:!0,value:0},boxShadow:e.boxShadowTabsOverflowLeft},"&::after":{right:{_skip_check_:!0,value:0},boxShadow:e.boxShadowTabsOverflowRight},["&".concat(t,"-nav-wrap-ping-left::before")]:{opacity:1},["&".concat(t,"-nav-wrap-ping-right::after")]:{opacity:1}}}},["".concat(t,"-top")]:{["> ".concat(t,"-nav,\n > div > ").concat(t,"-nav")]:{"&::before":{bottom:0},["".concat(t,"-ink-bar")]:{bottom:0}}},["".concat(t,"-bottom")]:{["> ".concat(t,"-nav, > div > ").concat(t,"-nav")]:{order:1,marginTop:n,marginBottom:0,"&::before":{top:0},["".concat(t,"-ink-bar")]:{top:0}},["> ".concat(t,"-content-holder, > div > ").concat(t,"-content-holder")]:{order:0}},["".concat(t,"-left, ").concat(t,"-right")]:{["> ".concat(t,"-nav, > div > ").concat(t,"-nav")]:{flexDirection:"column",minWidth:l(e.controlHeight).mul(1.25).equal(),["".concat(t,"-tab")]:{padding:i,textAlign:"center"},["".concat(t,"-tab + ").concat(t,"-tab")]:{margin:a},["".concat(t,"-nav-wrap")]:{flexDirection:"column","&::before, &::after":{right:{_skip_check_:!0,value:0},left:{_skip_check_:!0,value:0},height:e.controlHeight},"&::before":{top:0,boxShadow:e.boxShadowTabsOverflowTop},"&::after":{bottom:0,boxShadow:e.boxShadowTabsOverflowBottom},["&".concat(t,"-nav-wrap-ping-top::before")]:{opacity:1},["&".concat(t,"-nav-wrap-ping-bottom::after")]:{opacity:1}},["".concat(t,"-ink-bar")]:{width:e.lineWidthBold,"&-animated":{transition:"height ".concat(e.motionDurationSlow,", top ").concat(e.motionDurationSlow)}},["".concat(t,"-nav-list, ").concat(t,"-nav-operations")]:{flex:"1 0 auto",flexDirection:"column"}}},["".concat(t,"-left")]:{["> ".concat(t,"-nav, > div > ").concat(t,"-nav")]:{["".concat(t,"-ink-bar")]:{right:{_skip_check_:!0,value:0}}},["> ".concat(t,"-content-holder, > div > ").concat(t,"-content-holder")]:{marginLeft:{_skip_check_:!0,value:(0,u.bf)(l(e.lineWidth).mul(-1).equal())},borderLeft:{_skip_check_:!0,value:"".concat((0,u.bf)(e.lineWidth)," ").concat(e.lineType," ").concat(e.colorBorder)},["> ".concat(t,"-content > ").concat(t,"-tabpane")]:{paddingLeft:{_skip_check_:!0,value:e.paddingLG}}}},["".concat(t,"-right")]:{["> ".concat(t,"-nav, > div > ").concat(t,"-nav")]:{order:1,["".concat(t,"-ink-bar")]:{left:{_skip_check_:!0,value:0}}},["> ".concat(t,"-content-holder, > div > ").concat(t,"-content-holder")]:{order:0,marginRight:{_skip_check_:!0,value:l(e.lineWidth).mul(-1).equal()},borderRight:{_skip_check_:!0,value:"".concat((0,u.bf)(e.lineWidth)," ").concat(e.lineType," ").concat(e.colorBorder)},["> ".concat(t,"-content > ").concat(t,"-tabpane")]:{paddingRight:{_skip_check_:!0,value:e.paddingLG}}}}}},ez=e=>{let{componentCls:t,cardPaddingSM:n,cardPaddingLG:r,horizontalItemPaddingSM:o,horizontalItemPaddingLG:i}=e;return{[t]:{"&-small":{["> ".concat(t,"-nav")]:{["".concat(t,"-tab")]:{padding:o,fontSize:e.titleFontSizeSM}}},"&-large":{["> ".concat(t,"-nav")]:{["".concat(t,"-tab")]:{padding:i,fontSize:e.titleFontSizeLG}}}},["".concat(t,"-card")]:{["&".concat(t,"-small")]:{["> ".concat(t,"-nav")]:{["".concat(t,"-tab")]:{padding:n}},["&".concat(t,"-bottom")]:{["> ".concat(t,"-nav ").concat(t,"-tab")]:{borderRadius:"0 0 ".concat((0,u.bf)(e.borderRadius)," ").concat((0,u.bf)(e.borderRadius))}},["&".concat(t,"-top")]:{["> ".concat(t,"-nav ").concat(t,"-tab")]:{borderRadius:"".concat((0,u.bf)(e.borderRadius)," ").concat((0,u.bf)(e.borderRadius)," 0 0")}},["&".concat(t,"-right")]:{["> ".concat(t,"-nav ").concat(t,"-tab")]:{borderRadius:{_skip_check_:!0,value:"0 ".concat((0,u.bf)(e.borderRadius)," ").concat((0,u.bf)(e.borderRadius)," 0")}}},["&".concat(t,"-left")]:{["> ".concat(t,"-nav ").concat(t,"-tab")]:{borderRadius:{_skip_check_:!0,value:"".concat((0,u.bf)(e.borderRadius)," 0 0 ").concat((0,u.bf)(e.borderRadius))}}}},["&".concat(t,"-large")]:{["> ".concat(t,"-nav")]:{["".concat(t,"-tab")]:{padding:r}}}}}},eB=e=>{let{componentCls:t,itemActiveColor:n,itemHoverColor:r,iconCls:o,tabsHorizontalItemMargin:i,horizontalItemPadding:a,itemSelectedColor:l,itemColor:c}=e,s="".concat(t,"-tab");return{[s]:{position:"relative",WebkitTouchCallout:"none",WebkitTapHighlightColor:"transparent",display:"inline-flex",alignItems:"center",padding:a,fontSize:e.titleFontSize,background:"transparent",border:0,outline:"none",cursor:"pointer",color:c,"&-btn, &-remove":Object.assign({"&:focus:not(:focus-visible), &:active":{color:n}},(0,eT.Qy)(e)),"&-btn":{outline:"none",transition:"all 0.3s",["".concat(s,"-icon:not(:last-child)")]:{marginInlineEnd:e.marginSM}},"&-remove":{flex:"none",marginRight:{_skip_check_:!0,value:e.calc(e.marginXXS).mul(-1).equal()},marginLeft:{_skip_check_:!0,value:e.marginXS},color:e.colorTextDescription,fontSize:e.fontSizeSM,background:"transparent",border:"none",outline:"none",cursor:"pointer",transition:"all ".concat(e.motionDurationSlow),"&:hover":{color:e.colorTextHeading}},"&:hover":{color:r},["&".concat(s,"-active ").concat(s,"-btn")]:{color:l,textShadow:e.tabsActiveTextShadow},["&".concat(s,"-disabled")]:{color:e.colorTextDisabled,cursor:"not-allowed"},["&".concat(s,"-disabled ").concat(s,"-btn, &").concat(s,"-disabled ").concat(t,"-remove")]:{"&:focus, &:active":{color:e.colorTextDisabled}},["& ".concat(s,"-remove ").concat(o)]:{margin:0},["".concat(o,":not(:last-child)")]:{marginRight:{_skip_check_:!0,value:e.marginSM}}},["".concat(s," + ").concat(s)]:{margin:{_skip_check_:!0,value:i}}}},eF=e=>{let{componentCls:t,tabsHorizontalItemMarginRTL:n,iconCls:r,cardGutter:o,calc:i}=e;return{["".concat(t,"-rtl")]:{direction:"rtl",["".concat(t,"-nav")]:{["".concat(t,"-tab")]:{margin:{_skip_check_:!0,value:n},["".concat(t,"-tab:last-of-type")]:{marginLeft:{_skip_check_:!0,value:0}},[r]:{marginRight:{_skip_check_:!0,value:0},marginLeft:{_skip_check_:!0,value:(0,u.bf)(e.marginSM)}},["".concat(t,"-tab-remove")]:{marginRight:{_skip_check_:!0,value:(0,u.bf)(e.marginXS)},marginLeft:{_skip_check_:!0,value:(0,u.bf)(i(e.marginXXS).mul(-1).equal())},[r]:{margin:0}}}},["&".concat(t,"-left")]:{["> ".concat(t,"-nav")]:{order:1},["> ".concat(t,"-content-holder")]:{order:0}},["&".concat(t,"-right")]:{["> ".concat(t,"-nav")]:{order:0},["> ".concat(t,"-content-holder")]:{order:1}},["&".concat(t,"-card").concat(t,"-top, &").concat(t,"-card").concat(t,"-bottom")]:{["> ".concat(t,"-nav, > div > ").concat(t,"-nav")]:{["".concat(t,"-tab + ").concat(t,"-tab")]:{marginRight:{_skip_check_:!0,value:o},marginLeft:{_skip_check_:!0,value:0}}}}},["".concat(t,"-dropdown-rtl")]:{direction:"rtl"},["".concat(t,"-menu-item")]:{["".concat(t,"-dropdown-rtl")]:{textAlign:{_skip_check_:!0,value:"right"}}}}},eH=e=>{let{componentCls:t,tabsCardPadding:n,cardHeight:r,cardGutter:o,itemHoverColor:i,itemActiveColor:a,colorBorderSecondary:l}=e;return{[t]:Object.assign(Object.assign(Object.assign(Object.assign({},(0,eT.Wf)(e)),{display:"flex",["> ".concat(t,"-nav, > div > ").concat(t,"-nav")]:{position:"relative",display:"flex",flex:"none",alignItems:"center",["".concat(t,"-nav-wrap")]:{position:"relative",display:"flex",flex:"auto",alignSelf:"stretch",overflow:"hidden",whiteSpace:"nowrap",transform:"translate(0)","&::before, &::after":{position:"absolute",zIndex:1,opacity:0,transition:"opacity ".concat(e.motionDurationSlow),content:"''",pointerEvents:"none"}},["".concat(t,"-nav-list")]:{position:"relative",display:"flex",transition:"opacity ".concat(e.motionDurationSlow)},["".concat(t,"-nav-operations")]:{display:"flex",alignSelf:"stretch"},["".concat(t,"-nav-operations-hidden")]:{position:"absolute",visibility:"hidden",pointerEvents:"none"},["".concat(t,"-nav-more")]:{position:"relative",padding:n,background:"transparent",border:0,color:e.colorText,"&::after":{position:"absolute",right:{_skip_check_:!0,value:0},bottom:0,left:{_skip_check_:!0,value:0},height:e.calc(e.controlHeightLG).div(8).equal(),transform:"translateY(100%)",content:"''"}},["".concat(t,"-nav-add")]:Object.assign({minWidth:r,minHeight:r,marginLeft:{_skip_check_:!0,value:o},padding:"0 ".concat((0,u.bf)(e.paddingXS)),background:"transparent",border:"".concat((0,u.bf)(e.lineWidth)," ").concat(e.lineType," ").concat(l),borderRadius:"".concat((0,u.bf)(e.borderRadiusLG)," ").concat((0,u.bf)(e.borderRadiusLG)," 0 0"),outline:"none",cursor:"pointer",color:e.colorText,transition:"all ".concat(e.motionDurationSlow," ").concat(e.motionEaseInOut),"&:hover":{color:i},"&:active, &:focus:not(:focus-visible)":{color:a}},(0,eT.Qy)(e))},["".concat(t,"-extra-content")]:{flex:"none"},["".concat(t,"-ink-bar")]:{position:"absolute",background:e.inkBarColor,pointerEvents:"none"}}),eB(e)),{["".concat(t,"-content")]:{position:"relative",width:"100%"},["".concat(t,"-content-holder")]:{flex:"auto",minWidth:0,minHeight:0},["".concat(t,"-tabpane")]:{outline:"none","&-hidden":{display:"none"}}}),["".concat(t,"-centered")]:{["> ".concat(t,"-nav, > div > ").concat(t,"-nav")]:{["".concat(t,"-nav-wrap")]:{["&:not([class*='".concat(t,"-nav-wrap-ping'])")]:{justifyContent:"center"}}}}}};var eq=(0,d.I$)("Tabs",e=>{let t=(0,f.TS)(e,{tabsCardPadding:e.cardPadding,dropdownEdgeChildVerticalPadding:e.paddingXXS,tabsActiveTextShadow:"0 0 0.25px currentcolor",tabsDropdownHeight:200,tabsDropdownWidth:120,tabsHorizontalItemMargin:"0 0 0 ".concat((0,u.bf)(e.horizontalItemGutter)),tabsHorizontalItemMarginRTL:"0 0 0 ".concat((0,u.bf)(e.horizontalItemGutter))});return[ez(t),eF(t),eL(t),eZ(t),eD(t),eH(t),e_(t)]},e=>{let t=e.controlHeightLG;return{zIndexPopup:e.zIndexPopupBase+50,cardBg:e.colorFillAlter,cardHeight:t,cardPadding:"".concat((t-Math.round(e.fontSize*e.lineHeight))/2-e.lineWidth,"px ").concat(e.padding,"px"),cardPaddingSM:"".concat(1.5*e.paddingXXS,"px ").concat(e.padding,"px"),cardPaddingLG:"".concat(e.paddingXS,"px ").concat(e.padding,"px ").concat(1.5*e.paddingXXS,"px"),titleFontSize:e.fontSize,titleFontSizeLG:e.fontSizeLG,titleFontSizeSM:e.fontSize,inkBarColor:e.colorPrimary,horizontalMargin:"0 0 ".concat(e.margin,"px 0"),horizontalItemGutter:32,horizontalItemMargin:"",horizontalItemMarginRTL:"",horizontalItemPadding:"".concat(e.paddingSM,"px 0"),horizontalItemPaddingSM:"".concat(e.paddingXS,"px 0"),horizontalItemPaddingLG:"".concat(e.padding,"px 0"),verticalItemPadding:"".concat(e.paddingXS,"px ").concat(e.paddingLG,"px"),verticalItemMargin:"".concat(e.margin,"px 0 0 0"),itemColor:e.colorText,itemSelectedColor:e.colorPrimary,itemHoverColor:e.colorPrimaryHover,itemActiveColor:e.colorPrimaryActive,cardGutter:e.marginXXS/2}}),eW=function(e,t){var n={};for(var r in e)Object.prototype.hasOwnProperty.call(e,r)&&0>t.indexOf(r)&&(n[r]=e[r]);if(null!=e&&"function"==typeof Object.getOwnPropertySymbols)for(var o=0,r=Object.getOwnPropertySymbols(e);ot.indexOf(r[o])&&Object.prototype.propertyIsEnumerable.call(e,r[o])&&(n[r[o]]=e[r[o]]);return n};let eK=e=>{var t,n,o,a,s,u;let d;let{type:f,className:p,rootClassName:h,size:m,onEdit:g,hideAdd:v,centered:y,addIcon:b,popupClassName:x,children:w,items:S,animated:k,style:E,indicatorSize:C,indicator:O}=e,j=eW(e,["type","className","rootClassName","size","onEdit","hideAdd","centered","addIcon","popupClassName","children","items","animated","style","indicatorSize","indicator"]),{prefixCls:P,moreIcon:M=r.createElement(Z.Z,null)}=j,{direction:N,tabs:I,getPrefixCls:R,getPopupContainer:T}=r.useContext(l.E_),A=R("tabs",P),_=(0,eP.Z)(A),[z,B,F]=eq(A,_);"editable-card"===f&&(d={onEdit:(e,t)=>{let{key:n,event:r}=t;null==g||g("add"===e?r:n,e)},removeIcon:r.createElement(D.Z,null),addIcon:b||r.createElement(L.Z,null),showAdd:!0!==v});let H=R(),q=(0,c.Z)(m),W=S||(0,eI.Z)(w).map(e=>{if(r.isValidElement(e)){let{key:t,props:n}=e,r=n||{},{tab:o}=r,i=eR(r,["tab"]);return Object.assign(Object.assign({key:String(t)},i),{label:o})}return null}).filter(e=>e),K=function(e){let t,n=arguments.length>1&&void 0!==arguments[1]?arguments[1]:{inkBar:!0,tabPane:!1};return(t=!1===n?{inkBar:!1,tabPane:!1}:!0===n?{inkBar:!0,tabPane:!0}:Object.assign({inkBar:!0},"object"==typeof n?n:{})).tabPane&&(t.tabPaneMotion=Object.assign(Object.assign({},eN),{motionName:(0,eM.m)(e,"switch")})),t}(A,k),U=Object.assign(Object.assign({},null==I?void 0:I.style),E),V={align:null!==(t=null==O?void 0:O.align)&&void 0!==t?t:null===(n=null==I?void 0:I.indicator)||void 0===n?void 0:n.align,size:null!==(u=null!==(a=null!==(o=null==O?void 0:O.size)&&void 0!==o?o:C)&&void 0!==a?a:null===(s=null==I?void 0:I.indicator)||void 0===s?void 0:s.size)&&void 0!==u?u:null==I?void 0:I.indicatorSize};return z(r.createElement(ej,Object.assign({direction:N,getPopupContainer:T,moreTransitionName:"".concat(H,"-slide-up")},j,{items:W,className:i()({["".concat(A,"-").concat(q)]:q,["".concat(A,"-card")]:["card","editable-card"].includes(f),["".concat(A,"-editable-card")]:"editable-card"===f,["".concat(A,"-centered")]:y},null==I?void 0:I.className,p,h,B,F,_),popupClassName:i()(x,B,F,_),style:U,editable:d,moreIcon:M,prefixCls:A,animated:K,indicator:V})))};eK.TabPane=()=>null;var eU=function(e,t){var n={};for(var r in e)Object.prototype.hasOwnProperty.call(e,r)&&0>t.indexOf(r)&&(n[r]=e[r]);if(null!=e&&"function"==typeof Object.getOwnPropertySymbols)for(var o=0,r=Object.getOwnPropertySymbols(e);ot.indexOf(r[o])&&Object.prototype.propertyIsEnumerable.call(e,r[o])&&(n[r[o]]=e[r[o]]);return n},eV=e=>{var{prefixCls:t,className:n,hoverable:o=!0}=e,a=eU(e,["prefixCls","className","hoverable"]);let{getPrefixCls:c}=r.useContext(l.E_),s=c("card",t),u=i()("".concat(s,"-grid"),n,{["".concat(s,"-grid-hoverable")]:o});return r.createElement("div",Object.assign({},a,{className:u}))};let eG=e=>{let{antCls:t,componentCls:n,headerHeight:r,cardPaddingBase:o,tabsMarginBottom:i}=e;return Object.assign(Object.assign({display:"flex",justifyContent:"center",flexDirection:"column",minHeight:r,marginBottom:-1,padding:"0 ".concat((0,u.bf)(o)),color:e.colorTextHeading,fontWeight:e.fontWeightStrong,fontSize:e.headerFontSize,background:e.headerBg,borderBottom:"".concat((0,u.bf)(e.lineWidth)," ").concat(e.lineType," ").concat(e.colorBorderSecondary),borderRadius:"".concat((0,u.bf)(e.borderRadiusLG)," ").concat((0,u.bf)(e.borderRadiusLG)," 0 0")},(0,eT.dF)()),{"&-wrapper":{width:"100%",display:"flex",alignItems:"center"},"&-title":Object.assign(Object.assign({display:"inline-block",flex:1},eT.vS),{["\n > ".concat(n,"-typography,\n > ").concat(n,"-typography-edit-content\n ")]:{insetInlineStart:0,marginTop:0,marginBottom:0}}),["".concat(t,"-tabs-top")]:{clear:"both",marginBottom:i,color:e.colorText,fontWeight:"normal",fontSize:e.fontSize,"&-bar":{borderBottom:"".concat((0,u.bf)(e.lineWidth)," ").concat(e.lineType," ").concat(e.colorBorderSecondary)}}})},eX=e=>{let{cardPaddingBase:t,colorBorderSecondary:n,cardShadow:r,lineWidth:o}=e;return{width:"33.33%",padding:t,border:0,borderRadius:0,boxShadow:"\n ".concat((0,u.bf)(o)," 0 0 0 ").concat(n,",\n 0 ").concat((0,u.bf)(o)," 0 0 ").concat(n,",\n ").concat((0,u.bf)(o)," ").concat((0,u.bf)(o)," 0 0 ").concat(n,",\n ").concat((0,u.bf)(o)," 0 0 0 ").concat(n," inset,\n 0 ").concat((0,u.bf)(o)," 0 0 ").concat(n," inset;\n "),transition:"all ".concat(e.motionDurationMid),"&-hoverable:hover":{position:"relative",zIndex:1,boxShadow:r}}},e$=e=>{let{componentCls:t,iconCls:n,actionsLiMargin:r,cardActionsIconSize:o,colorBorderSecondary:i,actionsBg:a}=e;return Object.assign(Object.assign({margin:0,padding:0,listStyle:"none",background:a,borderTop:"".concat((0,u.bf)(e.lineWidth)," ").concat(e.lineType," ").concat(i),display:"flex",borderRadius:"0 0 ".concat((0,u.bf)(e.borderRadiusLG)," ").concat((0,u.bf)(e.borderRadiusLG))},(0,eT.dF)()),{"& > li":{margin:r,color:e.colorTextDescription,textAlign:"center","> span":{position:"relative",display:"block",minWidth:e.calc(e.cardActionsIconSize).mul(2).equal(),fontSize:e.fontSize,lineHeight:e.lineHeight,cursor:"pointer","&:hover":{color:e.colorPrimary,transition:"color ".concat(e.motionDurationMid)},["a:not(".concat(t,"-btn), > ").concat(n)]:{display:"inline-block",width:"100%",color:e.colorTextDescription,lineHeight:(0,u.bf)(e.fontHeight),transition:"color ".concat(e.motionDurationMid),"&:hover":{color:e.colorPrimary}},["> ".concat(n)]:{fontSize:o,lineHeight:(0,u.bf)(e.calc(o).mul(e.lineHeight).equal())}},"&:not(:last-child)":{borderInlineEnd:"".concat((0,u.bf)(e.lineWidth)," ").concat(e.lineType," ").concat(i)}}})},eY=e=>Object.assign(Object.assign({margin:"".concat((0,u.bf)(e.calc(e.marginXXS).mul(-1).equal())," 0"),display:"flex"},(0,eT.dF)()),{"&-avatar":{paddingInlineEnd:e.padding},"&-detail":{overflow:"hidden",flex:1,"> div:not(:last-child)":{marginBottom:e.marginXS}},"&-title":Object.assign({color:e.colorTextHeading,fontWeight:e.fontWeightStrong,fontSize:e.fontSizeLG},eT.vS),"&-description":{color:e.colorTextDescription}}),eQ=e=>{let{componentCls:t,cardPaddingBase:n,colorFillAlter:r}=e;return{["".concat(t,"-head")]:{padding:"0 ".concat((0,u.bf)(n)),background:r,"&-title":{fontSize:e.fontSize}},["".concat(t,"-body")]:{padding:"".concat((0,u.bf)(e.padding)," ").concat((0,u.bf)(n))}}},eJ=e=>{let{componentCls:t}=e;return{overflow:"hidden",["".concat(t,"-body")]:{userSelect:"none"}}},e0=e=>{let{antCls:t,componentCls:n,cardShadow:r,cardHeadPadding:o,colorBorderSecondary:i,boxShadowTertiary:a,cardPaddingBase:l,extraColor:c}=e;return{[n]:Object.assign(Object.assign({},(0,eT.Wf)(e)),{position:"relative",background:e.colorBgContainer,borderRadius:e.borderRadiusLG,["&:not(".concat(n,"-bordered)")]:{boxShadow:a},["".concat(n,"-head")]:eG(e),["".concat(n,"-extra")]:{marginInlineStart:"auto",color:c,fontWeight:"normal",fontSize:e.fontSize},["".concat(n,"-body")]:Object.assign({padding:l,borderRadius:" 0 0 ".concat((0,u.bf)(e.borderRadiusLG)," ").concat((0,u.bf)(e.borderRadiusLG))},(0,eT.dF)()),["".concat(n,"-grid")]:eX(e),["".concat(n,"-cover")]:{"> *":{display:"block",width:"100%"},["img, img + ".concat(t,"-image-mask")]:{borderRadius:"".concat((0,u.bf)(e.borderRadiusLG)," ").concat((0,u.bf)(e.borderRadiusLG)," 0 0")}},["".concat(n,"-actions")]:e$(e),["".concat(n,"-meta")]:eY(e)}),["".concat(n,"-bordered")]:{border:"".concat((0,u.bf)(e.lineWidth)," ").concat(e.lineType," ").concat(i),["".concat(n,"-cover")]:{marginTop:-1,marginInlineStart:-1,marginInlineEnd:-1}},["".concat(n,"-hoverable")]:{cursor:"pointer",transition:"box-shadow ".concat(e.motionDurationMid,", border-color ").concat(e.motionDurationMid),"&:hover":{borderColor:"transparent",boxShadow:r}},["".concat(n,"-contain-grid")]:{borderRadius:"".concat((0,u.bf)(e.borderRadiusLG)," ").concat((0,u.bf)(e.borderRadiusLG)," 0 0 "),["".concat(n,"-body")]:{display:"flex",flexWrap:"wrap"},["&:not(".concat(n,"-loading) ").concat(n,"-body")]:{marginBlockStart:e.calc(e.lineWidth).mul(-1).equal(),marginInlineStart:e.calc(e.lineWidth).mul(-1).equal(),padding:0}},["".concat(n,"-contain-tabs")]:{["> ".concat(n,"-head")]:{minHeight:0,["".concat(n,"-head-title, ").concat(n,"-extra")]:{paddingTop:o}}},["".concat(n,"-type-inner")]:eQ(e),["".concat(n,"-loading")]:eJ(e),["".concat(n,"-rtl")]:{direction:"rtl"}}},e1=e=>{let{componentCls:t,cardPaddingSM:n,headerHeightSM:r,headerFontSizeSM:o}=e;return{["".concat(t,"-small")]:{["> ".concat(t,"-head")]:{minHeight:r,padding:"0 ".concat((0,u.bf)(n)),fontSize:o,["> ".concat(t,"-head-wrapper")]:{["> ".concat(t,"-extra")]:{fontSize:e.fontSize}}},["> ".concat(t,"-body")]:{padding:n}},["".concat(t,"-small").concat(t,"-contain-tabs")]:{["> ".concat(t,"-head")]:{["".concat(t,"-head-title, ").concat(t,"-extra")]:{paddingTop:0,display:"flex",alignItems:"center"}}}}};var e2=(0,d.I$)("Card",e=>{let t=(0,f.TS)(e,{cardShadow:e.boxShadowCard,cardHeadPadding:e.padding,cardPaddingBase:e.paddingLG,cardActionsIconSize:e.fontSize,cardPaddingSM:12});return[e0(t),e1(t)]},e=>({headerBg:"transparent",headerFontSize:e.fontSizeLG,headerFontSizeSM:e.fontSize,headerHeight:e.fontSizeLG*e.lineHeightLG+2*e.padding,headerHeightSM:e.fontSize*e.lineHeight+2*e.paddingXS,actionsBg:e.colorBgContainer,actionsLiMargin:"".concat(e.paddingSM,"px 0"),tabsMarginBottom:-e.padding-e.lineWidth,extraColor:e.colorText})),e6=function(e,t){var n={};for(var r in e)Object.prototype.hasOwnProperty.call(e,r)&&0>t.indexOf(r)&&(n[r]=e[r]);if(null!=e&&"function"==typeof Object.getOwnPropertySymbols)for(var o=0,r=Object.getOwnPropertySymbols(e);ot.indexOf(r[o])&&Object.prototype.propertyIsEnumerable.call(e,r[o])&&(n[r[o]]=e[r[o]]);return n};let e3=e=>{let{prefixCls:t,actions:n=[]}=e;return r.createElement("ul",{className:"".concat(t,"-actions")},n.map((e,t)=>r.createElement("li",{style:{width:"".concat(100/n.length,"%")},key:"action-".concat(t)},r.createElement("span",null,e))))},e4=r.forwardRef((e,t)=>{let n;let{prefixCls:o,className:s,rootClassName:u,style:d,extra:f,headStyle:p={},bodyStyle:h={},title:m,loading:g,bordered:v=!0,size:y,type:b,cover:x,actions:w,tabList:S,children:k,activeTabKey:E,defaultActiveTabKey:C,tabBarExtraContent:O,hoverable:j,tabProps:P={}}=e,M=e6(e,["prefixCls","className","rootClassName","style","extra","headStyle","bodyStyle","title","loading","bordered","size","type","cover","actions","tabList","children","activeTabKey","defaultActiveTabKey","tabBarExtraContent","hoverable","tabProps"]),{getPrefixCls:N,direction:I,card:R}=r.useContext(l.E_),T=r.useMemo(()=>{let e=!1;return r.Children.forEach(k,t=>{t&&t.type&&t.type===eV&&(e=!0)}),e},[k]),A=N("card",o),[D,Z,L]=e2(A),z=r.createElement(_,{loading:!0,active:!0,paragraph:{rows:4},title:!1},k),B=void 0!==E,F=Object.assign(Object.assign({},P),{[B?"activeKey":"defaultActiveKey"]:B?E:C,tabBarExtraContent:O}),H=(0,c.Z)(y),q=H&&"default"!==H?H:"large",W=S?r.createElement(eK,Object.assign({size:q},F,{className:"".concat(A,"-head-tabs"),onChange:t=>{var n;null===(n=e.onTabChange)||void 0===n||n.call(e,t)},items:S.map(e=>{var{tab:t}=e;return Object.assign({label:t},e6(e,["tab"]))})})):null;(m||f||W)&&(n=r.createElement("div",{className:"".concat(A,"-head"),style:p},r.createElement("div",{className:"".concat(A,"-head-wrapper")},m&&r.createElement("div",{className:"".concat(A,"-head-title")},m),f&&r.createElement("div",{className:"".concat(A,"-extra")},f)),W));let K=x?r.createElement("div",{className:"".concat(A,"-cover")},x):null,U=r.createElement("div",{className:"".concat(A,"-body"),style:h},g?z:k),V=w&&w.length?r.createElement(e3,{prefixCls:A,actions:w}):null,G=(0,a.Z)(M,["onTabChange"]),X=i()(A,null==R?void 0:R.className,{["".concat(A,"-loading")]:g,["".concat(A,"-bordered")]:v,["".concat(A,"-hoverable")]:j,["".concat(A,"-contain-grid")]:T,["".concat(A,"-contain-tabs")]:S&&S.length,["".concat(A,"-").concat(H)]:H,["".concat(A,"-type-").concat(b)]:!!b,["".concat(A,"-rtl")]:"rtl"===I},s,u,Z,L),$=Object.assign(Object.assign({},null==R?void 0:R.style),d);return D(r.createElement("div",Object.assign({ref:t},G,{className:X,style:$}),n,K,U,V))});var e5=function(e,t){var n={};for(var r in e)Object.prototype.hasOwnProperty.call(e,r)&&0>t.indexOf(r)&&(n[r]=e[r]);if(null!=e&&"function"==typeof Object.getOwnPropertySymbols)for(var o=0,r=Object.getOwnPropertySymbols(e);ot.indexOf(r[o])&&Object.prototype.propertyIsEnumerable.call(e,r[o])&&(n[r[o]]=e[r[o]]);return n};e4.Grid=eV,e4.Meta=e=>{let{prefixCls:t,className:n,avatar:o,title:a,description:c}=e,s=e5(e,["prefixCls","className","avatar","title","description"]),{getPrefixCls:u}=r.useContext(l.E_),d=u("card",t),f=i()("".concat(d,"-meta"),n),p=o?r.createElement("div",{className:"".concat(d,"-meta-avatar")},o):null,h=a?r.createElement("div",{className:"".concat(d,"-meta-title")},a):null,m=c?r.createElement("div",{className:"".concat(d,"-meta-description")},c):null,g=h||m?r.createElement("div",{className:"".concat(d,"-meta-detail")},h,m):null;return r.createElement("div",Object.assign({},s,{className:f}),p,g)};var e8=e4},69410:function(e,t,n){"use strict";var r=n(54998);t.Z=r.Z},91086:function(e,t,n){"use strict";var r=n(2265),o=n(71744),i=n(85180);t.Z=e=>{let{componentName:t}=e,{getPrefixCls:n}=(0,r.useContext)(o.E_),a=n("empty");switch(t){case"Table":case"List":return r.createElement(i.Z,{image:i.Z.PRESENTED_IMAGE_SIMPLE});case"Select":case"TreeSelect":case"Cascader":case"Transfer":case"Mentions":return r.createElement(i.Z,{image:i.Z.PRESENTED_IMAGE_SIMPLE,className:"".concat(a,"-small")});default:return r.createElement(i.Z,null)}}},80795:function(e,t,n){"use strict";n.d(t,{Z:function(){return H}});var r=n(2265),o=n(77565),i=n(36760),a=n.n(i),l=n(71030),c=n(74126),s=n(50506),u=n(18694),d=n(62236),f=n(92736),p=n(93942),h=n(19722),m=n(13613),g=n(95140),v=n(71744),y=n(45937),b=n(88208),x=n(29961),w=n(12918),S=n(18544),k=n(29382),E=n(691),C=n(88260),O=n(80669),j=n(3104),P=e=>{let{componentCls:t,menuCls:n,colorError:r,colorTextLightSolid:o}=e,i="".concat(n,"-item");return{["".concat(t,", ").concat(t,"-menu-submenu")]:{["".concat(n," ").concat(i)]:{["&".concat(i,"-danger:not(").concat(i,"-disabled)")]:{color:r,"&:hover":{color:o,backgroundColor:r}}}}}},M=n(34442),N=n(352);let I=e=>{let{componentCls:t,menuCls:n,zIndexPopup:r,dropdownArrowDistance:o,sizePopupArrow:i,antCls:a,iconCls:l,motionDurationMid:c,paddingBlock:s,fontSize:u,dropdownEdgeChildPadding:d,colorTextDisabled:f,fontSizeIcon:p,controlPaddingHorizontal:h,colorBgElevated:m}=e;return[{[t]:Object.assign(Object.assign({},(0,w.Wf)(e)),{position:"absolute",top:-9999,left:{_skip_check_:!0,value:-9999},zIndex:r,display:"block","&::before":{position:"absolute",insetBlock:e.calc(i).div(2).sub(o).equal(),zIndex:-9999,opacity:1e-4,content:'""'},["&-trigger".concat(a,"-btn")]:{["& > ".concat(l,"-down, & > ").concat(a,"-btn-icon > ").concat(l,"-down")]:{fontSize:p}},["".concat(t,"-wrap")]:{position:"relative",["".concat(a,"-btn > ").concat(l,"-down")]:{fontSize:p},["".concat(l,"-down::before")]:{transition:"transform ".concat(c)}},["".concat(t,"-wrap-open")]:{["".concat(l,"-down::before")]:{transform:"rotate(180deg)"}},"\n &-hidden,\n &-menu-hidden,\n &-menu-submenu-hidden\n ":{display:"none"},["&".concat(a,"-slide-down-enter").concat(a,"-slide-down-enter-active").concat(t,"-placement-bottomLeft,\n &").concat(a,"-slide-down-appear").concat(a,"-slide-down-appear-active").concat(t,"-placement-bottomLeft,\n &").concat(a,"-slide-down-enter").concat(a,"-slide-down-enter-active").concat(t,"-placement-bottom,\n &").concat(a,"-slide-down-appear").concat(a,"-slide-down-appear-active").concat(t,"-placement-bottom,\n &").concat(a,"-slide-down-enter").concat(a,"-slide-down-enter-active").concat(t,"-placement-bottomRight,\n &").concat(a,"-slide-down-appear").concat(a,"-slide-down-appear-active").concat(t,"-placement-bottomRight")]:{animationName:S.fJ},["&".concat(a,"-slide-up-enter").concat(a,"-slide-up-enter-active").concat(t,"-placement-topLeft,\n &").concat(a,"-slide-up-appear").concat(a,"-slide-up-appear-active").concat(t,"-placement-topLeft,\n &").concat(a,"-slide-up-enter").concat(a,"-slide-up-enter-active").concat(t,"-placement-top,\n &").concat(a,"-slide-up-appear").concat(a,"-slide-up-appear-active").concat(t,"-placement-top,\n &").concat(a,"-slide-up-enter").concat(a,"-slide-up-enter-active").concat(t,"-placement-topRight,\n &").concat(a,"-slide-up-appear").concat(a,"-slide-up-appear-active").concat(t,"-placement-topRight")]:{animationName:S.Qt},["&".concat(a,"-slide-down-leave").concat(a,"-slide-down-leave-active").concat(t,"-placement-bottomLeft,\n &").concat(a,"-slide-down-leave").concat(a,"-slide-down-leave-active").concat(t,"-placement-bottom,\n &").concat(a,"-slide-down-leave").concat(a,"-slide-down-leave-active").concat(t,"-placement-bottomRight")]:{animationName:S.Uw},["&".concat(a,"-slide-up-leave").concat(a,"-slide-up-leave-active").concat(t,"-placement-topLeft,\n &").concat(a,"-slide-up-leave").concat(a,"-slide-up-leave-active").concat(t,"-placement-top,\n &").concat(a,"-slide-up-leave").concat(a,"-slide-up-leave-active").concat(t,"-placement-topRight")]:{animationName:S.ly}})},(0,C.ZP)(e,m,{arrowPlacement:{top:!0,bottom:!0}}),{["".concat(t," ").concat(n)]:{position:"relative",margin:0},["".concat(n,"-submenu-popup")]:{position:"absolute",zIndex:r,background:"transparent",boxShadow:"none",transformOrigin:"0 0","ul, li":{listStyle:"none",margin:0}},["".concat(t,", ").concat(t,"-menu-submenu")]:{[n]:Object.assign(Object.assign({padding:d,listStyleType:"none",backgroundColor:m,backgroundClip:"padding-box",borderRadius:e.borderRadiusLG,outline:"none",boxShadow:e.boxShadowSecondary},(0,w.Qy)(e)),{["".concat(n,"-item-group-title")]:{padding:"".concat((0,N.bf)(s)," ").concat((0,N.bf)(h)),color:e.colorTextDescription,transition:"all ".concat(c)},["".concat(n,"-item")]:{position:"relative",display:"flex",alignItems:"center"},["".concat(n,"-item-icon")]:{minWidth:u,marginInlineEnd:e.marginXS,fontSize:e.fontSizeSM},["".concat(n,"-title-content")]:{flex:"auto","> a":{color:"inherit",transition:"all ".concat(c),"&:hover":{color:"inherit"},"&::after":{position:"absolute",inset:0,content:'""'}}},["".concat(n,"-item, ").concat(n,"-submenu-title")]:Object.assign(Object.assign({clear:"both",margin:0,padding:"".concat((0,N.bf)(s)," ").concat((0,N.bf)(h)),color:e.colorText,fontWeight:"normal",fontSize:u,lineHeight:e.lineHeight,cursor:"pointer",transition:"all ".concat(c),borderRadius:e.borderRadiusSM,"&:hover, &-active":{backgroundColor:e.controlItemBgHover}},(0,w.Qy)(e)),{"&-selected":{color:e.colorPrimary,backgroundColor:e.controlItemBgActive,"&:hover, &-active":{backgroundColor:e.controlItemBgActiveHover}},"&-disabled":{color:f,cursor:"not-allowed","&:hover":{color:f,backgroundColor:m,cursor:"not-allowed"},a:{pointerEvents:"none"}},"&-divider":{height:1,margin:"".concat((0,N.bf)(e.marginXXS)," 0"),overflow:"hidden",lineHeight:0,backgroundColor:e.colorSplit},["".concat(t,"-menu-submenu-expand-icon")]:{position:"absolute",insetInlineEnd:e.paddingXS,["".concat(t,"-menu-submenu-arrow-icon")]:{marginInlineEnd:"0 !important",color:e.colorTextDescription,fontSize:p,fontStyle:"normal"}}}),["".concat(n,"-item-group-list")]:{margin:"0 ".concat((0,N.bf)(e.marginXS)),padding:0,listStyle:"none"},["".concat(n,"-submenu-title")]:{paddingInlineEnd:e.calc(h).add(e.fontSizeSM).equal()},["".concat(n,"-submenu-vertical")]:{position:"relative"},["".concat(n,"-submenu").concat(n,"-submenu-disabled ").concat(t,"-menu-submenu-title")]:{["&, ".concat(t,"-menu-submenu-arrow-icon")]:{color:f,backgroundColor:m,cursor:"not-allowed"}},["".concat(n,"-submenu-selected ").concat(t,"-menu-submenu-title")]:{color:e.colorPrimary}})}},[(0,S.oN)(e,"slide-up"),(0,S.oN)(e,"slide-down"),(0,k.Fm)(e,"move-up"),(0,k.Fm)(e,"move-down"),(0,E._y)(e,"zoom-big")]]};var R=(0,O.I$)("Dropdown",e=>{let{marginXXS:t,sizePopupArrow:n,paddingXXS:r,componentCls:o}=e,i=(0,j.TS)(e,{menuCls:"".concat(o,"-menu"),dropdownArrowDistance:e.calc(n).div(2).add(t).equal(),dropdownEdgeChildPadding:r});return[I(i),P(i)]},e=>Object.assign(Object.assign({zIndexPopup:e.zIndexPopupBase+50,paddingBlock:(e.controlHeight-e.fontSize*e.lineHeight)/2},(0,C.wZ)({contentRadius:e.borderRadiusLG,limitVerticalRadius:!0})),(0,M.w)(e))),T=n(64024);let A=e=>{let t;let{menu:n,arrow:i,prefixCls:p,children:w,trigger:S,disabled:k,dropdownRender:E,getPopupContainer:C,overlayClassName:O,rootClassName:j,overlayStyle:P,open:M,onOpenChange:N,visible:I,onVisibleChange:A,mouseEnterDelay:_=.15,mouseLeaveDelay:D=.1,autoAdjustOverflow:Z=!0,placement:L="",overlay:z,transitionName:B}=e,{getPopupContainer:F,getPrefixCls:H,direction:q,dropdown:W}=r.useContext(v.E_);(0,m.ln)("Dropdown");let K=r.useMemo(()=>{let e=H();return void 0!==B?B:L.includes("top")?"".concat(e,"-slide-down"):"".concat(e,"-slide-up")},[H,L,B]),U=r.useMemo(()=>L?L.includes("Center")?L.slice(0,L.indexOf("Center")):L:"rtl"===q?"bottomRight":"bottomLeft",[L,q]),V=H("dropdown",p),G=(0,T.Z)(V),[X,$,Y]=R(V,G),[,Q]=(0,x.ZP)(),J=r.Children.only(w),ee=(0,h.Tm)(J,{className:a()("".concat(V,"-trigger"),{["".concat(V,"-rtl")]:"rtl"===q},J.props.className),disabled:k}),et=k?[]:S;et&&et.includes("contextMenu")&&(t=!0);let[en,er]=(0,s.Z)(!1,{value:null!=M?M:I}),eo=(0,c.zX)(e=>{null==N||N(e,{source:"trigger"}),null==A||A(e),er(e)}),ei=a()(O,j,$,Y,G,null==W?void 0:W.className,{["".concat(V,"-rtl")]:"rtl"===q}),ea=(0,f.Z)({arrowPointAtCenter:"object"==typeof i&&i.pointAtCenter,autoAdjustOverflow:Z,offset:Q.marginXXS,arrowWidth:i?Q.sizePopupArrow:0,borderRadius:Q.borderRadius}),el=r.useCallback(()=>{null!=n&&n.selectable&&null!=n&&n.multiple||(null==N||N(!1,{source:"menu"}),er(!1))},[null==n?void 0:n.selectable,null==n?void 0:n.multiple]),[ec,es]=(0,d.Cn)("Dropdown",null==P?void 0:P.zIndex),eu=r.createElement(l.Z,Object.assign({alignPoint:t},(0,u.Z)(e,["rootClassName"]),{mouseEnterDelay:_,mouseLeaveDelay:D,visible:en,builtinPlacements:ea,arrow:!!i,overlayClassName:ei,prefixCls:V,getPopupContainer:C||F,transitionName:K,trigger:et,overlay:()=>{let e;return e=(null==n?void 0:n.items)?r.createElement(y.Z,Object.assign({},n)):"function"==typeof z?z():z,E&&(e=E(e)),e=r.Children.only("string"==typeof e?r.createElement("span",null,e):e),r.createElement(b.J,{prefixCls:"".concat(V,"-menu"),rootClassName:a()(Y,G),expandIcon:r.createElement("span",{className:"".concat(V,"-menu-submenu-arrow")},r.createElement(o.Z,{className:"".concat(V,"-menu-submenu-arrow-icon")})),mode:"vertical",selectable:!1,onClick:el,validator:e=>{let{mode:t}=e}},e)},placement:U,onVisibleChange:eo,overlayStyle:Object.assign(Object.assign(Object.assign({},null==W?void 0:W.style),P),{zIndex:ec})}),ee);return ec&&(eu=r.createElement(g.Z.Provider,{value:es},eu)),X(eu)},_=(0,p.Z)(A,"dropdown",e=>e,function(e){return Object.assign(Object.assign({},e),{align:{overflow:{adjustX:!1,adjustY:!1}}})});A._InternalPanelDoNotUseOrYouWillBeFired=e=>r.createElement(_,Object.assign({},e),r.createElement("span",null));var D=n(39760),Z=n(73002),L=n(93142),z=n(65658),B=function(e,t){var n={};for(var r in e)Object.prototype.hasOwnProperty.call(e,r)&&0>t.indexOf(r)&&(n[r]=e[r]);if(null!=e&&"function"==typeof Object.getOwnPropertySymbols)for(var o=0,r=Object.getOwnPropertySymbols(e);ot.indexOf(r[o])&&Object.prototype.propertyIsEnumerable.call(e,r[o])&&(n[r[o]]=e[r[o]]);return n};let F=e=>{let{getPopupContainer:t,getPrefixCls:n,direction:o}=r.useContext(v.E_),{prefixCls:i,type:l="default",danger:c,disabled:s,loading:u,onClick:d,htmlType:f,children:p,className:h,menu:m,arrow:g,autoFocus:y,overlay:b,trigger:x,align:w,open:S,onOpenChange:k,placement:E,getPopupContainer:C,href:O,icon:j=r.createElement(D.Z,null),title:P,buttonsRender:M=e=>e,mouseEnterDelay:N,mouseLeaveDelay:I,overlayClassName:R,overlayStyle:T,destroyPopupOnHide:_,dropdownRender:F}=e,H=B(e,["prefixCls","type","danger","disabled","loading","onClick","htmlType","children","className","menu","arrow","autoFocus","overlay","trigger","align","open","onOpenChange","placement","getPopupContainer","href","icon","title","buttonsRender","mouseEnterDelay","mouseLeaveDelay","overlayClassName","overlayStyle","destroyPopupOnHide","dropdownRender"]),q=n("dropdown",i),W={menu:m,arrow:g,autoFocus:y,align:w,disabled:s,trigger:s?[]:x,onOpenChange:k,getPopupContainer:C||t,mouseEnterDelay:N,mouseLeaveDelay:I,overlayClassName:R,overlayStyle:T,destroyPopupOnHide:_,dropdownRender:F},{compactSize:K,compactItemClassnames:U}=(0,z.ri)(q,o),V=a()("".concat(q,"-button"),U,h);"overlay"in e&&(W.overlay=b),"open"in e&&(W.open=S),"placement"in e?W.placement=E:W.placement="rtl"===o?"bottomLeft":"bottomRight";let[G,X]=M([r.createElement(Z.ZP,{type:l,danger:c,disabled:s,loading:u,onClick:d,htmlType:f,href:O,title:P},p),r.createElement(Z.ZP,{type:l,danger:c,icon:j})]);return r.createElement(L.Z.Compact,Object.assign({className:V,size:K,block:!0},H),G,r.createElement(A,Object.assign({},W),X))};F.__ANT_BUTTON=!0,A.Button=F;var H=A},85180:function(e,t,n){"use strict";n.d(t,{Z:function(){return y}});var r=n(36760),o=n.n(r),i=n(2265),a=n(71744),l=n(55274),c=n(36360),s=n(29961),u=n(80669),d=n(3104);let f=e=>{let{componentCls:t,margin:n,marginXS:r,marginXL:o,fontSize:i,lineHeight:a}=e;return{[t]:{marginInline:r,fontSize:i,lineHeight:a,textAlign:"center",["".concat(t,"-image")]:{height:e.emptyImgHeight,marginBottom:r,opacity:e.opacityImage,img:{height:"100%"},svg:{maxWidth:"100%",height:"100%",margin:"auto"}},["".concat(t,"-description")]:{color:e.colorText},["".concat(t,"-footer")]:{marginTop:n},"&-normal":{marginBlock:o,color:e.colorTextDisabled,["".concat(t,"-description")]:{color:e.colorTextDisabled},["".concat(t,"-image")]:{height:e.emptyImgHeightMD}},"&-small":{marginBlock:r,color:e.colorTextDisabled,["".concat(t,"-image")]:{height:e.emptyImgHeightSM}}}}};var p=(0,u.I$)("Empty",e=>{let{componentCls:t,controlHeightLG:n,calc:r}=e;return[f((0,d.TS)(e,{emptyImgCls:"".concat(t,"-img"),emptyImgHeight:r(n).mul(2.5).equal(),emptyImgHeightMD:n,emptyImgHeightSM:r(n).mul(.875).equal()}))]}),h=function(e,t){var n={};for(var r in e)Object.prototype.hasOwnProperty.call(e,r)&&0>t.indexOf(r)&&(n[r]=e[r]);if(null!=e&&"function"==typeof Object.getOwnPropertySymbols)for(var o=0,r=Object.getOwnPropertySymbols(e);ot.indexOf(r[o])&&Object.prototype.propertyIsEnumerable.call(e,r[o])&&(n[r[o]]=e[r[o]]);return n};let m=i.createElement(()=>{let[,e]=(0,s.ZP)(),t=new c.C(e.colorBgBase).toHsl().l<.5?{opacity:.65}:{};return i.createElement("svg",{style:t,width:"184",height:"152",viewBox:"0 0 184 152",xmlns:"http://www.w3.org/2000/svg"},i.createElement("g",{fill:"none",fillRule:"evenodd"},i.createElement("g",{transform:"translate(24 31.67)"},i.createElement("ellipse",{fillOpacity:".8",fill:"#F5F5F7",cx:"67.797",cy:"106.89",rx:"67.797",ry:"12.668"}),i.createElement("path",{d:"M122.034 69.674L98.109 40.229c-1.148-1.386-2.826-2.225-4.593-2.225h-51.44c-1.766 0-3.444.839-4.592 2.225L13.56 69.674v15.383h108.475V69.674z",fill:"#AEB8C2"}),i.createElement("path",{d:"M101.537 86.214L80.63 61.102c-1.001-1.207-2.507-1.867-4.048-1.867H31.724c-1.54 0-3.047.66-4.048 1.867L6.769 86.214v13.792h94.768V86.214z",fill:"url(#linearGradient-1)",transform:"translate(13.56)"}),i.createElement("path",{d:"M33.83 0h67.933a4 4 0 0 1 4 4v93.344a4 4 0 0 1-4 4H33.83a4 4 0 0 1-4-4V4a4 4 0 0 1 4-4z",fill:"#F5F5F7"}),i.createElement("path",{d:"M42.678 9.953h50.237a2 2 0 0 1 2 2V36.91a2 2 0 0 1-2 2H42.678a2 2 0 0 1-2-2V11.953a2 2 0 0 1 2-2zM42.94 49.767h49.713a2.262 2.262 0 1 1 0 4.524H42.94a2.262 2.262 0 0 1 0-4.524zM42.94 61.53h49.713a2.262 2.262 0 1 1 0 4.525H42.94a2.262 2.262 0 0 1 0-4.525zM121.813 105.032c-.775 3.071-3.497 5.36-6.735 5.36H20.515c-3.238 0-5.96-2.29-6.734-5.36a7.309 7.309 0 0 1-.222-1.79V69.675h26.318c2.907 0 5.25 2.448 5.25 5.42v.04c0 2.971 2.37 5.37 5.277 5.37h34.785c2.907 0 5.277-2.421 5.277-5.393V75.1c0-2.972 2.343-5.426 5.25-5.426h26.318v33.569c0 .617-.077 1.216-.221 1.789z",fill:"#DCE0E6"})),i.createElement("path",{d:"M149.121 33.292l-6.83 2.65a1 1 0 0 1-1.317-1.23l1.937-6.207c-2.589-2.944-4.109-6.534-4.109-10.408C138.802 8.102 148.92 0 161.402 0 173.881 0 184 8.102 184 18.097c0 9.995-10.118 18.097-22.599 18.097-4.528 0-8.744-1.066-12.28-2.902z",fill:"#DCE0E6"}),i.createElement("g",{transform:"translate(149.65 15.383)",fill:"#FFF"},i.createElement("ellipse",{cx:"20.654",cy:"3.167",rx:"2.849",ry:"2.815"}),i.createElement("path",{d:"M5.698 5.63H0L2.898.704zM9.259.704h4.985V5.63H9.259z"}))))},null),g=i.createElement(()=>{let[,e]=(0,s.ZP)(),{colorFill:t,colorFillTertiary:n,colorFillQuaternary:r,colorBgContainer:o}=e,{borderColor:a,shadowColor:l,contentColor:u}=(0,i.useMemo)(()=>({borderColor:new c.C(t).onBackground(o).toHexShortString(),shadowColor:new c.C(n).onBackground(o).toHexShortString(),contentColor:new c.C(r).onBackground(o).toHexShortString()}),[t,n,r,o]);return i.createElement("svg",{width:"64",height:"41",viewBox:"0 0 64 41",xmlns:"http://www.w3.org/2000/svg"},i.createElement("g",{transform:"translate(0 1)",fill:"none",fillRule:"evenodd"},i.createElement("ellipse",{fill:l,cx:"32",cy:"33",rx:"32",ry:"7"}),i.createElement("g",{fillRule:"nonzero",stroke:a},i.createElement("path",{d:"M55 12.76L44.854 1.258C44.367.474 43.656 0 42.907 0H21.093c-.749 0-1.46.474-1.947 1.257L9 12.761V22h46v-9.24z"}),i.createElement("path",{d:"M41.613 15.931c0-1.605.994-2.93 2.227-2.931H55v18.137C55 33.26 53.68 35 52.05 35h-40.1C10.32 35 9 33.259 9 31.137V13h11.16c1.233 0 2.227 1.323 2.227 2.928v.022c0 1.605 1.005 2.901 2.237 2.901h14.752c1.232 0 2.237-1.308 2.237-2.913v-.007z",fill:u}))))},null),v=e=>{var{className:t,rootClassName:n,prefixCls:r,image:c=m,description:s,children:u,imageStyle:d,style:f}=e,v=h(e,["className","rootClassName","prefixCls","image","description","children","imageStyle","style"]);let{getPrefixCls:y,direction:b,empty:x}=i.useContext(a.E_),w=y("empty",r),[S,k,E]=p(w),[C]=(0,l.Z)("Empty"),O=void 0!==s?s:null==C?void 0:C.description,j=null;return j="string"==typeof c?i.createElement("img",{alt:"string"==typeof O?O:"empty",src:c}):c,S(i.createElement("div",Object.assign({className:o()(k,E,w,null==x?void 0:x.className,{["".concat(w,"-normal")]:c===g,["".concat(w,"-rtl")]:"rtl"===b},t,n),style:Object.assign(Object.assign({},null==x?void 0:x.style),f)},v),i.createElement("div",{className:"".concat(w,"-image"),style:d},j),O&&i.createElement("div",{className:"".concat(w,"-description")},O),u&&i.createElement("div",{className:"".concat(w,"-footer")},u)))};v.PRESENTED_IMAGE_DEFAULT=m,v.PRESENTED_IMAGE_SIMPLE=g;var y=v},56250:function(e,t,n){"use strict";var r=n(2265),o=n(39109);let i=["outlined","borderless","filled"];t.Z=function(e){let t,n=arguments.length>1&&void 0!==arguments[1]?arguments[1]:void 0,a=(0,r.useContext)(o.pg);t=void 0!==e?e:!1===n?"borderless":null!=a?a:"outlined";let l=i.includes(t);return[t,l]}},20577:function(e,t,n){"use strict";n.d(t,{Z:function(){return em}});var r=n(2265),o=n(70464),i=n(1119),a={icon:{tag:"svg",attrs:{viewBox:"64 64 896 896",focusable:"false"},children:[{tag:"path",attrs:{d:"M890.5 755.3L537.9 269.2c-12.8-17.6-39-17.6-51.7 0L133.5 755.3A8 8 0 00140 768h75c5.1 0 9.9-2.5 12.9-6.6L512 369.8l284.1 391.6c3 4.1 7.8 6.6 12.9 6.6h75c6.5 0 10.3-7.4 6.5-12.7z"}}]},name:"up",theme:"outlined"},l=n(55015),c=r.forwardRef(function(e,t){return r.createElement(l.Z,(0,i.Z)({},e,{ref:t,icon:a}))}),s=n(36760),u=n.n(s),d=n(11993),f=n(41154),p=n(26365),h=n(6989),m=n(76405),g=n(25049);function v(){return"function"==typeof BigInt}function y(e){return!e&&0!==e&&!Number.isNaN(e)||!String(e).trim()}function b(e){var t=e.trim(),n=t.startsWith("-");n&&(t=t.slice(1)),(t=t.replace(/(\.\d*[^0])0*$/,"$1").replace(/\.0*$/,"").replace(/^0+/,"")).startsWith(".")&&(t="0".concat(t));var r=t||"0",o=r.split("."),i=o[0]||"0",a=o[1]||"0";"0"===i&&"0"===a&&(n=!1);var l=n?"-":"";return{negative:n,negativeStr:l,trimStr:r,integerStr:i,decimalStr:a,fullStr:"".concat(l).concat(r)}}function x(e){var t=String(e);return!Number.isNaN(Number(t))&&t.includes("e")}function w(e){var t=String(e);if(x(e)){var n=Number(t.slice(t.indexOf("e-")+2)),r=t.match(/\.(\d+)/);return null!=r&&r[1]&&(n+=r[1].length),n}return t.includes(".")&&k(t)?t.length-t.indexOf(".")-1:0}function S(e){var t=String(e);if(x(e)){if(e>Number.MAX_SAFE_INTEGER)return String(v()?BigInt(e).toString():Number.MAX_SAFE_INTEGER);if(e=this.add(e.negate().toString()).toNumber()}},{key:"toNumber",value:function(){return this.isNaN()?NaN:Number(this.toString())}},{key:"toString",value:function(){var e=!(arguments.length>0)||void 0===arguments[0]||arguments[0];return e?this.isInvalidate()?"":b("".concat(this.getMark()).concat(this.getIntegerStr(),".").concat(this.getDecimalStr())).fullStr:this.origin}}]),e}(),C=function(){function e(t){if((0,m.Z)(this,e),(0,d.Z)(this,"origin",""),(0,d.Z)(this,"number",void 0),(0,d.Z)(this,"empty",void 0),y(t)){this.empty=!0;return}this.origin=String(t),this.number=Number(t)}return(0,g.Z)(e,[{key:"negate",value:function(){return new e(-this.toNumber())}},{key:"add",value:function(t){if(this.isInvalidate())return new e(t);var n=Number(t);if(Number.isNaN(n))return this;var r=this.number+n;if(r>Number.MAX_SAFE_INTEGER)return new e(Number.MAX_SAFE_INTEGER);if(rNumber.MAX_SAFE_INTEGER)return new e(Number.MAX_SAFE_INTEGER);if(r=this.add(e.negate().toString()).toNumber()}},{key:"toNumber",value:function(){return this.number}},{key:"toString",value:function(){var e=!(arguments.length>0)||void 0===arguments[0]||arguments[0];return e?this.isInvalidate()?"":S(this.number):this.origin}}]),e}();function O(e){return v()?new E(e):new C(e)}function j(e,t,n){var r=arguments.length>3&&void 0!==arguments[3]&&arguments[3];if(""===e)return"";var o=b(e),i=o.negativeStr,a=o.integerStr,l=o.decimalStr,c="".concat(t).concat(l),s="".concat(i).concat(a);if(n>=0){var u=Number(l[n]);return u>=5&&!r?j(O(e).add("".concat(i,"0.").concat("0".repeat(n)).concat(10-u)).toString(),t,n,r):0===n?s:"".concat(s).concat(t).concat(l.padEnd(n,"0").slice(0,n))}return".0"===c?s:"".concat(s).concat(c)}var P=n(2027),M=n(27380),N=n(28791),I=n(32559),R=n(79267),T=function(){var e=(0,r.useState)(!1),t=(0,p.Z)(e,2),n=t[0],o=t[1];return(0,M.Z)(function(){o((0,R.Z)())},[]),n},A=n(53346);function _(e){var t=e.prefixCls,n=e.upNode,o=e.downNode,a=e.upDisabled,l=e.downDisabled,c=e.onStep,s=r.useRef(),f=r.useRef([]),p=r.useRef();p.current=c;var h=function(){clearTimeout(s.current)},m=function(e,t){e.preventDefault(),h(),p.current(t),s.current=setTimeout(function e(){p.current(t),s.current=setTimeout(e,200)},600)};if(r.useEffect(function(){return function(){h(),f.current.forEach(function(e){return A.Z.cancel(e)})}},[]),T())return null;var g="".concat(t,"-handler"),v=u()(g,"".concat(g,"-up"),(0,d.Z)({},"".concat(g,"-up-disabled"),a)),y=u()(g,"".concat(g,"-down"),(0,d.Z)({},"".concat(g,"-down-disabled"),l)),b=function(){return f.current.push((0,A.Z)(h))},x={unselectable:"on",role:"button",onMouseUp:b,onMouseLeave:b};return r.createElement("div",{className:"".concat(g,"-wrap")},r.createElement("span",(0,i.Z)({},x,{onMouseDown:function(e){m(e,!0)},"aria-label":"Increase Value","aria-disabled":a,className:v}),n||r.createElement("span",{unselectable:"on",className:"".concat(t,"-handler-up-inner")})),r.createElement("span",(0,i.Z)({},x,{onMouseDown:function(e){m(e,!1)},"aria-label":"Decrease Value","aria-disabled":l,className:y}),o||r.createElement("span",{unselectable:"on",className:"".concat(t,"-handler-down-inner")})))}function D(e){var t="number"==typeof e?S(e):b(e).fullStr;return t.includes(".")?b(t.replace(/(\d)\.(\d)/g,"$1$2.")).fullStr:e+"0"}var Z=n(55041),L=function(){var e=(0,r.useRef)(0),t=function(){A.Z.cancel(e.current)};return(0,r.useEffect)(function(){return t},[]),function(n){t(),e.current=(0,A.Z)(function(){n()})}},z=["prefixCls","className","style","min","max","step","defaultValue","value","disabled","readOnly","upHandler","downHandler","keyboard","wheel","controls","classNames","stringMode","parser","formatter","precision","decimalSeparator","onChange","onInput","onPressEnter","onStep","changeOnBlur"],B=["disabled","style","prefixCls","value","prefix","suffix","addonBefore","addonAfter","className","classNames"],F=function(e,t){return e||t.isEmpty()?t.toString():t.toNumber()},H=function(e){var t=O(e);return t.isInvalidate()?null:t},q=r.forwardRef(function(e,t){var n,o,a,l=e.prefixCls,c=void 0===l?"rc-input-number":l,s=e.className,m=e.style,g=e.min,v=e.max,y=e.step,b=void 0===y?1:y,x=e.defaultValue,E=e.value,C=e.disabled,P=e.readOnly,R=e.upHandler,T=e.downHandler,A=e.keyboard,Z=e.wheel,B=e.controls,q=(e.classNames,e.stringMode),W=e.parser,K=e.formatter,U=e.precision,V=e.decimalSeparator,G=e.onChange,X=e.onInput,$=e.onPressEnter,Y=e.onStep,Q=e.changeOnBlur,J=void 0===Q||Q,ee=(0,h.Z)(e,z),et="".concat(c,"-input"),en=r.useRef(null),er=r.useState(!1),eo=(0,p.Z)(er,2),ei=eo[0],ea=eo[1],el=r.useRef(!1),ec=r.useRef(!1),es=r.useRef(!1),eu=r.useState(function(){return O(null!=E?E:x)}),ed=(0,p.Z)(eu,2),ef=ed[0],ep=ed[1],eh=r.useCallback(function(e,t){return t?void 0:U>=0?U:Math.max(w(e),w(b))},[U,b]),em=r.useCallback(function(e){var t=String(e);if(W)return W(t);var n=t;return V&&(n=n.replace(V,".")),n.replace(/[^\w.-]+/g,"")},[W,V]),eg=r.useRef(""),ev=r.useCallback(function(e,t){if(K)return K(e,{userTyping:t,input:String(eg.current)});var n="number"==typeof e?S(e):e;if(!t){var r=eh(n,t);k(n)&&(V||r>=0)&&(n=j(n,V||".",r))}return n},[K,eh,V]),ey=r.useState(function(){var e=null!=x?x:E;return ef.isInvalidate()&&["string","number"].includes((0,f.Z)(e))?Number.isNaN(e)?"":e:ev(ef.toString(),!1)}),eb=(0,p.Z)(ey,2),ex=eb[0],ew=eb[1];function eS(e,t){ew(ev(e.isInvalidate()?e.toString(!1):e.toString(!t),t))}eg.current=ex;var ek=r.useMemo(function(){return H(v)},[v,U]),eE=r.useMemo(function(){return H(g)},[g,U]),eC=r.useMemo(function(){return!(!ek||!ef||ef.isInvalidate())&&ek.lessEquals(ef)},[ek,ef]),eO=r.useMemo(function(){return!(!eE||!ef||ef.isInvalidate())&&ef.lessEquals(eE)},[eE,ef]),ej=(n=en.current,o=(0,r.useRef)(null),[function(){try{var e=n.selectionStart,t=n.selectionEnd,r=n.value,i=r.substring(0,e),a=r.substring(t);o.current={start:e,end:t,value:r,beforeTxt:i,afterTxt:a}}catch(e){}},function(){if(n&&o.current&&ei)try{var e=n.value,t=o.current,r=t.beforeTxt,i=t.afterTxt,a=t.start,l=e.length;if(e.endsWith(i))l=e.length-o.current.afterTxt.length;else if(e.startsWith(r))l=r.length;else{var c=r[a-1],s=e.indexOf(c,a-1);-1!==s&&(l=s+1)}n.setSelectionRange(l,l)}catch(e){(0,I.ZP)(!1,"Something warning of cursor restore. Please fire issue about this: ".concat(e.message))}}]),eP=(0,p.Z)(ej,2),eM=eP[0],eN=eP[1],eI=function(e){return ek&&!e.lessEquals(ek)?ek:eE&&!eE.lessEquals(e)?eE:null},eR=function(e){return!eI(e)},eT=function(e,t){var n=e,r=eR(n)||n.isEmpty();if(n.isEmpty()||t||(n=eI(n)||n,r=!0),!P&&!C&&r){var o,i=n.toString(),a=eh(i,t);return a>=0&&!eR(n=O(j(i,".",a)))&&(n=O(j(i,".",a,!0))),n.equals(ef)||(o=n,void 0===E&&ep(o),null==G||G(n.isEmpty()?null:F(q,n)),void 0===E&&eS(n,t)),n}return ef},eA=L(),e_=function e(t){if(eM(),eg.current=t,ew(t),!ec.current){var n=O(em(t));n.isNaN()||eT(n,!0)}null==X||X(t),eA(function(){var n=t;W||(n=t.replace(/。/g,".")),n!==t&&e(n)})},eD=function(e){if((!e||!eC)&&(e||!eO)){el.current=!1;var t,n=O(es.current?D(b):b);e||(n=n.negate());var r=eT((ef||O(0)).add(n.toString()),!1);null==Y||Y(F(q,r),{offset:es.current?D(b):b,type:e?"up":"down"}),null===(t=en.current)||void 0===t||t.focus()}},eZ=function(e){var t=O(em(ex)),n=t;n=t.isNaN()?eT(ef,e):eT(t,e),void 0!==E?eS(ef,!1):n.isNaN()||eS(n,!1)};return r.useEffect(function(){var e=function(e){!1!==Z&&(eD(e.deltaY<0),e.preventDefault())},t=en.current;if(t)return t.addEventListener("wheel",e),function(){return t.removeEventListener("wheel",e)}},[eD]),(0,M.o)(function(){ef.isInvalidate()||eS(ef,!1)},[U,K]),(0,M.o)(function(){var e=O(E);ep(e);var t=O(em(ex));e.equals(t)&&el.current&&!K||eS(e,el.current)},[E]),(0,M.o)(function(){K&&eN()},[ex]),r.createElement("div",{className:u()(c,s,(a={},(0,d.Z)(a,"".concat(c,"-focused"),ei),(0,d.Z)(a,"".concat(c,"-disabled"),C),(0,d.Z)(a,"".concat(c,"-readonly"),P),(0,d.Z)(a,"".concat(c,"-not-a-number"),ef.isNaN()),(0,d.Z)(a,"".concat(c,"-out-of-range"),!ef.isInvalidate()&&!eR(ef)),a)),style:m,onFocus:function(){ea(!0)},onBlur:function(){J&&eZ(!1),ea(!1),el.current=!1},onKeyDown:function(e){var t=e.key,n=e.shiftKey;el.current=!0,es.current=n,"Enter"===t&&(ec.current||(el.current=!1),eZ(!1),null==$||$(e)),!1!==A&&!ec.current&&["Up","ArrowUp","Down","ArrowDown"].includes(t)&&(eD("Up"===t||"ArrowUp"===t),e.preventDefault())},onKeyUp:function(){el.current=!1,es.current=!1},onCompositionStart:function(){ec.current=!0},onCompositionEnd:function(){ec.current=!1,e_(en.current.value)},onBeforeInput:function(){el.current=!0}},(void 0===B||B)&&r.createElement(_,{prefixCls:c,upNode:R,downNode:T,upDisabled:eC,downDisabled:eO,onStep:eD}),r.createElement("div",{className:"".concat(et,"-wrap")},r.createElement("input",(0,i.Z)({autoComplete:"off",role:"spinbutton","aria-valuemin":g,"aria-valuemax":v,"aria-valuenow":ef.isInvalidate()?null:ef.toString(),step:b},ee,{ref:(0,N.sQ)(en,t),className:et,value:ex,onChange:function(e){e_(e.target.value)},disabled:C,readOnly:P}))))}),W=r.forwardRef(function(e,t){var n=e.disabled,o=e.style,a=e.prefixCls,l=e.value,c=e.prefix,s=e.suffix,u=e.addonBefore,d=e.addonAfter,f=e.className,p=e.classNames,m=(0,h.Z)(e,B),g=r.useRef(null);return r.createElement(P.Q,{className:f,triggerFocus:function(e){g.current&&(0,Z.nH)(g.current,e)},prefixCls:a,value:l,disabled:n,style:o,prefix:c,suffix:s,addonAfter:d,addonBefore:u,classNames:p,components:{affixWrapper:"div",groupWrapper:"div",wrapper:"div",groupAddon:"div"}},r.createElement(q,(0,i.Z)({prefixCls:a,disabled:n,ref:(0,N.sQ)(g,t),className:null==p?void 0:p.input},m)))});W.displayName="InputNumber";var K=n(12757),U=n(71744),V=n(13959),G=n(86586),X=n(64024),$=n(33759),Y=n(39109),Q=n(56250),J=n(65658),ee=n(352),et=n(31282),en=n(37433),er=n(65265),eo=n(12918),ei=n(17691),ea=n(80669),el=n(3104),ec=n(36360);let es=(e,t)=>{let{componentCls:n,borderRadiusSM:r,borderRadiusLG:o}=e,i="lg"===t?o:r;return{["&-".concat(t)]:{["".concat(n,"-handler-wrap")]:{borderStartEndRadius:i,borderEndEndRadius:i},["".concat(n,"-handler-up")]:{borderStartEndRadius:i},["".concat(n,"-handler-down")]:{borderEndEndRadius:i}}}},eu=e=>{let{componentCls:t,lineWidth:n,lineType:r,borderRadius:o,fontSizeLG:i,controlHeightLG:a,controlHeightSM:l,colorError:c,paddingInlineSM:s,paddingBlockSM:u,paddingBlockLG:d,paddingInlineLG:f,colorTextDescription:p,motionDurationMid:h,handleHoverColor:m,paddingInline:g,paddingBlock:v,handleBg:y,handleActiveBg:b,colorTextDisabled:x,borderRadiusSM:w,borderRadiusLG:S,controlWidth:k,handleOpacity:E,handleBorderColor:C,filledHandleBg:O,lineHeightLG:j,calc:P}=e;return[{[t]:Object.assign(Object.assign(Object.assign(Object.assign(Object.assign(Object.assign(Object.assign({},(0,eo.Wf)(e)),(0,et.ik)(e)),{display:"inline-block",width:k,margin:0,padding:0,borderRadius:o}),(0,er.qG)(e,{["".concat(t,"-handler-wrap")]:{background:y,["".concat(t,"-handler-down")]:{borderBlockStart:"".concat((0,ee.bf)(n)," ").concat(r," ").concat(C)}}})),(0,er.H8)(e,{["".concat(t,"-handler-wrap")]:{background:O,["".concat(t,"-handler-down")]:{borderBlockStart:"".concat((0,ee.bf)(n)," ").concat(r," ").concat(C)}},"&:focus-within":{["".concat(t,"-handler-wrap")]:{background:y}}})),(0,er.Mu)(e)),{"&-rtl":{direction:"rtl",["".concat(t,"-input")]:{direction:"rtl"}},"&-lg":{padding:0,fontSize:i,lineHeight:j,borderRadius:S,["input".concat(t,"-input")]:{height:P(a).sub(P(n).mul(2)).equal(),padding:"".concat((0,ee.bf)(d)," ").concat((0,ee.bf)(f))}},"&-sm":{padding:0,borderRadius:w,["input".concat(t,"-input")]:{height:P(l).sub(P(n).mul(2)).equal(),padding:"".concat((0,ee.bf)(u)," ").concat((0,ee.bf)(s))}},"&-out-of-range":{["".concat(t,"-input-wrap")]:{input:{color:c}}},"&-group":Object.assign(Object.assign(Object.assign({},(0,eo.Wf)(e)),(0,et.s7)(e)),{"&-wrapper":Object.assign(Object.assign(Object.assign({display:"inline-block",textAlign:"start",verticalAlign:"top",["".concat(t,"-affix-wrapper")]:{width:"100%"},"&-lg":{["".concat(t,"-group-addon")]:{borderRadius:S,fontSize:e.fontSizeLG}},"&-sm":{["".concat(t,"-group-addon")]:{borderRadius:w}}},(0,er.ir)(e)),(0,er.S5)(e)),{["&:not(".concat(t,"-compact-first-item):not(").concat(t,"-compact-last-item)").concat(t,"-compact-item")]:{["".concat(t,", ").concat(t,"-group-addon")]:{borderRadius:0}},["&:not(".concat(t,"-compact-last-item)").concat(t,"-compact-first-item")]:{["".concat(t,", ").concat(t,"-group-addon")]:{borderStartEndRadius:0,borderEndEndRadius:0}},["&:not(".concat(t,"-compact-first-item)").concat(t,"-compact-last-item")]:{["".concat(t,", ").concat(t,"-group-addon")]:{borderStartStartRadius:0,borderEndStartRadius:0}}})}),["&-disabled ".concat(t,"-input")]:{cursor:"not-allowed"},[t]:{"&-input":Object.assign(Object.assign(Object.assign(Object.assign({},(0,eo.Wf)(e)),{width:"100%",padding:"".concat((0,ee.bf)(v)," ").concat((0,ee.bf)(g)),textAlign:"start",backgroundColor:"transparent",border:0,borderRadius:o,outline:0,transition:"all ".concat(h," linear"),appearance:"textfield",fontSize:"inherit"}),(0,et.nz)(e.colorTextPlaceholder)),{'&[type="number"]::-webkit-inner-spin-button, &[type="number"]::-webkit-outer-spin-button':{margin:0,webkitAppearance:"none",appearance:"none"}})}})},{[t]:Object.assign(Object.assign(Object.assign({["&:hover ".concat(t,"-handler-wrap, &-focused ").concat(t,"-handler-wrap")]:{opacity:1},["".concat(t,"-handler-wrap")]:{position:"absolute",insetBlockStart:0,insetInlineEnd:0,width:e.handleWidth,height:"100%",borderStartStartRadius:0,borderStartEndRadius:o,borderEndEndRadius:o,borderEndStartRadius:0,opacity:E,display:"flex",flexDirection:"column",alignItems:"stretch",transition:"opacity ".concat(h," linear ").concat(h),["".concat(t,"-handler")]:{display:"flex",alignItems:"center",justifyContent:"center",flex:"auto",height:"40%",["\n ".concat(t,"-handler-up-inner,\n ").concat(t,"-handler-down-inner\n ")]:{marginInlineEnd:0,fontSize:e.handleFontSize}}},["".concat(t,"-handler")]:{height:"50%",overflow:"hidden",color:p,fontWeight:"bold",lineHeight:0,textAlign:"center",cursor:"pointer",borderInlineStart:"".concat((0,ee.bf)(n)," ").concat(r," ").concat(C),transition:"all ".concat(h," linear"),"&:active":{background:b},"&:hover":{height:"60%",["\n ".concat(t,"-handler-up-inner,\n ").concat(t,"-handler-down-inner\n ")]:{color:m}},"&-up-inner, &-down-inner":Object.assign(Object.assign({},(0,eo.Ro)()),{color:p,transition:"all ".concat(h," linear"),userSelect:"none"})},["".concat(t,"-handler-up")]:{borderStartEndRadius:o},["".concat(t,"-handler-down")]:{borderEndEndRadius:o}},es(e,"lg")),es(e,"sm")),{"&-disabled, &-readonly":{["".concat(t,"-handler-wrap")]:{display:"none"},["".concat(t,"-input")]:{color:"inherit"}},["\n ".concat(t,"-handler-up-disabled,\n ").concat(t,"-handler-down-disabled\n ")]:{cursor:"not-allowed"},["\n ".concat(t,"-handler-up-disabled:hover &-handler-up-inner,\n ").concat(t,"-handler-down-disabled:hover &-handler-down-inner\n ")]:{color:x}})}]},ed=e=>{let{componentCls:t,paddingBlock:n,paddingInline:r,inputAffixPadding:o,controlWidth:i,borderRadiusLG:a,borderRadiusSM:l,paddingInlineLG:c,paddingInlineSM:s,paddingBlockLG:u,paddingBlockSM:d}=e;return{["".concat(t,"-affix-wrapper")]:Object.assign(Object.assign({["input".concat(t,"-input")]:{padding:"".concat((0,ee.bf)(n)," 0")}},(0,et.ik)(e)),{position:"relative",display:"inline-flex",width:i,padding:0,paddingInlineStart:r,"&-lg":{borderRadius:a,paddingInlineStart:c,["input".concat(t,"-input")]:{padding:"".concat((0,ee.bf)(u)," 0")}},"&-sm":{borderRadius:l,paddingInlineStart:s,["input".concat(t,"-input")]:{padding:"".concat((0,ee.bf)(d)," 0")}},["&:not(".concat(t,"-disabled):hover")]:{zIndex:1},"&-focused, &:focus":{zIndex:1},["&-disabled > ".concat(t,"-disabled")]:{background:"transparent"},["> div".concat(t)]:{width:"100%",border:"none",outline:"none",["&".concat(t,"-focused")]:{boxShadow:"none !important"}},"&::before":{display:"inline-block",width:0,visibility:"hidden",content:'"\\a0"'},["".concat(t,"-handler-wrap")]:{zIndex:2},[t]:{color:"inherit","&-prefix, &-suffix":{display:"flex",flex:"none",alignItems:"center",pointerEvents:"none"},"&-prefix":{marginInlineEnd:o},"&-suffix":{position:"absolute",insetBlockStart:0,insetInlineEnd:0,zIndex:1,height:"100%",marginInlineEnd:r,marginInlineStart:o}}})}};var ef=(0,ea.I$)("InputNumber",e=>{let t=(0,el.TS)(e,(0,en.e)(e));return[eu(t),ed(t),(0,ei.c)(t)]},e=>{var t;let n=null!==(t=e.handleVisible)&&void 0!==t?t:"auto";return Object.assign(Object.assign({},(0,en.T)(e)),{controlWidth:90,handleWidth:e.controlHeightSM-2*e.lineWidth,handleFontSize:e.fontSize/2,handleVisible:n,handleActiveBg:e.colorFillAlter,handleBg:e.colorBgContainer,filledHandleBg:new ec.C(e.colorFillSecondary).onBackground(e.colorBgContainer).toHexString(),handleHoverColor:e.colorPrimary,handleBorderColor:e.colorBorder,handleOpacity:!0===n?1:0})},{unitless:{handleOpacity:!0}}),ep=function(e,t){var n={};for(var r in e)Object.prototype.hasOwnProperty.call(e,r)&&0>t.indexOf(r)&&(n[r]=e[r]);if(null!=e&&"function"==typeof Object.getOwnPropertySymbols)for(var o=0,r=Object.getOwnPropertySymbols(e);ot.indexOf(r[o])&&Object.prototype.propertyIsEnumerable.call(e,r[o])&&(n[r[o]]=e[r[o]]);return n};let eh=r.forwardRef((e,t)=>{let{getPrefixCls:n,direction:i}=r.useContext(U.E_),a=r.useRef(null);r.useImperativeHandle(t,()=>a.current);let{className:l,rootClassName:s,size:d,disabled:f,prefixCls:p,addonBefore:h,addonAfter:m,prefix:g,bordered:v,readOnly:y,status:b,controls:x,variant:w}=e,S=ep(e,["className","rootClassName","size","disabled","prefixCls","addonBefore","addonAfter","prefix","bordered","readOnly","status","controls","variant"]),k=n("input-number",p),E=(0,X.Z)(k),[C,O,j]=ef(k,E),{compactSize:P,compactItemClassnames:M}=(0,J.ri)(k,i),N=r.createElement(c,{className:"".concat(k,"-handler-up-inner")}),I=r.createElement(o.Z,{className:"".concat(k,"-handler-down-inner")});"object"==typeof x&&(N=void 0===x.upIcon?N:r.createElement("span",{className:"".concat(k,"-handler-up-inner")},x.upIcon),I=void 0===x.downIcon?I:r.createElement("span",{className:"".concat(k,"-handler-down-inner")},x.downIcon));let{hasFeedback:R,status:T,isFormItemInput:A,feedbackIcon:_}=r.useContext(Y.aM),D=(0,K.F)(T,b),Z=(0,$.Z)(e=>{var t;return null!==(t=null!=d?d:P)&&void 0!==t?t:e}),L=r.useContext(G.Z),[z,B]=(0,Q.Z)(w,v),F=R&&r.createElement(r.Fragment,null,_),H=u()({["".concat(k,"-lg")]:"large"===Z,["".concat(k,"-sm")]:"small"===Z,["".concat(k,"-rtl")]:"rtl"===i,["".concat(k,"-in-form-item")]:A},O),q="".concat(k,"-group");return C(r.createElement(W,Object.assign({ref:a,disabled:null!=f?f:L,className:u()(j,E,l,s,M),upHandler:N,downHandler:I,prefixCls:k,readOnly:y,controls:"boolean"==typeof x?x:void 0,prefix:g,suffix:F,addonAfter:m&&r.createElement(J.BR,null,r.createElement(Y.Ux,{override:!0,status:!0},m)),addonBefore:h&&r.createElement(J.BR,null,r.createElement(Y.Ux,{override:!0,status:!0},h)),classNames:{input:H,variant:u()({["".concat(k,"-").concat(z)]:B},(0,K.Z)(k,D,R)),affixWrapper:u()({["".concat(k,"-affix-wrapper-sm")]:"small"===Z,["".concat(k,"-affix-wrapper-lg")]:"large"===Z,["".concat(k,"-affix-wrapper-rtl")]:"rtl"===i},O),wrapper:u()({["".concat(q,"-rtl")]:"rtl"===i},O),groupWrapper:u()({["".concat(k,"-group-wrapper-sm")]:"small"===Z,["".concat(k,"-group-wrapper-lg")]:"large"===Z,["".concat(k,"-group-wrapper-rtl")]:"rtl"===i,["".concat(k,"-group-wrapper-").concat(z)]:B},(0,K.Z)("".concat(k,"-group-wrapper"),D,R),O)}},S)))});eh._InternalPanelDoNotUseOrYouWillBeFired=e=>r.createElement(V.ZP,{theme:{components:{InputNumber:{handleVisible:!0}}}},r.createElement(eh,Object.assign({},e)));var em=eh},65863:function(e,t,n){"use strict";n.d(t,{Z:function(){return S},n:function(){return w}});var r=n(2265),o=n(36760),i=n.n(o),a=n(2027),l=n(28791),c=n(12757),s=n(71744),u=n(86586),d=n(33759),f=n(39109),p=n(65658),h=n(39164),m=n(31282),g=n(64024),v=n(56250),y=n(39725),b=e=>{let t;return"object"==typeof e&&(null==e?void 0:e.clearIcon)?t=e:e&&(t={clearIcon:r.createElement(y.Z,null)}),t},x=function(e,t){var n={};for(var r in e)Object.prototype.hasOwnProperty.call(e,r)&&0>t.indexOf(r)&&(n[r]=e[r]);if(null!=e&&"function"==typeof Object.getOwnPropertySymbols)for(var o=0,r=Object.getOwnPropertySymbols(e);ot.indexOf(r[o])&&Object.prototype.propertyIsEnumerable.call(e,r[o])&&(n[r[o]]=e[r[o]]);return n};function w(e,t){if(!e)return;e.focus(t);let{cursor:n}=t||{};if(n){let t=e.value.length;switch(n){case"start":e.setSelectionRange(0,0);break;case"end":e.setSelectionRange(t,t);break;default:e.setSelectionRange(0,t)}}}var S=(0,r.forwardRef)((e,t)=>{var n;let{prefixCls:o,bordered:y=!0,status:w,size:S,disabled:k,onBlur:E,onFocus:C,suffix:O,allowClear:j,addonAfter:P,addonBefore:M,className:N,style:I,styles:R,rootClassName:T,onChange:A,classNames:_,variant:D}=e,Z=x(e,["prefixCls","bordered","status","size","disabled","onBlur","onFocus","suffix","allowClear","addonAfter","addonBefore","className","style","styles","rootClassName","onChange","classNames","variant"]),{getPrefixCls:L,direction:z,input:B}=r.useContext(s.E_),F=L("input",o),H=(0,r.useRef)(null),q=(0,g.Z)(F),[W,K,U]=(0,m.ZP)(F,q),{compactSize:V,compactItemClassnames:G}=(0,p.ri)(F,z),X=(0,d.Z)(e=>{var t;return null!==(t=null!=S?S:V)&&void 0!==t?t:e}),$=r.useContext(u.Z),{status:Y,hasFeedback:Q,feedbackIcon:J}=(0,r.useContext)(f.aM),ee=(0,c.F)(Y,w),et=!!(e.prefix||e.suffix||e.allowClear||e.showCount)||!!Q;(0,r.useRef)(et);let en=(0,h.Z)(H,!0),er=(Q||O)&&r.createElement(r.Fragment,null,O,Q&&J),eo=b(j),[ei,ea]=(0,v.Z)(D,y);return W(r.createElement(a.Z,Object.assign({ref:(0,l.sQ)(t,H),prefixCls:F,autoComplete:null==B?void 0:B.autoComplete},Z,{disabled:null!=k?k:$,onBlur:e=>{en(),null==E||E(e)},onFocus:e=>{en(),null==C||C(e)},style:Object.assign(Object.assign({},null==B?void 0:B.style),I),styles:Object.assign(Object.assign({},null==B?void 0:B.styles),R),suffix:er,allowClear:eo,className:i()(N,T,U,q,G,null==B?void 0:B.className),onChange:e=>{en(),null==A||A(e)},addonAfter:P&&r.createElement(p.BR,null,r.createElement(f.Ux,{override:!0,status:!0},P)),addonBefore:M&&r.createElement(p.BR,null,r.createElement(f.Ux,{override:!0,status:!0},M)),classNames:Object.assign(Object.assign(Object.assign({},_),null==B?void 0:B.classNames),{input:i()({["".concat(F,"-sm")]:"small"===X,["".concat(F,"-lg")]:"large"===X,["".concat(F,"-rtl")]:"rtl"===z},null==_?void 0:_.input,null===(n=null==B?void 0:B.classNames)||void 0===n?void 0:n.input,K),variant:i()({["".concat(F,"-").concat(ei)]:ea},(0,c.Z)(F,ee)),affixWrapper:i()({["".concat(F,"-affix-wrapper-sm")]:"small"===X,["".concat(F,"-affix-wrapper-lg")]:"large"===X,["".concat(F,"-affix-wrapper-rtl")]:"rtl"===z},K),wrapper:i()({["".concat(F,"-group-rtl")]:"rtl"===z},K),groupWrapper:i()({["".concat(F,"-group-wrapper-sm")]:"small"===X,["".concat(F,"-group-wrapper-lg")]:"large"===X,["".concat(F,"-group-wrapper-rtl")]:"rtl"===z,["".concat(F,"-group-wrapper-").concat(ei)]:ea},(0,c.Z)("".concat(F,"-group-wrapper"),ee,Q),K)})})))})},90464:function(e,t,n){"use strict";n.d(t,{Z:function(){return L}});var r,o=n(2265),i=n(39725),a=n(36760),l=n.n(a),c=n(1119),s=n(11993),u=n(31686),d=n(83145),f=n(26365),p=n(6989),h=n(2027),m=n(96032),g=n(55041),v=n(50506),y=n(41154),b=n(31474),x=n(27380),w=n(53346),S=["letter-spacing","line-height","padding-top","padding-bottom","font-family","font-weight","font-size","font-variant","text-rendering","text-transform","width","text-indent","padding-left","padding-right","border-width","box-sizing","word-break","white-space"],k={},E=["prefixCls","onPressEnter","defaultValue","value","autoSize","onResize","className","style","disabled","onChange","onInternalAutoSize"],C=o.forwardRef(function(e,t){var n=e.prefixCls,i=(e.onPressEnter,e.defaultValue),a=e.value,d=e.autoSize,h=e.onResize,m=e.className,g=e.style,C=e.disabled,O=e.onChange,j=(e.onInternalAutoSize,(0,p.Z)(e,E)),P=(0,v.Z)(i,{value:a,postState:function(e){return null!=e?e:""}}),M=(0,f.Z)(P,2),N=M[0],I=M[1],R=o.useRef();o.useImperativeHandle(t,function(){return{textArea:R.current}});var T=o.useMemo(function(){return d&&"object"===(0,y.Z)(d)?[d.minRows,d.maxRows]:[]},[d]),A=(0,f.Z)(T,2),_=A[0],D=A[1],Z=!!d,L=function(){try{if(document.activeElement===R.current){var e=R.current,t=e.selectionStart,n=e.selectionEnd,r=e.scrollTop;R.current.setSelectionRange(t,n),R.current.scrollTop=r}}catch(e){}},z=o.useState(2),B=(0,f.Z)(z,2),F=B[0],H=B[1],q=o.useState(),W=(0,f.Z)(q,2),K=W[0],U=W[1],V=function(){H(0)};(0,x.Z)(function(){Z&&V()},[a,_,D,Z]),(0,x.Z)(function(){if(0===F)H(1);else if(1===F){var e=function(e){var t,n=arguments.length>1&&void 0!==arguments[1]&&arguments[1],o=arguments.length>2&&void 0!==arguments[2]?arguments[2]:null,i=arguments.length>3&&void 0!==arguments[3]?arguments[3]:null;r||((r=document.createElement("textarea")).setAttribute("tab-index","-1"),r.setAttribute("aria-hidden","true"),document.body.appendChild(r)),e.getAttribute("wrap")?r.setAttribute("wrap",e.getAttribute("wrap")):r.removeAttribute("wrap");var a=function(e){var t=arguments.length>1&&void 0!==arguments[1]&&arguments[1],n=e.getAttribute("id")||e.getAttribute("data-reactid")||e.getAttribute("name");if(t&&k[n])return k[n];var r=window.getComputedStyle(e),o=r.getPropertyValue("box-sizing")||r.getPropertyValue("-moz-box-sizing")||r.getPropertyValue("-webkit-box-sizing"),i=parseFloat(r.getPropertyValue("padding-bottom"))+parseFloat(r.getPropertyValue("padding-top")),a=parseFloat(r.getPropertyValue("border-bottom-width"))+parseFloat(r.getPropertyValue("border-top-width")),l={sizingStyle:S.map(function(e){return"".concat(e,":").concat(r.getPropertyValue(e))}).join(";"),paddingSize:i,borderSize:a,boxSizing:o};return t&&n&&(k[n]=l),l}(e,n),l=a.paddingSize,c=a.borderSize,s=a.boxSizing,u=a.sizingStyle;r.setAttribute("style","".concat(u,";").concat("\n min-height:0 !important;\n max-height:none !important;\n height:0 !important;\n visibility:hidden !important;\n overflow:hidden !important;\n position:absolute !important;\n z-index:-1000 !important;\n top:0 !important;\n right:0 !important;\n pointer-events: none !important;\n")),r.value=e.value||e.placeholder||"";var d=void 0,f=void 0,p=r.scrollHeight;if("border-box"===s?p+=c:"content-box"===s&&(p-=l),null!==o||null!==i){r.value=" ";var h=r.scrollHeight-l;null!==o&&(d=h*o,"border-box"===s&&(d=d+l+c),p=Math.max(d,p)),null!==i&&(f=h*i,"border-box"===s&&(f=f+l+c),t=p>f?"":"hidden",p=Math.min(f,p))}var m={height:p,overflowY:t,resize:"none"};return d&&(m.minHeight=d),f&&(m.maxHeight=f),m}(R.current,!1,_,D);H(2),U(e)}else L()},[F]);var G=o.useRef(),X=function(){w.Z.cancel(G.current)};o.useEffect(function(){return X},[]);var $=(0,u.Z)((0,u.Z)({},g),Z?K:null);return(0===F||1===F)&&($.overflowY="hidden",$.overflowX="hidden"),o.createElement(b.Z,{onResize:function(e){2===F&&(null==h||h(e),d&&(X(),G.current=(0,w.Z)(function(){V()})))},disabled:!(d||h)},o.createElement("textarea",(0,c.Z)({},j,{ref:R,style:$,className:l()(n,m,(0,s.Z)({},"".concat(n,"-disabled"),C)),disabled:C,value:N,onChange:function(e){I(e.target.value),null==O||O(e)}})))}),O=["defaultValue","value","onFocus","onBlur","onChange","allowClear","maxLength","onCompositionStart","onCompositionEnd","suffix","prefixCls","showCount","count","className","style","disabled","hidden","classNames","styles","onResize"],j=o.forwardRef(function(e,t){var n,r,i,a=e.defaultValue,y=e.value,b=e.onFocus,x=e.onBlur,w=e.onChange,S=e.allowClear,k=e.maxLength,E=e.onCompositionStart,j=e.onCompositionEnd,P=e.suffix,M=e.prefixCls,N=void 0===M?"rc-textarea":M,I=e.showCount,R=e.count,T=e.className,A=e.style,_=e.disabled,D=e.hidden,Z=e.classNames,L=e.styles,z=e.onResize,B=(0,p.Z)(e,O),F=(0,v.Z)(a,{value:y,defaultValue:a}),H=(0,f.Z)(F,2),q=H[0],W=H[1],K=null==q?"":String(q),U=o.useState(!1),V=(0,f.Z)(U,2),G=V[0],X=V[1],$=o.useRef(!1),Y=o.useState(null),Q=(0,f.Z)(Y,2),J=Q[0],ee=Q[1],et=(0,o.useRef)(null),en=function(){var e;return null===(e=et.current)||void 0===e?void 0:e.textArea},er=function(){en().focus()};(0,o.useImperativeHandle)(t,function(){return{resizableTextArea:et.current,focus:er,blur:function(){en().blur()}}}),(0,o.useEffect)(function(){X(function(e){return!_&&e})},[_]);var eo=o.useState(null),ei=(0,f.Z)(eo,2),ea=ei[0],el=ei[1];o.useEffect(function(){if(ea){var e;(e=en()).setSelectionRange.apply(e,(0,d.Z)(ea))}},[ea]);var ec=(0,m.Z)(R,I),es=null!==(n=ec.max)&&void 0!==n?n:k,eu=Number(es)>0,ed=ec.strategy(K),ef=!!es&&ed>es,ep=function(e,t){var n=t;!$.current&&ec.exceedFormatter&&ec.max&&ec.strategy(t)>ec.max&&(n=ec.exceedFormatter(t,{max:ec.max}),t!==n&&el([en().selectionStart||0,en().selectionEnd||0])),W(n),(0,g.rJ)(e.currentTarget,e,w,n)},eh=P;ec.show&&(i=ec.showFormatter?ec.showFormatter({value:K,count:ed,maxLength:es}):"".concat(ed).concat(eu?" / ".concat(es):""),eh=o.createElement(o.Fragment,null,eh,o.createElement("span",{className:l()("".concat(N,"-data-count"),null==Z?void 0:Z.count),style:null==L?void 0:L.count},i)));var em=!B.autoSize&&!I&&!S;return o.createElement(h.Q,{value:K,allowClear:S,handleReset:function(e){W(""),er(),(0,g.rJ)(en(),e,w)},suffix:eh,prefixCls:N,classNames:(0,u.Z)((0,u.Z)({},Z),{},{affixWrapper:l()(null==Z?void 0:Z.affixWrapper,(r={},(0,s.Z)(r,"".concat(N,"-show-count"),I),(0,s.Z)(r,"".concat(N,"-textarea-allow-clear"),S),r))}),disabled:_,focused:G,className:l()(T,ef&&"".concat(N,"-out-of-range")),style:(0,u.Z)((0,u.Z)({},A),J&&!em?{height:"auto"}:{}),dataAttrs:{affixWrapper:{"data-count":"string"==typeof i?i:void 0}},hidden:D},o.createElement(C,(0,c.Z)({},B,{maxLength:k,onKeyDown:function(e){var t=B.onPressEnter,n=B.onKeyDown;"Enter"===e.key&&t&&t(e),null==n||n(e)},onChange:function(e){ep(e,e.target.value)},onFocus:function(e){X(!0),null==b||b(e)},onBlur:function(e){X(!1),null==x||x(e)},onCompositionStart:function(e){$.current=!0,null==E||E(e)},onCompositionEnd:function(e){$.current=!1,ep(e,e.currentTarget.value),null==j||j(e)},className:l()(null==Z?void 0:Z.textarea),style:(0,u.Z)((0,u.Z)({},null==L?void 0:L.textarea),{},{resize:null==A?void 0:A.resize}),disabled:_,prefixCls:N,onResize:function(e){var t;null==z||z(e),null!==(t=en())&&void 0!==t&&t.style.height&&ee(!0)},ref:et})))}),P=n(12757),M=n(71744),N=n(86586),I=n(33759),R=n(39109),T=n(65863),A=n(31282),_=n(64024),D=n(56250),Z=function(e,t){var n={};for(var r in e)Object.prototype.hasOwnProperty.call(e,r)&&0>t.indexOf(r)&&(n[r]=e[r]);if(null!=e&&"function"==typeof Object.getOwnPropertySymbols)for(var o=0,r=Object.getOwnPropertySymbols(e);ot.indexOf(r[o])&&Object.prototype.propertyIsEnumerable.call(e,r[o])&&(n[r[o]]=e[r[o]]);return n},L=(0,o.forwardRef)((e,t)=>{var n;let r;let{prefixCls:a,bordered:c=!0,size:s,disabled:u,status:d,allowClear:f,classNames:p,rootClassName:h,className:m,variant:g}=e,v=Z(e,["prefixCls","bordered","size","disabled","status","allowClear","classNames","rootClassName","className","variant"]),{getPrefixCls:y,direction:b}=o.useContext(M.E_),x=(0,I.Z)(s),w=o.useContext(N.Z),{status:S,hasFeedback:k,feedbackIcon:E}=o.useContext(R.aM),C=(0,P.F)(S,d),O=o.useRef(null);o.useImperativeHandle(t,()=>{var e;return{resizableTextArea:null===(e=O.current)||void 0===e?void 0:e.resizableTextArea,focus:e=>{var t,n;(0,T.n)(null===(n=null===(t=O.current)||void 0===t?void 0:t.resizableTextArea)||void 0===n?void 0:n.textArea,e)},blur:()=>{var e;return null===(e=O.current)||void 0===e?void 0:e.blur()}}});let L=y("input",a);"object"==typeof f&&(null==f?void 0:f.clearIcon)?r=f:f&&(r={clearIcon:o.createElement(i.Z,null)});let z=(0,_.Z)(L),[B,F,H]=(0,A.ZP)(L,z),[q,W]=(0,D.Z)(g,c);return B(o.createElement(j,Object.assign({},v,{disabled:null!=u?u:w,allowClear:r,className:l()(H,z,m,h),classNames:Object.assign(Object.assign({},p),{textarea:l()({["".concat(L,"-sm")]:"small"===x,["".concat(L,"-lg")]:"large"===x},F,null==p?void 0:p.textarea),variant:l()({["".concat(L,"-").concat(q)]:W},(0,P.Z)(L,C)),affixWrapper:l()("".concat(L,"-textarea-affix-wrapper"),{["".concat(L,"-affix-wrapper-rtl")]:"rtl"===b,["".concat(L,"-affix-wrapper-sm")]:"small"===x,["".concat(L,"-affix-wrapper-lg")]:"large"===x,["".concat(L,"-textarea-show-count")]:e.showCount||(null===(n=e.count)||void 0===n?void 0:n.show)},F)}),prefixCls:L,suffix:k&&o.createElement("span",{className:"".concat(L,"-textarea-suffix")},E),ref:O})))})},39164:function(e,t,n){"use strict";n.d(t,{Z:function(){return o}});var r=n(2265);function o(e,t){let n=(0,r.useRef)([]),o=()=>{n.current.push(setTimeout(()=>{var t,n,r,o;(null===(t=e.current)||void 0===t?void 0:t.input)&&(null===(n=e.current)||void 0===n?void 0:n.input.getAttribute("type"))==="password"&&(null===(r=e.current)||void 0===r?void 0:r.input.hasAttribute("value"))&&(null===(o=e.current)||void 0===o||o.input.removeAttribute("value"))}))};return(0,r.useEffect)(()=>(t&&o(),()=>n.current.forEach(e=>{e&&clearTimeout(e)})),[]),o}},56632:function(e,t,n){"use strict";n.d(t,{Z:function(){return I}});var r=n(2265),o=n(36760),i=n.n(o),a=n(71744),l=n(39109),c=n(31282),s=n(65863),u=n(1119),d={icon:{tag:"svg",attrs:{viewBox:"64 64 896 896",focusable:"false"},children:[{tag:"path",attrs:{d:"M942.2 486.2Q889.47 375.11 816.7 305l-50.88 50.88C807.31 395.53 843.45 447.4 874.7 512 791.5 684.2 673.4 766 512 766q-72.67 0-133.87-22.38L323 798.75Q408 838 512 838q288.3 0 430.2-300.3a60.29 60.29 0 000-51.5zm-63.57-320.64L836 122.88a8 8 0 00-11.32 0L715.31 232.2Q624.86 186 512 186q-288.3 0-430.2 300.3a60.3 60.3 0 000 51.5q56.69 119.4 136.5 191.41L112.48 835a8 8 0 000 11.31L155.17 889a8 8 0 0011.31 0l712.15-712.12a8 8 0 000-11.32zM149.3 512C232.6 339.8 350.7 258 512 258c54.54 0 104.13 9.36 149.12 28.39l-70.3 70.3a176 176 0 00-238.13 238.13l-83.42 83.42C223.1 637.49 183.3 582.28 149.3 512zm246.7 0a112.11 112.11 0 01146.2-106.69L401.31 546.2A112 112 0 01396 512z"}},{tag:"path",attrs:{d:"M508 624c-3.46 0-6.87-.16-10.25-.47l-52.82 52.82a176.09 176.09 0 00227.42-227.42l-52.82 52.82c.31 3.38.47 6.79.47 10.25a111.94 111.94 0 01-112 112z"}}]},name:"eye-invisible",theme:"outlined"},f=n(55015),p=r.forwardRef(function(e,t){return r.createElement(f.Z,(0,u.Z)({},e,{ref:t,icon:d}))}),h=n(6520),m=n(18694),g=n(28791),v=n(39164),y=function(e,t){var n={};for(var r in e)Object.prototype.hasOwnProperty.call(e,r)&&0>t.indexOf(r)&&(n[r]=e[r]);if(null!=e&&"function"==typeof Object.getOwnPropertySymbols)for(var o=0,r=Object.getOwnPropertySymbols(e);ot.indexOf(r[o])&&Object.prototype.propertyIsEnumerable.call(e,r[o])&&(n[r[o]]=e[r[o]]);return n};let b=e=>e?r.createElement(h.Z,null):r.createElement(p,null),x={click:"onClick",hover:"onMouseOver"},w=r.forwardRef((e,t)=>{let{visibilityToggle:n=!0}=e,o="object"==typeof n&&void 0!==n.visible,[l,c]=(0,r.useState)(()=>!!o&&n.visible),u=(0,r.useRef)(null);r.useEffect(()=>{o&&c(n.visible)},[o,n]);let d=(0,v.Z)(u),f=()=>{let{disabled:t}=e;t||(l&&d(),c(e=>{var t;let r=!e;return"object"==typeof n&&(null===(t=n.onVisibleChange)||void 0===t||t.call(n,r)),r}))},{className:p,prefixCls:h,inputPrefixCls:w,size:S}=e,k=y(e,["className","prefixCls","inputPrefixCls","size"]),{getPrefixCls:E}=r.useContext(a.E_),C=E("input",w),O=E("input-password",h),j=n&&(t=>{let{action:n="click",iconRender:o=b}=e,i=x[n]||"",a=o(l);return r.cloneElement(r.isValidElement(a)?a:r.createElement("span",null,a),{[i]:f,className:"".concat(t,"-icon"),key:"passwordIcon",onMouseDown:e=>{e.preventDefault()},onMouseUp:e=>{e.preventDefault()}})})(O),P=i()(O,p,{["".concat(O,"-").concat(S)]:!!S}),M=Object.assign(Object.assign({},(0,m.Z)(k,["suffix","iconRender","visibilityToggle"])),{type:l?"text":"password",className:P,prefixCls:C,suffix:j});return S&&(M.size=S),r.createElement(s.Z,Object.assign({ref:(0,g.sQ)(t,u)},M))});var S=n(29436),k=n(19722),E=n(73002),C=n(33759),O=n(65658),j=function(e,t){var n={};for(var r in e)Object.prototype.hasOwnProperty.call(e,r)&&0>t.indexOf(r)&&(n[r]=e[r]);if(null!=e&&"function"==typeof Object.getOwnPropertySymbols)for(var o=0,r=Object.getOwnPropertySymbols(e);ot.indexOf(r[o])&&Object.prototype.propertyIsEnumerable.call(e,r[o])&&(n[r[o]]=e[r[o]]);return n};let P=r.forwardRef((e,t)=>{let n;let{prefixCls:o,inputPrefixCls:l,className:c,size:u,suffix:d,enterButton:f=!1,addonAfter:p,loading:h,disabled:m,onSearch:v,onChange:y,onCompositionStart:b,onCompositionEnd:x}=e,w=j(e,["prefixCls","inputPrefixCls","className","size","suffix","enterButton","addonAfter","loading","disabled","onSearch","onChange","onCompositionStart","onCompositionEnd"]),{getPrefixCls:P,direction:M}=r.useContext(a.E_),N=r.useRef(!1),I=P("input-search",o),R=P("input",l),{compactSize:T}=(0,O.ri)(I,M),A=(0,C.Z)(e=>{var t;return null!==(t=null!=u?u:T)&&void 0!==t?t:e}),_=r.useRef(null),D=e=>{var t;document.activeElement===(null===(t=_.current)||void 0===t?void 0:t.input)&&e.preventDefault()},Z=e=>{var t,n;v&&v(null===(n=null===(t=_.current)||void 0===t?void 0:t.input)||void 0===n?void 0:n.value,e,{source:"input"})},L="boolean"==typeof f?r.createElement(S.Z,null):null,z="".concat(I,"-button"),B=f||{},F=B.type&&!0===B.type.__ANT_BUTTON;n=F||"button"===B.type?(0,k.Tm)(B,Object.assign({onMouseDown:D,onClick:e=>{var t,n;null===(n=null===(t=null==B?void 0:B.props)||void 0===t?void 0:t.onClick)||void 0===n||n.call(t,e),Z(e)},key:"enterButton"},F?{className:z,size:A}:{})):r.createElement(E.ZP,{className:z,type:f?"primary":void 0,size:A,disabled:m,key:"enterButton",onMouseDown:D,onClick:Z,loading:h,icon:L},f),p&&(n=[n,(0,k.Tm)(p,{key:"addonAfter"})]);let H=i()(I,{["".concat(I,"-rtl")]:"rtl"===M,["".concat(I,"-").concat(A)]:!!A,["".concat(I,"-with-button")]:!!f},c);return r.createElement(s.Z,Object.assign({ref:(0,g.sQ)(_,t),onPressEnter:e=>{N.current||h||Z(e)}},w,{size:A,onCompositionStart:e=>{N.current=!0,null==b||b(e)},onCompositionEnd:e=>{N.current=!1,null==x||x(e)},prefixCls:R,addonAfter:n,suffix:d,onChange:e=>{e&&e.target&&"click"===e.type&&v&&v(e.target.value,e,{source:"clear"}),y&&y(e)},className:H,disabled:m}))});var M=n(90464);let N=s.Z;N.Group=e=>{let{getPrefixCls:t,direction:n}=(0,r.useContext)(a.E_),{prefixCls:o,className:s}=e,u=t("input-group",o),d=t("input"),[f,p]=(0,c.ZP)(d),h=i()(u,{["".concat(u,"-lg")]:"large"===e.size,["".concat(u,"-sm")]:"small"===e.size,["".concat(u,"-compact")]:e.compact,["".concat(u,"-rtl")]:"rtl"===n},p,s),m=(0,r.useContext)(l.aM),g=(0,r.useMemo)(()=>Object.assign(Object.assign({},m),{isFormItemInput:!1}),[m]);return f(r.createElement("span",{className:h,style:e.style,onMouseEnter:e.onMouseEnter,onMouseLeave:e.onMouseLeave,onFocus:e.onFocus,onBlur:e.onBlur},r.createElement(l.aM.Provider,{value:g},e.children)))},N.Search=P,N.TextArea=M.Z,N.Password=w;var I=N},31282:function(e,t,n){"use strict";n.d(t,{ik:function(){return p},nz:function(){return u},s7:function(){return h},x0:function(){return f}});var r=n(352),o=n(12918),i=n(17691),a=n(80669),l=n(3104),c=n(37433),s=n(65265);let u=e=>({"&::-moz-placeholder":{opacity:1},"&::placeholder":{color:e,userSelect:"none"},"&:placeholder-shown":{textOverflow:"ellipsis"}}),d=e=>{let{paddingBlockLG:t,lineHeightLG:n,borderRadiusLG:o,paddingInlineLG:i}=e;return{padding:"".concat((0,r.bf)(t)," ").concat((0,r.bf)(i)),fontSize:e.inputFontSizeLG,lineHeight:n,borderRadius:o}},f=e=>({padding:"".concat((0,r.bf)(e.paddingBlockSM)," ").concat((0,r.bf)(e.paddingInlineSM)),fontSize:e.inputFontSizeSM,borderRadius:e.borderRadiusSM}),p=e=>Object.assign(Object.assign({position:"relative",display:"inline-block",width:"100%",minWidth:0,padding:"".concat((0,r.bf)(e.paddingBlock)," ").concat((0,r.bf)(e.paddingInline)),color:e.colorText,fontSize:e.inputFontSize,lineHeight:e.lineHeight,borderRadius:e.borderRadius,transition:"all ".concat(e.motionDurationMid)},u(e.colorTextPlaceholder)),{"textarea&":{maxWidth:"100%",height:"auto",minHeight:e.controlHeight,lineHeight:e.lineHeight,verticalAlign:"bottom",transition:"all ".concat(e.motionDurationSlow,", height 0s"),resize:"vertical"},"&-lg":Object.assign({},d(e)),"&-sm":Object.assign({},f(e)),"&-rtl":{direction:"rtl"},"&-textarea-rtl":{direction:"rtl"}}),h=e=>{let{componentCls:t,antCls:n}=e;return{position:"relative",display:"table",width:"100%",borderCollapse:"separate",borderSpacing:0,"&[class*='col-']":{paddingInlineEnd:e.paddingXS,"&:last-child":{paddingInlineEnd:0}},["&-lg ".concat(t,", &-lg > ").concat(t,"-group-addon")]:Object.assign({},d(e)),["&-sm ".concat(t,", &-sm > ").concat(t,"-group-addon")]:Object.assign({},f(e)),["&-lg ".concat(n,"-select-single ").concat(n,"-select-selector")]:{height:e.controlHeightLG},["&-sm ".concat(n,"-select-single ").concat(n,"-select-selector")]:{height:e.controlHeightSM},["> ".concat(t)]:{display:"table-cell","&:not(:first-child):not(:last-child)":{borderRadius:0}},["".concat(t,"-group")]:{"&-addon, &-wrap":{display:"table-cell",width:1,whiteSpace:"nowrap",verticalAlign:"middle","&:not(:first-child):not(:last-child)":{borderRadius:0}},"&-wrap > *":{display:"block !important"},"&-addon":{position:"relative",padding:"0 ".concat((0,r.bf)(e.paddingInline)),color:e.colorText,fontWeight:"normal",fontSize:e.inputFontSize,textAlign:"center",borderRadius:e.borderRadius,transition:"all ".concat(e.motionDurationSlow),lineHeight:1,["".concat(n,"-select")]:{margin:"".concat((0,r.bf)(e.calc(e.paddingBlock).add(1).mul(-1).equal())," ").concat((0,r.bf)(e.calc(e.paddingInline).mul(-1).equal())),["&".concat(n,"-select-single:not(").concat(n,"-select-customize-input):not(").concat(n,"-pagination-size-changer)")]:{["".concat(n,"-select-selector")]:{backgroundColor:"inherit",border:"".concat((0,r.bf)(e.lineWidth)," ").concat(e.lineType," transparent"),boxShadow:"none"}},"&-open, &-focused":{["".concat(n,"-select-selector")]:{color:e.colorPrimary}}},["".concat(n,"-cascader-picker")]:{margin:"-9px ".concat((0,r.bf)(e.calc(e.paddingInline).mul(-1).equal())),backgroundColor:"transparent",["".concat(n,"-cascader-input")]:{textAlign:"start",border:0,boxShadow:"none"}}}},["".concat(t)]:{width:"100%",marginBottom:0,textAlign:"inherit","&:focus":{zIndex:1,borderInlineEndWidth:1},"&:hover":{zIndex:1,borderInlineEndWidth:1,["".concat(t,"-search-with-button &")]:{zIndex:0}}},["> ".concat(t,":first-child, ").concat(t,"-group-addon:first-child")]:{borderStartEndRadius:0,borderEndEndRadius:0,["".concat(n,"-select ").concat(n,"-select-selector")]:{borderStartEndRadius:0,borderEndEndRadius:0}},["> ".concat(t,"-affix-wrapper")]:{["&:not(:first-child) ".concat(t)]:{borderStartStartRadius:0,borderEndStartRadius:0},["&:not(:last-child) ".concat(t)]:{borderStartEndRadius:0,borderEndEndRadius:0}},["> ".concat(t,":last-child, ").concat(t,"-group-addon:last-child")]:{borderStartStartRadius:0,borderEndStartRadius:0,["".concat(n,"-select ").concat(n,"-select-selector")]:{borderStartStartRadius:0,borderEndStartRadius:0}},["".concat(t,"-affix-wrapper")]:{"&:not(:last-child)":{borderStartEndRadius:0,borderEndEndRadius:0,["".concat(t,"-search &")]:{borderStartStartRadius:e.borderRadius,borderEndStartRadius:e.borderRadius}},["&:not(:first-child), ".concat(t,"-search &:not(:first-child)")]:{borderStartStartRadius:0,borderEndStartRadius:0}},["&".concat(t,"-group-compact")]:Object.assign(Object.assign({display:"block"},(0,o.dF)()),{["".concat(t,"-group-addon, ").concat(t,"-group-wrap, > ").concat(t)]:{"&:not(:first-child):not(:last-child)":{borderInlineEndWidth:e.lineWidth,"&:hover":{zIndex:1},"&:focus":{zIndex:1}}},"& > *":{display:"inline-block",float:"none",verticalAlign:"top",borderRadius:0},["\n & > ".concat(t,"-affix-wrapper,\n & > ").concat(t,"-number-affix-wrapper,\n & > ").concat(n,"-picker-range\n ")]:{display:"inline-flex"},"& > *:not(:last-child)":{marginInlineEnd:e.calc(e.lineWidth).mul(-1).equal(),borderInlineEndWidth:e.lineWidth},["".concat(t)]:{float:"none"},["& > ".concat(n,"-select > ").concat(n,"-select-selector,\n & > ").concat(n,"-select-auto-complete ").concat(t,",\n & > ").concat(n,"-cascader-picker ").concat(t,",\n & > ").concat(t,"-group-wrapper ").concat(t)]:{borderInlineEndWidth:e.lineWidth,borderRadius:0,"&:hover":{zIndex:1},"&:focus":{zIndex:1}},["& > ".concat(n,"-select-focused")]:{zIndex:1},["& > ".concat(n,"-select > ").concat(n,"-select-arrow")]:{zIndex:1},["& > *:first-child,\n & > ".concat(n,"-select:first-child > ").concat(n,"-select-selector,\n & > ").concat(n,"-select-auto-complete:first-child ").concat(t,",\n & > ").concat(n,"-cascader-picker:first-child ").concat(t)]:{borderStartStartRadius:e.borderRadius,borderEndStartRadius:e.borderRadius},["& > *:last-child,\n & > ".concat(n,"-select:last-child > ").concat(n,"-select-selector,\n & > ").concat(n,"-cascader-picker:last-child ").concat(t,",\n & > ").concat(n,"-cascader-picker-focused:last-child ").concat(t)]:{borderInlineEndWidth:e.lineWidth,borderStartEndRadius:e.borderRadius,borderEndEndRadius:e.borderRadius},["& > ".concat(n,"-select-auto-complete ").concat(t)]:{verticalAlign:"top"},["".concat(t,"-group-wrapper + ").concat(t,"-group-wrapper")]:{marginInlineStart:e.calc(e.lineWidth).mul(-1).equal(),["".concat(t,"-affix-wrapper")]:{borderRadius:0}},["".concat(t,"-group-wrapper:not(:last-child)")]:{["&".concat(t,"-search > ").concat(t,"-group")]:{["& > ".concat(t,"-group-addon > ").concat(t,"-search-button")]:{borderRadius:0},["& > ".concat(t)]:{borderStartStartRadius:e.borderRadius,borderStartEndRadius:0,borderEndEndRadius:0,borderEndStartRadius:e.borderRadius}}}})}},m=e=>{let{componentCls:t,controlHeightSM:n,lineWidth:r,calc:i}=e,a=i(n).sub(i(r).mul(2)).sub(16).div(2).equal();return{[t]:Object.assign(Object.assign(Object.assign(Object.assign(Object.assign(Object.assign({},(0,o.Wf)(e)),p(e)),(0,s.qG)(e)),(0,s.H8)(e)),(0,s.Mu)(e)),{'&[type="color"]':{height:e.controlHeight,["&".concat(t,"-lg")]:{height:e.controlHeightLG},["&".concat(t,"-sm")]:{height:n,paddingTop:a,paddingBottom:a}},'&[type="search"]::-webkit-search-cancel-button, &[type="search"]::-webkit-search-decoration':{"-webkit-appearance":"none"}})}},g=e=>{let{componentCls:t}=e;return{["".concat(t,"-clear-icon")]:{margin:0,color:e.colorTextQuaternary,fontSize:e.fontSizeIcon,verticalAlign:-1,cursor:"pointer",transition:"color ".concat(e.motionDurationSlow),"&:hover":{color:e.colorTextTertiary},"&:active":{color:e.colorText},"&-hidden":{visibility:"hidden"},"&-has-suffix":{margin:"0 ".concat((0,r.bf)(e.inputAffixPadding))}}}},v=e=>{let{componentCls:t,inputAffixPadding:n,colorTextDescription:r,motionDurationSlow:o,colorIcon:i,colorIconHover:a,iconCls:l}=e;return{["".concat(t,"-affix-wrapper")]:Object.assign(Object.assign(Object.assign(Object.assign({},p(e)),{display:"inline-flex",["&:not(".concat(t,"-disabled):hover")]:{zIndex:1,["".concat(t,"-search-with-button &")]:{zIndex:0}},"&-focused, &:focus":{zIndex:1},["> input".concat(t)]:{padding:0,fontSize:"inherit",border:"none",borderRadius:0,outline:"none",background:"transparent",color:"inherit","&::-ms-reveal":{display:"none"},"&:focus":{boxShadow:"none !important"}},"&::before":{display:"inline-block",width:0,visibility:"hidden",content:'"\\a0"'},["".concat(t)]:{"&-prefix, &-suffix":{display:"flex",flex:"none",alignItems:"center","> *:not(:last-child)":{marginInlineEnd:e.paddingXS}},"&-show-count-suffix":{color:r},"&-show-count-has-suffix":{marginInlineEnd:e.paddingXXS},"&-prefix":{marginInlineEnd:n},"&-suffix":{marginInlineStart:n}}}),g(e)),{["".concat(l).concat(t,"-password-icon")]:{color:i,cursor:"pointer",transition:"all ".concat(o),"&:hover":{color:a}}})}},y=e=>{let{componentCls:t,borderRadiusLG:n,borderRadiusSM:r}=e;return{["".concat(t,"-group")]:Object.assign(Object.assign(Object.assign({},(0,o.Wf)(e)),h(e)),{"&-rtl":{direction:"rtl"},"&-wrapper":Object.assign(Object.assign(Object.assign({display:"inline-block",width:"100%",textAlign:"start",verticalAlign:"top","&-rtl":{direction:"rtl"},"&-lg":{["".concat(t,"-group-addon")]:{borderRadius:n,fontSize:e.inputFontSizeLG}},"&-sm":{["".concat(t,"-group-addon")]:{borderRadius:r}}},(0,s.ir)(e)),(0,s.S5)(e)),{["&:not(".concat(t,"-compact-first-item):not(").concat(t,"-compact-last-item)").concat(t,"-compact-item")]:{["".concat(t,", ").concat(t,"-group-addon")]:{borderRadius:0}},["&:not(".concat(t,"-compact-last-item)").concat(t,"-compact-first-item")]:{["".concat(t,", ").concat(t,"-group-addon")]:{borderStartEndRadius:0,borderEndEndRadius:0}},["&:not(".concat(t,"-compact-first-item)").concat(t,"-compact-last-item")]:{["".concat(t,", ").concat(t,"-group-addon")]:{borderStartStartRadius:0,borderEndStartRadius:0}}})})}},b=e=>{let{componentCls:t,antCls:n}=e,r="".concat(t,"-search");return{[r]:{["".concat(t)]:{"&:hover, &:focus":{borderColor:e.colorPrimaryHover,["+ ".concat(t,"-group-addon ").concat(r,"-button:not(").concat(n,"-btn-primary)")]:{borderInlineStartColor:e.colorPrimaryHover}}},["".concat(t,"-affix-wrapper")]:{borderRadius:0},["".concat(t,"-lg")]:{lineHeight:e.calc(e.lineHeightLG).sub(2e-4).equal({unit:!1})},["> ".concat(t,"-group")]:{["> ".concat(t,"-group-addon:last-child")]:{insetInlineStart:-1,padding:0,border:0,["".concat(r,"-button")]:{paddingTop:0,paddingBottom:0,borderStartStartRadius:0,borderStartEndRadius:e.borderRadius,borderEndEndRadius:e.borderRadius,borderEndStartRadius:0,boxShadow:"none"},["".concat(r,"-button:not(").concat(n,"-btn-primary)")]:{color:e.colorTextDescription,"&:hover":{color:e.colorPrimaryHover},"&:active":{color:e.colorPrimaryActive},["&".concat(n,"-btn-loading::before")]:{insetInlineStart:0,insetInlineEnd:0,insetBlockStart:0,insetBlockEnd:0}}}},["".concat(r,"-button")]:{height:e.controlHeight,"&:hover, &:focus":{zIndex:1}},["&-large ".concat(r,"-button")]:{height:e.controlHeightLG},["&-small ".concat(r,"-button")]:{height:e.controlHeightSM},"&-rtl":{direction:"rtl"},["&".concat(t,"-compact-item")]:{["&:not(".concat(t,"-compact-last-item)")]:{["".concat(t,"-group-addon")]:{["".concat(t,"-search-button")]:{marginInlineEnd:e.calc(e.lineWidth).mul(-1).equal(),borderRadius:0}}},["&:not(".concat(t,"-compact-first-item)")]:{["".concat(t,",").concat(t,"-affix-wrapper")]:{borderRadius:0}},["> ".concat(t,"-group-addon ").concat(t,"-search-button,\n > ").concat(t,",\n ").concat(t,"-affix-wrapper")]:{"&:hover,&:focus,&:active":{zIndex:2}},["> ".concat(t,"-affix-wrapper-focused")]:{zIndex:2}}}}},x=e=>{let{componentCls:t,paddingLG:n}=e,r="".concat(t,"-textarea");return{[r]:{position:"relative","&-show-count":{["> ".concat(t)]:{height:"100%"},["".concat(t,"-data-count")]:{position:"absolute",bottom:e.calc(e.fontSize).mul(e.lineHeight).mul(-1).equal(),insetInlineEnd:0,color:e.colorTextDescription,whiteSpace:"nowrap",pointerEvents:"none"}},"&-allow-clear":{["> ".concat(t)]:{paddingInlineEnd:n}},["&-affix-wrapper".concat(r,"-has-feedback")]:{["".concat(t)]:{paddingInlineEnd:n}},["&-affix-wrapper".concat(t,"-affix-wrapper")]:{padding:0,["> textarea".concat(t)]:{fontSize:"inherit",border:"none",outline:"none",background:"transparent","&:focus":{boxShadow:"none !important"}},["".concat(t,"-suffix")]:{margin:0,"> *:not(:last-child)":{marginInline:0},["".concat(t,"-clear-icon")]:{position:"absolute",insetInlineEnd:e.paddingXS,insetBlockStart:e.paddingXS},["".concat(r,"-suffix")]:{position:"absolute",top:0,insetInlineEnd:e.paddingInline,bottom:0,zIndex:1,display:"inline-flex",alignItems:"center",margin:"auto",pointerEvents:"none"}}}}}},w=e=>{let{componentCls:t}=e;return{["".concat(t,"-out-of-range")]:{["&, & input, & textarea, ".concat(t,"-show-count-suffix, ").concat(t,"-data-count")]:{color:e.colorError}}}};t.ZP=(0,a.I$)("Input",e=>{let t=(0,l.TS)(e,(0,c.e)(e));return[m(t),x(t),v(t),y(t),b(t),w(t),(0,i.c)(t)]},c.T)},37433:function(e,t,n){"use strict";n.d(t,{T:function(){return i},e:function(){return o}});var r=n(3104);function o(e){return(0,r.TS)(e,{inputAffixPadding:e.paddingXXS})}let i=e=>{let{controlHeight:t,fontSize:n,lineHeight:r,lineWidth:o,controlHeightSM:i,controlHeightLG:a,fontSizeLG:l,lineHeightLG:c,paddingSM:s,controlPaddingHorizontalSM:u,controlPaddingHorizontal:d,colorFillAlter:f,colorPrimaryHover:p,colorPrimary:h,controlOutlineWidth:m,controlOutline:g,colorErrorOutline:v,colorWarningOutline:y,colorBgContainer:b}=e;return{paddingBlock:Math.max(Math.round((t-n*r)/2*10)/10-o,0),paddingBlockSM:Math.max(Math.round((i-n*r)/2*10)/10-o,0),paddingBlockLG:Math.ceil((a-l*c)/2*10)/10-o,paddingInline:s-o,paddingInlineSM:u-o,paddingInlineLG:d-o,addonBg:f,activeBorderColor:h,hoverBorderColor:p,activeShadow:"0 0 0 ".concat(m,"px ").concat(g),errorActiveShadow:"0 0 0 ".concat(m,"px ").concat(v),warningActiveShadow:"0 0 0 ".concat(m,"px ").concat(y),hoverBg:b,activeBg:b,inputFontSize:n,inputFontSizeLG:l,inputFontSizeSM:n}}},65265:function(e,t,n){"use strict";n.d(t,{$U:function(){return l},H8:function(){return m},Mu:function(){return f},S5:function(){return v},Xy:function(){return a},ir:function(){return d},qG:function(){return s}});var r=n(352),o=n(3104);let i=e=>({borderColor:e.hoverBorderColor,backgroundColor:e.hoverBg}),a=e=>({color:e.colorTextDisabled,backgroundColor:e.colorBgContainerDisabled,borderColor:e.colorBorder,boxShadow:"none",cursor:"not-allowed",opacity:1,"&:hover:not([disabled])":Object.assign({},i((0,o.TS)(e,{hoverBorderColor:e.colorBorder,hoverBg:e.colorBgContainerDisabled})))}),l=(e,t)=>({background:e.colorBgContainer,borderWidth:e.lineWidth,borderStyle:e.lineType,borderColor:t.borderColor,"&:hover":{borderColor:t.hoverBorderColor,backgroundColor:e.hoverBg},"&:focus, &:focus-within":{borderColor:t.activeBorderColor,boxShadow:t.activeShadow,outline:0,backgroundColor:e.activeBg}}),c=(e,t)=>({["&".concat(e.componentCls,"-status-").concat(t.status,":not(").concat(e.componentCls,"-disabled)")]:Object.assign(Object.assign({},l(e,t)),{["".concat(e.componentCls,"-prefix, ").concat(e.componentCls,"-suffix")]:{color:t.affixColor}})}),s=(e,t)=>({"&-outlined":Object.assign(Object.assign(Object.assign(Object.assign(Object.assign({},l(e,{borderColor:e.colorBorder,hoverBorderColor:e.colorPrimaryHover,activeBorderColor:e.colorPrimary,activeShadow:e.activeShadow})),{["&".concat(e.componentCls,"-disabled, &[disabled]")]:Object.assign({},a(e))}),c(e,{status:"error",borderColor:e.colorError,hoverBorderColor:e.colorErrorBorderHover,activeBorderColor:e.colorError,activeShadow:e.errorActiveShadow,affixColor:e.colorError})),c(e,{status:"warning",borderColor:e.colorWarning,hoverBorderColor:e.colorWarningBorderHover,activeBorderColor:e.colorWarning,activeShadow:e.warningActiveShadow,affixColor:e.colorWarning})),t)}),u=(e,t)=>({["&".concat(e.componentCls,"-group-wrapper-status-").concat(t.status)]:{["".concat(e.componentCls,"-group-addon")]:{borderColor:t.addonBorderColor,color:t.addonColor}}}),d=e=>({"&-outlined":Object.assign(Object.assign(Object.assign({["".concat(e.componentCls,"-group")]:{"&-addon":{background:e.addonBg,border:"".concat((0,r.bf)(e.lineWidth)," ").concat(e.lineType," ").concat(e.colorBorder)},"&-addon:first-child":{borderInlineEnd:0},"&-addon:last-child":{borderInlineStart:0}}},u(e,{status:"error",addonBorderColor:e.colorError,addonColor:e.colorErrorText})),u(e,{status:"warning",addonBorderColor:e.colorWarning,addonColor:e.colorWarningText})),{["&".concat(e.componentCls,"-group-wrapper-disabled")]:{["".concat(e.componentCls,"-group-addon")]:Object.assign({},a(e))}})}),f=(e,t)=>({"&-borderless":Object.assign({background:"transparent",border:"none","&:focus, &:focus-within":{outline:"none"},["&".concat(e.componentCls,"-disabled, &[disabled]")]:{color:e.colorTextDisabled}},t)}),p=(e,t)=>({background:t.bg,borderWidth:e.lineWidth,borderStyle:e.lineType,borderColor:"transparent","input&, & input, textarea&, & textarea":{color:null==t?void 0:t.inputColor},"&:hover":{background:t.hoverBg},"&:focus, &:focus-within":{outline:0,borderColor:t.activeBorderColor,backgroundColor:e.activeBg}}),h=(e,t)=>({["&".concat(e.componentCls,"-status-").concat(t.status,":not(").concat(e.componentCls,"-disabled)")]:Object.assign(Object.assign({},p(e,t)),{["".concat(e.componentCls,"-prefix, ").concat(e.componentCls,"-suffix")]:{color:t.affixColor}})}),m=(e,t)=>({"&-filled":Object.assign(Object.assign(Object.assign(Object.assign(Object.assign({},p(e,{bg:e.colorFillTertiary,hoverBg:e.colorFillSecondary,activeBorderColor:e.colorPrimary})),{["&".concat(e.componentCls,"-disabled, &[disabled]")]:Object.assign({},a(e))}),h(e,{status:"error",bg:e.colorErrorBg,hoverBg:e.colorErrorBgHover,activeBorderColor:e.colorError,inputColor:e.colorErrorText,affixColor:e.colorError})),h(e,{status:"warning",bg:e.colorWarningBg,hoverBg:e.colorWarningBgHover,activeBorderColor:e.colorWarning,inputColor:e.colorWarningText,affixColor:e.colorWarning})),t)}),g=(e,t)=>({["&".concat(e.componentCls,"-group-wrapper-status-").concat(t.status)]:{["".concat(e.componentCls,"-group-addon")]:{background:t.addonBg,color:t.addonColor}}}),v=e=>({"&-filled":Object.assign(Object.assign(Object.assign({["".concat(e.componentCls,"-group")]:{"&-addon":{background:e.colorFillTertiary},["".concat(e.componentCls,"-filled:not(:focus):not(:focus-within)")]:{"&:not(:first-child)":{borderInlineStart:"".concat((0,r.bf)(e.lineWidth)," ").concat(e.lineType," ").concat(e.colorSplit)},"&:not(:last-child)":{borderInlineEnd:"".concat((0,r.bf)(e.lineWidth)," ").concat(e.lineType," ").concat(e.colorSplit)}}}},g(e,{status:"error",addonBg:e.colorErrorBg,addonColor:e.colorErrorText})),g(e,{status:"warning",addonBg:e.colorWarningBg,addonColor:e.colorWarningText})),{["&".concat(e.componentCls,"-group-wrapper-disabled")]:{["".concat(e.componentCls,"-group")]:{"&-addon":{background:e.colorFillTertiary,color:e.colorTextDisabled},"&-addon:first-child":{borderInlineStart:"".concat((0,r.bf)(e.lineWidth)," ").concat(e.lineType," ").concat(e.colorBorder),borderTop:"".concat((0,r.bf)(e.lineWidth)," ").concat(e.lineType," ").concat(e.colorBorder),borderBottom:"".concat((0,r.bf)(e.lineWidth)," ").concat(e.lineType," ").concat(e.colorBorder)},"&-addon:last-child":{borderInlineEnd:"".concat((0,r.bf)(e.lineWidth)," ").concat(e.lineType," ").concat(e.colorBorder),borderTop:"".concat((0,r.bf)(e.lineWidth)," ").concat(e.lineType," ").concat(e.colorBorder),borderBottom:"".concat((0,r.bf)(e.lineWidth)," ").concat(e.lineType," ").concat(e.colorBorder)}}}})})},92239:function(e,t,n){"use strict";let r;n.d(t,{D:function(){return b},Z:function(){return w}});var o=n(2265),i=n(1119),a={icon:{tag:"svg",attrs:{viewBox:"0 0 1024 1024",focusable:"false"},children:[{tag:"path",attrs:{d:"M912 192H328c-4.4 0-8 3.6-8 8v56c0 4.4 3.6 8 8 8h584c4.4 0 8-3.6 8-8v-56c0-4.4-3.6-8-8-8zm0 284H328c-4.4 0-8 3.6-8 8v56c0 4.4 3.6 8 8 8h584c4.4 0 8-3.6 8-8v-56c0-4.4-3.6-8-8-8zm0 284H328c-4.4 0-8 3.6-8 8v56c0 4.4 3.6 8 8 8h584c4.4 0 8-3.6 8-8v-56c0-4.4-3.6-8-8-8zM104 228a56 56 0 10112 0 56 56 0 10-112 0zm0 284a56 56 0 10112 0 56 56 0 10-112 0zm0 284a56 56 0 10112 0 56 56 0 10-112 0z"}}]},name:"bars",theme:"outlined"},l=n(55015),c=o.forwardRef(function(e,t){return o.createElement(l.Z,(0,i.Z)({},e,{ref:t,icon:a}))}),s=n(15327),u=n(77565),d=n(36760),f=n.n(d),p=n(18694),h=e=>!isNaN(parseFloat(e))&&isFinite(e),m=n(71744),g=n(80856),v=function(e,t){var n={};for(var r in e)Object.prototype.hasOwnProperty.call(e,r)&&0>t.indexOf(r)&&(n[r]=e[r]);if(null!=e&&"function"==typeof Object.getOwnPropertySymbols)for(var o=0,r=Object.getOwnPropertySymbols(e);ot.indexOf(r[o])&&Object.prototype.propertyIsEnumerable.call(e,r[o])&&(n[r[o]]=e[r[o]]);return n};let y={xs:"479.98px",sm:"575.98px",md:"767.98px",lg:"991.98px",xl:"1199.98px",xxl:"1599.98px"},b=o.createContext({}),x=(r=0,function(){let e=arguments.length>0&&void 0!==arguments[0]?arguments[0]:"";return r+=1,"".concat(e).concat(r)});var w=o.forwardRef((e,t)=>{let{prefixCls:n,className:r,trigger:i,children:a,defaultCollapsed:l=!1,theme:d="dark",style:w={},collapsible:S=!1,reverseArrow:k=!1,width:E=200,collapsedWidth:C=80,zeroWidthTriggerStyle:O,breakpoint:j,onCollapse:P,onBreakpoint:M}=e,N=v(e,["prefixCls","className","trigger","children","defaultCollapsed","theme","style","collapsible","reverseArrow","width","collapsedWidth","zeroWidthTriggerStyle","breakpoint","onCollapse","onBreakpoint"]),{siderHook:I}=(0,o.useContext)(g.V),[R,T]=(0,o.useState)("collapsed"in e?e.collapsed:l),[A,_]=(0,o.useState)(!1);(0,o.useEffect)(()=>{"collapsed"in e&&T(e.collapsed)},[e.collapsed]);let D=(t,n)=>{"collapsed"in e||T(t),null==P||P(t,n)},Z=(0,o.useRef)();Z.current=e=>{_(e.matches),null==M||M(e.matches),R!==e.matches&&D(e.matches,"responsive")},(0,o.useEffect)(()=>{let e;function t(e){return Z.current(e)}if("undefined"!=typeof window){let{matchMedia:n}=window;if(n&&j&&j in y){e=n("screen and (max-width: ".concat(y[j],")"));try{e.addEventListener("change",t)}catch(n){e.addListener(t)}t(e)}}return()=>{try{null==e||e.removeEventListener("change",t)}catch(n){null==e||e.removeListener(t)}}},[j]),(0,o.useEffect)(()=>{let e=x("ant-sider-");return I.addSider(e),()=>I.removeSider(e)},[]);let L=()=>{D(!R,"clickTrigger")},{getPrefixCls:z}=(0,o.useContext)(m.E_),B=o.useMemo(()=>({siderCollapsed:R}),[R]);return o.createElement(b.Provider,{value:B},(()=>{let e=z("layout-sider",n),l=(0,p.Z)(N,["collapsed"]),m=R?C:E,g=h(m)?"".concat(m,"px"):String(m),v=0===parseFloat(String(C||0))?o.createElement("span",{onClick:L,className:f()("".concat(e,"-zero-width-trigger"),"".concat(e,"-zero-width-trigger-").concat(k?"right":"left")),style:O},i||o.createElement(c,null)):null,y={expanded:k?o.createElement(u.Z,null):o.createElement(s.Z,null),collapsed:k?o.createElement(s.Z,null):o.createElement(u.Z,null)}[R?"collapsed":"expanded"],b=null!==i?v||o.createElement("div",{className:"".concat(e,"-trigger"),onClick:L,style:{width:g}},i||y):null,x=Object.assign(Object.assign({},w),{flex:"0 0 ".concat(g),maxWidth:g,minWidth:g,width:g}),j=f()(e,"".concat(e,"-").concat(d),{["".concat(e,"-collapsed")]:!!R,["".concat(e,"-has-trigger")]:S&&null!==i&&!v,["".concat(e,"-below")]:!!A,["".concat(e,"-zero-width")]:0===parseFloat(g)},r);return o.createElement("aside",Object.assign({className:j},l,{style:x,ref:t}),o.createElement("div",{className:"".concat(e,"-children")},a),S||A&&v?b:null)})())})},80856:function(e,t,n){"use strict";n.d(t,{V:function(){return r}});let r=n(2265).createContext({siderHook:{addSider:()=>null,removeSider:()=>null}})},19226:function(e,t,n){"use strict";n.d(t,{default:function(){return C}});var r=n(83145),o=n(2265),i=n(36760),a=n.n(i),l=n(18694),c=n(71744),s=n(80856),u=n(45287),d=n(92239),f=n(352),p=n(80669),h=e=>{let{componentCls:t,bodyBg:n,lightSiderBg:r,lightTriggerBg:o,lightTriggerColor:i}=e;return{["".concat(t,"-sider-light")]:{background:r,["".concat(t,"-sider-trigger")]:{color:i,background:o},["".concat(t,"-sider-zero-width-trigger")]:{color:i,background:o,border:"1px solid ".concat(n),borderInlineStart:0}}}};let m=e=>{let{antCls:t,componentCls:n,colorText:r,triggerColor:o,footerBg:i,triggerBg:a,headerHeight:l,headerPadding:c,headerColor:s,footerPadding:u,triggerHeight:d,zeroTriggerHeight:p,zeroTriggerWidth:m,motionDurationMid:g,motionDurationSlow:v,fontSize:y,borderRadius:b,bodyBg:x,headerBg:w,siderBg:S}=e;return{[n]:Object.assign(Object.assign({display:"flex",flex:"auto",flexDirection:"column",minHeight:0,background:x,"&, *":{boxSizing:"border-box"},["&".concat(n,"-has-sider")]:{flexDirection:"row",["> ".concat(n,", > ").concat(n,"-content")]:{width:0}},["".concat(n,"-header, &").concat(n,"-footer")]:{flex:"0 0 auto"},["".concat(n,"-sider")]:{position:"relative",minWidth:0,background:S,transition:"all ".concat(g,", background 0s"),"&-children":{height:"100%",marginTop:-.1,paddingTop:.1,["".concat(t,"-menu").concat(t,"-menu-inline-collapsed")]:{width:"auto"}},"&-has-trigger":{paddingBottom:d},"&-right":{order:1},"&-trigger":{position:"fixed",bottom:0,zIndex:1,height:d,color:o,lineHeight:(0,f.bf)(d),textAlign:"center",background:a,cursor:"pointer",transition:"all ".concat(g)},"&-zero-width":{"> *":{overflow:"hidden"},"&-trigger":{position:"absolute",top:l,insetInlineEnd:e.calc(m).mul(-1).equal(),zIndex:1,width:m,height:p,color:o,fontSize:e.fontSizeXL,display:"flex",alignItems:"center",justifyContent:"center",background:S,borderStartStartRadius:0,borderStartEndRadius:b,borderEndEndRadius:b,borderEndStartRadius:0,cursor:"pointer",transition:"background ".concat(v," ease"),"&::after":{position:"absolute",inset:0,background:"transparent",transition:"all ".concat(v),content:'""'},"&:hover::after":{background:"rgba(255, 255, 255, 0.2)"},"&-right":{insetInlineStart:e.calc(m).mul(-1).equal(),borderStartStartRadius:b,borderStartEndRadius:0,borderEndEndRadius:0,borderEndStartRadius:b}}}}},h(e)),{"&-rtl":{direction:"rtl"}}),["".concat(n,"-header")]:{height:l,padding:c,color:s,lineHeight:(0,f.bf)(l),background:w,["".concat(t,"-menu")]:{lineHeight:"inherit"}},["".concat(n,"-footer")]:{padding:u,color:r,fontSize:y,background:i},["".concat(n,"-content")]:{flex:"auto",minHeight:0}}};var g=(0,p.I$)("Layout",e=>[m(e)],e=>{let{colorBgLayout:t,controlHeight:n,controlHeightLG:r,colorText:o,controlHeightSM:i,marginXXS:a,colorTextLightSolid:l,colorBgContainer:c}=e,s=1.25*r;return{colorBgHeader:"#001529",colorBgBody:t,colorBgTrigger:"#002140",bodyBg:t,headerBg:"#001529",headerHeight:2*n,headerPadding:"0 ".concat(s,"px"),headerColor:o,footerPadding:"".concat(i,"px ").concat(s,"px"),footerBg:t,siderBg:"#001529",triggerHeight:r+2*a,triggerBg:"#002140",triggerColor:l,zeroTriggerWidth:r,zeroTriggerHeight:r,lightSiderBg:c,lightTriggerBg:c,lightTriggerColor:o}},{deprecatedTokens:[["colorBgBody","bodyBg"],["colorBgHeader","headerBg"],["colorBgTrigger","triggerBg"]]}),v=function(e,t){var n={};for(var r in e)Object.prototype.hasOwnProperty.call(e,r)&&0>t.indexOf(r)&&(n[r]=e[r]);if(null!=e&&"function"==typeof Object.getOwnPropertySymbols)for(var o=0,r=Object.getOwnPropertySymbols(e);ot.indexOf(r[o])&&Object.prototype.propertyIsEnumerable.call(e,r[o])&&(n[r[o]]=e[r[o]]);return n};function y(e){let{suffixCls:t,tagName:n,displayName:r}=e;return e=>o.forwardRef((r,i)=>o.createElement(e,Object.assign({ref:i,suffixCls:t,tagName:n},r)))}let b=o.forwardRef((e,t)=>{let{prefixCls:n,suffixCls:r,className:i,tagName:l}=e,s=v(e,["prefixCls","suffixCls","className","tagName"]),{getPrefixCls:u}=o.useContext(c.E_),d=u("layout",n),[f,p,h]=g(d),m=r?"".concat(d,"-").concat(r):d;return f(o.createElement(l,Object.assign({className:a()(n||m,i,p,h),ref:t},s)))}),x=o.forwardRef((e,t)=>{let{direction:n}=o.useContext(c.E_),[i,f]=o.useState([]),{prefixCls:p,className:h,rootClassName:m,children:y,hasSider:b,tagName:x,style:w}=e,S=v(e,["prefixCls","className","rootClassName","children","hasSider","tagName","style"]),k=(0,l.Z)(S,["suffixCls"]),{getPrefixCls:E,layout:C}=o.useContext(c.E_),O=E("layout",p),j="boolean"==typeof b?b:!!i.length||(0,u.Z)(y).some(e=>e.type===d.Z),[P,M,N]=g(O),I=a()(O,{["".concat(O,"-has-sider")]:j,["".concat(O,"-rtl")]:"rtl"===n},null==C?void 0:C.className,h,m,M,N),R=o.useMemo(()=>({siderHook:{addSider:e=>{f(t=>[].concat((0,r.Z)(t),[e]))},removeSider:e=>{f(t=>t.filter(t=>t!==e))}}}),[]);return P(o.createElement(s.V.Provider,{value:R},o.createElement(x,Object.assign({ref:t,className:I,style:Object.assign(Object.assign({},null==C?void 0:C.style),w)},k),y)))}),w=y({tagName:"div",displayName:"Layout"})(x),S=y({suffixCls:"header",tagName:"header",displayName:"Header"})(b),k=y({suffixCls:"footer",tagName:"footer",displayName:"Footer"})(b),E=y({suffixCls:"content",tagName:"main",displayName:"Content"})(b);w.Header=S,w.Footer=k,w.Content=E,w.Sider=d.Z,w._InternalSiderContext=d.D;var C=w},88208:function(e,t,n){"use strict";n.d(t,{J:function(){return c}});var r=n(2265),o=n(74126),i=n(65658),a=function(e,t){var n={};for(var r in e)Object.prototype.hasOwnProperty.call(e,r)&&0>t.indexOf(r)&&(n[r]=e[r]);if(null!=e&&"function"==typeof Object.getOwnPropertySymbols)for(var o=0,r=Object.getOwnPropertySymbols(e);ot.indexOf(r[o])&&Object.prototype.propertyIsEnumerable.call(e,r[o])&&(n[r[o]]=e[r[o]]);return n};let l=r.createContext(null),c=r.forwardRef((e,t)=>{let{children:n}=e,c=a(e,["children"]),s=r.useContext(l),u=r.useMemo(()=>Object.assign(Object.assign({},s),c),[s,c.prefixCls,c.mode,c.selectable,c.rootClassName]),d=(0,o.t4)(n),f=(0,o.x1)(t,d?n.ref:null);return r.createElement(l.Provider,{value:u},r.createElement(i.BR,null,d?r.cloneElement(n,{ref:f}):n))});t.Z=l},45937:function(e,t,n){"use strict";n.d(t,{Z:function(){return U}});var r=n(2265),o=n(33082),i=n(92239),a=n(39760),l=n(36760),c=n.n(l),s=n(74126),u=n(18694),d=n(68710),f=n(19722),p=n(71744),h=function(e,t){var n={};for(var r in e)Object.prototype.hasOwnProperty.call(e,r)&&0>t.indexOf(r)&&(n[r]=e[r]);if(null!=e&&"function"==typeof Object.getOwnPropertySymbols)for(var o=0,r=Object.getOwnPropertySymbols(e);ot.indexOf(r[o])&&Object.prototype.propertyIsEnumerable.call(e,r[o])&&(n[r[o]]=e[r[o]]);return n},m=e=>{let{prefixCls:t,className:n,dashed:i}=e,a=h(e,["prefixCls","className","dashed"]),{getPrefixCls:l}=r.useContext(p.E_),s=l("menu",t),u=c()({["".concat(s,"-item-divider-dashed")]:!!i},n);return r.createElement(o.iz,Object.assign({className:u},a))},g=n(45287),v=n(98074);let y=(0,r.createContext)({prefixCls:"",firstLevel:!0,inlineCollapsed:!1});var b=e=>{var t;let{className:n,children:a,icon:l,title:s,danger:d}=e,{prefixCls:p,firstLevel:h,direction:m,disableMenuItemTitleTooltip:b,inlineCollapsed:x}=r.useContext(y),{siderCollapsed:w}=r.useContext(i.D),S=s;void 0===s?S=h?a:"":!1===s&&(S="");let k={title:S};w||x||(k.title=null,k.open=!1);let E=(0,g.Z)(a).length,C=r.createElement(o.ck,Object.assign({},(0,u.Z)(e,["title","icon","danger"]),{className:c()({["".concat(p,"-item-danger")]:d,["".concat(p,"-item-only-child")]:(l?E+1:E)===1},n),title:"string"==typeof s?s:void 0}),(0,f.Tm)(l,{className:c()((0,f.l$)(l)?null===(t=l.props)||void 0===t?void 0:t.className:"","".concat(p,"-item-icon"))}),(e=>{let t=r.createElement("span",{className:"".concat(p,"-title-content")},a);return(!l||(0,f.l$)(a)&&"span"===a.type)&&a&&e&&h&&"string"==typeof a?r.createElement("div",{className:"".concat(p,"-inline-collapsed-noicon")},a.charAt(0)):t})(x));return b||(C=r.createElement(v.Z,Object.assign({},k,{placement:"rtl"===m?"left":"right",overlayClassName:"".concat(p,"-inline-collapsed-tooltip")}),C)),C},x=n(62236),w=e=>{var t;let n;let{popupClassName:i,icon:a,title:l,theme:s}=e,d=r.useContext(y),{prefixCls:p,inlineCollapsed:h,theme:m}=d,g=(0,o.Xl)();if(a){let e=(0,f.l$)(l)&&"span"===l.type;n=r.createElement(r.Fragment,null,(0,f.Tm)(a,{className:c()((0,f.l$)(a)?null===(t=a.props)||void 0===t?void 0:t.className:"","".concat(p,"-item-icon"))}),e?l:r.createElement("span",{className:"".concat(p,"-title-content")},l))}else n=h&&!g.length&&l&&"string"==typeof l?r.createElement("div",{className:"".concat(p,"-inline-collapsed-noicon")},l.charAt(0)):r.createElement("span",{className:"".concat(p,"-title-content")},l);let v=r.useMemo(()=>Object.assign(Object.assign({},d),{firstLevel:!1}),[d]),[b]=(0,x.Cn)("Menu");return r.createElement(y.Provider,{value:v},r.createElement(o.Wd,Object.assign({},(0,u.Z)(e,["icon"]),{title:n,popupClassName:c()(p,i,"".concat(p,"-").concat(s||m)),popupStyle:{zIndex:b}})))},S=function(e,t){var n={};for(var r in e)Object.prototype.hasOwnProperty.call(e,r)&&0>t.indexOf(r)&&(n[r]=e[r]);if(null!=e&&"function"==typeof Object.getOwnPropertySymbols)for(var o=0,r=Object.getOwnPropertySymbols(e);ot.indexOf(r[o])&&Object.prototype.propertyIsEnumerable.call(e,r[o])&&(n[r[o]]=e[r[o]]);return n},k=n(88208),E=n(352),C=n(36360),O=n(12918),j=n(63074),P=n(18544),M=n(691),N=n(80669),I=n(3104),R=e=>{let{componentCls:t,motionDurationSlow:n,horizontalLineHeight:r,colorSplit:o,lineWidth:i,lineType:a,itemPaddingInline:l}=e;return{["".concat(t,"-horizontal")]:{lineHeight:r,border:0,borderBottom:"".concat((0,E.bf)(i)," ").concat(a," ").concat(o),boxShadow:"none","&::after":{display:"block",clear:"both",height:0,content:'"\\20"'},["".concat(t,"-item, ").concat(t,"-submenu")]:{position:"relative",display:"inline-block",verticalAlign:"bottom",paddingInline:l},["> ".concat(t,"-item:hover,\n > ").concat(t,"-item-active,\n > ").concat(t,"-submenu ").concat(t,"-submenu-title:hover")]:{backgroundColor:"transparent"},["".concat(t,"-item, ").concat(t,"-submenu-title")]:{transition:["border-color ".concat(n),"background ".concat(n)].join(",")},["".concat(t,"-submenu-arrow")]:{display:"none"}}}},T=e=>{let{componentCls:t,menuArrowOffset:n,calc:r}=e;return{["".concat(t,"-rtl")]:{direction:"rtl"},["".concat(t,"-submenu-rtl")]:{transformOrigin:"100% 0"},["".concat(t,"-rtl").concat(t,"-vertical,\n ").concat(t,"-submenu-rtl ").concat(t,"-vertical")]:{["".concat(t,"-submenu-arrow")]:{"&::before":{transform:"rotate(-45deg) translateY(".concat((0,E.bf)(r(n).mul(-1).equal()),")")},"&::after":{transform:"rotate(45deg) translateY(".concat((0,E.bf)(n),")")}}}}};let A=e=>Object.assign({},(0,O.oN)(e));var _=(e,t)=>{let{componentCls:n,itemColor:r,itemSelectedColor:o,groupTitleColor:i,itemBg:a,subMenuItemBg:l,itemSelectedBg:c,activeBarHeight:s,activeBarWidth:u,activeBarBorderWidth:d,motionDurationSlow:f,motionEaseInOut:p,motionEaseOut:h,itemPaddingInline:m,motionDurationMid:g,itemHoverColor:v,lineType:y,colorSplit:b,itemDisabledColor:x,dangerItemColor:w,dangerItemHoverColor:S,dangerItemSelectedColor:k,dangerItemActiveBg:C,dangerItemSelectedBg:O,popupBg:j,itemHoverBg:P,itemActiveBg:M,menuSubMenuBg:N,horizontalItemSelectedColor:I,horizontalItemSelectedBg:R,horizontalItemBorderRadius:T,horizontalItemHoverBg:_}=e;return{["".concat(n,"-").concat(t,", ").concat(n,"-").concat(t," > ").concat(n)]:{color:r,background:a,["&".concat(n,"-root:focus-visible")]:Object.assign({},A(e)),["".concat(n,"-item-group-title")]:{color:i},["".concat(n,"-submenu-selected")]:{["> ".concat(n,"-submenu-title")]:{color:o}},["".concat(n,"-item-disabled, ").concat(n,"-submenu-disabled")]:{color:"".concat(x," !important")},["".concat(n,"-item:not(").concat(n,"-item-selected):not(").concat(n,"-submenu-selected)")]:{["&:hover, > ".concat(n,"-submenu-title:hover")]:{color:v}},["&:not(".concat(n,"-horizontal)")]:{["".concat(n,"-item:not(").concat(n,"-item-selected)")]:{"&:hover":{backgroundColor:P},"&:active":{backgroundColor:M}},["".concat(n,"-submenu-title")]:{"&:hover":{backgroundColor:P},"&:active":{backgroundColor:M}}},["".concat(n,"-item-danger")]:{color:w,["&".concat(n,"-item:hover")]:{["&:not(".concat(n,"-item-selected):not(").concat(n,"-submenu-selected)")]:{color:S}},["&".concat(n,"-item:active")]:{background:C}},["".concat(n,"-item a")]:{"&, &:hover":{color:"inherit"}},["".concat(n,"-item-selected")]:{color:o,["&".concat(n,"-item-danger")]:{color:k},"a, a:hover":{color:"inherit"}},["& ".concat(n,"-item-selected")]:{backgroundColor:c,["&".concat(n,"-item-danger")]:{backgroundColor:O}},["".concat(n,"-item, ").concat(n,"-submenu-title")]:{["&:not(".concat(n,"-item-disabled):focus-visible")]:Object.assign({},A(e))},["&".concat(n,"-submenu > ").concat(n)]:{backgroundColor:N},["&".concat(n,"-popup > ").concat(n)]:{backgroundColor:j},["&".concat(n,"-submenu-popup > ").concat(n)]:{backgroundColor:j},["&".concat(n,"-horizontal")]:Object.assign(Object.assign({},"dark"===t?{borderBottom:0}:{}),{["> ".concat(n,"-item, > ").concat(n,"-submenu")]:{top:d,marginTop:e.calc(d).mul(-1).equal(),marginBottom:0,borderRadius:T,"&::after":{position:"absolute",insetInline:m,bottom:0,borderBottom:"".concat((0,E.bf)(s)," solid transparent"),transition:"border-color ".concat(f," ").concat(p),content:'""'},"&:hover, &-active, &-open":{background:_,"&::after":{borderBottomWidth:s,borderBottomColor:I}},"&-selected":{color:I,backgroundColor:R,"&:hover":{backgroundColor:R},"&::after":{borderBottomWidth:s,borderBottomColor:I}}}}),["&".concat(n,"-root")]:{["&".concat(n,"-inline, &").concat(n,"-vertical")]:{borderInlineEnd:"".concat((0,E.bf)(d)," ").concat(y," ").concat(b)}},["&".concat(n,"-inline")]:{["".concat(n,"-sub").concat(n,"-inline")]:{background:l},["".concat(n,"-item")]:{position:"relative","&::after":{position:"absolute",insetBlock:0,insetInlineEnd:0,borderInlineEnd:"".concat((0,E.bf)(u)," solid ").concat(o),transform:"scaleY(0.0001)",opacity:0,transition:["transform ".concat(g," ").concat(h),"opacity ".concat(g," ").concat(h)].join(","),content:'""'},["&".concat(n,"-item-danger")]:{"&::after":{borderInlineEndColor:k}}},["".concat(n,"-selected, ").concat(n,"-item-selected")]:{"&::after":{transform:"scaleY(1)",opacity:1,transition:["transform ".concat(g," ").concat(p),"opacity ".concat(g," ").concat(p)].join(",")}}}}}};let D=e=>{let{componentCls:t,itemHeight:n,itemMarginInline:r,padding:o,menuArrowSize:i,marginXS:a,itemMarginBlock:l,itemWidth:c}=e,s=e.calc(i).add(o).add(a).equal();return{["".concat(t,"-item")]:{position:"relative",overflow:"hidden"},["".concat(t,"-item, ").concat(t,"-submenu-title")]:{height:n,lineHeight:(0,E.bf)(n),paddingInline:o,overflow:"hidden",textOverflow:"ellipsis",marginInline:r,marginBlock:l,width:c},["> ".concat(t,"-item,\n > ").concat(t,"-submenu > ").concat(t,"-submenu-title")]:{height:n,lineHeight:(0,E.bf)(n)},["".concat(t,"-item-group-list ").concat(t,"-submenu-title,\n ").concat(t,"-submenu-title")]:{paddingInlineEnd:s}}};var Z=e=>{let{componentCls:t,iconCls:n,itemHeight:r,colorTextLightSolid:o,dropdownWidth:i,controlHeightLG:a,motionDurationMid:l,motionEaseOut:c,paddingXL:s,itemMarginInline:u,fontSizeLG:d,motionDurationSlow:f,paddingXS:p,boxShadowSecondary:h,collapsedWidth:m,collapsedIconSize:g}=e,v={height:r,lineHeight:(0,E.bf)(r),listStylePosition:"inside",listStyleType:"disc"};return[{[t]:{"&-inline, &-vertical":Object.assign({["&".concat(t,"-root")]:{boxShadow:"none"}},D(e))},["".concat(t,"-submenu-popup")]:{["".concat(t,"-vertical")]:Object.assign(Object.assign({},D(e)),{boxShadow:h})}},{["".concat(t,"-submenu-popup ").concat(t,"-vertical").concat(t,"-sub")]:{minWidth:i,maxHeight:"calc(100vh - ".concat((0,E.bf)(e.calc(a).mul(2.5).equal()),")"),padding:"0",overflow:"hidden",borderInlineEnd:0,"&:not([class*='-active'])":{overflowX:"hidden",overflowY:"auto"}}},{["".concat(t,"-inline")]:{width:"100%",["&".concat(t,"-root")]:{["".concat(t,"-item, ").concat(t,"-submenu-title")]:{display:"flex",alignItems:"center",transition:["border-color ".concat(f),"background ".concat(f),"padding ".concat(l," ").concat(c)].join(","),["> ".concat(t,"-title-content")]:{flex:"auto",minWidth:0,overflow:"hidden",textOverflow:"ellipsis"},"> *":{flex:"none"}}},["".concat(t,"-sub").concat(t,"-inline")]:{padding:0,border:0,borderRadius:0,boxShadow:"none",["& > ".concat(t,"-submenu > ").concat(t,"-submenu-title")]:v,["& ".concat(t,"-item-group-title")]:{paddingInlineStart:s}},["".concat(t,"-item")]:v}},{["".concat(t,"-inline-collapsed")]:{width:m,["&".concat(t,"-root")]:{["".concat(t,"-item, ").concat(t,"-submenu ").concat(t,"-submenu-title")]:{["> ".concat(t,"-inline-collapsed-noicon")]:{fontSize:d,textAlign:"center"}}},["> ".concat(t,"-item,\n > ").concat(t,"-item-group > ").concat(t,"-item-group-list > ").concat(t,"-item,\n > ").concat(t,"-item-group > ").concat(t,"-item-group-list > ").concat(t,"-submenu > ").concat(t,"-submenu-title,\n > ").concat(t,"-submenu > ").concat(t,"-submenu-title")]:{insetInlineStart:0,paddingInline:"calc(50% - ".concat((0,E.bf)(e.calc(d).div(2).equal())," - ").concat((0,E.bf)(u),")"),textOverflow:"clip",["\n ".concat(t,"-submenu-arrow,\n ").concat(t,"-submenu-expand-icon\n ")]:{opacity:0},["".concat(t,"-item-icon, ").concat(n)]:{margin:0,fontSize:g,lineHeight:(0,E.bf)(r),"+ span":{display:"inline-block",opacity:0}}},["".concat(t,"-item-icon, ").concat(n)]:{display:"inline-block"},"&-tooltip":{pointerEvents:"none",["".concat(t,"-item-icon, ").concat(n)]:{display:"none"},"a, a:hover":{color:o}},["".concat(t,"-item-group-title")]:Object.assign(Object.assign({},O.vS),{paddingInline:p})}}]};let L=e=>{let{componentCls:t,motionDurationSlow:n,motionDurationMid:r,motionEaseInOut:o,motionEaseOut:i,iconCls:a,iconSize:l,iconMarginInlineEnd:c}=e;return{["".concat(t,"-item, ").concat(t,"-submenu-title")]:{position:"relative",display:"block",margin:0,whiteSpace:"nowrap",cursor:"pointer",transition:["border-color ".concat(n),"background ".concat(n),"padding ".concat(n," ").concat(o)].join(","),["".concat(t,"-item-icon, ").concat(a)]:{minWidth:l,fontSize:l,transition:["font-size ".concat(r," ").concat(i),"margin ".concat(n," ").concat(o),"color ".concat(n)].join(","),"+ span":{marginInlineStart:c,opacity:1,transition:["opacity ".concat(n," ").concat(o),"margin ".concat(n),"color ".concat(n)].join(",")}},["".concat(t,"-item-icon")]:Object.assign({},(0,O.Ro)()),["&".concat(t,"-item-only-child")]:{["> ".concat(a,", > ").concat(t,"-item-icon")]:{marginInlineEnd:0}}},["".concat(t,"-item-disabled, ").concat(t,"-submenu-disabled")]:{background:"none !important",cursor:"not-allowed","&::after":{borderColor:"transparent !important"},a:{color:"inherit !important"},["> ".concat(t,"-submenu-title")]:{color:"inherit !important",cursor:"not-allowed"}}}},z=e=>{let{componentCls:t,motionDurationSlow:n,motionEaseInOut:r,borderRadius:o,menuArrowSize:i,menuArrowOffset:a}=e;return{["".concat(t,"-submenu")]:{"&-expand-icon, &-arrow":{position:"absolute",top:"50%",insetInlineEnd:e.margin,width:i,color:"currentcolor",transform:"translateY(-50%)",transition:"transform ".concat(n," ").concat(r,", opacity ").concat(n)},"&-arrow":{"&::before, &::after":{position:"absolute",width:e.calc(i).mul(.6).equal(),height:e.calc(i).mul(.15).equal(),backgroundColor:"currentcolor",borderRadius:o,transition:["background ".concat(n," ").concat(r),"transform ".concat(n," ").concat(r),"top ".concat(n," ").concat(r),"color ".concat(n," ").concat(r)].join(","),content:'""'},"&::before":{transform:"rotate(45deg) translateY(".concat((0,E.bf)(e.calc(a).mul(-1).equal()),")")},"&::after":{transform:"rotate(-45deg) translateY(".concat((0,E.bf)(a),")")}}}}},B=e=>{let{antCls:t,componentCls:n,fontSize:r,motionDurationSlow:o,motionDurationMid:i,motionEaseInOut:a,paddingXS:l,padding:c,colorSplit:s,lineWidth:u,zIndexPopup:d,borderRadiusLG:f,subMenuItemBorderRadius:p,menuArrowSize:h,menuArrowOffset:m,lineType:g,menuPanelMaskInset:v,groupTitleLineHeight:y,groupTitleFontSize:b}=e;return[{"":{["".concat(n)]:Object.assign(Object.assign({},(0,O.dF)()),{"&-hidden":{display:"none"}})},["".concat(n,"-submenu-hidden")]:{display:"none"}},{[n]:Object.assign(Object.assign(Object.assign(Object.assign(Object.assign(Object.assign(Object.assign({},(0,O.Wf)(e)),(0,O.dF)()),{marginBottom:0,paddingInlineStart:0,fontSize:r,lineHeight:0,listStyle:"none",outline:"none",transition:"width ".concat(o," cubic-bezier(0.2, 0, 0, 1) 0s"),"ul, ol":{margin:0,padding:0,listStyle:"none"},"&-overflow":{display:"flex",["".concat(n,"-item")]:{flex:"none"}},["".concat(n,"-item, ").concat(n,"-submenu, ").concat(n,"-submenu-title")]:{borderRadius:e.itemBorderRadius},["".concat(n,"-item-group-title")]:{padding:"".concat((0,E.bf)(l)," ").concat((0,E.bf)(c)),fontSize:b,lineHeight:y,transition:"all ".concat(o)},["&-horizontal ".concat(n,"-submenu")]:{transition:["border-color ".concat(o," ").concat(a),"background ".concat(o," ").concat(a)].join(",")},["".concat(n,"-submenu, ").concat(n,"-submenu-inline")]:{transition:["border-color ".concat(o," ").concat(a),"background ".concat(o," ").concat(a),"padding ".concat(i," ").concat(a)].join(",")},["".concat(n,"-submenu ").concat(n,"-sub")]:{cursor:"initial",transition:["background ".concat(o," ").concat(a),"padding ".concat(o," ").concat(a)].join(",")},["".concat(n,"-title-content")]:{transition:"color ".concat(o),["> ".concat(t,"-typography-ellipsis-single-line")]:{display:"inline",verticalAlign:"unset"}},["".concat(n,"-item a")]:{"&::before":{position:"absolute",inset:0,backgroundColor:"transparent",content:'""'}},["".concat(n,"-item-divider")]:{overflow:"hidden",lineHeight:0,borderColor:s,borderStyle:g,borderWidth:0,borderTopWidth:u,marginBlock:u,padding:0,"&-dashed":{borderStyle:"dashed"}}}),L(e)),{["".concat(n,"-item-group")]:{["".concat(n,"-item-group-list")]:{margin:0,padding:0,["".concat(n,"-item, ").concat(n,"-submenu-title")]:{paddingInline:"".concat((0,E.bf)(e.calc(r).mul(2).equal())," ").concat((0,E.bf)(c))}}},"&-submenu":{"&-popup":{position:"absolute",zIndex:d,borderRadius:f,boxShadow:"none",transformOrigin:"0 0",["&".concat(n,"-submenu")]:{background:"transparent"},"&::before":{position:"absolute",inset:"".concat((0,E.bf)(v)," 0 0"),zIndex:-1,width:"100%",height:"100%",opacity:0,content:'""'}},"&-placement-rightTop::before":{top:0,insetInlineStart:v},"\n &-placement-leftTop,\n &-placement-bottomRight,\n ":{transformOrigin:"100% 0"},"\n &-placement-leftBottom,\n &-placement-topRight,\n ":{transformOrigin:"100% 100%"},"\n &-placement-rightBottom,\n &-placement-topLeft,\n ":{transformOrigin:"0 100%"},"\n &-placement-bottomLeft,\n &-placement-rightTop,\n ":{transformOrigin:"0 0"},"\n &-placement-leftTop,\n &-placement-leftBottom\n ":{paddingInlineEnd:e.paddingXS},"\n &-placement-rightTop,\n &-placement-rightBottom\n ":{paddingInlineStart:e.paddingXS},"\n &-placement-topRight,\n &-placement-topLeft\n ":{paddingBottom:e.paddingXS},"\n &-placement-bottomRight,\n &-placement-bottomLeft\n ":{paddingTop:e.paddingXS},["> ".concat(n)]:Object.assign(Object.assign(Object.assign({borderRadius:f},L(e)),z(e)),{["".concat(n,"-item, ").concat(n,"-submenu > ").concat(n,"-submenu-title")]:{borderRadius:p},["".concat(n,"-submenu-title::after")]:{transition:"transform ".concat(o," ").concat(a)}})}}),z(e)),{["&-inline-collapsed ".concat(n,"-submenu-arrow,\n &-inline ").concat(n,"-submenu-arrow")]:{"&::before":{transform:"rotate(-45deg) translateX(".concat((0,E.bf)(m),")")},"&::after":{transform:"rotate(45deg) translateX(".concat((0,E.bf)(e.calc(m).mul(-1).equal()),")")}},["".concat(n,"-submenu-open").concat(n,"-submenu-inline > ").concat(n,"-submenu-title > ").concat(n,"-submenu-arrow")]:{transform:"translateY(".concat((0,E.bf)(e.calc(h).mul(.2).mul(-1).equal()),")"),"&::after":{transform:"rotate(-45deg) translateX(".concat((0,E.bf)(e.calc(m).mul(-1).equal()),")")},"&::before":{transform:"rotate(45deg) translateX(".concat((0,E.bf)(m),")")}}})},{["".concat(t,"-layout-header")]:{[n]:{lineHeight:"inherit"}}}]},F=e=>{var t,n,r;let{colorPrimary:o,colorError:i,colorTextDisabled:a,colorErrorBg:l,colorText:c,colorTextDescription:s,colorBgContainer:u,colorFillAlter:d,colorFillContent:f,lineWidth:p,lineWidthBold:h,controlItemBgActive:m,colorBgTextHover:g,controlHeightLG:v,lineHeight:y,colorBgElevated:b,marginXXS:x,padding:w,fontSize:S,controlHeightSM:k,fontSizeLG:E,colorTextLightSolid:O,colorErrorHover:j}=e,P=null!==(t=e.activeBarWidth)&&void 0!==t?t:0,M=null!==(n=e.activeBarBorderWidth)&&void 0!==n?n:p,N=null!==(r=e.itemMarginInline)&&void 0!==r?r:e.marginXXS,I=new C.C(O).setAlpha(.65).toRgbString();return{dropdownWidth:160,zIndexPopup:e.zIndexPopupBase+50,radiusItem:e.borderRadiusLG,itemBorderRadius:e.borderRadiusLG,radiusSubMenuItem:e.borderRadiusSM,subMenuItemBorderRadius:e.borderRadiusSM,colorItemText:c,itemColor:c,colorItemTextHover:c,itemHoverColor:c,colorItemTextHoverHorizontal:o,horizontalItemHoverColor:o,colorGroupTitle:s,groupTitleColor:s,colorItemTextSelected:o,itemSelectedColor:o,colorItemTextSelectedHorizontal:o,horizontalItemSelectedColor:o,colorItemBg:u,itemBg:u,colorItemBgHover:g,itemHoverBg:g,colorItemBgActive:f,itemActiveBg:m,colorSubItemBg:d,subMenuItemBg:d,colorItemBgSelected:m,itemSelectedBg:m,colorItemBgSelectedHorizontal:"transparent",horizontalItemSelectedBg:"transparent",colorActiveBarWidth:0,activeBarWidth:P,colorActiveBarHeight:h,activeBarHeight:h,colorActiveBarBorderSize:p,activeBarBorderWidth:M,colorItemTextDisabled:a,itemDisabledColor:a,colorDangerItemText:i,dangerItemColor:i,colorDangerItemTextHover:i,dangerItemHoverColor:i,colorDangerItemTextSelected:i,dangerItemSelectedColor:i,colorDangerItemBgActive:l,dangerItemActiveBg:l,colorDangerItemBgSelected:l,dangerItemSelectedBg:l,itemMarginInline:N,horizontalItemBorderRadius:0,horizontalItemHoverBg:"transparent",itemHeight:v,groupTitleLineHeight:y,collapsedWidth:2*v,popupBg:b,itemMarginBlock:x,itemPaddingInline:w,horizontalLineHeight:"".concat(1.15*v,"px"),iconSize:S,iconMarginInlineEnd:k-S,collapsedIconSize:E,groupTitleFontSize:S,darkItemDisabledColor:new C.C(O).setAlpha(.25).toRgbString(),darkItemColor:I,darkDangerItemColor:i,darkItemBg:"#001529",darkPopupBg:"#001529",darkSubMenuItemBg:"#000c17",darkItemSelectedColor:O,darkItemSelectedBg:o,darkDangerItemSelectedBg:i,darkItemHoverBg:"transparent",darkGroupTitleColor:I,darkItemHoverColor:O,darkDangerItemHoverColor:j,darkDangerItemSelectedColor:O,darkDangerItemActiveBg:i,itemWidth:P?"calc(100% + ".concat(M,"px)"):"calc(100% - ".concat(2*N,"px)")}};var H=n(64024),q=function(e,t){var n={};for(var r in e)Object.prototype.hasOwnProperty.call(e,r)&&0>t.indexOf(r)&&(n[r]=e[r]);if(null!=e&&"function"==typeof Object.getOwnPropertySymbols)for(var o=0,r=Object.getOwnPropertySymbols(e);ot.indexOf(r[o])&&Object.prototype.propertyIsEnumerable.call(e,r[o])&&(n[r[o]]=e[r[o]]);return n};let W=(0,r.forwardRef)((e,t)=>{var n,i;let l;let h=r.useContext(k.Z),g=h||{},{getPrefixCls:v,getPopupContainer:x,direction:E,menu:C}=r.useContext(p.E_),O=v(),{prefixCls:A,className:D,style:L,theme:z="light",expandIcon:W,_internalDisableMenuItemTitleTooltip:K,inlineCollapsed:U,siderCollapsed:V,items:G,children:X,rootClassName:$,mode:Y,selectable:Q,onClick:J,overflowedIndicatorPopupClassName:ee}=e,et=q(e,["prefixCls","className","style","theme","expandIcon","_internalDisableMenuItemTitleTooltip","inlineCollapsed","siderCollapsed","items","children","rootClassName","mode","selectable","onClick","overflowedIndicatorPopupClassName"]),en=(0,u.Z)(et,["collapsedWidth"]),er=r.useMemo(()=>G?function e(t){return(t||[]).map((t,n)=>{if(t&&"object"==typeof t){let{label:i,children:a,key:l,type:c}=t,s=S(t,["label","children","key","type"]),u=null!=l?l:"tmp-".concat(n);return a||"group"===c?"group"===c?r.createElement(o.BW,Object.assign({key:u},s,{title:i}),e(a)):r.createElement(w,Object.assign({key:u},s,{title:i}),e(a)):"divider"===c?r.createElement(m,Object.assign({key:u},s)):r.createElement(b,Object.assign({key:u},s),i)}return null}).filter(e=>e)}(G):G,[G])||X;null===(n=g.validator)||void 0===n||n.call(g,{mode:Y});let eo=(0,s.zX)(function(){var e;null==J||J.apply(void 0,arguments),null===(e=g.onClick)||void 0===e||e.call(g)}),ei=g.mode||Y,ea=null!=Q?Q:g.selectable,el=r.useMemo(()=>void 0!==V?V:U,[U,V]),ec={horizontal:{motionName:"".concat(O,"-slide-up")},inline:(0,d.Z)(O),other:{motionName:"".concat(O,"-zoom-big")}},es=v("menu",A||g.prefixCls),eu=(0,H.Z)(es),[ed,ef,ep]=function(e){let t=arguments.length>1&&void 0!==arguments[1]?arguments[1]:e,n=!(arguments.length>2)||void 0===arguments[2]||arguments[2];return(0,N.I$)("Menu",e=>{let{colorBgElevated:t,colorPrimary:n,colorTextLightSolid:r,controlHeightLG:o,fontSize:i,darkItemColor:a,darkDangerItemColor:l,darkItemBg:c,darkSubMenuItemBg:s,darkItemSelectedColor:u,darkItemSelectedBg:d,darkDangerItemSelectedBg:f,darkItemHoverBg:p,darkGroupTitleColor:h,darkItemHoverColor:m,darkItemDisabledColor:g,darkDangerItemHoverColor:v,darkDangerItemSelectedColor:y,darkDangerItemActiveBg:b,popupBg:x,darkPopupBg:w}=e,S=e.calc(i).div(7).mul(5).equal(),k=(0,I.TS)(e,{menuArrowSize:S,menuHorizontalHeight:e.calc(o).mul(1.15).equal(),menuArrowOffset:e.calc(S).mul(.25).equal(),menuPanelMaskInset:-7,menuSubMenuBg:t,calc:e.calc,popupBg:x}),E=(0,I.TS)(k,{itemColor:a,itemHoverColor:m,groupTitleColor:h,itemSelectedColor:u,itemBg:c,popupBg:w,subMenuItemBg:s,itemActiveBg:"transparent",itemSelectedBg:d,activeBarHeight:0,activeBarBorderWidth:0,itemHoverBg:p,itemDisabledColor:g,dangerItemColor:l,dangerItemHoverColor:v,dangerItemSelectedColor:y,dangerItemActiveBg:b,dangerItemSelectedBg:f,menuSubMenuBg:s,horizontalItemSelectedColor:r,horizontalItemSelectedBg:n});return[B(k),R(k),Z(k),_(k,"light"),_(E,"dark"),T(k),(0,j.Z)(k),(0,P.oN)(k,"slide-up"),(0,P.oN)(k,"slide-down"),(0,M._y)(k,"zoom-big")]},F,{deprecatedTokens:[["colorGroupTitle","groupTitleColor"],["radiusItem","itemBorderRadius"],["radiusSubMenuItem","subMenuItemBorderRadius"],["colorItemText","itemColor"],["colorItemTextHover","itemHoverColor"],["colorItemTextHoverHorizontal","horizontalItemHoverColor"],["colorItemTextSelected","itemSelectedColor"],["colorItemTextSelectedHorizontal","horizontalItemSelectedColor"],["colorItemTextDisabled","itemDisabledColor"],["colorDangerItemText","dangerItemColor"],["colorDangerItemTextHover","dangerItemHoverColor"],["colorDangerItemTextSelected","dangerItemSelectedColor"],["colorDangerItemBgActive","dangerItemActiveBg"],["colorDangerItemBgSelected","dangerItemSelectedBg"],["colorItemBg","itemBg"],["colorItemBgHover","itemHoverBg"],["colorSubItemBg","subMenuItemBg"],["colorItemBgActive","itemActiveBg"],["colorItemBgSelectedHorizontal","horizontalItemSelectedBg"],["colorActiveBarWidth","activeBarWidth"],["colorActiveBarHeight","activeBarHeight"],["colorActiveBarBorderSize","activeBarBorderWidth"],["colorItemBgSelected","itemSelectedBg"]],injectStyle:n,unitless:{groupTitleLineHeight:!0}})(e,t)}(es,eu,!h),eh=c()("".concat(es,"-").concat(z),null==C?void 0:C.className,D);if("function"==typeof W)l=W;else if(null===W||!1===W)l=null;else if(null===g.expandIcon||!1===g.expandIcon)l=null;else{let e=null!=W?W:g.expandIcon;l=(0,f.Tm)(e,{className:c()("".concat(es,"-submenu-expand-icon"),(0,f.l$)(e)?null===(i=e.props)||void 0===i?void 0:i.className:"")})}let em=r.useMemo(()=>({prefixCls:es,inlineCollapsed:el||!1,direction:E,firstLevel:!0,theme:z,mode:ei,disableMenuItemTitleTooltip:K}),[es,el,E,K,z]);return ed(r.createElement(k.Z.Provider,{value:null},r.createElement(y.Provider,{value:em},r.createElement(o.ZP,Object.assign({getPopupContainer:x,overflowedIndicator:r.createElement(a.Z,null),overflowedIndicatorPopupClassName:c()(es,"".concat(es,"-").concat(z),ee),mode:ei,selectable:ea,onClick:eo},en,{inlineCollapsed:el,style:Object.assign(Object.assign({},null==C?void 0:C.style),L),className:eh,prefixCls:es,direction:E,defaultMotions:ec,expandIcon:l,ref:t,rootClassName:c()($,ef,g.rootClassName,ep,eu)}),er))))}),K=(0,r.forwardRef)((e,t)=>{let n=(0,r.useRef)(null),o=r.useContext(i.D);return(0,r.useImperativeHandle)(t,()=>({menu:n.current,focus:e=>{var t;null===(t=n.current)||void 0===t||t.focus(e)}})),r.createElement(W,Object.assign({ref:n},e,o))});K.Item=b,K.SubMenu=w,K.Divider=m,K.ItemGroup=o.BW;var U=K},29967:function(e,t,n){"use strict";n.d(t,{ZP:function(){return _}});var r=n(2265),o=n(36760),i=n.n(o),a=n(50506),l=n(18242),c=n(71744),s=n(33759);let u=r.createContext(null),d=u.Provider,f=r.createContext(null),p=f.Provider;var h=n(20873),m=n(28791),g=n(6694),v=n(34709),y=n(86586),b=n(39109),x=n(352),w=n(12918),S=n(80669),k=n(3104);let E=e=>{let{componentCls:t,antCls:n}=e,r="".concat(t,"-group");return{[r]:Object.assign(Object.assign({},(0,w.Wf)(e)),{display:"inline-block",fontSize:0,["&".concat(r,"-rtl")]:{direction:"rtl"},["".concat(n,"-badge ").concat(n,"-badge-count")]:{zIndex:1},["> ".concat(n,"-badge:not(:first-child) > ").concat(n,"-button-wrapper")]:{borderInlineStart:"none"}})}},C=e=>{let{componentCls:t,wrapperMarginInlineEnd:n,colorPrimary:r,radioSize:o,motionDurationSlow:i,motionDurationMid:a,motionEaseInOutCirc:l,colorBgContainer:c,colorBorder:s,lineWidth:u,colorBgContainerDisabled:d,colorTextDisabled:f,paddingXS:p,dotColorDisabled:h,lineType:m,radioColor:g,radioBgColor:v,calc:y}=e,b="".concat(t,"-inner"),S=y(o).sub(y(4).mul(2)),k=y(1).mul(o).equal();return{["".concat(t,"-wrapper")]:Object.assign(Object.assign({},(0,w.Wf)(e)),{display:"inline-flex",alignItems:"baseline",marginInlineStart:0,marginInlineEnd:n,cursor:"pointer",["&".concat(t,"-wrapper-rtl")]:{direction:"rtl"},"&-disabled":{cursor:"not-allowed",color:e.colorTextDisabled},"&::after":{display:"inline-block",width:0,overflow:"hidden",content:'"\\a0"'},["".concat(t,"-checked::after")]:{position:"absolute",insetBlockStart:0,insetInlineStart:0,width:"100%",height:"100%",border:"".concat((0,x.bf)(u)," ").concat(m," ").concat(r),borderRadius:"50%",visibility:"hidden",content:'""'},[t]:Object.assign(Object.assign({},(0,w.Wf)(e)),{position:"relative",display:"inline-block",outline:"none",cursor:"pointer",alignSelf:"center",borderRadius:"50%"}),["".concat(t,"-wrapper:hover &,\n &:hover ").concat(b)]:{borderColor:r},["".concat(t,"-input:focus-visible + ").concat(b)]:Object.assign({},(0,w.oN)(e)),["".concat(t,":hover::after, ").concat(t,"-wrapper:hover &::after")]:{visibility:"visible"},["".concat(t,"-inner")]:{"&::after":{boxSizing:"border-box",position:"absolute",insetBlockStart:"50%",insetInlineStart:"50%",display:"block",width:k,height:k,marginBlockStart:y(1).mul(o).div(-2).equal(),marginInlineStart:y(1).mul(o).div(-2).equal(),backgroundColor:g,borderBlockStart:0,borderInlineStart:0,borderRadius:k,transform:"scale(0)",opacity:0,transition:"all ".concat(i," ").concat(l),content:'""'},boxSizing:"border-box",position:"relative",insetBlockStart:0,insetInlineStart:0,display:"block",width:k,height:k,backgroundColor:c,borderColor:s,borderStyle:"solid",borderWidth:u,borderRadius:"50%",transition:"all ".concat(a)},["".concat(t,"-input")]:{position:"absolute",inset:0,zIndex:1,cursor:"pointer",opacity:0},["".concat(t,"-checked")]:{[b]:{borderColor:r,backgroundColor:v,"&::after":{transform:"scale(".concat(e.calc(e.dotSize).div(o).equal(),")"),opacity:1,transition:"all ".concat(i," ").concat(l)}}},["".concat(t,"-disabled")]:{cursor:"not-allowed",[b]:{backgroundColor:d,borderColor:s,cursor:"not-allowed","&::after":{backgroundColor:h}},["".concat(t,"-input")]:{cursor:"not-allowed"},["".concat(t,"-disabled + span")]:{color:f,cursor:"not-allowed"},["&".concat(t,"-checked")]:{[b]:{"&::after":{transform:"scale(".concat(y(S).div(o).equal({unit:!1}),")")}}}},["span".concat(t," + *")]:{paddingInlineStart:p,paddingInlineEnd:p}})}},O=e=>{let{buttonColor:t,controlHeight:n,componentCls:r,lineWidth:o,lineType:i,colorBorder:a,motionDurationSlow:l,motionDurationMid:c,buttonPaddingInline:s,fontSize:u,buttonBg:d,fontSizeLG:f,controlHeightLG:p,controlHeightSM:h,paddingXS:m,borderRadius:g,borderRadiusSM:v,borderRadiusLG:y,buttonCheckedBg:b,buttonSolidCheckedColor:S,colorTextDisabled:k,colorBgContainerDisabled:E,buttonCheckedBgDisabled:C,buttonCheckedColorDisabled:O,colorPrimary:j,colorPrimaryHover:P,colorPrimaryActive:M,buttonSolidCheckedBg:N,buttonSolidCheckedHoverBg:I,buttonSolidCheckedActiveBg:R,calc:T}=e;return{["".concat(r,"-button-wrapper")]:{position:"relative",display:"inline-block",height:n,margin:0,paddingInline:s,paddingBlock:0,color:t,fontSize:u,lineHeight:(0,x.bf)(T(n).sub(T(o).mul(2)).equal()),background:d,border:"".concat((0,x.bf)(o)," ").concat(i," ").concat(a),borderBlockStartWidth:T(o).add(.02).equal(),borderInlineStartWidth:0,borderInlineEndWidth:o,cursor:"pointer",transition:["color ".concat(c),"background ".concat(c),"box-shadow ".concat(c)].join(","),a:{color:t},["> ".concat(r,"-button")]:{position:"absolute",insetBlockStart:0,insetInlineStart:0,zIndex:-1,width:"100%",height:"100%"},"&:not(:first-child)":{"&::before":{position:"absolute",insetBlockStart:T(o).mul(-1).equal(),insetInlineStart:T(o).mul(-1).equal(),display:"block",boxSizing:"content-box",width:1,height:"100%",paddingBlock:o,paddingInline:0,backgroundColor:a,transition:"background-color ".concat(l),content:'""'}},"&:first-child":{borderInlineStart:"".concat((0,x.bf)(o)," ").concat(i," ").concat(a),borderStartStartRadius:g,borderEndStartRadius:g},"&:last-child":{borderStartEndRadius:g,borderEndEndRadius:g},"&:first-child:last-child":{borderRadius:g},["".concat(r,"-group-large &")]:{height:p,fontSize:f,lineHeight:(0,x.bf)(T(p).sub(T(o).mul(2)).equal()),"&:first-child":{borderStartStartRadius:y,borderEndStartRadius:y},"&:last-child":{borderStartEndRadius:y,borderEndEndRadius:y}},["".concat(r,"-group-small &")]:{height:h,paddingInline:T(m).sub(o).equal(),paddingBlock:0,lineHeight:(0,x.bf)(T(h).sub(T(o).mul(2)).equal()),"&:first-child":{borderStartStartRadius:v,borderEndStartRadius:v},"&:last-child":{borderStartEndRadius:v,borderEndEndRadius:v}},"&:hover":{position:"relative",color:j},"&:has(:focus-visible)":Object.assign({},(0,w.oN)(e)),["".concat(r,"-inner, input[type='checkbox'], input[type='radio']")]:{width:0,height:0,opacity:0,pointerEvents:"none"},["&-checked:not(".concat(r,"-button-wrapper-disabled)")]:{zIndex:1,color:j,background:b,borderColor:j,"&::before":{backgroundColor:j},"&:first-child":{borderColor:j},"&:hover":{color:P,borderColor:P,"&::before":{backgroundColor:P}},"&:active":{color:M,borderColor:M,"&::before":{backgroundColor:M}}},["".concat(r,"-group-solid &-checked:not(").concat(r,"-button-wrapper-disabled)")]:{color:S,background:N,borderColor:N,"&:hover":{color:S,background:I,borderColor:I},"&:active":{color:S,background:R,borderColor:R}},"&-disabled":{color:k,backgroundColor:E,borderColor:a,cursor:"not-allowed","&:first-child, &:hover":{color:k,backgroundColor:E,borderColor:a}},["&-disabled".concat(r,"-button-wrapper-checked")]:{color:O,backgroundColor:C,borderColor:a,boxShadow:"none"}}}};var j=(0,S.I$)("Radio",e=>{let{controlOutline:t,controlOutlineWidth:n}=e,r="0 0 0 ".concat((0,x.bf)(n)," ").concat(t),o=(0,k.TS)(e,{radioFocusShadow:r,radioButtonFocusShadow:r});return[E(o),C(o),O(o)]},e=>{let{wireframe:t,padding:n,marginXS:r,lineWidth:o,fontSizeLG:i,colorText:a,colorBgContainer:l,colorTextDisabled:c,controlItemBgActiveDisabled:s,colorTextLightSolid:u,colorPrimary:d,colorPrimaryHover:f,colorPrimaryActive:p,colorWhite:h}=e;return{radioSize:i,dotSize:t?i-8:i-(4+o)*2,dotColorDisabled:c,buttonSolidCheckedColor:u,buttonSolidCheckedBg:d,buttonSolidCheckedHoverBg:f,buttonSolidCheckedActiveBg:p,buttonBg:l,buttonCheckedBg:l,buttonColor:a,buttonCheckedBgDisabled:s,buttonCheckedColorDisabled:c,buttonPaddingInline:n-o,wrapperMarginInlineEnd:r,radioColor:t?d:h,radioBgColor:t?l:d}},{unitless:{radioSize:!0,dotSize:!0}}),P=n(64024),M=function(e,t){var n={};for(var r in e)Object.prototype.hasOwnProperty.call(e,r)&&0>t.indexOf(r)&&(n[r]=e[r]);if(null!=e&&"function"==typeof Object.getOwnPropertySymbols)for(var o=0,r=Object.getOwnPropertySymbols(e);ot.indexOf(r[o])&&Object.prototype.propertyIsEnumerable.call(e,r[o])&&(n[r[o]]=e[r[o]]);return n};let N=r.forwardRef((e,t)=>{var n,o;let a=r.useContext(u),l=r.useContext(f),{getPrefixCls:s,direction:d,radio:p}=r.useContext(c.E_),x=r.useRef(null),w=(0,m.sQ)(t,x),{isFormItemInput:S}=r.useContext(b.aM),{prefixCls:k,className:E,rootClassName:C,children:O,style:N,title:I}=e,R=M(e,["prefixCls","className","rootClassName","children","style","title"]),T=s("radio",k),A="button"===((null==a?void 0:a.optionType)||l),_=A?"".concat(T,"-button"):T,D=(0,P.Z)(T),[Z,L,z]=j(T,D),B=Object.assign({},R),F=r.useContext(y.Z);a&&(B.name=a.name,B.onChange=t=>{var n,r;null===(n=e.onChange)||void 0===n||n.call(e,t),null===(r=null==a?void 0:a.onChange)||void 0===r||r.call(a,t)},B.checked=e.value===a.value,B.disabled=null!==(n=B.disabled)&&void 0!==n?n:a.disabled),B.disabled=null!==(o=B.disabled)&&void 0!==o?o:F;let H=i()("".concat(_,"-wrapper"),{["".concat(_,"-wrapper-checked")]:B.checked,["".concat(_,"-wrapper-disabled")]:B.disabled,["".concat(_,"-wrapper-rtl")]:"rtl"===d,["".concat(_,"-wrapper-in-form-item")]:S},null==p?void 0:p.className,E,C,L,z,D);return Z(r.createElement(g.Z,{component:"Radio",disabled:B.disabled},r.createElement("label",{className:H,style:Object.assign(Object.assign({},null==p?void 0:p.style),N),onMouseEnter:e.onMouseEnter,onMouseLeave:e.onMouseLeave,title:I},r.createElement(h.Z,Object.assign({},B,{className:i()(B.className,!A&&v.A),type:"radio",prefixCls:_,ref:w})),void 0!==O?r.createElement("span",null,O):null)))}),I=r.forwardRef((e,t)=>{let{getPrefixCls:n,direction:o}=r.useContext(c.E_),[u,f]=(0,a.Z)(e.defaultValue,{value:e.value}),{prefixCls:p,className:h,rootClassName:m,options:g,buttonStyle:v="outline",disabled:y,children:b,size:x,style:w,id:S,onMouseEnter:k,onMouseLeave:E,onFocus:C,onBlur:O}=e,M=n("radio",p),I="".concat(M,"-group"),R=(0,P.Z)(M),[T,A,_]=j(M,R),D=b;g&&g.length>0&&(D=g.map(e=>"string"==typeof e||"number"==typeof e?r.createElement(N,{key:e.toString(),prefixCls:M,disabled:y,value:e,checked:u===e},e):r.createElement(N,{key:"radio-group-value-options-".concat(e.value),prefixCls:M,disabled:e.disabled||y,value:e.value,checked:u===e.value,title:e.title,style:e.style,id:e.id,required:e.required},e.label)));let Z=(0,s.Z)(x),L=i()(I,"".concat(I,"-").concat(v),{["".concat(I,"-").concat(Z)]:Z,["".concat(I,"-rtl")]:"rtl"===o},h,m,A,_,R);return T(r.createElement("div",Object.assign({},(0,l.Z)(e,{aria:!0,data:!0}),{className:L,style:w,onMouseEnter:k,onMouseLeave:E,onFocus:C,onBlur:O,id:S,ref:t}),r.createElement(d,{value:{onChange:t=>{let n=t.target.value;"value"in e||f(n);let{onChange:r}=e;r&&n!==u&&r(t)},value:u,disabled:e.disabled,name:e.name,optionType:e.optionType}},D)))});var R=r.memo(I),T=function(e,t){var n={};for(var r in e)Object.prototype.hasOwnProperty.call(e,r)&&0>t.indexOf(r)&&(n[r]=e[r]);if(null!=e&&"function"==typeof Object.getOwnPropertySymbols)for(var o=0,r=Object.getOwnPropertySymbols(e);ot.indexOf(r[o])&&Object.prototype.propertyIsEnumerable.call(e,r[o])&&(n[r[o]]=e[r[o]]);return n},A=r.forwardRef((e,t)=>{let{getPrefixCls:n}=r.useContext(c.E_),{prefixCls:o}=e,i=T(e,["prefixCls"]),a=n("radio",o);return r.createElement(p,{value:"button"},r.createElement(N,Object.assign({prefixCls:a},i,{type:"radio",ref:t})))});N.Button=A,N.Group=R,N.__ANT_RADIO=!0;var _=N},47451:function(e,t,n){"use strict";var r=n(10295);t.Z=r.Z},52787:function(e,t,n){"use strict";n.d(t,{default:function(){return tt}});var r=n(2265),o=n(36760),i=n.n(o),a=n(1119),l=n(83145),c=n(11993),s=n(31686),u=n(26365),d=n(6989),f=n(41154),p=n(50506),h=n(32559),m=n(27380),g=n(79267),v=n(95814),y=n(28791),b=function(e){var t=e.className,n=e.customizeIcon,o=e.customizeIconProps,a=e.children,l=e.onMouseDown,c=e.onClick,s="function"==typeof n?n(o):n;return r.createElement("span",{className:t,onMouseDown:function(e){e.preventDefault(),null==l||l(e)},style:{userSelect:"none",WebkitUserSelect:"none"},unselectable:"on",onClick:c,"aria-hidden":!0},void 0!==s?s:r.createElement("span",{className:i()(t.split(/\s+/).map(function(e){return"".concat(e,"-icon")}))},a))},x=function(e,t,n,o,i){var a=arguments.length>5&&void 0!==arguments[5]&&arguments[5],l=arguments.length>6?arguments[6]:void 0,c=arguments.length>7?arguments[7]:void 0,s=r.useMemo(function(){return"object"===(0,f.Z)(o)?o.clearIcon:i||void 0},[o,i]);return{allowClear:r.useMemo(function(){return!a&&!!o&&(!!n.length||!!l)&&!("combobox"===c&&""===l)},[o,a,n.length,l,c]),clearIcon:r.createElement(b,{className:"".concat(e,"-clear"),onMouseDown:t,customizeIcon:s},"\xd7")}},w=r.createContext(null);function S(){var e=arguments.length>0&&void 0!==arguments[0]?arguments[0]:250,t=r.useRef(null),n=r.useRef(null);return r.useEffect(function(){return function(){window.clearTimeout(n.current)}},[]),[function(){return t.current},function(r){(r||null===t.current)&&(t.current=r),window.clearTimeout(n.current),n.current=window.setTimeout(function(){t.current=null},e)}]}var k=n(18242),E=n(1699),C=r.forwardRef(function(e,t){var n,o=e.prefixCls,a=e.id,l=e.inputElement,c=e.disabled,u=e.tabIndex,d=e.autoFocus,f=e.autoComplete,p=e.editable,m=e.activeDescendantId,g=e.value,v=e.maxLength,b=e.onKeyDown,x=e.onMouseDown,w=e.onChange,S=e.onPaste,k=e.onCompositionStart,E=e.onCompositionEnd,C=e.open,O=e.attrs,j=l||r.createElement("input",null),P=j,M=P.ref,N=P.props,I=N.onKeyDown,R=N.onChange,T=N.onMouseDown,A=N.onCompositionStart,_=N.onCompositionEnd,D=N.style;return(0,h.Kp)(!("maxLength"in j.props),"Passing 'maxLength' to input element directly may not work because input in BaseSelect is controlled."),j=r.cloneElement(j,(0,s.Z)((0,s.Z)((0,s.Z)({type:"search"},N),{},{id:a,ref:(0,y.sQ)(t,M),disabled:c,tabIndex:u,autoComplete:f||"off",autoFocus:d,className:i()("".concat(o,"-selection-search-input"),null===(n=j)||void 0===n||null===(n=n.props)||void 0===n?void 0:n.className),role:"combobox","aria-expanded":C||!1,"aria-haspopup":"listbox","aria-owns":"".concat(a,"_list"),"aria-autocomplete":"list","aria-controls":"".concat(a,"_list"),"aria-activedescendant":C?m:void 0},O),{},{value:p?g:"",maxLength:v,readOnly:!p,unselectable:p?null:"on",style:(0,s.Z)((0,s.Z)({},D),{},{opacity:p?null:0}),onKeyDown:function(e){b(e),I&&I(e)},onMouseDown:function(e){x(e),T&&T(e)},onChange:function(e){w(e),R&&R(e)},onCompositionStart:function(e){k(e),A&&A(e)},onCompositionEnd:function(e){E(e),_&&_(e)},onPaste:S}))});function O(e){return Array.isArray(e)?e:void 0!==e?[e]:[]}var j="undefined"!=typeof window&&window.document&&window.document.documentElement;function P(e){return["string","number"].includes((0,f.Z)(e))}function M(e){var t=void 0;return e&&(P(e.title)?t=e.title.toString():P(e.label)&&(t=e.label.toString())),t}function N(e){var t;return null!==(t=e.key)&&void 0!==t?t:e.value}var I=function(e){e.preventDefault(),e.stopPropagation()},R=function(e){var t,n,o=e.id,a=e.prefixCls,l=e.values,s=e.open,d=e.searchValue,f=e.autoClearSearchValue,p=e.inputRef,h=e.placeholder,m=e.disabled,g=e.mode,v=e.showSearch,y=e.autoFocus,x=e.autoComplete,w=e.activeDescendantId,S=e.tabIndex,O=e.removeIcon,P=e.maxTagCount,R=e.maxTagTextLength,T=e.maxTagPlaceholder,A=void 0===T?function(e){return"+ ".concat(e.length," ...")}:T,_=e.tagRender,D=e.onToggleOpen,Z=e.onRemove,L=e.onInputChange,z=e.onInputPaste,B=e.onInputKeyDown,F=e.onInputMouseDown,H=e.onInputCompositionStart,q=e.onInputCompositionEnd,W=r.useRef(null),K=(0,r.useState)(0),U=(0,u.Z)(K,2),V=U[0],G=U[1],X=(0,r.useState)(!1),$=(0,u.Z)(X,2),Y=$[0],Q=$[1],J="".concat(a,"-selection"),ee=s||"multiple"===g&&!1===f||"tags"===g?d:"",et="tags"===g||"multiple"===g&&!1===f||v&&(s||Y);t=function(){G(W.current.scrollWidth)},n=[ee],j?r.useLayoutEffect(t,n):r.useEffect(t,n);var en=function(e,t,n,o,a){return r.createElement("span",{title:M(e),className:i()("".concat(J,"-item"),(0,c.Z)({},"".concat(J,"-item-disabled"),n))},r.createElement("span",{className:"".concat(J,"-item-content")},t),o&&r.createElement(b,{className:"".concat(J,"-item-remove"),onMouseDown:I,onClick:a,customizeIcon:O},"\xd7"))},er=r.createElement("div",{className:"".concat(J,"-search"),style:{width:V},onFocus:function(){Q(!0)},onBlur:function(){Q(!1)}},r.createElement(C,{ref:p,open:s,prefixCls:a,id:o,inputElement:null,disabled:m,autoFocus:y,autoComplete:x,editable:et,activeDescendantId:w,value:ee,onKeyDown:B,onMouseDown:F,onChange:L,onPaste:z,onCompositionStart:H,onCompositionEnd:q,tabIndex:S,attrs:(0,k.Z)(e,!0)}),r.createElement("span",{ref:W,className:"".concat(J,"-search-mirror"),"aria-hidden":!0},ee,"\xa0")),eo=r.createElement(E.Z,{prefixCls:"".concat(J,"-overflow"),data:l,renderItem:function(e){var t,n=e.disabled,o=e.label,i=e.value,a=!m&&!n,l=o;if("number"==typeof R&&("string"==typeof o||"number"==typeof o)){var c=String(l);c.length>R&&(l="".concat(c.slice(0,R),"..."))}var u=function(t){t&&t.stopPropagation(),Z(e)};return"function"==typeof _?(t=l,r.createElement("span",{onMouseDown:function(e){I(e),D(!s)}},_({label:t,value:i,disabled:n,closable:a,onClose:u}))):en(e,l,n,a,u)},renderRest:function(e){var t="function"==typeof A?A(e):A;return en({title:t},t,!1)},suffix:er,itemKey:N,maxCount:P});return r.createElement(r.Fragment,null,eo,!l.length&&!ee&&r.createElement("span",{className:"".concat(J,"-placeholder")},h))},T=function(e){var t=e.inputElement,n=e.prefixCls,o=e.id,i=e.inputRef,a=e.disabled,l=e.autoFocus,c=e.autoComplete,s=e.activeDescendantId,d=e.mode,f=e.open,p=e.values,h=e.placeholder,m=e.tabIndex,g=e.showSearch,v=e.searchValue,y=e.activeValue,b=e.maxLength,x=e.onInputKeyDown,w=e.onInputMouseDown,S=e.onInputChange,E=e.onInputPaste,O=e.onInputCompositionStart,j=e.onInputCompositionEnd,P=e.title,N=r.useState(!1),I=(0,u.Z)(N,2),R=I[0],T=I[1],A="combobox"===d,_=A||g,D=p[0],Z=v||"";A&&y&&!R&&(Z=y),r.useEffect(function(){A&&T(!1)},[A,y]);var L=("combobox"===d||!!f||!!g)&&!!Z,z=void 0===P?M(D):P,B=r.useMemo(function(){return D?null:r.createElement("span",{className:"".concat(n,"-selection-placeholder"),style:L?{visibility:"hidden"}:void 0},h)},[D,L,h,n]);return r.createElement(r.Fragment,null,r.createElement("span",{className:"".concat(n,"-selection-search")},r.createElement(C,{ref:i,prefixCls:n,id:o,open:f,inputElement:t,disabled:a,autoFocus:l,autoComplete:c,editable:_,activeDescendantId:s,value:Z,onKeyDown:x,onMouseDown:w,onChange:function(e){T(!0),S(e)},onPaste:E,onCompositionStart:O,onCompositionEnd:j,tabIndex:m,attrs:(0,k.Z)(e,!0),maxLength:A?b:void 0})),!A&&D?r.createElement("span",{className:"".concat(n,"-selection-item"),title:z,style:L?{visibility:"hidden"}:void 0},D.label):null,B)},A=r.forwardRef(function(e,t){var n=(0,r.useRef)(null),o=(0,r.useRef)(!1),i=e.prefixCls,l=e.open,c=e.mode,s=e.showSearch,d=e.tokenWithEnter,f=e.autoClearSearchValue,p=e.onSearch,h=e.onSearchSubmit,m=e.onToggleOpen,g=e.onInputKeyDown,y=e.domRef;r.useImperativeHandle(t,function(){return{focus:function(){n.current.focus()},blur:function(){n.current.blur()}}});var b=S(0),x=(0,u.Z)(b,2),w=x[0],k=x[1],E=(0,r.useRef)(null),C=function(e){!1!==p(e,!0,o.current)&&m(!0)},O={inputRef:n,onInputKeyDown:function(e){var t=e.which;(t===v.Z.UP||t===v.Z.DOWN)&&e.preventDefault(),g&&g(e),t!==v.Z.ENTER||"tags"!==c||o.current||l||null==h||h(e.target.value),[v.Z.ESC,v.Z.SHIFT,v.Z.BACKSPACE,v.Z.TAB,v.Z.WIN_KEY,v.Z.ALT,v.Z.META,v.Z.WIN_KEY_RIGHT,v.Z.CTRL,v.Z.SEMICOLON,v.Z.EQUALS,v.Z.CAPS_LOCK,v.Z.CONTEXT_MENU,v.Z.F1,v.Z.F2,v.Z.F3,v.Z.F4,v.Z.F5,v.Z.F6,v.Z.F7,v.Z.F8,v.Z.F9,v.Z.F10,v.Z.F11,v.Z.F12].includes(t)||m(!0)},onInputMouseDown:function(){k(!0)},onInputChange:function(e){var t=e.target.value;if(d&&E.current&&/[\r\n]/.test(E.current)){var n=E.current.replace(/[\r\n]+$/,"").replace(/\r\n/g," ").replace(/[\r\n]/g," ");t=t.replace(n,E.current)}E.current=null,C(t)},onInputPaste:function(e){var t=e.clipboardData,n=null==t?void 0:t.getData("text");E.current=n||""},onInputCompositionStart:function(){o.current=!0},onInputCompositionEnd:function(e){o.current=!1,"combobox"!==c&&C(e.target.value)}},j="multiple"===c||"tags"===c?r.createElement(R,(0,a.Z)({},e,O)):r.createElement(T,(0,a.Z)({},e,O));return r.createElement("div",{ref:y,className:"".concat(i,"-selector"),onClick:function(e){e.target!==n.current&&(void 0!==document.body.style.msTouchAction?setTimeout(function(){n.current.focus()}):n.current.focus())},onMouseDown:function(e){var t=w();e.target===n.current||t||"combobox"===c||e.preventDefault(),("combobox"===c||s&&t)&&l||(l&&!1!==f&&p("",!0,!1),m())}},j)}),_=n(97821),D=["prefixCls","disabled","visible","children","popupElement","animation","transitionName","dropdownStyle","dropdownClassName","direction","placement","builtinPlacements","dropdownMatchSelectWidth","dropdownRender","dropdownAlign","getPopupContainer","empty","getTriggerDOMNode","onPopupVisibleChange","onPopupMouseEnter"],Z=function(e){var t=!0===e?0:1;return{bottomLeft:{points:["tl","bl"],offset:[0,4],overflow:{adjustX:t,adjustY:1},htmlRegion:"scroll"},bottomRight:{points:["tr","br"],offset:[0,4],overflow:{adjustX:t,adjustY:1},htmlRegion:"scroll"},topLeft:{points:["bl","tl"],offset:[0,-4],overflow:{adjustX:t,adjustY:1},htmlRegion:"scroll"},topRight:{points:["br","tr"],offset:[0,-4],overflow:{adjustX:t,adjustY:1},htmlRegion:"scroll"}}},L=r.forwardRef(function(e,t){var n=e.prefixCls,o=(e.disabled,e.visible),l=e.children,u=e.popupElement,f=e.animation,p=e.transitionName,h=e.dropdownStyle,m=e.dropdownClassName,g=e.direction,v=e.placement,y=e.builtinPlacements,b=e.dropdownMatchSelectWidth,x=e.dropdownRender,w=e.dropdownAlign,S=e.getPopupContainer,k=e.empty,E=e.getTriggerDOMNode,C=e.onPopupVisibleChange,O=e.onPopupMouseEnter,j=(0,d.Z)(e,D),P="".concat(n,"-dropdown"),M=u;x&&(M=x(u));var N=r.useMemo(function(){return y||Z(b)},[y,b]),I=f?"".concat(P,"-").concat(f):p,R="number"==typeof b,T=r.useMemo(function(){return R?null:!1===b?"minWidth":"width"},[b,R]),A=h;R&&(A=(0,s.Z)((0,s.Z)({},A),{},{width:b}));var L=r.useRef(null);return r.useImperativeHandle(t,function(){return{getPopupElement:function(){return L.current}}}),r.createElement(_.Z,(0,a.Z)({},j,{showAction:C?["click"]:[],hideAction:C?["click"]:[],popupPlacement:v||("rtl"===(void 0===g?"ltr":g)?"bottomRight":"bottomLeft"),builtinPlacements:N,prefixCls:P,popupTransitionName:I,popup:r.createElement("div",{ref:L,onMouseEnter:O},M),stretch:T,popupAlign:w,popupVisible:o,getPopupContainer:S,popupClassName:i()(m,(0,c.Z)({},"".concat(P,"-empty"),k)),popupStyle:A,getTriggerDOMNode:E,onPopupVisibleChange:C}),l)}),z=n(87099);function B(e,t){var n,r=e.key;return("value"in e&&(n=e.value),null!=r)?r:void 0!==n?n:"rc-index-key-".concat(t)}function F(e,t){var n=e||{},r=n.label,o=n.value,i=n.options,a=n.groupLabel,l=r||(t?"children":"label");return{label:l,value:o||"value",options:i||"options",groupLabel:a||l}}function H(e){var t=(0,s.Z)({},e);return"props"in t||Object.defineProperty(t,"props",{get:function(){return(0,h.ZP)(!1,"Return type is option instead of Option instance. Please read value directly instead of reading from `props`."),t}}),t}var q=function(e,t,n){if(!t||!t.length)return null;var r=!1,o=function e(t,n){var o=(0,z.Z)(n),i=o[0],a=o.slice(1);if(!i)return[t];var c=t.split(i);return r=r||c.length>1,c.reduce(function(t,n){return[].concat((0,l.Z)(t),(0,l.Z)(e(n,a)))},[]).filter(Boolean)}(e,t);return r?void 0!==n?o.slice(0,n):o:null},W=r.createContext(null),K=["id","prefixCls","className","showSearch","tagRender","direction","omitDomProps","displayValues","onDisplayValuesChange","emptyOptions","notFoundContent","onClear","mode","disabled","loading","getInputElement","getRawInputElement","open","defaultOpen","onDropdownVisibleChange","activeValue","onActiveValueChange","activeDescendantId","searchValue","autoClearSearchValue","onSearch","onSearchSplit","tokenSeparators","allowClear","suffixIcon","clearIcon","OptionList","animation","transitionName","dropdownStyle","dropdownClassName","dropdownMatchSelectWidth","dropdownRender","dropdownAlign","placement","builtinPlacements","getPopupContainer","showAction","onFocus","onBlur","onKeyUp","onKeyDown","onMouseDown"],U=["value","onChange","removeIcon","placeholder","autoFocus","maxTagCount","maxTagTextLength","maxTagPlaceholder","choiceTransitionName","onInputKeyDown","onPopupScroll","tabIndex"],V=function(e){return"tags"===e||"multiple"===e},G=r.forwardRef(function(e,t){var n,o,h,k,E,C,O,j,P=e.id,M=e.prefixCls,N=e.className,I=e.showSearch,R=e.tagRender,T=e.direction,_=e.omitDomProps,D=e.displayValues,Z=e.onDisplayValuesChange,z=e.emptyOptions,B=e.notFoundContent,F=void 0===B?"Not Found":B,H=e.onClear,G=e.mode,X=e.disabled,$=e.loading,Y=e.getInputElement,Q=e.getRawInputElement,J=e.open,ee=e.defaultOpen,et=e.onDropdownVisibleChange,en=e.activeValue,er=e.onActiveValueChange,eo=e.activeDescendantId,ei=e.searchValue,ea=e.autoClearSearchValue,el=e.onSearch,ec=e.onSearchSplit,es=e.tokenSeparators,eu=e.allowClear,ed=e.suffixIcon,ef=e.clearIcon,ep=e.OptionList,eh=e.animation,em=e.transitionName,eg=e.dropdownStyle,ev=e.dropdownClassName,ey=e.dropdownMatchSelectWidth,eb=e.dropdownRender,ex=e.dropdownAlign,ew=e.placement,eS=e.builtinPlacements,ek=e.getPopupContainer,eE=e.showAction,eC=void 0===eE?[]:eE,eO=e.onFocus,ej=e.onBlur,eP=e.onKeyUp,eM=e.onKeyDown,eN=e.onMouseDown,eI=(0,d.Z)(e,K),eR=V(G),eT=(void 0!==I?I:eR)||"combobox"===G,eA=(0,s.Z)({},eI);U.forEach(function(e){delete eA[e]}),null==_||_.forEach(function(e){delete eA[e]});var e_=r.useState(!1),eD=(0,u.Z)(e_,2),eZ=eD[0],eL=eD[1];r.useEffect(function(){eL((0,g.Z)())},[]);var ez=r.useRef(null),eB=r.useRef(null),eF=r.useRef(null),eH=r.useRef(null),eq=r.useRef(null),eW=r.useRef(!1),eK=function(){var e=arguments.length>0&&void 0!==arguments[0]?arguments[0]:10,t=r.useState(!1),n=(0,u.Z)(t,2),o=n[0],i=n[1],a=r.useRef(null),l=function(){window.clearTimeout(a.current)};return r.useEffect(function(){return l},[]),[o,function(t,n){l(),a.current=window.setTimeout(function(){i(t),n&&n()},e)},l]}(),eU=(0,u.Z)(eK,3),eV=eU[0],eG=eU[1],eX=eU[2];r.useImperativeHandle(t,function(){var e,t;return{focus:null===(e=eH.current)||void 0===e?void 0:e.focus,blur:null===(t=eH.current)||void 0===t?void 0:t.blur,scrollTo:function(e){var t;return null===(t=eq.current)||void 0===t?void 0:t.scrollTo(e)}}});var e$=r.useMemo(function(){if("combobox"!==G)return ei;var e,t=null===(e=D[0])||void 0===e?void 0:e.value;return"string"==typeof t||"number"==typeof t?String(t):""},[ei,G,D]),eY="combobox"===G&&"function"==typeof Y&&Y()||null,eQ="function"==typeof Q&&Q(),eJ=(0,y.x1)(eB,null==eQ||null===(k=eQ.props)||void 0===k?void 0:k.ref),e0=r.useState(!1),e1=(0,u.Z)(e0,2),e2=e1[0],e6=e1[1];(0,m.Z)(function(){e6(!0)},[]);var e3=(0,p.Z)(!1,{defaultValue:ee,value:J}),e4=(0,u.Z)(e3,2),e5=e4[0],e8=e4[1],e7=!!e2&&e5,e9=!F&&z;(X||e9&&e7&&"combobox"===G)&&(e7=!1);var te=!e9&&e7,tt=r.useCallback(function(e){var t=void 0!==e?e:!e7;X||(e8(t),e7!==t&&(null==et||et(t)))},[X,e7,e8,et]),tn=r.useMemo(function(){return(es||[]).some(function(e){return["\n","\r\n"].includes(e)})},[es]),tr=r.useContext(W)||{},to=tr.maxCount,ti=tr.rawValues,ta=function(e,t,n){if(!((null==ti?void 0:ti.size)>=to)){var r=!0,o=e;null==er||er(null);var i=q(e,es,to&&to-ti.size),a=n?null:i;return"combobox"!==G&&a&&(o="",null==ec||ec(a),tt(!1),r=!1),el&&e$!==o&&el(o,{source:t?"typing":"effect"}),r}};r.useEffect(function(){e7||eR||"combobox"===G||ta("",!1,!1)},[e7]),r.useEffect(function(){e5&&X&&e8(!1),X&&!eW.current&&eG(!1)},[X]);var tl=S(),tc=(0,u.Z)(tl,2),ts=tc[0],tu=tc[1],td=r.useRef(!1),tf=[];r.useEffect(function(){return function(){tf.forEach(function(e){return clearTimeout(e)}),tf.splice(0,tf.length)}},[]);var tp=r.useState({}),th=(0,u.Z)(tp,2)[1];eQ&&(C=function(e){tt(e)}),n=function(){var e;return[ez.current,null===(e=eF.current)||void 0===e?void 0:e.getPopupElement()]},o=!!eQ,(h=r.useRef(null)).current={open:te,triggerOpen:tt,customizedTrigger:o},r.useEffect(function(){function e(e){if(null===(t=h.current)||void 0===t||!t.customizedTrigger){var t,r=e.target;r.shadowRoot&&e.composed&&(r=e.composedPath()[0]||r),h.current.open&&n().filter(function(e){return e}).every(function(e){return!e.contains(r)&&e!==r})&&h.current.triggerOpen(!1)}}return window.addEventListener("mousedown",e),function(){return window.removeEventListener("mousedown",e)}},[]);var tm=r.useMemo(function(){return(0,s.Z)((0,s.Z)({},e),{},{notFoundContent:F,open:e7,triggerOpen:te,id:P,showSearch:eT,multiple:eR,toggleOpen:tt})},[e,F,te,e7,P,eT,eR,tt]),tg=!!ed||$;tg&&(O=r.createElement(b,{className:i()("".concat(M,"-arrow"),(0,c.Z)({},"".concat(M,"-arrow-loading"),$)),customizeIcon:ed,customizeIconProps:{loading:$,searchValue:e$,open:e7,focused:eV,showSearch:eT}}));var tv=x(M,function(){var e;null==H||H(),null===(e=eH.current)||void 0===e||e.focus(),Z([],{type:"clear",values:D}),ta("",!1,!1)},D,eu,ef,X,e$,G),ty=tv.allowClear,tb=tv.clearIcon,tx=r.createElement(ep,{ref:eq}),tw=i()(M,N,(E={},(0,c.Z)(E,"".concat(M,"-focused"),eV),(0,c.Z)(E,"".concat(M,"-multiple"),eR),(0,c.Z)(E,"".concat(M,"-single"),!eR),(0,c.Z)(E,"".concat(M,"-allow-clear"),eu),(0,c.Z)(E,"".concat(M,"-show-arrow"),tg),(0,c.Z)(E,"".concat(M,"-disabled"),X),(0,c.Z)(E,"".concat(M,"-loading"),$),(0,c.Z)(E,"".concat(M,"-open"),e7),(0,c.Z)(E,"".concat(M,"-customize-input"),eY),(0,c.Z)(E,"".concat(M,"-show-search"),eT),E)),tS=r.createElement(L,{ref:eF,disabled:X,prefixCls:M,visible:te,popupElement:tx,animation:eh,transitionName:em,dropdownStyle:eg,dropdownClassName:ev,direction:T,dropdownMatchSelectWidth:ey,dropdownRender:eb,dropdownAlign:ex,placement:ew,builtinPlacements:eS,getPopupContainer:ek,empty:z,getTriggerDOMNode:function(){return eB.current},onPopupVisibleChange:C,onPopupMouseEnter:function(){th({})}},eQ?r.cloneElement(eQ,{ref:eJ}):r.createElement(A,(0,a.Z)({},e,{domRef:eB,prefixCls:M,inputElement:eY,ref:eH,id:P,showSearch:eT,autoClearSearchValue:ea,mode:G,activeDescendantId:eo,tagRender:R,values:D,open:e7,onToggleOpen:tt,activeValue:en,searchValue:e$,onSearch:ta,onSearchSubmit:function(e){e&&e.trim()&&el(e,{source:"submit"})},onRemove:function(e){Z(D.filter(function(t){return t!==e}),{type:"remove",values:[e]})},tokenWithEnter:tn})));return j=eQ?tS:r.createElement("div",(0,a.Z)({className:tw},eA,{ref:ez,onMouseDown:function(e){var t,n=e.target,r=null===(t=eF.current)||void 0===t?void 0:t.getPopupElement();if(r&&r.contains(n)){var o=setTimeout(function(){var e,t=tf.indexOf(o);-1!==t&&tf.splice(t,1),eX(),eZ||r.contains(document.activeElement)||null===(e=eH.current)||void 0===e||e.focus()});tf.push(o)}for(var i=arguments.length,a=Array(i>1?i-1:0),l=1;l=0;a-=1){var c=o[a];if(!c.disabled){o.splice(a,1),i=c;break}}i&&Z(o,{type:"remove",values:[i]})}for(var s=arguments.length,u=Array(s>1?s-1:0),d=1;d1?n-1:0),o=1;o=E},[p,E,null==N?void 0:N.size]),F=function(e){e.preventDefault()},H=function(e){var t;null===(t=z.current)||void 0===t||t.scrollTo("number"==typeof e?{index:e}:e)},q=function(e){for(var t=arguments.length>1&&void 0!==arguments[1]?arguments[1]:1,n=L.length,r=0;r1&&void 0!==arguments[1]&&arguments[1];G(e);var n={source:t?"keyboard":"mouse"},r=L[e];if(!r){O(null,-1,n);return}O(r.value,e,n)};(0,r.useEffect)(function(){X(!1!==j?q(0):-1)},[L.length,m]);var $=r.useCallback(function(e){return N.has(e)&&"combobox"!==h},[h,(0,l.Z)(N).toString(),N.size]);(0,r.useEffect)(function(){var e,t=setTimeout(function(){if(!p&&f&&1===N.size){var e=Array.from(N)[0],t=L.findIndex(function(t){return t.data.value===e});-1!==t&&(X(t),H(t))}});return f&&(null===(e=z.current)||void 0===e||e.scrollTo(void 0)),function(){return clearTimeout(t)}},[f,m]);var en=function(e){void 0!==e&&P(e,{selected:!N.has(e)}),p||g(!1)};if(r.useImperativeHandle(t,function(){return{onKeyDown:function(e){var t=e.which,n=e.ctrlKey;switch(t){case v.Z.N:case v.Z.P:case v.Z.UP:case v.Z.DOWN:var r=0;if(t===v.Z.UP?r=-1:t===v.Z.DOWN?r=1:/(mac\sos|macintosh)/i.test(navigator.appVersion)&&n&&(t===v.Z.N?r=1:t===v.Z.P&&(r=-1)),0!==r){var o=q(V+r,r);H(o),X(o,!0)}break;case v.Z.ENTER:var i,a=L[V];!a||null!=a&&null!==(i=a.data)&&void 0!==i&&i.disabled||B?en(void 0):en(a.value),f&&e.preventDefault();break;case v.Z.ESC:g(!1),f&&e.stopPropagation()}},onKeyUp:function(){},scrollTo:function(e){H(e)}}}),0===L.length)return r.createElement("div",{role:"listbox",id:"".concat(s,"_list"),className:"".concat(Z,"-empty"),onMouseDown:F},y);var er=Object.keys(I).map(function(e){return I[e]}),eo=function(e){return e.label};function ei(e,t){return{role:e.group?"presentation":"option",id:"".concat(s,"_list_").concat(t)}}var ea=function(e){var t=L[e];if(!t)return null;var n=t.data||{},o=n.value,i=t.group,l=(0,k.Z)(n,!0),c=eo(t);return t?r.createElement("div",(0,a.Z)({"aria-label":"string"!=typeof c||i?null:c},l,{key:e},ei(t,e),{"aria-selected":$(o)}),o):null},el={role:"listbox",id:"".concat(s,"_list")};return r.createElement(r.Fragment,null,R&&r.createElement("div",(0,a.Z)({},el,{style:{height:0,width:0,overflow:"hidden"}}),ea(V-1),ea(V),ea(V+1)),r.createElement(J.Z,{itemKey:"key",ref:z,data:L,height:A,itemHeight:_,fullHeight:!1,onMouseDown:F,onScroll:x,virtual:R,direction:T,innerProps:R?null:el},function(e,t){var n=e.group,o=e.groupOption,l=e.data,s=e.label,u=e.value,f=l.key;if(n){var p,h,m=null!==(h=l.title)&&void 0!==h?h:et(s)?s.toString():void 0;return r.createElement("div",{className:i()(Z,"".concat(Z,"-group")),title:m},void 0!==s?s:f)}var g=l.disabled,v=l.title,y=(l.children,l.style),x=l.className,w=(0,d.Z)(l,ee),S=(0,Q.Z)(w,er),E=$(u),C=g||!E&&B,O="".concat(Z,"-option"),j=i()(Z,O,x,(p={},(0,c.Z)(p,"".concat(O,"-grouped"),o),(0,c.Z)(p,"".concat(O,"-active"),V===t&&!C),(0,c.Z)(p,"".concat(O,"-disabled"),C),(0,c.Z)(p,"".concat(O,"-selected"),E),p)),P=eo(e),N=!M||"function"==typeof M||E,I="number"==typeof P?P:P||u,T=et(I)?I.toString():void 0;return void 0!==v&&(T=v),r.createElement("div",(0,a.Z)({},(0,k.Z)(S),R?{}:ei(e,t),{"aria-selected":E,className:j,title:T,onMouseMove:function(){V===t||C||X(t)},onClick:function(){C||en(u)},style:y}),r.createElement("div",{className:"".concat(O,"-content")},"function"==typeof D?D(e,{index:t}):I),r.isValidElement(M)||E,N&&r.createElement(b,{className:"".concat(Z,"-option-state"),customizeIcon:M,customizeIconProps:{value:u,disabled:C,isSelected:E}},E?"✓":null))}))}),er=function(e,t){var n=r.useRef({values:new Map,options:new Map});return[r.useMemo(function(){var r=n.current,o=r.values,i=r.options,a=e.map(function(e){if(void 0===e.label){var t;return(0,s.Z)((0,s.Z)({},e),{},{label:null===(t=o.get(e.value))||void 0===t?void 0:t.label})}return e}),l=new Map,c=new Map;return a.forEach(function(e){l.set(e.value,e),c.set(e.value,t.get(e.value)||i.get(e.value))}),n.current.values=l,n.current.options=c,a},[e,t]),r.useCallback(function(e){return t.get(e)||n.current.options.get(e)},[t])]};function eo(e,t){return O(e).join("").toUpperCase().includes(t)}var ei=n(94981),ea=0,el=(0,ei.Z)(),ec=n(45287),es=["children","value"],eu=["children"];function ed(e){var t=r.useRef();return t.current=e,r.useCallback(function(){return t.current.apply(t,arguments)},[])}var ef=["id","mode","prefixCls","backfill","fieldNames","inputValue","searchValue","onSearch","autoClearSearchValue","onSelect","onDeselect","dropdownMatchSelectWidth","filterOption","filterSort","optionFilterProp","optionLabelProp","options","optionRender","children","defaultActiveFirstOption","menuItemSelectedIcon","virtual","direction","listHeight","listItemHeight","value","defaultValue","labelInValue","onChange","maxCount"],ep=["inputValue"],eh=r.forwardRef(function(e,t){var n,o,i,h,m,g=e.id,v=e.mode,y=e.prefixCls,b=e.backfill,x=e.fieldNames,w=e.inputValue,S=e.searchValue,k=e.onSearch,E=e.autoClearSearchValue,C=void 0===E||E,j=e.onSelect,P=e.onDeselect,M=e.dropdownMatchSelectWidth,N=void 0===M||M,I=e.filterOption,R=e.filterSort,T=e.optionFilterProp,A=e.optionLabelProp,_=e.options,D=e.optionRender,Z=e.children,L=e.defaultActiveFirstOption,z=e.menuItemSelectedIcon,q=e.virtual,K=e.direction,U=e.listHeight,X=void 0===U?200:U,$=e.listItemHeight,Y=void 0===$?20:$,Q=e.value,J=e.defaultValue,ee=e.labelInValue,et=e.onChange,ei=e.maxCount,eh=(0,d.Z)(e,ef),em=(n=r.useState(),i=(o=(0,u.Z)(n,2))[0],h=o[1],r.useEffect(function(){var e;h("rc_select_".concat((el?(e=ea,ea+=1):e="TEST_OR_SSR",e)))},[]),g||i),eg=V(v),ev=!!(!_&&Z),ey=r.useMemo(function(){return(void 0!==I||"combobox"!==v)&&I},[I,v]),eb=r.useMemo(function(){return F(x,ev)},[JSON.stringify(x),ev]),ex=(0,p.Z)("",{value:void 0!==S?S:w,postState:function(e){return e||""}}),ew=(0,u.Z)(ex,2),eS=ew[0],ek=ew[1],eE=r.useMemo(function(){var e=_;_||(e=function e(t){var n=arguments.length>1&&void 0!==arguments[1]&&arguments[1];return(0,ec.Z)(t).map(function(t,o){if(!r.isValidElement(t)||!t.type)return null;var i,a,l,c,u,f=t.type.isSelectOptGroup,p=t.key,h=t.props,m=h.children,g=(0,d.Z)(h,eu);return n||!f?(i=t.key,l=(a=t.props).children,c=a.value,u=(0,d.Z)(a,es),(0,s.Z)({key:i,value:void 0!==c?c:i,children:l},u)):(0,s.Z)((0,s.Z)({key:"__RC_SELECT_GRP__".concat(null===p?o:p,"__"),label:p},g),{},{options:e(m)})}).filter(function(e){return e})}(Z));var t=new Map,n=new Map,o=function(e,t,n){n&&"string"==typeof n&&e.set(t[n],t)};return function e(r){for(var i=arguments.length>1&&void 0!==arguments[1]&&arguments[1],a=0;a1&&void 0!==arguments[1]?arguments[1]:{},n=t.fieldNames,r=t.childrenAsData,o=[],i=F(n,!1),a=i.label,l=i.value,c=i.options,s=i.groupLabel;return!function e(t,n){Array.isArray(t)&&t.forEach(function(t){if(!n&&c in t){var i=t[s];void 0===i&&r&&(i=t.label),o.push({key:B(t,o.length),group:!0,data:t,label:i}),e(t[c],!0)}else{var u=t[l];o.push({key:B(t,o.length),groupOption:n,data:t,label:t[a],value:u})}})}(e,!1),o}(eH,{fieldNames:eb,childrenAsData:ev})},[eH,eb,ev]),eW=function(e){var t=eP(e);if(eR(t),et&&(t.length!==e_.length||t.some(function(e,t){var n;return(null===(n=e_[t])||void 0===n?void 0:n.value)!==(null==e?void 0:e.value)}))){var n=ee?t:t.map(function(e){return e.value}),r=t.map(function(e){return H(eD(e.value))});et(eg?n:n[0],eg?r:r[0])}},eK=r.useState(null),eU=(0,u.Z)(eK,2),eV=eU[0],eG=eU[1],eX=r.useState(0),e$=(0,u.Z)(eX,2),eY=e$[0],eQ=e$[1],eJ=void 0!==L?L:"combobox"!==v,e0=r.useCallback(function(e,t){var n=arguments.length>2&&void 0!==arguments[2]?arguments[2]:{},r=n.source;eQ(t),b&&"combobox"===v&&null!==e&&"keyboard"===(void 0===r?"keyboard":r)&&eG(String(e))},[b,v]),e1=function(e,t,n){var r=function(){var t,n=eD(e);return[ee?{label:null==n?void 0:n[eb.label],value:e,key:null!==(t=null==n?void 0:n.key)&&void 0!==t?t:e}:e,H(n)]};if(t&&j){var o=r(),i=(0,u.Z)(o,2);j(i[0],i[1])}else if(!t&&P&&"clear"!==n){var a=r(),l=(0,u.Z)(a,2);P(l[0],l[1])}},e2=ed(function(e,t){var n=!eg||t.selected;eW(n?eg?[].concat((0,l.Z)(e_),[e]):[e]:e_.filter(function(t){return t.value!==e})),e1(e,n),"combobox"===v?eG(""):(!V||C)&&(ek(""),eG(""))}),e6=r.useMemo(function(){var e=!1!==q&&!1!==N;return(0,s.Z)((0,s.Z)({},eE),{},{flattenOptions:eq,onActiveValue:e0,defaultActiveFirstOption:eJ,onSelect:e2,menuItemSelectedIcon:z,rawValues:eL,fieldNames:eb,virtual:e,direction:K,listHeight:X,listItemHeight:Y,childrenAsData:ev,maxCount:ei,optionRender:D})},[ei,eE,eq,e0,eJ,e2,z,eL,eb,q,N,K,X,Y,ev,D]);return r.createElement(W.Provider,{value:e6},r.createElement(G,(0,a.Z)({},eh,{id:em,prefixCls:void 0===y?"rc-select":y,ref:t,omitDomProps:ep,mode:v,displayValues:eZ,onDisplayValuesChange:function(e,t){eW(e);var n=t.type,r=t.values;("remove"===n||"clear"===n)&&r.forEach(function(e){e1(e.value,!1,n)})},direction:K,searchValue:eS,onSearch:function(e,t){if(ek(e),eG(null),"submit"===t.source){var n=(e||"").trim();n&&(eW(Array.from(new Set([].concat((0,l.Z)(eL),[n])))),e1(n,!0),ek(""));return}"blur"!==t.source&&("combobox"===v&&eW(e),null==k||k(e))},autoClearSearchValue:C,onSearchSplit:function(e){var t=e;"tags"!==v&&(t=e.map(function(e){var t=eO.get(e);return null==t?void 0:t.value}).filter(function(e){return void 0!==e}));var n=Array.from(new Set([].concat((0,l.Z)(eL),(0,l.Z)(t))));eW(n),n.forEach(function(e){e1(e,!0)})},dropdownMatchSelectWidth:N,OptionList:en,emptyOptions:!eq.length,activeValue:eV,activeDescendantId:"".concat(em,"_list_").concat(eY)})))});eh.Option=$,eh.OptGroup=X;var em=n(62236),eg=n(68710),ev=n(93942),ey=n(12757),eb=n(71744),ex=n(91086),ew=n(86586),eS=n(64024),ek=n(33759),eE=n(39109),eC=n(56250),eO=n(65658),ej=n(29961);let eP=e=>{let t={overflow:{adjustX:!0,adjustY:!0,shiftY:!0},htmlRegion:"scroll"===e?"scroll":"visible",dynamicInset:!0};return{bottomLeft:Object.assign(Object.assign({},t),{points:["tl","bl"],offset:[0,4]}),bottomRight:Object.assign(Object.assign({},t),{points:["tr","br"],offset:[0,4]}),topLeft:Object.assign(Object.assign({},t),{points:["bl","tl"],offset:[0,-4]}),topRight:Object.assign(Object.assign({},t),{points:["br","tr"],offset:[0,-4]})}};var eM=n(12918),eN=n(17691),eI=n(80669),eR=n(3104),eT=n(18544),eA=n(29382);let e_=e=>{let{optionHeight:t,optionFontSize:n,optionLineHeight:r,optionPadding:o}=e;return{position:"relative",display:"block",minHeight:t,padding:o,color:e.colorText,fontWeight:"normal",fontSize:n,lineHeight:r,boxSizing:"border-box"}};var eD=e=>{let{antCls:t,componentCls:n}=e,r="".concat(n,"-item"),o="&".concat(t,"-slide-up-enter").concat(t,"-slide-up-enter-active"),i="&".concat(t,"-slide-up-appear").concat(t,"-slide-up-appear-active"),a="&".concat(t,"-slide-up-leave").concat(t,"-slide-up-leave-active"),l="".concat(n,"-dropdown-placement-");return[{["".concat(n,"-dropdown")]:Object.assign(Object.assign({},(0,eM.Wf)(e)),{position:"absolute",top:-9999,zIndex:e.zIndexPopup,boxSizing:"border-box",padding:e.paddingXXS,overflow:"hidden",fontSize:e.fontSize,fontVariant:"initial",backgroundColor:e.colorBgElevated,borderRadius:e.borderRadiusLG,outline:"none",boxShadow:e.boxShadowSecondary,["\n ".concat(o).concat(l,"bottomLeft,\n ").concat(i).concat(l,"bottomLeft\n ")]:{animationName:eT.fJ},["\n ".concat(o).concat(l,"topLeft,\n ").concat(i).concat(l,"topLeft,\n ").concat(o).concat(l,"topRight,\n ").concat(i).concat(l,"topRight\n ")]:{animationName:eT.Qt},["".concat(a).concat(l,"bottomLeft")]:{animationName:eT.Uw},["\n ".concat(a).concat(l,"topLeft,\n ").concat(a).concat(l,"topRight\n ")]:{animationName:eT.ly},"&-hidden":{display:"none"},["".concat(r)]:Object.assign(Object.assign({},e_(e)),{cursor:"pointer",transition:"background ".concat(e.motionDurationSlow," ease"),borderRadius:e.borderRadiusSM,"&-group":{color:e.colorTextDescription,fontSize:e.fontSizeSM,cursor:"default"},"&-option":{display:"flex","&-content":Object.assign({flex:"auto"},eM.vS),"&-state":{flex:"none",display:"flex",alignItems:"center"},["&-active:not(".concat(r,"-option-disabled)")]:{backgroundColor:e.optionActiveBg},["&-selected:not(".concat(r,"-option-disabled)")]:{color:e.optionSelectedColor,fontWeight:e.optionSelectedFontWeight,backgroundColor:e.optionSelectedBg,["".concat(r,"-option-state")]:{color:e.colorPrimary},["&:has(+ ".concat(r,"-option-selected:not(").concat(r,"-option-disabled))")]:{borderEndStartRadius:0,borderEndEndRadius:0,["& + ".concat(r,"-option-selected:not(").concat(r,"-option-disabled)")]:{borderStartStartRadius:0,borderStartEndRadius:0}}},"&-disabled":{["&".concat(r,"-option-selected")]:{backgroundColor:e.colorBgContainerDisabled},color:e.colorTextDisabled,cursor:"not-allowed"},"&-grouped":{paddingInlineStart:e.calc(e.controlPaddingHorizontal).mul(2).equal()}}}),"&-rtl":{direction:"rtl"}})},(0,eT.oN)(e,"slide-up"),(0,eT.oN)(e,"slide-down"),(0,eA.Fm)(e,"move-up"),(0,eA.Fm)(e,"move-down")]},eZ=n(352);let eL=e=>{let{multipleSelectItemHeight:t,selectHeight:n,lineWidth:r}=e;return e.calc(n).sub(t).div(2).sub(r).equal()};function ez(e,t){let{componentCls:n,iconCls:r}=e,o="".concat(n,"-selection-overflow"),i=e.multipleSelectItemHeight,a=eL(e),l=t?"".concat(n,"-").concat(t):"";return{["".concat(n,"-multiple").concat(l)]:{fontSize:e.fontSize,[o]:{position:"relative",display:"flex",flex:"auto",flexWrap:"wrap",maxWidth:"100%","&-item":{flex:"none",alignSelf:"center",maxWidth:"100%",display:"inline-flex"}},["".concat(n,"-selector")]:{display:"flex",flexWrap:"wrap",alignItems:"center",height:"100%",paddingInline:e.calc(2).mul(2).equal(),paddingBlock:e.calc(a).sub(2).equal(),borderRadius:e.borderRadius,["".concat(n,"-show-search&")]:{cursor:"text"},["".concat(n,"-disabled&")]:{background:e.multipleSelectorBgDisabled,cursor:"not-allowed"},"&:after":{display:"inline-block",width:0,margin:"".concat((0,eZ.bf)(2)," 0"),lineHeight:(0,eZ.bf)(i),visibility:"hidden",content:'"\\a0"'}},["\n &".concat(n,"-show-arrow ").concat(n,"-selector,\n &").concat(n,"-allow-clear ").concat(n,"-selector\n ")]:{paddingInlineEnd:e.calc(e.fontSizeIcon).add(e.controlPaddingHorizontal).equal()},["".concat(n,"-selection-item")]:{display:"flex",alignSelf:"center",flex:"none",boxSizing:"border-box",maxWidth:"100%",height:i,marginTop:2,marginBottom:2,lineHeight:(0,eZ.bf)(e.calc(i).sub(e.calc(e.lineWidth).mul(2)).equal()),borderRadius:e.borderRadiusSM,cursor:"default",transition:"font-size ".concat(e.motionDurationSlow,", line-height ").concat(e.motionDurationSlow,", height ").concat(e.motionDurationSlow),marginInlineEnd:e.calc(2).mul(2).equal(),paddingInlineStart:e.paddingXS,paddingInlineEnd:e.calc(e.paddingXS).div(2).equal(),["".concat(n,"-disabled&")]:{color:e.multipleItemColorDisabled,borderColor:e.multipleItemBorderColorDisabled,cursor:"not-allowed"},"&-content":{display:"inline-block",marginInlineEnd:e.calc(e.paddingXS).div(2).equal(),overflow:"hidden",whiteSpace:"pre",textOverflow:"ellipsis"},"&-remove":Object.assign(Object.assign({},(0,eM.Ro)()),{display:"inline-flex",alignItems:"center",color:e.colorIcon,fontWeight:"bold",fontSize:10,lineHeight:"inherit",cursor:"pointer",["> ".concat(r)]:{verticalAlign:"-0.2em"},"&:hover":{color:e.colorIconHover}})},["".concat(o,"-item + ").concat(o,"-item")]:{["".concat(n,"-selection-search")]:{marginInlineStart:0}},["".concat(o,"-item-suffix")]:{height:"100%"},["".concat(n,"-selection-search")]:{display:"inline-flex",position:"relative",maxWidth:"100%",marginInlineStart:e.calc(e.inputPaddingHorizontalBase).sub(a).equal(),"\n &-input,\n &-mirror\n ":{height:i,fontFamily:e.fontFamily,lineHeight:(0,eZ.bf)(i),transition:"all ".concat(e.motionDurationSlow)},"&-input":{width:"100%",minWidth:4.1},"&-mirror":{position:"absolute",top:0,insetInlineStart:0,insetInlineEnd:"auto",zIndex:999,whiteSpace:"pre",visibility:"hidden"}},["".concat(n,"-selection-placeholder")]:{position:"absolute",top:"50%",insetInlineStart:e.inputPaddingHorizontalBase,insetInlineEnd:e.inputPaddingHorizontalBase,transform:"translateY(-50%)",transition:"all ".concat(e.motionDurationSlow)}}}}var eB=e=>{let{componentCls:t}=e,n=(0,eR.TS)(e,{selectHeight:e.controlHeightSM,multipleSelectItemHeight:e.controlHeightXS,borderRadius:e.borderRadiusSM,borderRadiusSM:e.borderRadiusXS}),r=(0,eR.TS)(e,{fontSize:e.fontSizeLG,selectHeight:e.controlHeightLG,multipleSelectItemHeight:e.multipleItemHeightLG,borderRadius:e.borderRadiusLG,borderRadiusSM:e.borderRadius});return[ez(e),ez(n,"sm"),{["".concat(t,"-multiple").concat(t,"-sm")]:{["".concat(t,"-selection-placeholder")]:{insetInline:e.calc(e.controlPaddingHorizontalSM).sub(e.lineWidth).equal()},["".concat(t,"-selection-search")]:{marginInlineStart:2}}},ez(r,"lg")]};function eF(e,t){let{componentCls:n,inputPaddingHorizontalBase:r,borderRadius:o}=e,i=e.calc(e.controlHeight).sub(e.calc(e.lineWidth).mul(2)).equal(),a=t?"".concat(n,"-").concat(t):"";return{["".concat(n,"-single").concat(a)]:{fontSize:e.fontSize,height:e.controlHeight,["".concat(n,"-selector")]:Object.assign(Object.assign({},(0,eM.Wf)(e,!0)),{display:"flex",borderRadius:o,["".concat(n,"-selection-search")]:{position:"absolute",top:0,insetInlineStart:r,insetInlineEnd:r,bottom:0,"&-input":{width:"100%",WebkitAppearance:"textfield"}},["\n ".concat(n,"-selection-item,\n ").concat(n,"-selection-placeholder\n ")]:{padding:0,lineHeight:(0,eZ.bf)(i),transition:"all ".concat(e.motionDurationSlow,", visibility 0s"),alignSelf:"center"},["".concat(n,"-selection-placeholder")]:{transition:"none",pointerEvents:"none"},[["&:after","".concat(n,"-selection-item:empty:after"),"".concat(n,"-selection-placeholder:empty:after")].join(",")]:{display:"inline-block",width:0,visibility:"hidden",content:'"\\a0"'}}),["\n &".concat(n,"-show-arrow ").concat(n,"-selection-item,\n &").concat(n,"-show-arrow ").concat(n,"-selection-placeholder\n ")]:{paddingInlineEnd:e.showArrowPaddingInlineEnd},["&".concat(n,"-open ").concat(n,"-selection-item")]:{color:e.colorTextPlaceholder},["&:not(".concat(n,"-customize-input)")]:{["".concat(n,"-selector")]:{width:"100%",height:"100%",padding:"0 ".concat((0,eZ.bf)(r)),["".concat(n,"-selection-search-input")]:{height:i},"&:after":{lineHeight:(0,eZ.bf)(i)}}},["&".concat(n,"-customize-input")]:{["".concat(n,"-selector")]:{"&:after":{display:"none"},["".concat(n,"-selection-search")]:{position:"static",width:"100%"},["".concat(n,"-selection-placeholder")]:{position:"absolute",insetInlineStart:0,insetInlineEnd:0,padding:"0 ".concat((0,eZ.bf)(r)),"&:after":{display:"none"}}}}}}}let eH=(e,t)=>{let{componentCls:n,antCls:r,controlOutlineWidth:o}=e;return{["&:not(".concat(n,"-customize-input) ").concat(n,"-selector")]:{border:"".concat((0,eZ.bf)(e.lineWidth)," ").concat(e.lineType," ").concat(t.borderColor),background:e.selectorBg},["&:not(".concat(n,"-disabled):not(").concat(n,"-customize-input):not(").concat(r,"-pagination-size-changer)")]:{["&:hover ".concat(n,"-selector")]:{borderColor:t.hoverBorderHover},["".concat(n,"-focused& ").concat(n,"-selector")]:{borderColor:t.activeBorderColor,boxShadow:"0 0 0 ".concat((0,eZ.bf)(o)," ").concat(t.activeShadowColor),outline:0}}}},eq=(e,t)=>({["&".concat(e.componentCls,"-status-").concat(t.status)]:Object.assign({},eH(e,t))}),eW=e=>({"&-outlined":Object.assign(Object.assign(Object.assign(Object.assign({},eH(e,{borderColor:e.colorBorder,hoverBorderHover:e.colorPrimaryHover,activeBorderColor:e.colorPrimary,activeShadowColor:e.controlOutline})),eq(e,{status:"error",borderColor:e.colorError,hoverBorderHover:e.colorErrorHover,activeBorderColor:e.colorError,activeShadowColor:e.colorErrorOutline})),eq(e,{status:"warning",borderColor:e.colorWarning,hoverBorderHover:e.colorWarningHover,activeBorderColor:e.colorWarning,activeShadowColor:e.colorWarningOutline})),{["&".concat(e.componentCls,"-disabled")]:{["&:not(".concat(e.componentCls,"-customize-input) ").concat(e.componentCls,"-selector")]:{background:e.colorBgContainerDisabled,color:e.colorTextDisabled}},["&".concat(e.componentCls,"-multiple ").concat(e.componentCls,"-selection-item")]:{background:e.multipleItemBg,border:"".concat((0,eZ.bf)(e.lineWidth)," ").concat(e.lineType," ").concat(e.multipleItemBorderColor)}})}),eK=(e,t)=>{let{componentCls:n,antCls:r}=e;return{["&:not(".concat(n,"-customize-input) ").concat(n,"-selector")]:{background:t.bg,border:"".concat((0,eZ.bf)(e.lineWidth)," ").concat(e.lineType," transparent"),color:t.color},["&:not(".concat(n,"-disabled):not(").concat(n,"-customize-input):not(").concat(r,"-pagination-size-changer)")]:{["&:hover ".concat(n,"-selector")]:{background:t.hoverBg},["".concat(n,"-focused& ").concat(n,"-selector")]:{background:e.selectorBg,borderColor:t.activeBorderColor,outline:0}}}},eU=(e,t)=>({["&".concat(e.componentCls,"-status-").concat(t.status)]:Object.assign({},eK(e,t))}),eV=e=>({"&-filled":Object.assign(Object.assign(Object.assign(Object.assign({},eK(e,{bg:e.colorFillTertiary,hoverBg:e.colorFillSecondary,activeBorderColor:e.colorPrimary,color:e.colorText})),eU(e,{status:"error",bg:e.colorErrorBg,hoverBg:e.colorErrorBgHover,activeBorderColor:e.colorError,color:e.colorError})),eU(e,{status:"warning",bg:e.colorWarningBg,hoverBg:e.colorWarningBgHover,activeBorderColor:e.colorWarning,color:e.colorWarning})),{["&".concat(e.componentCls,"-disabled")]:{["&:not(".concat(e.componentCls,"-customize-input) ").concat(e.componentCls,"-selector")]:{borderColor:e.colorBorder,background:e.colorBgContainerDisabled,color:e.colorTextDisabled}},["&".concat(e.componentCls,"-multiple ").concat(e.componentCls,"-selection-item")]:{background:e.colorBgContainer,border:"".concat((0,eZ.bf)(e.lineWidth)," ").concat(e.lineType," ").concat(e.colorSplit)}})}),eG=e=>({"&-borderless":{["".concat(e.componentCls,"-selector")]:{background:"transparent",borderColor:"transparent"},["&".concat(e.componentCls,"-disabled")]:{["&:not(".concat(e.componentCls,"-customize-input) ").concat(e.componentCls,"-selector")]:{color:e.colorTextDisabled}},["&".concat(e.componentCls,"-multiple ").concat(e.componentCls,"-selection-item")]:{background:e.multipleItemBg,border:"".concat((0,eZ.bf)(e.lineWidth)," ").concat(e.lineType," ").concat(e.multipleItemBorderColor)}}});var eX=e=>({[e.componentCls]:Object.assign(Object.assign(Object.assign({},eW(e)),eV(e)),eG(e))});let e$=e=>{let{componentCls:t}=e;return{position:"relative",transition:"all ".concat(e.motionDurationMid," ").concat(e.motionEaseInOut),input:{cursor:"pointer"},["".concat(t,"-show-search&")]:{cursor:"text",input:{cursor:"auto",color:"inherit",height:"100%"}},["".concat(t,"-disabled&")]:{cursor:"not-allowed",input:{cursor:"not-allowed"}}}},eY=e=>{let{componentCls:t}=e;return{["".concat(t,"-selection-search-input")]:{margin:0,padding:0,background:"transparent",border:"none",outline:"none",appearance:"none",fontFamily:"inherit","&::-webkit-search-cancel-button":{display:"none","-webkit-appearance":"none"}}}},eQ=e=>{let{antCls:t,componentCls:n,inputPaddingHorizontalBase:r,iconCls:o}=e;return{[n]:Object.assign(Object.assign({},(0,eM.Wf)(e)),{position:"relative",display:"inline-block",cursor:"pointer",["&:not(".concat(n,"-customize-input) ").concat(n,"-selector")]:Object.assign(Object.assign({},e$(e)),eY(e)),["".concat(n,"-selection-item")]:Object.assign(Object.assign({flex:1,fontWeight:"normal",position:"relative",userSelect:"none"},eM.vS),{["> ".concat(t,"-typography")]:{display:"inline"}}),["".concat(n,"-selection-placeholder")]:Object.assign(Object.assign({},eM.vS),{flex:1,color:e.colorTextPlaceholder,pointerEvents:"none"}),["".concat(n,"-arrow")]:Object.assign(Object.assign({},(0,eM.Ro)()),{position:"absolute",top:"50%",insetInlineStart:"auto",insetInlineEnd:r,height:e.fontSizeIcon,marginTop:e.calc(e.fontSizeIcon).mul(-1).div(2).equal(),color:e.colorTextQuaternary,fontSize:e.fontSizeIcon,lineHeight:1,textAlign:"center",pointerEvents:"none",display:"flex",alignItems:"center",transition:"opacity ".concat(e.motionDurationSlow," ease"),[o]:{verticalAlign:"top",transition:"transform ".concat(e.motionDurationSlow),"> svg":{verticalAlign:"top"},["&:not(".concat(n,"-suffix)")]:{pointerEvents:"auto"}},["".concat(n,"-disabled &")]:{cursor:"not-allowed"},"> *:not(:last-child)":{marginInlineEnd:8}}),["".concat(n,"-clear")]:{position:"absolute",top:"50%",insetInlineStart:"auto",insetInlineEnd:r,zIndex:1,display:"inline-block",width:e.fontSizeIcon,height:e.fontSizeIcon,marginTop:e.calc(e.fontSizeIcon).mul(-1).div(2).equal(),color:e.colorTextQuaternary,fontSize:e.fontSizeIcon,fontStyle:"normal",lineHeight:1,textAlign:"center",textTransform:"none",cursor:"pointer",opacity:0,transition:"color ".concat(e.motionDurationMid," ease, opacity ").concat(e.motionDurationSlow," ease"),textRendering:"auto","&:before":{display:"block"},"&:hover":{color:e.colorTextTertiary}},"&:hover":{["".concat(n,"-clear")]:{opacity:1},["".concat(n,"-arrow:not(:last-child)")]:{opacity:0}}}),["".concat(n,"-has-feedback")]:{["".concat(n,"-clear")]:{insetInlineEnd:e.calc(r).add(e.fontSize).add(e.paddingXS).equal()}}}},eJ=e=>{let{componentCls:t}=e;return[{[t]:{["&".concat(t,"-in-form-item")]:{width:"100%"}}},eQ(e),function(e){let{componentCls:t}=e,n=e.calc(e.controlPaddingHorizontalSM).sub(e.lineWidth).equal();return[eF(e),eF((0,eR.TS)(e,{controlHeight:e.controlHeightSM,borderRadius:e.borderRadiusSM}),"sm"),{["".concat(t,"-single").concat(t,"-sm")]:{["&:not(".concat(t,"-customize-input)")]:{["".concat(t,"-selection-search")]:{insetInlineStart:n,insetInlineEnd:n},["".concat(t,"-selector")]:{padding:"0 ".concat((0,eZ.bf)(n))},["&".concat(t,"-show-arrow ").concat(t,"-selection-search")]:{insetInlineEnd:e.calc(n).add(e.calc(e.fontSize).mul(1.5)).equal()},["\n &".concat(t,"-show-arrow ").concat(t,"-selection-item,\n &").concat(t,"-show-arrow ").concat(t,"-selection-placeholder\n ")]:{paddingInlineEnd:e.calc(e.fontSize).mul(1.5).equal()}}}},eF((0,eR.TS)(e,{controlHeight:e.singleItemHeightLG,fontSize:e.fontSizeLG,borderRadius:e.borderRadiusLG}),"lg")]}(e),eB(e),eD(e),{["".concat(t,"-rtl")]:{direction:"rtl"}},(0,eN.c)(e,{borderElCls:"".concat(t,"-selector"),focusElCls:"".concat(t,"-focused")})]};var e0=(0,eI.I$)("Select",(e,t)=>{let{rootPrefixCls:n}=t,r=(0,eR.TS)(e,{rootPrefixCls:n,inputPaddingHorizontalBase:e.calc(e.paddingSM).sub(1).equal(),multipleSelectItemHeight:e.multipleItemHeight,selectHeight:e.controlHeight});return[eJ(r),eX(r)]},e=>{let{fontSize:t,lineHeight:n,controlHeight:r,controlPaddingHorizontal:o,zIndexPopupBase:i,colorText:a,fontWeightStrong:l,controlItemBgActive:c,controlItemBgHover:s,colorBgContainer:u,colorFillSecondary:d,controlHeightLG:f,controlHeightSM:p,colorBgContainerDisabled:h,colorTextDisabled:m}=e;return{zIndexPopup:i+50,optionSelectedColor:a,optionSelectedFontWeight:l,optionSelectedBg:c,optionActiveBg:s,optionPadding:"".concat((r-t*n)/2,"px ").concat(o,"px"),optionFontSize:t,optionLineHeight:n,optionHeight:r,selectorBg:u,clearBg:u,singleItemHeightLG:f,multipleItemBg:d,multipleItemBorderColor:"transparent",multipleItemHeight:p,multipleItemHeightLG:r,multipleSelectorBgDisabled:h,multipleItemColorDisabled:m,multipleItemBorderColorDisabled:"transparent",showArrowPaddingInlineEnd:Math.ceil(1.25*e.fontSize)}},{unitless:{optionLineHeight:!0,optionSelectedFontWeight:!0}}),e1=n(9738),e2=n(39725),e6=n(49638),e3=n(70464),e4=n(61935),e5=n(29436),e8=function(e,t){var n={};for(var r in e)Object.prototype.hasOwnProperty.call(e,r)&&0>t.indexOf(r)&&(n[r]=e[r]);if(null!=e&&"function"==typeof Object.getOwnPropertySymbols)for(var o=0,r=Object.getOwnPropertySymbols(e);ot.indexOf(r[o])&&Object.prototype.propertyIsEnumerable.call(e,r[o])&&(n[r[o]]=e[r[o]]);return n};let e7="SECRET_COMBOBOX_MODE_DO_NOT_USE",e9=r.forwardRef((e,t)=>{var n,o,a;let l;let{prefixCls:c,bordered:s,className:u,rootClassName:d,getPopupContainer:f,popupClassName:p,dropdownClassName:h,listHeight:m=256,placement:g,listItemHeight:v,size:y,disabled:b,notFoundContent:x,status:w,builtinPlacements:S,dropdownMatchSelectWidth:k,popupMatchSelectWidth:E,direction:C,style:O,allowClear:j,variant:P,dropdownStyle:M,transitionName:N,tagRender:I,maxCount:R}=e,T=e8(e,["prefixCls","bordered","className","rootClassName","getPopupContainer","popupClassName","dropdownClassName","listHeight","placement","listItemHeight","size","disabled","notFoundContent","status","builtinPlacements","dropdownMatchSelectWidth","popupMatchSelectWidth","direction","style","allowClear","variant","dropdownStyle","transitionName","tagRender","maxCount"]),{getPopupContainer:A,getPrefixCls:_,renderEmpty:D,direction:Z,virtual:L,popupMatchSelectWidth:z,popupOverflow:B,select:F}=r.useContext(eb.E_),[,H]=(0,ej.ZP)(),q=null!=v?v:null==H?void 0:H.controlHeight,W=_("select",c),K=_(),U=null!=C?C:Z,{compactSize:V,compactItemClassnames:G}=(0,eO.ri)(W,U),[X,$]=(0,eC.Z)(P,s),Y=(0,eS.Z)(W),[J,ee,et]=e0(W,Y),en=r.useMemo(()=>{let{mode:t}=e;return"combobox"===t?void 0:t===e7?"combobox":t},[e.mode]),er="multiple"===en||"tags"===en,eo=(o=e.suffixIcon,void 0!==(a=e.showArrow)?a:null!==o),ei=null!==(n=null!=E?E:k)&&void 0!==n?n:z,{status:ea,hasFeedback:el,isFormItemInput:ec,feedbackIcon:es}=r.useContext(eE.aM),eu=(0,ey.F)(ea,w);l=void 0!==x?x:"combobox"===en?null:(null==D?void 0:D("Select"))||r.createElement(ex.Z,{componentName:"Select"});let{suffixIcon:ed,itemIcon:ef,removeIcon:ep,clearIcon:ev}=function(e){let{suffixIcon:t,clearIcon:n,menuItemSelectedIcon:o,removeIcon:i,loading:a,multiple:l,hasFeedback:c,prefixCls:s,showSuffixIcon:u,feedbackIcon:d,showArrow:f,componentName:p}=e,h=null!=n?n:r.createElement(e2.Z,null),m=e=>null!==t||c||f?r.createElement(r.Fragment,null,!1!==u&&e,c&&d):null,g=null;if(void 0!==t)g=m(t);else if(a)g=m(r.createElement(e4.Z,{spin:!0}));else{let e="".concat(s,"-suffix");g=t=>{let{open:n,showSearch:o}=t;return n&&o?m(r.createElement(e5.Z,{className:e})):m(r.createElement(e3.Z,{className:e}))}}let v=null;return v=void 0!==o?o:l?r.createElement(e1.Z,null):null,{clearIcon:h,suffixIcon:g,itemIcon:v,removeIcon:void 0!==i?i:r.createElement(e6.Z,null)}}(Object.assign(Object.assign({},T),{multiple:er,hasFeedback:el,feedbackIcon:es,showSuffixIcon:eo,prefixCls:W,componentName:"Select"})),eM=(0,Q.Z)(T,["suffixIcon","itemIcon"]),eN=i()(p||h,{["".concat(W,"-dropdown-").concat(U)]:"rtl"===U},d,et,Y,ee),eI=(0,ek.Z)(e=>{var t;return null!==(t=null!=y?y:V)&&void 0!==t?t:e}),eR=r.useContext(ew.Z),eT=i()({["".concat(W,"-lg")]:"large"===eI,["".concat(W,"-sm")]:"small"===eI,["".concat(W,"-rtl")]:"rtl"===U,["".concat(W,"-").concat(X)]:$,["".concat(W,"-in-form-item")]:ec},(0,ey.Z)(W,eu,el),G,null==F?void 0:F.className,u,d,et,Y,ee),eA=r.useMemo(()=>void 0!==g?g:"rtl"===U?"bottomRight":"bottomLeft",[g,U]),[e_]=(0,em.Cn)("SelectLike",null==M?void 0:M.zIndex);return J(r.createElement(eh,Object.assign({ref:t,virtual:L,showSearch:null==F?void 0:F.showSearch},eM,{style:Object.assign(Object.assign({},null==F?void 0:F.style),O),dropdownMatchSelectWidth:ei,transitionName:(0,eg.m)(K,"slide-up",N),builtinPlacements:S||eP(B),listHeight:m,listItemHeight:q,mode:en,prefixCls:W,placement:eA,direction:U,suffixIcon:ed,menuItemSelectedIcon:ef,removeIcon:ep,allowClear:!0===j?{clearIcon:ev}:j,notFoundContent:l,className:eT,getPopupContainer:f||A,dropdownClassName:eN,disabled:null!=b?b:eR,dropdownStyle:Object.assign(Object.assign({},M),{zIndex:e_}),maxCount:er?R:void 0,tagRender:er?I:void 0})))}),te=(0,ev.Z)(e9);e9.SECRET_COMBOBOX_MODE_DO_NOT_USE=e7,e9.Option=$,e9.OptGroup=X,e9._InternalPanelDoNotUseOrYouWillBeFired=te;var tt=e9},93142:function(e,t,n){"use strict";n.d(t,{Z:function(){return v}});var r=n(2265),o=n(36760),i=n.n(o),a=n(45287);function l(e){return["small","middle","large"].includes(e)}function c(e){return!!e&&"number"==typeof e&&!Number.isNaN(e)}var s=n(71744),u=n(65658);let d=r.createContext({latestIndex:0}),f=d.Provider;var p=e=>{let{className:t,index:n,children:o,split:i,style:a}=e,{latestIndex:l}=r.useContext(d);return null==o?null:r.createElement(r.Fragment,null,r.createElement("div",{className:t,style:a},o),nt.indexOf(r)&&(n[r]=e[r]);if(null!=e&&"function"==typeof Object.getOwnPropertySymbols)for(var o=0,r=Object.getOwnPropertySymbols(e);ot.indexOf(r[o])&&Object.prototype.propertyIsEnumerable.call(e,r[o])&&(n[r[o]]=e[r[o]]);return n};let g=r.forwardRef((e,t)=>{var n,o;let{getPrefixCls:u,space:d,direction:g}=r.useContext(s.E_),{size:v=(null==d?void 0:d.size)||"small",align:y,className:b,rootClassName:x,children:w,direction:S="horizontal",prefixCls:k,split:E,style:C,wrap:O=!1,classNames:j,styles:P}=e,M=m(e,["size","align","className","rootClassName","children","direction","prefixCls","split","style","wrap","classNames","styles"]),[N,I]=Array.isArray(v)?v:[v,v],R=l(I),T=l(N),A=c(I),_=c(N),D=(0,a.Z)(w,{keepEmpty:!0}),Z=void 0===y&&"horizontal"===S?"center":y,L=u("space",k),[z,B,F]=(0,h.Z)(L),H=i()(L,null==d?void 0:d.className,B,"".concat(L,"-").concat(S),{["".concat(L,"-rtl")]:"rtl"===g,["".concat(L,"-align-").concat(Z)]:Z,["".concat(L,"-gap-row-").concat(I)]:R,["".concat(L,"-gap-col-").concat(N)]:T},b,x,F),q=i()("".concat(L,"-item"),null!==(n=null==j?void 0:j.item)&&void 0!==n?n:null===(o=null==d?void 0:d.classNames)||void 0===o?void 0:o.item),W=0,K=D.map((e,t)=>{var n,o;null!=e&&(W=t);let i=e&&e.key||"".concat(q,"-").concat(t);return r.createElement(p,{className:q,key:i,index:t,split:E,style:null!==(n=null==P?void 0:P.item)&&void 0!==n?n:null===(o=null==d?void 0:d.styles)||void 0===o?void 0:o.item},e)}),U=r.useMemo(()=>({latestIndex:W}),[W]);if(0===D.length)return null;let V={};return O&&(V.flexWrap="wrap"),!T&&_&&(V.columnGap=N),!R&&A&&(V.rowGap=I),z(r.createElement("div",Object.assign({ref:t,className:H,style:Object.assign(Object.assign(Object.assign({},V),null==d?void 0:d.style),C)},M),r.createElement(f,{value:U},K)))});g.Compact=u.ZP;var v=g},87908:function(e,t,n){"use strict";n.d(t,{Z:function(){return x}});var r=n(2265),o=n(36760),i=n.n(o),a=n(18694),l=n(19722),c=n(71744),s=n(352),u=n(12918),d=n(80669),f=n(3104);let p=new s.E4("antSpinMove",{to:{opacity:1}}),h=new s.E4("antRotate",{to:{transform:"rotate(405deg)"}}),m=e=>{let{componentCls:t,calc:n}=e;return{["".concat(t)]:Object.assign(Object.assign({},(0,u.Wf)(e)),{position:"absolute",display:"none",color:e.colorPrimary,fontSize:0,textAlign:"center",verticalAlign:"middle",opacity:0,transition:"transform ".concat(e.motionDurationSlow," ").concat(e.motionEaseInOutCirc),"&-spinning":{position:"static",display:"inline-block",opacity:1},["".concat(t,"-text")]:{fontSize:e.fontSize,paddingTop:n(n(e.dotSize).sub(e.fontSize)).div(2).add(2).equal()},"&-fullscreen":{position:"fixed",width:"100vw",height:"100vh",backgroundColor:e.colorBgMask,zIndex:e.zIndexPopupBase,inset:0,display:"flex",alignItems:"center",flexDirection:"column",justifyContent:"center",opacity:0,visibility:"hidden",transition:"all ".concat(e.motionDurationMid),"&-show":{opacity:1,visibility:"visible"},["".concat(t,"-dot ").concat(t,"-dot-item")]:{backgroundColor:e.colorWhite},["".concat(t,"-text")]:{color:e.colorTextLightSolid}},"&-nested-loading":{position:"relative",["> div > ".concat(t)]:{position:"absolute",top:0,insetInlineStart:0,zIndex:4,display:"block",width:"100%",height:"100%",maxHeight:e.contentHeight,["".concat(t,"-dot")]:{position:"absolute",top:"50%",insetInlineStart:"50%",margin:n(e.dotSize).mul(-1).div(2).equal()},["".concat(t,"-text")]:{position:"absolute",top:"50%",width:"100%",textShadow:"0 1px 2px ".concat(e.colorBgContainer)},["&".concat(t,"-show-text ").concat(t,"-dot")]:{marginTop:n(e.dotSize).div(2).mul(-1).sub(10).equal()},"&-sm":{["".concat(t,"-dot")]:{margin:n(e.dotSizeSM).mul(-1).div(2).equal()},["".concat(t,"-text")]:{paddingTop:n(n(e.dotSizeSM).sub(e.fontSize)).div(2).add(2).equal()},["&".concat(t,"-show-text ").concat(t,"-dot")]:{marginTop:n(e.dotSizeSM).div(2).mul(-1).sub(10).equal()}},"&-lg":{["".concat(t,"-dot")]:{margin:n(e.dotSizeLG).mul(-1).div(2).equal()},["".concat(t,"-text")]:{paddingTop:n(n(e.dotSizeLG).sub(e.fontSize)).div(2).add(2).equal()},["&".concat(t,"-show-text ").concat(t,"-dot")]:{marginTop:n(e.dotSizeLG).div(2).mul(-1).sub(10).equal()}}},["".concat(t,"-container")]:{position:"relative",transition:"opacity ".concat(e.motionDurationSlow),"&::after":{position:"absolute",top:0,insetInlineEnd:0,bottom:0,insetInlineStart:0,zIndex:10,width:"100%",height:"100%",background:e.colorBgContainer,opacity:0,transition:"all ".concat(e.motionDurationSlow),content:'""',pointerEvents:"none"}},["".concat(t,"-blur")]:{clear:"both",opacity:.5,userSelect:"none",pointerEvents:"none","&::after":{opacity:.4,pointerEvents:"auto"}}},"&-tip":{color:e.spinDotDefault},["".concat(t,"-dot")]:{position:"relative",display:"inline-block",fontSize:e.dotSize,width:"1em",height:"1em","&-item":{position:"absolute",display:"block",width:n(e.dotSize).sub(n(e.marginXXS).div(2)).div(2).equal(),height:n(e.dotSize).sub(n(e.marginXXS).div(2)).div(2).equal(),backgroundColor:e.colorPrimary,borderRadius:"100%",transform:"scale(0.75)",transformOrigin:"50% 50%",opacity:.3,animationName:p,animationDuration:"1s",animationIterationCount:"infinite",animationTimingFunction:"linear",animationDirection:"alternate","&:nth-child(1)":{top:0,insetInlineStart:0,animationDelay:"0s"},"&:nth-child(2)":{top:0,insetInlineEnd:0,animationDelay:"0.4s"},"&:nth-child(3)":{insetInlineEnd:0,bottom:0,animationDelay:"0.8s"},"&:nth-child(4)":{bottom:0,insetInlineStart:0,animationDelay:"1.2s"}},"&-spin":{transform:"rotate(45deg)",animationName:h,animationDuration:"1.2s",animationIterationCount:"infinite",animationTimingFunction:"linear"}},["&-sm ".concat(t,"-dot")]:{fontSize:e.dotSizeSM,i:{width:n(n(e.dotSizeSM).sub(n(e.marginXXS).div(2))).div(2).equal(),height:n(n(e.dotSizeSM).sub(n(e.marginXXS).div(2))).div(2).equal()}},["&-lg ".concat(t,"-dot")]:{fontSize:e.dotSizeLG,i:{width:n(n(e.dotSizeLG).sub(e.marginXXS)).div(2).equal(),height:n(n(e.dotSizeLG).sub(e.marginXXS)).div(2).equal()}},["&".concat(t,"-show-text ").concat(t,"-text")]:{display:"block"}})}};var g=(0,d.I$)("Spin",e=>[m((0,f.TS)(e,{spinDotDefault:e.colorTextDescription}))],e=>{let{controlHeightLG:t,controlHeight:n}=e;return{contentHeight:400,dotSize:t/2,dotSizeSM:.35*t,dotSizeLG:n}}),v=function(e,t){var n={};for(var r in e)Object.prototype.hasOwnProperty.call(e,r)&&0>t.indexOf(r)&&(n[r]=e[r]);if(null!=e&&"function"==typeof Object.getOwnPropertySymbols)for(var o=0,r=Object.getOwnPropertySymbols(e);ot.indexOf(r[o])&&Object.prototype.propertyIsEnumerable.call(e,r[o])&&(n[r[o]]=e[r[o]]);return n};let y=null,b=e=>{let{prefixCls:t,spinning:n=!0,delay:o=0,className:s,rootClassName:u,size:d="default",tip:f,wrapperClassName:p,style:h,children:m,fullscreen:b}=e,x=v(e,["prefixCls","spinning","delay","className","rootClassName","size","tip","wrapperClassName","style","children","fullscreen"]),{getPrefixCls:w}=r.useContext(c.E_),S=w("spin",t),[k,E,C]=g(S),[O,j]=r.useState(()=>n&&(!n||!o||!!isNaN(Number(o))));r.useEffect(()=>{if(n){var e;let t=function(e,t,n){var r,o=n||{},i=o.noTrailing,a=void 0!==i&&i,l=o.noLeading,c=void 0!==l&&l,s=o.debounceMode,u=void 0===s?void 0:s,d=!1,f=0;function p(){r&&clearTimeout(r)}function h(){for(var n=arguments.length,o=Array(n),i=0;ie?c?(f=Date.now(),a||(r=setTimeout(u?m:h,e))):h():!0!==a&&(r=setTimeout(u?m:h,void 0===u?e-s:e)))}return h.cancel=function(e){var t=(e||{}).upcomingOnly;p(),d=!(void 0!==t&&t)},h}(o,()=>{j(!0)},{debounceMode:!1!==(void 0!==(e=({}).atBegin)&&e)});return t(),()=>{var e;null===(e=null==t?void 0:t.cancel)||void 0===e||e.call(t)}}j(!1)},[o,n]);let P=r.useMemo(()=>void 0!==m&&!b,[m,b]),{direction:M,spin:N}=r.useContext(c.E_),I=i()(S,null==N?void 0:N.className,{["".concat(S,"-sm")]:"small"===d,["".concat(S,"-lg")]:"large"===d,["".concat(S,"-spinning")]:O,["".concat(S,"-show-text")]:!!f,["".concat(S,"-fullscreen")]:b,["".concat(S,"-fullscreen-show")]:b&&O,["".concat(S,"-rtl")]:"rtl"===M},s,u,E,C),R=i()("".concat(S,"-container"),{["".concat(S,"-blur")]:O}),T=(0,a.Z)(x,["indicator"]),A=Object.assign(Object.assign({},null==N?void 0:N.style),h),_=r.createElement("div",Object.assign({},T,{style:A,className:I,"aria-live":"polite","aria-busy":O}),function(e,t){let{indicator:n}=t,o="".concat(e,"-dot");return null===n?null:(0,l.l$)(n)?(0,l.Tm)(n,{className:i()(n.props.className,o)}):(0,l.l$)(y)?(0,l.Tm)(y,{className:i()(y.props.className,o)}):r.createElement("span",{className:i()(o,"".concat(e,"-dot-spin"))},r.createElement("i",{className:"".concat(e,"-dot-item"),key:1}),r.createElement("i",{className:"".concat(e,"-dot-item"),key:2}),r.createElement("i",{className:"".concat(e,"-dot-item"),key:3}),r.createElement("i",{className:"".concat(e,"-dot-item"),key:4}))}(S,e),f&&(P||b)?r.createElement("div",{className:"".concat(S,"-text")},f):null);return k(P?r.createElement("div",Object.assign({},T,{className:i()("".concat(S,"-nested-loading"),p,E,C)}),O&&r.createElement("div",{key:"loading"},_),r.createElement("div",{className:R,key:"container"},m)):_)};b.setDefaultIndicator=e=>{y=e};var x=b},29382:function(e,t,n){"use strict";n.d(t,{Fm:function(){return f}});var r=n(352),o=n(37133);let i=new r.E4("antMoveDownIn",{"0%":{transform:"translate3d(0, 100%, 0)",transformOrigin:"0 0",opacity:0},"100%":{transform:"translate3d(0, 0, 0)",transformOrigin:"0 0",opacity:1}}),a=new r.E4("antMoveDownOut",{"0%":{transform:"translate3d(0, 0, 0)",transformOrigin:"0 0",opacity:1},"100%":{transform:"translate3d(0, 100%, 0)",transformOrigin:"0 0",opacity:0}}),l=new r.E4("antMoveLeftIn",{"0%":{transform:"translate3d(-100%, 0, 0)",transformOrigin:"0 0",opacity:0},"100%":{transform:"translate3d(0, 0, 0)",transformOrigin:"0 0",opacity:1}}),c=new r.E4("antMoveLeftOut",{"0%":{transform:"translate3d(0, 0, 0)",transformOrigin:"0 0",opacity:1},"100%":{transform:"translate3d(-100%, 0, 0)",transformOrigin:"0 0",opacity:0}}),s=new r.E4("antMoveRightIn",{"0%":{transform:"translate3d(100%, 0, 0)",transformOrigin:"0 0",opacity:0},"100%":{transform:"translate3d(0, 0, 0)",transformOrigin:"0 0",opacity:1}}),u=new r.E4("antMoveRightOut",{"0%":{transform:"translate3d(0, 0, 0)",transformOrigin:"0 0",opacity:1},"100%":{transform:"translate3d(100%, 0, 0)",transformOrigin:"0 0",opacity:0}}),d={"move-up":{inKeyframes:new r.E4("antMoveUpIn",{"0%":{transform:"translate3d(0, -100%, 0)",transformOrigin:"0 0",opacity:0},"100%":{transform:"translate3d(0, 0, 0)",transformOrigin:"0 0",opacity:1}}),outKeyframes:new r.E4("antMoveUpOut",{"0%":{transform:"translate3d(0, 0, 0)",transformOrigin:"0 0",opacity:1},"100%":{transform:"translate3d(0, -100%, 0)",transformOrigin:"0 0",opacity:0}})},"move-down":{inKeyframes:i,outKeyframes:a},"move-left":{inKeyframes:l,outKeyframes:c},"move-right":{inKeyframes:s,outKeyframes:u}},f=(e,t)=>{let{antCls:n}=e,r="".concat(n,"-").concat(t),{inKeyframes:i,outKeyframes:a}=d[t];return[(0,o.R)(r,i,a,e.motionDurationMid),{["\n ".concat(r,"-enter,\n ").concat(r,"-appear\n ")]:{opacity:0,animationTimingFunction:e.motionEaseOutCirc},["".concat(r,"-leave")]:{animationTimingFunction:e.motionEaseInOutCirc}}]}},18544:function(e,t,n){"use strict";n.d(t,{Qt:function(){return l},Uw:function(){return a},fJ:function(){return i},ly:function(){return c},oN:function(){return d}});var r=n(352),o=n(37133);let i=new r.E4("antSlideUpIn",{"0%":{transform:"scaleY(0.8)",transformOrigin:"0% 0%",opacity:0},"100%":{transform:"scaleY(1)",transformOrigin:"0% 0%",opacity:1}}),a=new r.E4("antSlideUpOut",{"0%":{transform:"scaleY(1)",transformOrigin:"0% 0%",opacity:1},"100%":{transform:"scaleY(0.8)",transformOrigin:"0% 0%",opacity:0}}),l=new r.E4("antSlideDownIn",{"0%":{transform:"scaleY(0.8)",transformOrigin:"100% 100%",opacity:0},"100%":{transform:"scaleY(1)",transformOrigin:"100% 100%",opacity:1}}),c=new r.E4("antSlideDownOut",{"0%":{transform:"scaleY(1)",transformOrigin:"100% 100%",opacity:1},"100%":{transform:"scaleY(0.8)",transformOrigin:"100% 100%",opacity:0}}),s=new r.E4("antSlideLeftIn",{"0%":{transform:"scaleX(0.8)",transformOrigin:"0% 0%",opacity:0},"100%":{transform:"scaleX(1)",transformOrigin:"0% 0%",opacity:1}}),u={"slide-up":{inKeyframes:i,outKeyframes:a},"slide-down":{inKeyframes:l,outKeyframes:c},"slide-left":{inKeyframes:s,outKeyframes:new r.E4("antSlideLeftOut",{"0%":{transform:"scaleX(1)",transformOrigin:"0% 0%",opacity:1},"100%":{transform:"scaleX(0.8)",transformOrigin:"0% 0%",opacity:0}})},"slide-right":{inKeyframes:new r.E4("antSlideRightIn",{"0%":{transform:"scaleX(0.8)",transformOrigin:"100% 0%",opacity:0},"100%":{transform:"scaleX(1)",transformOrigin:"100% 0%",opacity:1}}),outKeyframes:new r.E4("antSlideRightOut",{"0%":{transform:"scaleX(1)",transformOrigin:"100% 0%",opacity:1},"100%":{transform:"scaleX(0.8)",transformOrigin:"100% 0%",opacity:0}})}},d=(e,t)=>{let{antCls:n}=e,r="".concat(n,"-").concat(t),{inKeyframes:i,outKeyframes:a}=u[t];return[(0,o.R)(r,i,a,e.motionDurationMid),{["\n ".concat(r,"-enter,\n ").concat(r,"-appear\n ")]:{transform:"scale(0)",transformOrigin:"0% 0%",opacity:0,animationTimingFunction:e.motionEaseOutQuint,"&-prepare":{transform:"scale(1)"}},["".concat(r,"-leave")]:{animationTimingFunction:e.motionEaseInQuint}}]}},76122:function(e,t,n){"use strict";n.d(t,{N:function(){return r}});let r=e=>({color:e.colorLink,textDecoration:"none",outline:"none",cursor:"pointer",transition:"color ".concat(e.motionDurationSlow),"&:focus, &:hover":{color:e.colorLinkHover},"&:active":{color:e.colorLinkActive}})},63709:function(e,t,n){"use strict";n.d(t,{Z:function(){return R}});var r=n(2265),o=n(61935),i=n(36760),a=n.n(i),l=n(1119),c=n(11993),s=n(26365),u=n(6989),d=n(50506),f=n(95814),p=["prefixCls","className","checked","defaultChecked","disabled","loadingIcon","checkedChildren","unCheckedChildren","onClick","onChange","onKeyDown"],h=r.forwardRef(function(e,t){var n,o=e.prefixCls,i=void 0===o?"rc-switch":o,h=e.className,m=e.checked,g=e.defaultChecked,v=e.disabled,y=e.loadingIcon,b=e.checkedChildren,x=e.unCheckedChildren,w=e.onClick,S=e.onChange,k=e.onKeyDown,E=(0,u.Z)(e,p),C=(0,d.Z)(!1,{value:m,defaultValue:g}),O=(0,s.Z)(C,2),j=O[0],P=O[1];function M(e,t){var n=j;return v||(P(n=e),null==S||S(n,t)),n}var N=a()(i,h,(n={},(0,c.Z)(n,"".concat(i,"-checked"),j),(0,c.Z)(n,"".concat(i,"-disabled"),v),n));return r.createElement("button",(0,l.Z)({},E,{type:"button",role:"switch","aria-checked":j,disabled:v,className:N,ref:t,onKeyDown:function(e){e.which===f.Z.LEFT?M(!1,e):e.which===f.Z.RIGHT&&M(!0,e),null==k||k(e)},onClick:function(e){var t=M(!j,e);null==w||w(t,e)}}),y,r.createElement("span",{className:"".concat(i,"-inner")},r.createElement("span",{className:"".concat(i,"-inner-checked")},b),r.createElement("span",{className:"".concat(i,"-inner-unchecked")},x)))});h.displayName="Switch";var m=n(6694),g=n(71744),v=n(86586),y=n(33759),b=n(352),x=n(36360),w=n(12918),S=n(80669),k=n(3104);let E=e=>{let{componentCls:t,trackHeightSM:n,trackPadding:r,trackMinWidthSM:o,innerMinMarginSM:i,innerMaxMarginSM:a,handleSizeSM:l,calc:c}=e,s="".concat(t,"-inner"),u=(0,b.bf)(c(l).add(c(r).mul(2)).equal()),d=(0,b.bf)(c(a).mul(2).equal());return{[t]:{["&".concat(t,"-small")]:{minWidth:o,height:n,lineHeight:(0,b.bf)(n),["".concat(t,"-inner")]:{paddingInlineStart:a,paddingInlineEnd:i,["".concat(s,"-checked")]:{marginInlineStart:"calc(-100% + ".concat(u," - ").concat(d,")"),marginInlineEnd:"calc(100% - ".concat(u," + ").concat(d,")")},["".concat(s,"-unchecked")]:{marginTop:c(n).mul(-1).equal(),marginInlineStart:0,marginInlineEnd:0}},["".concat(t,"-handle")]:{width:l,height:l},["".concat(t,"-loading-icon")]:{top:c(c(l).sub(e.switchLoadingIconSize)).div(2).equal(),fontSize:e.switchLoadingIconSize},["&".concat(t,"-checked")]:{["".concat(t,"-inner")]:{paddingInlineStart:i,paddingInlineEnd:a,["".concat(s,"-checked")]:{marginInlineStart:0,marginInlineEnd:0},["".concat(s,"-unchecked")]:{marginInlineStart:"calc(100% - ".concat(u," + ").concat(d,")"),marginInlineEnd:"calc(-100% + ".concat(u," - ").concat(d,")")}},["".concat(t,"-handle")]:{insetInlineStart:"calc(100% - ".concat((0,b.bf)(c(l).add(r).equal()),")")}},["&:not(".concat(t,"-disabled):active")]:{["&:not(".concat(t,"-checked) ").concat(s)]:{["".concat(s,"-unchecked")]:{marginInlineStart:c(e.marginXXS).div(2).equal(),marginInlineEnd:c(e.marginXXS).mul(-1).div(2).equal()}},["&".concat(t,"-checked ").concat(s)]:{["".concat(s,"-checked")]:{marginInlineStart:c(e.marginXXS).mul(-1).div(2).equal(),marginInlineEnd:c(e.marginXXS).div(2).equal()}}}}}}},C=e=>{let{componentCls:t,handleSize:n,calc:r}=e;return{[t]:{["".concat(t,"-loading-icon").concat(e.iconCls)]:{position:"relative",top:r(r(n).sub(e.fontSize)).div(2).equal(),color:e.switchLoadingIconColor,verticalAlign:"top"},["&".concat(t,"-checked ").concat(t,"-loading-icon")]:{color:e.switchColor}}}},O=e=>{let{componentCls:t,trackPadding:n,handleBg:r,handleShadow:o,handleSize:i,calc:a}=e,l="".concat(t,"-handle");return{[t]:{[l]:{position:"absolute",top:n,insetInlineStart:n,width:i,height:i,transition:"all ".concat(e.switchDuration," ease-in-out"),"&::before":{position:"absolute",top:0,insetInlineEnd:0,bottom:0,insetInlineStart:0,backgroundColor:r,borderRadius:a(i).div(2).equal(),boxShadow:o,transition:"all ".concat(e.switchDuration," ease-in-out"),content:'""'}},["&".concat(t,"-checked ").concat(l)]:{insetInlineStart:"calc(100% - ".concat((0,b.bf)(a(i).add(n).equal()),")")},["&:not(".concat(t,"-disabled):active")]:{["".concat(l,"::before")]:{insetInlineEnd:e.switchHandleActiveInset,insetInlineStart:0},["&".concat(t,"-checked ").concat(l,"::before")]:{insetInlineEnd:0,insetInlineStart:e.switchHandleActiveInset}}}}},j=e=>{let{componentCls:t,trackHeight:n,trackPadding:r,innerMinMargin:o,innerMaxMargin:i,handleSize:a,calc:l}=e,c="".concat(t,"-inner"),s=(0,b.bf)(l(a).add(l(r).mul(2)).equal()),u=(0,b.bf)(l(i).mul(2).equal());return{[t]:{[c]:{display:"block",overflow:"hidden",borderRadius:100,height:"100%",paddingInlineStart:i,paddingInlineEnd:o,transition:"padding-inline-start ".concat(e.switchDuration," ease-in-out, padding-inline-end ").concat(e.switchDuration," ease-in-out"),["".concat(c,"-checked, ").concat(c,"-unchecked")]:{display:"block",color:e.colorTextLightSolid,fontSize:e.fontSizeSM,transition:"margin-inline-start ".concat(e.switchDuration," ease-in-out, margin-inline-end ").concat(e.switchDuration," ease-in-out"),pointerEvents:"none"},["".concat(c,"-checked")]:{marginInlineStart:"calc(-100% + ".concat(s," - ").concat(u,")"),marginInlineEnd:"calc(100% - ".concat(s," + ").concat(u,")")},["".concat(c,"-unchecked")]:{marginTop:l(n).mul(-1).equal(),marginInlineStart:0,marginInlineEnd:0}},["&".concat(t,"-checked ").concat(c)]:{paddingInlineStart:o,paddingInlineEnd:i,["".concat(c,"-checked")]:{marginInlineStart:0,marginInlineEnd:0},["".concat(c,"-unchecked")]:{marginInlineStart:"calc(100% - ".concat(s," + ").concat(u,")"),marginInlineEnd:"calc(-100% + ".concat(s," - ").concat(u,")")}},["&:not(".concat(t,"-disabled):active")]:{["&:not(".concat(t,"-checked) ").concat(c)]:{["".concat(c,"-unchecked")]:{marginInlineStart:l(r).mul(2).equal(),marginInlineEnd:l(r).mul(-1).mul(2).equal()}},["&".concat(t,"-checked ").concat(c)]:{["".concat(c,"-checked")]:{marginInlineStart:l(r).mul(-1).mul(2).equal(),marginInlineEnd:l(r).mul(2).equal()}}}}}},P=e=>{let{componentCls:t,trackHeight:n,trackMinWidth:r}=e;return{[t]:Object.assign(Object.assign(Object.assign(Object.assign({},(0,w.Wf)(e)),{position:"relative",display:"inline-block",boxSizing:"border-box",minWidth:r,height:n,lineHeight:"".concat((0,b.bf)(n)),verticalAlign:"middle",background:e.colorTextQuaternary,border:"0",borderRadius:100,cursor:"pointer",transition:"all ".concat(e.motionDurationMid),userSelect:"none",["&:hover:not(".concat(t,"-disabled)")]:{background:e.colorTextTertiary}}),(0,w.Qy)(e)),{["&".concat(t,"-checked")]:{background:e.switchColor,["&:hover:not(".concat(t,"-disabled)")]:{background:e.colorPrimaryHover}},["&".concat(t,"-loading, &").concat(t,"-disabled")]:{cursor:"not-allowed",opacity:e.switchDisabledOpacity,"*":{boxShadow:"none",cursor:"not-allowed"}},["&".concat(t,"-rtl")]:{direction:"rtl"}})}};var M=(0,S.I$)("Switch",e=>{let t=(0,k.TS)(e,{switchDuration:e.motionDurationMid,switchColor:e.colorPrimary,switchDisabledOpacity:e.opacityLoading,switchLoadingIconSize:e.calc(e.fontSizeIcon).mul(.75).equal(),switchLoadingIconColor:"rgba(0, 0, 0, ".concat(e.opacityLoading,")"),switchHandleActiveInset:"-30%"});return[P(t),j(t),O(t),C(t),E(t)]},e=>{let{fontSize:t,lineHeight:n,controlHeight:r,colorWhite:o}=e,i=t*n,a=r/2,l=i-4,c=a-4;return{trackHeight:i,trackHeightSM:a,trackMinWidth:2*l+8,trackMinWidthSM:2*c+4,trackPadding:2,handleBg:o,handleSize:l,handleSizeSM:c,handleShadow:"0 2px 4px 0 ".concat(new x.C("#00230b").setAlpha(.2).toRgbString()),innerMinMargin:l/2,innerMaxMargin:l+2+4,innerMinMarginSM:c/2,innerMaxMarginSM:c+2+4}}),N=function(e,t){var n={};for(var r in e)Object.prototype.hasOwnProperty.call(e,r)&&0>t.indexOf(r)&&(n[r]=e[r]);if(null!=e&&"function"==typeof Object.getOwnPropertySymbols)for(var o=0,r=Object.getOwnPropertySymbols(e);ot.indexOf(r[o])&&Object.prototype.propertyIsEnumerable.call(e,r[o])&&(n[r[o]]=e[r[o]]);return n};let I=r.forwardRef((e,t)=>{let{prefixCls:n,size:i,disabled:l,loading:c,className:s,rootClassName:u,style:f,checked:p,value:b,defaultChecked:x,defaultValue:w,onChange:S}=e,k=N(e,["prefixCls","size","disabled","loading","className","rootClassName","style","checked","value","defaultChecked","defaultValue","onChange"]),[E,C]=(0,d.Z)(!1,{value:null!=p?p:b,defaultValue:null!=x?x:w}),{getPrefixCls:O,direction:j,switch:P}=r.useContext(g.E_),I=r.useContext(v.Z),R=(null!=l?l:I)||c,T=O("switch",n),A=r.createElement("div",{className:"".concat(T,"-handle")},c&&r.createElement(o.Z,{className:"".concat(T,"-loading-icon")})),[_,D,Z]=M(T),L=(0,y.Z)(i),z=a()(null==P?void 0:P.className,{["".concat(T,"-small")]:"small"===L,["".concat(T,"-loading")]:c,["".concat(T,"-rtl")]:"rtl"===j},s,u,D,Z),B=Object.assign(Object.assign({},null==P?void 0:P.style),f);return _(r.createElement(m.Z,{component:"Switch"},r.createElement(h,Object.assign({},k,{checked:E,onChange:function(){C(arguments.length<=0?void 0:arguments[0]),null==S||S.apply(void 0,arguments)},prefixCls:T,className:z,style:B,disabled:R,ref:t,loadingIcon:A}))))});I.__ANT_SWITCH=!0;var R=I},28181:function(e,t,n){"use strict";n.d(t,{Z:function(){return of}});var r,o,i=n(2265),a={},l="rc-table-internal-hook",c=n(26365),s=n(58525),u=n(27380),d=n(16671),f=n(54887);function p(e){var t=i.createContext(void 0);return{Context:t,Provider:function(e){var n=e.value,r=e.children,o=i.useRef(n);o.current=n;var a=i.useState(function(){return{getValue:function(){return o.current},listeners:new Set}}),l=(0,c.Z)(a,1)[0];return(0,u.Z)(function(){(0,f.unstable_batchedUpdates)(function(){l.listeners.forEach(function(e){e(n)})})},[n]),i.createElement(t.Provider,{value:l},r)},defaultValue:e}}function h(e,t){var n=(0,s.Z)("function"==typeof t?t:function(e){if(void 0===t)return e;if(!Array.isArray(t))return e[t];var n={};return t.forEach(function(t){n[t]=e[t]}),n}),r=i.useContext(null==e?void 0:e.Context),o=r||{},a=o.listeners,l=o.getValue,f=i.useRef();f.current=n(r?l():null==e?void 0:e.defaultValue);var p=i.useState({}),h=(0,c.Z)(p,2)[1];return(0,u.Z)(function(){if(r)return a.add(e),function(){a.delete(e)};function e(e){var t=n(e);(0,d.Z)(f.current,t,!0)||h({})}},[r]),f.current}var m=n(1119),g=n(28791);function v(){var e=i.createContext(null);function t(){return i.useContext(e)}return{makeImmutable:function(n,r){var o=(0,g.Yr)(n),a=function(a,l){var c=o?{ref:l}:{},s=i.useRef(0),u=i.useRef(a);return null!==t()?i.createElement(n,(0,m.Z)({},a,c)):((!r||r(u.current,a))&&(s.current+=1),u.current=a,i.createElement(e.Provider,{value:s.current},i.createElement(n,(0,m.Z)({},a,c))))};return o?i.forwardRef(a):a},responseImmutable:function(e,n){var r=(0,g.Yr)(e),o=function(n,o){return t(),i.createElement(e,(0,m.Z)({},n,r?{ref:o}:{}))};return r?i.memo(i.forwardRef(o),n):i.memo(o,n)},useImmutableMark:t}}var y=v();y.makeImmutable,y.responseImmutable,y.useImmutableMark;var b=v(),x=b.makeImmutable,w=b.responseImmutable,S=b.useImmutableMark,k=p();i.memo(function(){var e,t,n,r,o,a=(t=i.useRef(0),t.current+=1,n=i.useRef(void 0),r=[],Object.keys(e||{}).map(function(t){var o;(null==e?void 0:e[t])!==(null===(o=n.current)||void 0===o?void 0:o[t])&&r.push(t)}),n.current=e,o=i.useRef([]),r.length&&(o.current=r),i.useDebugValue(t.current),i.useDebugValue(o.current.join(", ")),t.current);return i.createElement("h1",null,"Render Times: ",a)}).displayName="RenderBlock";var E=n(41154),C=n(31686),O=n(11993),j=n(36760),P=n.n(j),M=n(6397),N=n(16847),I=n(32559),R=i.createContext({renderWithProps:!1});function T(e){var t=[],n={};return e.forEach(function(e){for(var r=e||{},o=r.key,i=r.dataIndex,a=o||(null==i?[]:Array.isArray(i)?i:[i]).join("-")||"RC_TABLE_KEY";n[a];)a="".concat(a,"_next");n[a]=!0,t.push(a)}),t}var A=n(74126),_=function(e){var t,n=e.ellipsis,r=e.rowType,o=e.children,a=!0===n?{showTitle:!0}:n;return a&&(a.showTitle||"header"===r)&&("string"==typeof o||"number"==typeof o?t=o.toString():i.isValidElement(o)&&"string"==typeof o.props.children&&(t=o.props.children)),t},D=i.memo(function(e){var t,n,r,o,a,l,s,u,f,p,g=e.component,v=e.children,y=e.ellipsis,b=e.scope,x=e.prefixCls,w=e.className,j=e.align,I=e.record,T=e.render,D=e.dataIndex,Z=e.renderIndex,L=e.shouldCellUpdate,z=e.index,B=e.rowType,F=e.colSpan,H=e.rowSpan,q=e.fixLeft,W=e.fixRight,K=e.firstFixLeft,U=e.lastFixLeft,V=e.firstFixRight,G=e.lastFixRight,X=e.appendNode,$=e.additionalProps,Y=void 0===$?{}:$,Q=e.isSticky,J="".concat(x,"-cell"),ee=h(k,["supportSticky","allColumnsFixedLeft"]),et=ee.supportSticky,en=ee.allColumnsFixedLeft,er=(t=i.useContext(R),n=S(),(0,M.Z)(function(){if(null!=v)return[v];var e=null==D||""===D?[]:Array.isArray(D)?D:[D],n=(0,N.Z)(I,e),r=n,o=void 0;if(T){var a=T(n,I,Z);!a||"object"!==(0,E.Z)(a)||Array.isArray(a)||i.isValidElement(a)?r=a:(r=a.children,o=a.props,t.renderWithProps=!0)}return[r,o]},[n,I,v,D,T,Z],function(e,n){if(L){var r=(0,c.Z)(e,2)[1];return L((0,c.Z)(n,2)[1],r)}return!!t.renderWithProps||!(0,d.Z)(e,n,!0)})),eo=(0,c.Z)(er,2),ei=eo[0],ea=eo[1],el={},ec="number"==typeof q&&et,es="number"==typeof W&&et;ec&&(el.position="sticky",el.left=q),es&&(el.position="sticky",el.right=W);var eu=null!==(r=null!==(o=null!==(a=null==ea?void 0:ea.colSpan)&&void 0!==a?a:Y.colSpan)&&void 0!==o?o:F)&&void 0!==r?r:1,ed=null!==(l=null!==(s=null!==(u=null==ea?void 0:ea.rowSpan)&&void 0!==u?u:Y.rowSpan)&&void 0!==s?s:H)&&void 0!==l?l:1,ef=h(k,function(e){var t,n;return[(t=ed||1,n=e.hoverStartRow,z<=e.hoverEndRow&&z+t-1>=n),e.onHover]}),ep=(0,c.Z)(ef,2),eh=ep[0],em=ep[1],eg=(0,A.zX)(function(e){var t;I&&em(z,z+ed-1),null==Y||null===(t=Y.onMouseEnter)||void 0===t||t.call(Y,e)}),ev=(0,A.zX)(function(e){var t;I&&em(-1,-1),null==Y||null===(t=Y.onMouseLeave)||void 0===t||t.call(Y,e)});if(0===eu||0===ed)return null;var ey=null!==(f=Y.title)&&void 0!==f?f:_({rowType:B,ellipsis:y,children:ei}),eb=P()(J,w,(p={},(0,O.Z)((0,O.Z)((0,O.Z)((0,O.Z)((0,O.Z)((0,O.Z)((0,O.Z)((0,O.Z)((0,O.Z)((0,O.Z)(p,"".concat(J,"-fix-left"),ec&&et),"".concat(J,"-fix-left-first"),K&&et),"".concat(J,"-fix-left-last"),U&&et),"".concat(J,"-fix-left-all"),U&&en&&et),"".concat(J,"-fix-right"),es&&et),"".concat(J,"-fix-right-first"),V&&et),"".concat(J,"-fix-right-last"),G&&et),"".concat(J,"-ellipsis"),y),"".concat(J,"-with-append"),X),"".concat(J,"-fix-sticky"),(ec||es)&&Q&&et),(0,O.Z)(p,"".concat(J,"-row-hover"),!ea&&eh)),Y.className,null==ea?void 0:ea.className),ex={};j&&(ex.textAlign=j);var ew=(0,C.Z)((0,C.Z)((0,C.Z)((0,C.Z)({},el),Y.style),ex),null==ea?void 0:ea.style),eS=ei;return"object"!==(0,E.Z)(eS)||Array.isArray(eS)||i.isValidElement(eS)||(eS=null),y&&(U||V)&&(eS=i.createElement("span",{className:"".concat(J,"-content")},eS)),i.createElement(g,(0,m.Z)({},ea,Y,{className:eb,style:ew,title:ey,scope:b,onMouseEnter:eg,onMouseLeave:ev,colSpan:1!==eu?eu:null,rowSpan:1!==ed?ed:null}),X,eS)});function Z(e,t,n,r,o,i){var a,l,c=n[e]||{},s=n[t]||{};"left"===c.fixed?a=r.left["rtl"===o?t:e]:"right"===s.fixed&&(l=r.right["rtl"===o?e:t]);var u=!1,d=!1,f=!1,p=!1,h=n[t+1],m=n[e-1],g=!(null!=i&&i.children);return"rtl"===o?void 0!==a?p=!(m&&"left"===m.fixed)&&g:void 0!==l&&(f=!(h&&"right"===h.fixed)&&g):void 0!==a?u=!(h&&"left"===h.fixed)&&g:void 0!==l&&(d=!(m&&"right"===m.fixed)&&g),{fixLeft:a,fixRight:l,lastFixLeft:u,firstFixRight:d,lastFixRight:f,firstFixLeft:p,isSticky:r.isSticky}}var L=i.createContext({}),z=n(6989),B=["children"];function F(e){return e.children}F.Row=function(e){var t=e.children,n=(0,z.Z)(e,B);return i.createElement("tr",n,t)},F.Cell=function(e){var t=e.className,n=e.index,r=e.children,o=e.colSpan,a=void 0===o?1:o,l=e.rowSpan,c=e.align,s=h(k,["prefixCls","direction"]),u=s.prefixCls,d=s.direction,f=i.useContext(L),p=f.scrollColumnIndex,g=f.stickyOffsets,v=f.flattenColumns,y=f.columns,b=n+a-1+1===p?a+1:a,x=Z(n,n+b-1,v,g,d,null==y?void 0:y[n]);return i.createElement(D,(0,m.Z)({className:t,index:n,component:"td",prefixCls:u,record:null,dataIndex:null,align:c,colSpan:b,rowSpan:l,render:function(){return r}},x))};var H=w(function(e){var t=e.children,n=e.stickyOffsets,r=e.flattenColumns,o=e.columns,a=h(k,"prefixCls"),l=r.length-1,c=r[l],s=i.useMemo(function(){return{stickyOffsets:n,flattenColumns:r,scrollColumnIndex:null!=c&&c.scrollbar?l:null,columns:o}},[c,r,l,n,o]);return i.createElement(L.Provider,{value:s},i.createElement("tfoot",{className:"".concat(a,"-summary")},t))}),q=n(31474),W=n(2857),K=n(10281),U=n(3208),V=n(18242);function G(e,t,n,r){return i.useMemo(function(){if(null!=n&&n.size){for(var o=[],i=0;i<(null==e?void 0:e.length);i+=1)!function e(t,n,r,o,i,a,l){t.push({record:n,indent:r,index:l});var c=a(n),s=null==i?void 0:i.has(c);if(n&&Array.isArray(n[o])&&s)for(var u=0;u1?n-1:0),o=1;o=0;c-=1){var s=t[c],u=n&&n[c],d=u&&u[ei];if(s||d||l){var f=d||{},p=(f.columnType,(0,z.Z)(f,ea));o.unshift(i.createElement("col",(0,m.Z)({key:c,style:{width:s}},p))),l=!0}}return i.createElement("colgroup",null,o)},ec=n(83145),es=["className","noData","columns","flattenColumns","colWidths","columCount","stickyOffsets","direction","fixHeader","stickyTopOffset","stickyBottomOffset","stickyClassName","onScroll","maxContentScroll","children"],eu=i.forwardRef(function(e,t){var n=e.className,r=e.noData,o=e.columns,a=e.flattenColumns,l=e.colWidths,c=e.columCount,s=e.stickyOffsets,u=e.direction,d=e.fixHeader,f=e.stickyTopOffset,p=e.stickyBottomOffset,m=e.stickyClassName,v=e.onScroll,y=e.maxContentScroll,b=e.children,x=(0,z.Z)(e,es),w=h(k,["prefixCls","scrollbarSize","isSticky"]),S=w.prefixCls,E=w.scrollbarSize,j=w.isSticky,M=j&&!d?0:E,N=i.useRef(null),I=i.useCallback(function(e){(0,g.mH)(t,e),(0,g.mH)(N,e)},[]);i.useEffect(function(){var e;function t(e){var t=e.currentTarget,n=e.deltaX;n&&(v({currentTarget:t,scrollLeft:t.scrollLeft+n}),e.preventDefault())}return null===(e=N.current)||void 0===e||e.addEventListener("wheel",t),function(){var e;null===(e=N.current)||void 0===e||e.removeEventListener("wheel",t)}},[]);var R=i.useMemo(function(){return a.every(function(e){return e.width})},[a]),T=a[a.length-1],A={fixed:T?T.fixed:null,scrollbar:!0,onHeaderCell:function(){return{className:"".concat(S,"-cell-scrollbar")}}},_=(0,i.useMemo)(function(){return M?[].concat((0,ec.Z)(o),[A]):o},[M,o]),D=(0,i.useMemo)(function(){return M?[].concat((0,ec.Z)(a),[A]):a},[M,a]),Z=(0,i.useMemo)(function(){var e=s.right,t=s.left;return(0,C.Z)((0,C.Z)({},s),{},{left:"rtl"===u?[].concat((0,ec.Z)(t.map(function(e){return e+M})),[0]):t,right:"rtl"===u?e:[].concat((0,ec.Z)(e.map(function(e){return e+M})),[0]),isSticky:j})},[M,s,j]),L=(0,i.useMemo)(function(){for(var e=[],t=0;t1?"colgroup":"col":null,ellipsis:a.ellipsis,align:a.align,component:a.title?l:c,prefixCls:f,key:g[t]},s,{additionalProps:n,rowType:"header"}))}))}ef.displayName="HeaderRow";var ep=w(function(e){var t=e.stickyOffsets,n=e.columns,r=e.flattenColumns,o=e.onHeaderRow,a=h(k,["prefixCls","getComponent"]),l=a.prefixCls,c=a.getComponent,s=i.useMemo(function(){return function(e){var t=[];!function e(n,r){var o=arguments.length>2&&void 0!==arguments[2]?arguments[2]:0;t[o]=t[o]||[];var i=r;return n.filter(Boolean).map(function(n){var r={key:n.key,className:n.className||"",children:n.title,column:n,colStart:i},a=1,l=n.children;return l&&l.length>0&&(a=e(l,i,o+1).reduce(function(e,t){return e+t},0),r.hasSubColumns=!0),"colSpan"in n&&(a=n.colSpan),"rowSpan"in n&&(r.rowSpan=n.rowSpan),r.colSpan=a,r.colEnd=r.colStart+a-1,t[o].push(r),i+=a,a})}(e,0);for(var n=t.length,r=function(e){t[e].forEach(function(t){("rowSpan"in t)||t.hasSubColumns||(t.rowSpan=n-e)})},o=0;o1&&void 0!==arguments[1]?arguments[1]:"";return"number"==typeof t?t:t.endsWith("%")?e*parseFloat(t)/100:null}var eg=["children"],ev=["fixed"];function ey(e){return(0,eh.Z)(e).filter(function(e){return i.isValidElement(e)}).map(function(e){var t=e.key,n=e.props,r=n.children,o=(0,z.Z)(n,eg),i=(0,C.Z)({key:t},o);return r&&(i.children=ey(r)),i})}function eb(e){var t=arguments.length>1&&void 0!==arguments[1]?arguments[1]:"key";return e.filter(function(e){return e&&"object"===(0,E.Z)(e)}).reduce(function(e,n,r){var o=n.fixed,i=!0===o?"left":o,a="".concat(t,"-").concat(r),l=n.children;return l&&l.length>0?[].concat((0,ec.Z)(e),(0,ec.Z)(eb(l,a).map(function(e){return(0,C.Z)({fixed:i},e)}))):[].concat((0,ec.Z)(e),[(0,C.Z)((0,C.Z)({key:a},n),{},{fixed:i})])},[])}var ex=function(e,t){var n=e.prefixCls,r=e.columns,o=e.children,l=e.expandable,s=e.expandedKeys,u=e.columnTitle,d=e.getRowKey,f=e.onTriggerExpand,p=e.expandIcon,h=e.rowExpandable,m=e.expandIconColumnIndex,g=e.direction,v=e.expandRowByClick,y=e.columnWidth,b=e.fixed,x=e.scrollWidth,w=e.clientWidth,S=i.useMemo(function(){return function e(t){return t.filter(function(e){return e&&"object"===(0,E.Z)(e)&&!e.hidden}).map(function(t){var n=t.children;return n&&n.length>0?(0,C.Z)((0,C.Z)({},t),{},{children:e(n)}):t})}((r||ey(o)||[]).slice())},[r,o]),k=i.useMemo(function(){if(l){var e,t=S.slice();if(!t.includes(a)){var r=m||0;r>=0&&t.splice(r,0,a)}var o=t.indexOf(a);t=t.filter(function(e,t){return e!==a||t===o});var c=S[o];e=("left"===b||b)&&!m?"left":("right"===b||b)&&m===S.length?"right":c?c.fixed:null;var g=(0,O.Z)((0,O.Z)((0,O.Z)((0,O.Z)((0,O.Z)((0,O.Z)({},ei,{className:"".concat(n,"-expand-icon-col"),columnType:"EXPAND_COLUMN"}),"title",u),"fixed",e),"className","".concat(n,"-row-expand-icon-cell")),"width",y),"render",function(e,t,r){var o=d(t,r),a=p({prefixCls:n,expanded:s.has(o),expandable:!h||h(t),record:t,onExpand:f});return v?i.createElement("span",{onClick:function(e){return e.stopPropagation()}},a):a});return t.map(function(e){return e===a?g:e})}return S.filter(function(e){return e!==a})},[l,S,d,s,p,g]),j=i.useMemo(function(){var e=k;return t&&(e=t(e)),e.length||(e=[{render:function(){return null}}]),e},[t,k,g]),P=i.useMemo(function(){return"rtl"===g?eb(j).map(function(e){var t=e.fixed,n=(0,z.Z)(e,ev),r=t;return"left"===t?r="right":"right"===t&&(r="left"),(0,C.Z)({fixed:r},n)}):eb(j)},[j,g,x]),M=i.useMemo(function(){if(x&&x>0){var e=0,t=0;P.forEach(function(n){var r=em(x,n.width);r?e+=r:t+=1});var n=Math.max(x,w),r=Math.max(n-e,t),o=t,i=r/t,a=0,l=P.map(function(e){var t=(0,C.Z)({},e),n=em(x,t.width);if(n)t.width=n;else{var l=Math.floor(i);t.width=1===o?r:l,r-=l,o-=1}return a+=t.width,t});if(a=f&&(r=f-p),a({scrollLeft:r/f*(d+2)}),x.current.x=e.pageX},I=function(){if(o.current){var e=eO(o.current).top,t=e+o.current.offsetHeight,n=s===window?document.documentElement.scrollTop+window.innerHeight:eO(s).top+s.clientHeight;t-(0,U.Z)()<=n||e>=n-l?b(function(e){return(0,C.Z)((0,C.Z)({},e),{},{isHiddenScrollBar:!0})}):b(function(e){return(0,C.Z)((0,C.Z)({},e),{},{isHiddenScrollBar:!1})})}},R=function(e){b(function(t){return(0,C.Z)((0,C.Z)({},t),{},{scrollLeft:e/d*f||0})})};return(i.useImperativeHandle(t,function(){return{setScrollLeft:R}}),i.useEffect(function(){var e=eC(document.body,"mouseup",M,!1),t=eC(document.body,"mousemove",N,!1);return I(),function(){e.remove(),t.remove()}},[p,E]),i.useEffect(function(){var e=eC(s,"scroll",I,!1),t=eC(window,"resize",I,!1);return function(){e.remove(),t.remove()}},[s]),i.useEffect(function(){y.isHiddenScrollBar||b(function(e){var t=o.current;return t?(0,C.Z)((0,C.Z)({},e),{},{scrollLeft:t.scrollLeft/t.scrollWidth*t.clientWidth}):e})},[y.isHiddenScrollBar]),d<=f||!p||y.isHiddenScrollBar)?null:i.createElement("div",{style:{height:(0,U.Z)(),width:f,bottom:l},className:"".concat(u,"-sticky-scroll")},i.createElement("div",{onMouseDown:function(e){e.persist(),x.current.delta=e.pageX-y.scrollLeft,x.current.x=0,j(!0),e.preventDefault()},ref:m,className:P()("".concat(u,"-sticky-scroll-bar"),(0,O.Z)({},"".concat(u,"-sticky-scroll-bar-active"),E)),style:{width:"".concat(p,"px"),transform:"translate3d(".concat(y.scrollLeft,"px, 0, 0)")}}))}),eP="rc-table",eM=[],eN={};function eI(){return"No Data"}var eR=i.forwardRef(function(e,t){var n,r=(0,C.Z)({rowKey:"key",prefixCls:eP,emptyText:eI},e),o=r.prefixCls,a=r.className,u=r.rowClassName,f=r.style,p=r.data,h=r.rowKey,g=r.scroll,v=r.tableLayout,y=r.direction,b=r.title,x=r.footer,w=r.summary,S=r.caption,j=r.id,I=r.showHeader,R=r.components,A=r.emptyText,_=r.onRow,D=r.onHeaderRow,L=r.internalHooks,B=r.transformColumns,G=r.internalRefs,X=r.tailor,$=r.getContainerWidth,Y=r.sticky,Q=p||eM,J=!!Q.length,ee=L===l,et=i.useCallback(function(e,t){return(0,N.Z)(R,e)||t},[R]),en=i.useMemo(function(){return"function"==typeof h?h:function(e){return e&&e[h]}},[h]),ei=et(["body"]),ea=(tH=i.useState(-1),tW=(tq=(0,c.Z)(tH,2))[0],tK=tq[1],tU=i.useState(-1),tG=(tV=(0,c.Z)(tU,2))[0],tX=tV[1],[tW,tG,i.useCallback(function(e,t){tK(e),tX(t)},[])]),es=(0,c.Z)(ea,3),eu=es[0],ef=es[1],eh=es[2],em=(tJ=(tY=r.expandable,tQ=(0,z.Z)(r,eo),!1===(t$="expandable"in r?(0,C.Z)((0,C.Z)({},tQ),tY):tQ).showExpandColumn&&(t$.expandIconColumnIndex=-1),t$).expandIcon,t0=t$.expandedRowKeys,t1=t$.defaultExpandedRowKeys,t2=t$.defaultExpandAllRows,t6=t$.expandedRowRender,t3=t$.onExpand,t4=t$.onExpandedRowsChange,t5=t$.childrenColumnName||"children",t8=i.useMemo(function(){return t6?"row":!!(r.expandable&&r.internalHooks===l&&r.expandable.__PARENT_RENDER_ICON__||Q.some(function(e){return e&&"object"===(0,E.Z)(e)&&e[t5]}))&&"nest"},[!!t6,Q]),t7=i.useState(function(){if(t1)return t1;if(t2){var e;return e=[],function t(n){(n||[]).forEach(function(n,r){e.push(en(n,r)),t(n[t5])})}(Q),e}return[]}),ne=(t9=(0,c.Z)(t7,2))[0],nt=t9[1],nn=i.useMemo(function(){return new Set(t0||ne||[])},[t0,ne]),nr=i.useCallback(function(e){var t,n=en(e,Q.indexOf(e)),r=nn.has(n);r?(nn.delete(n),t=(0,ec.Z)(nn)):t=[].concat((0,ec.Z)(nn),[n]),nt(t),t3&&t3(!r,e),t4&&t4(t)},[en,nn,Q,t3,t4]),[t$,t8,nn,tJ||ew,t5,nr]),eg=(0,c.Z)(em,6),ev=eg[0],ey=eg[1],eb=eg[2],eC=eg[3],eO=eg[4],eR=eg[5],eT=null==g?void 0:g.x,eA=i.useState(0),e_=(0,c.Z)(eA,2),eD=e_[0],eZ=e_[1],eL=ex((0,C.Z)((0,C.Z)((0,C.Z)({},r),ev),{},{expandable:!!ev.expandedRowRender,columnTitle:ev.columnTitle,expandedKeys:eb,getRowKey:en,onTriggerExpand:eR,expandIcon:eC,expandIconColumnIndex:ev.expandIconColumnIndex,direction:y,scrollWidth:ee&&X&&"number"==typeof eT?eT:null,clientWidth:eD}),ee?B:null),ez=(0,c.Z)(eL,3),eB=ez[0],eF=ez[1],eH=ez[2],eq=null!=eH?eH:eT,eW=i.useMemo(function(){return{columns:eB,flattenColumns:eF}},[eB,eF]),eK=i.useRef(),eU=i.useRef(),eV=i.useRef(),eG=i.useRef();i.useImperativeHandle(t,function(){return{nativeElement:eK.current,scrollTo:function(e){var t;if(eV.current instanceof HTMLElement){var n=e.index,r=e.top,o=e.key;if(r)null===(i=eV.current)||void 0===i||i.scrollTo({top:r});else{var i,a,l=null!=o?o:en(Q[n]);null===(a=eV.current.querySelector('[data-row-key="'.concat(l,'"]')))||void 0===a||a.scrollIntoView()}}else null!==(t=eV.current)&&void 0!==t&&t.scrollTo&&eV.current.scrollTo(e)}}});var eX=i.useRef(),e$=i.useState(!1),eY=(0,c.Z)(e$,2),eQ=eY[0],eJ=eY[1],e0=i.useState(!1),e1=(0,c.Z)(e0,2),e2=e1[0],e6=e1[1],e3=eS(new Map),e4=(0,c.Z)(e3,2),e5=e4[0],e8=e4[1],e7=T(eF).map(function(e){return e5.get(e)}),e9=i.useMemo(function(){return e7},[e7.join("_")]),te=(no=eF.length,(0,i.useMemo)(function(){for(var e=[],t=[],n=0,r=0,o=0;o0)):(eJ(i>0),e6(i1?b-T:0,pointerEvents:"auto"}),_=i.useMemo(function(){return f?R<=1:0===N||0===R||R>1},[R,N,f]);_?A.visibility="hidden":f&&(A.height=null==p?void 0:p(R));var Z={};return(0===R||0===N)&&(Z.rowSpan=1,Z.colSpan=1),i.createElement(D,(0,m.Z)({className:P()(y,d),ellipsis:r.ellipsis,align:r.align,scope:r.rowScope,component:"div",prefixCls:n.prefixCls,key:S,record:s,index:l,renderIndex:c,dataIndex:v,render:_?function(){return null}:g,shouldCellUpdate:r.shouldCellUpdate},k,{appendNode:E,additionalProps:(0,C.Z)((0,C.Z)({},O),{},{style:A},Z)}))},eL=["data","index","className","rowKey","style","extra","getHeight"],ez=w(i.forwardRef(function(e,t){var n,r=e.data,o=e.index,a=e.className,l=e.rowKey,c=e.style,s=e.extra,u=e.getHeight,d=(0,z.Z)(e,eL),f=r.record,p=r.indent,g=r.index,v=h(k,["prefixCls","flattenColumns","fixColumn","componentWidth","scrollX"]),y=v.scrollX,b=v.flattenColumns,x=v.prefixCls,w=v.fixColumn,S=v.componentWidth,E=X(f,l,o,p),j=E.rowSupportExpand,M=E.expanded,N=E.rowProps,I=E.expandedRowRender,R=E.expandedRowClassName;if(j&&M){var T=I(f,o,p+1,M),A=null==R?void 0:R(f,o,p),_={};w&&(_={style:(0,O.Z)({},"--virtual-width","".concat(S,"px"))});var Z="".concat(x,"-expanded-row-cell");n=i.createElement("div",{className:P()("".concat(x,"-expanded-row"),"".concat(x,"-expanded-row-level-").concat(p+1),A)},i.createElement(D,{component:"div",prefixCls:x,className:P()(Z,(0,O.Z)({},"".concat(Z,"-fixed"),w)),additionalProps:_},T))}var L=(0,C.Z)((0,C.Z)({},c),{},{width:y});s&&(L.position="absolute",L.pointerEvents="none");var B=i.createElement("div",(0,m.Z)({},N,d,{ref:j?null:t,className:P()(a,"".concat(x,"-row"),null==N?void 0:N.className,(0,O.Z)({},"".concat(x,"-row-extra"),s)),style:(0,C.Z)((0,C.Z)({},L),null==N?void 0:N.style)}),b.map(function(e,t){return i.createElement(eZ,{key:t,rowInfo:E,column:e,colIndex:t,indent:p,index:o,renderIndex:g,record:f,inverse:s,getHeight:u})}));return j?i.createElement("div",{ref:t},B,n):B})),eB=w(i.forwardRef(function(e,t){var n,r=e.data,o=e.onScroll,a=h(k,["flattenColumns","onColumnResize","getRowKey","prefixCls","expandedKeys","childrenColumnName","emptyNode","scrollX"]),l=a.flattenColumns,s=a.onColumnResize,u=a.getRowKey,d=a.expandedKeys,f=a.prefixCls,p=a.childrenColumnName,g=a.emptyNode,v=a.scrollX,y=h(e_),b=y.sticky,x=y.scrollY,w=y.listItemHeight,S=i.useRef(),C=G(r,p,d,u),O=i.useMemo(function(){var e=0;return l.map(function(t){var n=t.width,r=t.key;return e+=n,[r,n,e]})},[l]),j=i.useMemo(function(){return O.map(function(e){return e[2]})},[O]);i.useEffect(function(){O.forEach(function(e){var t=(0,c.Z)(e,2);s(t[0],t[1])})},[O]),i.useImperativeHandle(t,function(){var e={scrollTo:function(e){var t;null===(t=S.current)||void 0===t||t.scrollTo(e)}};return Object.defineProperty(e,"scrollLeft",{get:function(){var e;return(null===(e=S.current)||void 0===e?void 0:e.getScrollInfo().x)||0},set:function(e){var t;null===(t=S.current)||void 0===t||t.scrollTo({left:e})}}),e});var M=function(e,t){var n=null===(o=C[t])||void 0===o?void 0:o.record,r=e.onCell;if(r){var o,i,a=r(n,t);return null!==(i=null==a?void 0:a.rowSpan)&&void 0!==i?i:1}return 1},N=i.useMemo(function(){return{columnsOffset:j}},[j]),I="".concat(f,"-tbody");if(C.length){var R={};b&&(R.position="sticky",R.bottom=0,"object"===(0,E.Z)(b)&&b.offsetScroll&&(R.bottom=b.offsetScroll)),n=i.createElement(eA.Z,{fullHeight:!1,ref:S,styles:{horizontalScrollBar:R},className:P()(I,"".concat(I,"-virtual")),height:x,itemHeight:w||24,data:C,itemKey:function(e){return u(e.record)},scrollWidth:v,onVirtualScroll:function(e){o({scrollLeft:e.x})},extraRender:function(e){var t=e.start,n=e.end,r=e.getSize,o=e.offsetY;if(n<0)return null;for(var a=l.filter(function(e){return 0===M(e,t)}),c=t,s=function(e){if(!(a=a.filter(function(t){return 0===M(t,e)})).length)return c=e,1},d=t;d>=0&&!s(d);d-=1);for(var f=l.filter(function(e){return 1!==M(e,n)}),p=n,h=function(e){if(!(f=f.filter(function(t){return 1!==M(t,e)})).length)return p=Math.max(e-1,n),1},m=n;m1})&&g.push(e)},y=c;y<=p;y+=1)if(v(y))continue;return g.map(function(e){var t=C[e],n=u(t.record,e),a=r(n);return i.createElement(ez,{key:e,data:t,rowKey:n,index:e,style:{top:-o+a.top},extra:!0,getHeight:function(t){var o=e+t-1,i=r(n,u(C[o].record,o));return i.bottom-i.top}})})}},function(e,t,n){var r=u(e.record,t);return i.createElement(ez,(0,m.Z)({data:e,rowKey:r,index:t},n))})}else n=i.createElement("div",{className:P()("".concat(f,"-placeholder"))},i.createElement(D,{component:"div",prefixCls:f},g));return i.createElement(eD.Provider,{value:N},n)})),eF=function(e,t){var n=t.ref,r=t.onScroll;return i.createElement(eB,{ref:n,data:e,onScroll:r})},eH=i.forwardRef(function(e,t){var n=e.columns,r=e.scroll,o=e.sticky,a=e.prefixCls,c=void 0===a?eP:a,s=e.className,u=e.listItemHeight,d=e.components,f=r||{},p=f.x,h=f.y;"number"!=typeof p&&(p=1),"number"!=typeof h&&(h=500);var g=i.useMemo(function(){return{sticky:o,scrollY:h,listItemHeight:u}},[o,h,u]);return i.createElement(e_.Provider,{value:g},i.createElement(eT,(0,m.Z)({},e,{className:P()(s,"".concat(c,"-virtual")),scroll:(0,C.Z)((0,C.Z)({},r),{},{x:p}),components:(0,C.Z)((0,C.Z)({},d),{},{body:eF}),columns:n,internalHooks:l,tailor:!0,ref:t})))});x(eH,void 0);var eq=n(70464),eW=n(76405),eK=n(25049),eU=n(63496),eV=n(15354),eG=n(15900),eX=i.createContext(null),e$=i.memo(function(e){for(var t,n=e.prefixCls,r=e.level,o=e.isStart,a=e.isEnd,l="".concat(n,"-indent-unit"),c=[],s=0;s1&&void 0!==arguments[1]?arguments[1]:null;return n.map(function(s,u){for(var d,f=eJ(r?r.pos:"0",u),p=e0(s[i],f),h=0;h1&&void 0!==arguments[1]?arguments[1]:{},f=d.initWrapper,p=d.processEntity,h=d.onProcessFinished,m=d.externalGetKey,g=d.childrenPropName,v=d.fieldNames,y=arguments.length>2?arguments[2]:void 0,b={},x={},w={posEntities:b,keyEntities:x};return f&&(w=f(w)||w),t=function(e){var t=e.node,n=e.index,r=e.pos,o=e.key,i=e.parentPos,a=e.level,l={node:t,nodes:e.nodes,index:n,key:o,pos:r,level:a},c=e0(o,r);b[r]=l,x[c]=l,l.parent=b[i],l.parent&&(l.parent.children=l.parent.children||[],l.parent.children.push(l)),p&&p(l,w)},n={externalGetKey:m||y,childrenPropName:g,fieldNames:v},i=(o=("object"===(0,E.Z)(n)?n:{externalGetKey:n})||{}).childrenPropName,a=o.externalGetKey,c=(l=e1(o.fieldNames)).key,s=l.children,u=i||s,a?"string"==typeof a?r=function(e){return e[a]}:"function"==typeof a&&(r=function(e){return a(e)}):r=function(e,t){return e0(e[c],t)},function n(o,i,a,l){var c=o?o[u]:e,s=o?eJ(a.pos,i):"0",d=o?[].concat((0,ec.Z)(l),[o]):[];if(o){var f=r(o,s);t({node:o,index:i,pos:s,key:f,parentPos:a.node?a.pos:null,level:a.level+1,nodes:d})}c&&c.forEach(function(e,t){n(e,t,{node:o,pos:s,level:a?a.level+1:-1},d)})}(null),h&&h(w),w}function e4(e,t){var n=t.expandedKeys,r=t.selectedKeys,o=t.loadedKeys,i=t.loadingKeys,a=t.checkedKeys,l=t.halfCheckedKeys,c=t.dragOverNodeKey,s=t.dropPosition,u=t.keyEntities[e];return{eventKey:e,expanded:-1!==n.indexOf(e),selected:-1!==r.indexOf(e),loaded:-1!==o.indexOf(e),loading:-1!==i.indexOf(e),checked:-1!==a.indexOf(e),halfChecked:-1!==l.indexOf(e),pos:String(u?u.pos:""),dragOver:c===e&&0===s,dragOverGapTop:c===e&&-1===s,dragOverGapBottom:c===e&&1===s}}function e5(e){var t=e.data,n=e.expanded,r=e.selected,o=e.checked,i=e.loaded,a=e.loading,l=e.halfChecked,c=e.dragOver,s=e.dragOverGapTop,u=e.dragOverGapBottom,d=e.pos,f=e.active,p=e.eventKey,h=(0,C.Z)((0,C.Z)({},t),{},{expanded:n,selected:r,checked:o,loaded:i,loading:a,halfChecked:l,dragOver:c,dragOverGapTop:s,dragOverGapBottom:u,pos:d,active:f,key:p});return"props"in h||Object.defineProperty(h,"props",{get:function(){return(0,I.ZP)(!1,"Second param return from event is node data instead of TreeNode instance. Please read value directly instead of reading from `props`."),e}}),h}var e8=["eventKey","className","style","dragOver","dragOverGapTop","dragOverGapBottom","isLeaf","isStart","isEnd","expanded","selected","checked","halfChecked","loading","domRef","active","data","onMouseMove","selectable"],e7="open",e9="close",te=function(e){(0,eV.Z)(n,e);var t=(0,eG.Z)(n);function n(){var e;(0,eW.Z)(this,n);for(var r=arguments.length,o=Array(r),a=0;a=0&&n.splice(r,1),n}function tr(e,t){var n=(e||[]).slice();return -1===n.indexOf(t)&&n.push(t),n}function to(e){return e.split("-")}function ti(e,t,n,r,o,i,a,l,c,s){var u,d,f=e.clientX,p=e.clientY,h=e.target.getBoundingClientRect(),m=h.top,g=h.height,v=(("rtl"===s?-1:1)*(((null==o?void 0:o.x)||0)-f)-12)/r,y=l[n.props.eventKey];if(p-1.5?i({dragNode:O,dropNode:j,dropPosition:1})?k=1:P=!1:i({dragNode:O,dropNode:j,dropPosition:0})?k=0:i({dragNode:O,dropNode:j,dropPosition:1})?k=1:P=!1:i({dragNode:O,dropNode:j,dropPosition:1})?k=1:P=!1,{dropPosition:k,dropLevelOffset:E,dropTargetKey:y.key,dropTargetPos:y.pos,dragOverNodeKey:S,dropContainerKey:0===k?null:(null===(d=y.parent)||void 0===d?void 0:d.key)||null,dropAllowed:P}}function ta(e,t){if(e)return t.multiple?e.slice():e.length?[e[0]]:e}function tl(e){var t;if(!e)return null;if(Array.isArray(e))t={checkedKeys:e,halfCheckedKeys:void 0};else{if("object"!==(0,E.Z)(e))return(0,I.ZP)(!1,"`checkedKeys` is not an array or an object"),null;t={checkedKeys:e.checked||void 0,halfCheckedKeys:e.halfChecked||void 0}}return t}function tc(e,t){var n=new Set;return(e||[]).forEach(function(e){!function e(r){if(!n.has(r)){var o=t[r];if(o){n.add(r);var i=o.parent;!o.node.disabled&&i&&e(i.key)}}}(e)}),(0,ec.Z)(n)}function ts(e,t){var n=new Set;return e.forEach(function(e){t.has(e)||n.add(e)}),n}function tu(e){var t=e||{},n=t.disabled,r=t.disableCheckbox,o=t.checkable;return!!(n||r)||!1===o}function td(e,t,n,r){var o,i=[];o=r||tu;var a=new Set(e.filter(function(e){var t=!!n[e];return t||i.push(e),t})),l=new Map,c=0;return Object.keys(n).forEach(function(e){var t=n[e],r=t.level,o=l.get(r);o||(o=new Set,l.set(r,o)),o.add(t),c=Math.max(c,r)}),(0,I.ZP)(!i.length,"Tree missing follow keys: ".concat(i.slice(0,100).map(function(e){return"'".concat(e,"'")}).join(", "))),!0===t?function(e,t,n,r){for(var o=new Set(e),i=new Set,a=0;a<=n;a+=1)(t.get(a)||new Set).forEach(function(e){var t=e.key,n=e.node,i=e.children,a=void 0===i?[]:i;o.has(t)&&!r(n)&&a.filter(function(e){return!r(e.node)}).forEach(function(e){o.add(e.key)})});for(var l=new Set,c=n;c>=0;c-=1)(t.get(c)||new Set).forEach(function(e){var t=e.parent;if(!(r(e.node)||!e.parent||l.has(e.parent.key))){if(r(e.parent.node)){l.add(t.key);return}var n=!0,a=!1;(t.children||[]).filter(function(e){return!r(e.node)}).forEach(function(e){var t=e.key,r=o.has(t);n&&!r&&(n=!1),!a&&(r||i.has(t))&&(a=!0)}),n&&o.add(t.key),a&&i.add(t.key),l.add(t.key)}});return{checkedKeys:Array.from(o),halfCheckedKeys:Array.from(ts(i,o))}}(a,l,c,o):function(e,t,n,r,o){for(var i=new Set(e),a=new Set(t),l=0;l<=r;l+=1)(n.get(l)||new Set).forEach(function(e){var t=e.key,n=e.node,r=e.children,l=void 0===r?[]:r;i.has(t)||a.has(t)||o(n)||l.filter(function(e){return!o(e.node)}).forEach(function(e){i.delete(e.key)})});a=new Set;for(var c=new Set,s=r;s>=0;s-=1)(n.get(s)||new Set).forEach(function(e){var t=e.parent;if(!(o(e.node)||!e.parent||c.has(e.parent.key))){if(o(e.parent.node)){c.add(t.key);return}var n=!0,r=!1;(t.children||[]).filter(function(e){return!o(e.node)}).forEach(function(e){var t=e.key,o=i.has(t);n&&!o&&(n=!1),!r&&(o||a.has(t))&&(r=!0)}),n||i.delete(t.key),r&&a.add(t.key),c.add(t.key)}});return{checkedKeys:Array.from(i),halfCheckedKeys:Array.from(ts(a,i))}}(a,t.halfCheckedKeys,l,c,o)}tt.displayName="TreeNode",tt.isTreeNode=1;var tf=n(50506),tp=n(13613),th=n(20873),tm=n(6694),tg=n(34709),tv=n(71744),ty=n(86586),tb=n(64024),tx=n(39109);let tw=i.createContext(null);var tS=n(352),tk=n(12918),tE=n(3104),tC=n(80669);let tO=e=>{let{checkboxCls:t}=e,n="".concat(t,"-wrapper");return[{["".concat(t,"-group")]:Object.assign(Object.assign({},(0,tk.Wf)(e)),{display:"inline-flex",flexWrap:"wrap",columnGap:e.marginXS,["> ".concat(e.antCls,"-row")]:{flex:1}}),[n]:Object.assign(Object.assign({},(0,tk.Wf)(e)),{display:"inline-flex",alignItems:"baseline",cursor:"pointer","&:after":{display:"inline-block",width:0,overflow:"hidden",content:"'\\a0'"},["& + ".concat(n)]:{marginInlineStart:0},["&".concat(n,"-in-form-item")]:{'input[type="checkbox"]':{width:14,height:14}}}),[t]:Object.assign(Object.assign({},(0,tk.Wf)(e)),{position:"relative",whiteSpace:"nowrap",lineHeight:1,cursor:"pointer",borderRadius:e.borderRadiusSM,alignSelf:"center",["".concat(t,"-input")]:{position:"absolute",inset:0,zIndex:1,cursor:"pointer",opacity:0,margin:0,["&:focus-visible + ".concat(t,"-inner")]:Object.assign({},(0,tk.oN)(e))},["".concat(t,"-inner")]:{boxSizing:"border-box",display:"block",width:e.checkboxSize,height:e.checkboxSize,direction:"ltr",backgroundColor:e.colorBgContainer,border:"".concat((0,tS.bf)(e.lineWidth)," ").concat(e.lineType," ").concat(e.colorBorder),borderRadius:e.borderRadiusSM,borderCollapse:"separate",transition:"all ".concat(e.motionDurationSlow),"&:after":{boxSizing:"border-box",position:"absolute",top:"50%",insetInlineStart:"25%",display:"table",width:e.calc(e.checkboxSize).div(14).mul(5).equal(),height:e.calc(e.checkboxSize).div(14).mul(8).equal(),border:"".concat((0,tS.bf)(e.lineWidthBold)," solid ").concat(e.colorWhite),borderTop:0,borderInlineStart:0,transform:"rotate(45deg) scale(0) translate(-50%,-50%)",opacity:0,content:'""',transition:"all ".concat(e.motionDurationFast," ").concat(e.motionEaseInBack,", opacity ").concat(e.motionDurationFast)}},"& + span":{paddingInlineStart:e.paddingXS,paddingInlineEnd:e.paddingXS}})},{["\n ".concat(n,":not(").concat(n,"-disabled),\n ").concat(t,":not(").concat(t,"-disabled)\n ")]:{["&:hover ".concat(t,"-inner")]:{borderColor:e.colorPrimary}},["".concat(n,":not(").concat(n,"-disabled)")]:{["&:hover ".concat(t,"-checked:not(").concat(t,"-disabled) ").concat(t,"-inner")]:{backgroundColor:e.colorPrimaryHover,borderColor:"transparent"},["&:hover ".concat(t,"-checked:not(").concat(t,"-disabled):after")]:{borderColor:e.colorPrimaryHover}}},{["".concat(t,"-checked")]:{["".concat(t,"-inner")]:{backgroundColor:e.colorPrimary,borderColor:e.colorPrimary,"&:after":{opacity:1,transform:"rotate(45deg) scale(1) translate(-50%,-50%)",transition:"all ".concat(e.motionDurationMid," ").concat(e.motionEaseOutBack," ").concat(e.motionDurationFast)}}},["\n ".concat(n,"-checked:not(").concat(n,"-disabled),\n ").concat(t,"-checked:not(").concat(t,"-disabled)\n ")]:{["&:hover ".concat(t,"-inner")]:{backgroundColor:e.colorPrimaryHover,borderColor:"transparent"}}},{[t]:{"&-indeterminate":{["".concat(t,"-inner")]:{backgroundColor:e.colorBgContainer,borderColor:e.colorBorder,"&:after":{top:"50%",insetInlineStart:"50%",width:e.calc(e.fontSizeLG).div(2).equal(),height:e.calc(e.fontSizeLG).div(2).equal(),backgroundColor:e.colorPrimary,border:0,transform:"translate(-50%, -50%) scale(1)",opacity:1,content:'""'}}}}},{["".concat(n,"-disabled")]:{cursor:"not-allowed"},["".concat(t,"-disabled")]:{["&, ".concat(t,"-input")]:{cursor:"not-allowed",pointerEvents:"none"},["".concat(t,"-inner")]:{background:e.colorBgContainerDisabled,borderColor:e.colorBorder,"&:after":{borderColor:e.colorTextDisabled}},"&:after":{display:"none"},"& + span":{color:e.colorTextDisabled},["&".concat(t,"-indeterminate ").concat(t,"-inner::after")]:{background:e.colorTextDisabled}}}]};function tj(e,t){return[tO((0,tE.TS)(t,{checkboxCls:".".concat(e),checkboxSize:t.controlInteractiveSize}))]}var tP=(0,tC.I$)("Checkbox",(e,t)=>{let{prefixCls:n}=t;return[tj(n,e)]}),tM=function(e,t){var n={};for(var r in e)Object.prototype.hasOwnProperty.call(e,r)&&0>t.indexOf(r)&&(n[r]=e[r]);if(null!=e&&"function"==typeof Object.getOwnPropertySymbols)for(var o=0,r=Object.getOwnPropertySymbols(e);ot.indexOf(r[o])&&Object.prototype.propertyIsEnumerable.call(e,r[o])&&(n[r[o]]=e[r[o]]);return n};let tN=i.forwardRef((e,t)=>{var n;let{prefixCls:r,className:o,rootClassName:a,children:l,indeterminate:c=!1,style:s,onMouseEnter:u,onMouseLeave:d,skipGroup:f=!1,disabled:p}=e,h=tM(e,["prefixCls","className","rootClassName","children","indeterminate","style","onMouseEnter","onMouseLeave","skipGroup","disabled"]),{getPrefixCls:m,direction:g,checkbox:v}=i.useContext(tv.E_),y=i.useContext(tw),{isFormItemInput:b}=i.useContext(tx.aM),x=i.useContext(ty.Z),w=null!==(n=(null==y?void 0:y.disabled)||p)&&void 0!==n?n:x,S=i.useRef(h.value);i.useEffect(()=>{null==y||y.registerValue(h.value)},[]),i.useEffect(()=>{if(!f)return h.value!==S.current&&(null==y||y.cancelValue(S.current),null==y||y.registerValue(h.value),S.current=h.value),()=>null==y?void 0:y.cancelValue(h.value)},[h.value]);let k=m("checkbox",r),E=(0,tb.Z)(k),[C,O,j]=tP(k,E),M=Object.assign({},h);y&&!f&&(M.onChange=function(){h.onChange&&h.onChange.apply(h,arguments),y.toggleOption&&y.toggleOption({label:l,value:h.value})},M.name=y.name,M.checked=y.value.includes(h.value));let N=P()("".concat(k,"-wrapper"),{["".concat(k,"-rtl")]:"rtl"===g,["".concat(k,"-wrapper-checked")]:M.checked,["".concat(k,"-wrapper-disabled")]:w,["".concat(k,"-wrapper-in-form-item")]:b},null==v?void 0:v.className,o,a,j,E,O),I=P()({["".concat(k,"-indeterminate")]:c},tg.A,O),R=c?"mixed":void 0;return C(i.createElement(tm.Z,{component:"Checkbox",disabled:w},i.createElement("label",{className:N,style:Object.assign(Object.assign({},null==v?void 0:v.style),s),onMouseEnter:u,onMouseLeave:d},i.createElement(th.Z,Object.assign({"aria-checked":R},M,{prefixCls:k,className:I,disabled:w,ref:t})),void 0!==l&&i.createElement("span",null,l))))});var tI=function(e,t){var n={};for(var r in e)Object.prototype.hasOwnProperty.call(e,r)&&0>t.indexOf(r)&&(n[r]=e[r]);if(null!=e&&"function"==typeof Object.getOwnPropertySymbols)for(var o=0,r=Object.getOwnPropertySymbols(e);ot.indexOf(r[o])&&Object.prototype.propertyIsEnumerable.call(e,r[o])&&(n[r[o]]=e[r[o]]);return n};let tR=i.forwardRef((e,t)=>{let{defaultValue:n,children:r,options:o=[],prefixCls:a,className:l,rootClassName:c,style:s,onChange:u}=e,d=tI(e,["defaultValue","children","options","prefixCls","className","rootClassName","style","onChange"]),{getPrefixCls:f,direction:p}=i.useContext(tv.E_),[h,m]=i.useState(d.value||n||[]),[g,v]=i.useState([]);i.useEffect(()=>{"value"in d&&m(d.value||[])},[d.value]);let y=i.useMemo(()=>o.map(e=>"string"==typeof e||"number"==typeof e?{label:e,value:e}:e),[o]),b=f("checkbox",a),x="".concat(b,"-group"),w=(0,tb.Z)(b),[S,k,E]=tP(b,w),C=(0,eY.Z)(d,["value","disabled"]),O=o.length?y.map(e=>i.createElement(tN,{prefixCls:b,key:e.value.toString(),disabled:"disabled"in e?e.disabled:d.disabled,value:e.value,checked:h.includes(e.value),onChange:e.onChange,className:"".concat(x,"-item"),style:e.style,title:e.title,id:e.id,required:e.required},e.label)):r,j={toggleOption:e=>{let t=h.indexOf(e.value),n=(0,ec.Z)(h);-1===t?n.push(e.value):n.splice(t,1),"value"in d||m(n),null==u||u(n.filter(e=>g.includes(e)).sort((e,t)=>y.findIndex(t=>t.value===e)-y.findIndex(e=>e.value===t)))},value:h,disabled:d.disabled,name:d.name,registerValue:e=>{v(t=>[].concat((0,ec.Z)(t),[e]))},cancelValue:e=>{v(t=>t.filter(t=>t!==e))}},M=P()(x,{["".concat(x,"-rtl")]:"rtl"===p},l,c,E,w,k);return S(i.createElement("div",Object.assign({className:M,style:s},C,{ref:t}),i.createElement(tw.Provider,{value:j},O)))});tN.Group=tR,tN.__ANT_CHECKBOX=!0;var tT=n(80795),tA=n(29967);let t_={},tD="SELECT_ALL",tZ="SELECT_INVERT",tL="SELECT_NONE",tz=[],tB=(e,t)=>{let n=[];return(t||[]).forEach(t=>{n.push(t),t&&"object"==typeof t&&e in t&&(n=[].concat((0,ec.Z)(n),(0,ec.Z)(tB(e,t[e]))))}),n};var tF=(e,t)=>{let{preserveSelectedRowKeys:n,selectedRowKeys:r,defaultSelectedRowKeys:o,getCheckboxProps:a,onChange:l,onSelect:c,onSelectAll:s,onSelectInvert:u,onSelectNone:d,onSelectMultiple:f,columnWidth:p,type:h,selections:m,fixed:g,renderCell:v,hideSelectAll:y,checkStrictly:b=!0}=t||{},{prefixCls:x,data:w,pageData:S,getRecordByKey:k,getRowKey:E,expandType:C,childrenColumnName:O,locale:j,getPopupContainer:M}=e,N=(0,tp.ln)("Table"),[I,R]=function(e){let[t,n]=(0,i.useState)(null);return[(0,i.useCallback)((r,o,i)=>{let a=null!=t?t:r,l=Math.max(a||0,r),c=o.slice(Math.min(a||0,r),l+1).map(t=>e(t)),s=c.some(e=>!i.has(e)),u=[];return c.forEach(e=>{s?(i.has(e)||u.push(e),i.add(e)):(i.delete(e),u.push(e))}),n(s?l:null),u},[t]),e=>{n(e)}]}(e=>e),[T,A]=(0,tf.Z)(r||o||tz,{value:r}),_=i.useRef(new Map),D=(0,i.useCallback)(e=>{if(n){let t=new Map;e.forEach(e=>{let n=k(e);!n&&_.current.has(e)&&(n=_.current.get(e)),t.set(e,n)}),_.current=t}},[k,n]);i.useEffect(()=>{D(T)},[T]);let{keyEntities:Z}=(0,i.useMemo)(()=>{if(b)return{keyEntities:null};let e=w;if(n){let t=new Set(w.map((e,t)=>E(e,t))),n=Array.from(_.current).reduce((e,n)=>{let[r,o]=n;return t.has(r)?e:e.concat(o)},[]);e=[].concat((0,ec.Z)(e),(0,ec.Z)(n))}return e3(e,{externalGetKey:E,childrenPropName:O})},[w,E,b,O,n]),L=(0,i.useMemo)(()=>tB(O,S),[O,S]),z=(0,i.useMemo)(()=>{let e=new Map;return L.forEach((t,n)=>{let r=E(t,n),o=(a?a(t):null)||{};e.set(r,o)}),e},[L,E,a]),B=(0,i.useCallback)(e=>{var t;return!!(null===(t=z.get(E(e)))||void 0===t?void 0:t.disabled)},[z,E]),[F,H]=(0,i.useMemo)(()=>{if(b)return[T||[],[]];let{checkedKeys:e,halfCheckedKeys:t}=td(T,!0,Z,B);return[e||[],t]},[T,b,Z,B]),q=(0,i.useMemo)(()=>new Set("radio"===h?F.slice(0,1):F),[F,h]),W=(0,i.useMemo)(()=>"radio"===h?new Set:new Set(H),[H,h]);i.useEffect(()=>{t||A(tz)},[!!t]);let K=(0,i.useCallback)((e,t)=>{let r,o;D(e),n?(r=e,o=e.map(e=>_.current.get(e))):(r=[],o=[],e.forEach(e=>{let t=k(e);void 0!==t&&(r.push(e),o.push(t))})),A(r),null==l||l(r,o,{type:t})},[A,k,l,n]),U=(0,i.useCallback)((e,t,n,r)=>{if(c){let o=n.map(e=>k(e));c(k(e),t,o,r)}K(n,"single")},[c,k,K]),V=(0,i.useMemo)(()=>!m||y?null:(!0===m?[tD,tZ,tL]:m).map(e=>e===tD?{key:"all",text:j.selectionAll,onSelect(){K(w.map((e,t)=>E(e,t)).filter(e=>{let t=z.get(e);return!(null==t?void 0:t.disabled)||q.has(e)}),"all")}}:e===tZ?{key:"invert",text:j.selectInvert,onSelect(){let e=new Set(q);S.forEach((t,n)=>{let r=E(t,n),o=z.get(r);(null==o?void 0:o.disabled)||(e.has(r)?e.delete(r):e.add(r))});let t=Array.from(e);u&&(N.deprecated(!1,"onSelectInvert","onChange"),u(t)),K(t,"invert")}}:e===tL?{key:"none",text:j.selectNone,onSelect(){null==d||d(),K(Array.from(q).filter(e=>{let t=z.get(e);return null==t?void 0:t.disabled}),"none")}}:e).map(e=>Object.assign(Object.assign({},e),{onSelect:function(){for(var t,n=arguments.length,r=Array(n),o=0;o{var n;let r,o,a;if(!t)return e.filter(e=>e!==t_);let l=(0,ec.Z)(e),c=new Set(q),u=L.map(E).filter(e=>!z.get(e).disabled),d=u.every(e=>c.has(e)),w=u.some(e=>c.has(e));if("radio"!==h){let e;if(V){let t={getPopupContainer:M,items:V.map((e,t)=>{let{key:n,text:r,onSelect:o}=e;return{key:null!=n?n:t,onClick:()=>{null==o||o(u)},label:r}})};e=i.createElement("div",{className:"".concat(x,"-selection-extra")},i.createElement(tT.Z,{menu:t,getPopupContainer:M},i.createElement("span",null,i.createElement(eq.Z,null))))}let t=L.map((e,t)=>{let n=E(e,t),r=z.get(n)||{};return Object.assign({checked:c.has(n)},r)}).filter(e=>{let{disabled:t}=e;return t}),n=!!t.length&&t.length===L.length,a=n&&t.every(e=>{let{checked:t}=e;return t}),l=n&&t.some(e=>{let{checked:t}=e;return t});o=i.createElement(tN,{checked:n?a:!!L.length&&d,indeterminate:n?!a&&l:!d&&w,onChange:()=>{let e=[];d?u.forEach(t=>{c.delete(t),e.push(t)}):u.forEach(t=>{c.has(t)||(c.add(t),e.push(t))});let t=Array.from(c);null==s||s(!d,t.map(e=>k(e)),e.map(e=>k(e))),K(t,"all"),R(null)},disabled:0===L.length||n,"aria-label":e?"Custom selection":"Select all",skipGroup:!0}),r=!y&&i.createElement("div",{className:"".concat(x,"-selection")},o,e)}if(a="radio"===h?(e,t,n)=>{let r=E(t,n),o=c.has(r);return{node:i.createElement(tA.ZP,Object.assign({},z.get(r),{checked:o,onClick:e=>e.stopPropagation(),onChange:e=>{c.has(r)||U(r,!0,[r],e.nativeEvent)}})),checked:o}}:(e,t,n)=>{var r;let o;let a=E(t,n),l=c.has(a),s=W.has(a),d=z.get(a);return o="nest"===C?s:null!==(r=null==d?void 0:d.indeterminate)&&void 0!==r?r:s,{node:i.createElement(tN,Object.assign({},d,{indeterminate:o,checked:l,skipGroup:!0,onClick:e=>e.stopPropagation(),onChange:e=>{let{nativeEvent:t}=e,{shiftKey:n}=t,r=u.findIndex(e=>e===a),o=F.some(e=>u.includes(e));if(n&&b&&o){let e=I(r,u,c),t=Array.from(c);null==f||f(!l,t.map(e=>k(e)),e.map(e=>k(e))),K(t,"multiple")}else if(b){let e=l?tn(F,a):tr(F,a);U(a,!l,e,t)}else{let{checkedKeys:e,halfCheckedKeys:n}=td([].concat((0,ec.Z)(F),[a]),!0,Z,B),r=e;if(l){let t=new Set(e);t.delete(a),r=td(Array.from(t),{checked:!1,halfCheckedKeys:n},Z,B).checkedKeys}U(a,!l,r,t)}l?R(null):R(r)}})),checked:l}},!l.includes(t_)){if(0===l.findIndex(e=>{var t;return(null===(t=e[ei])||void 0===t?void 0:t.columnType)==="EXPAND_COLUMN"})){let[e,...t]=l;l=[e,t_].concat((0,ec.Z)(t))}else l=[t_].concat((0,ec.Z)(l))}let S=l.indexOf(t_),O=(l=l.filter((e,t)=>e!==t_||t===S))[S-1],j=l[S+1],N=g;void 0===N&&((null==j?void 0:j.fixed)!==void 0?N=j.fixed:(null==O?void 0:O.fixed)!==void 0&&(N=O.fixed)),N&&O&&(null===(n=O[ei])||void 0===n?void 0:n.columnType)==="EXPAND_COLUMN"&&void 0===O.fixed&&(O.fixed=N);let T=P()("".concat(x,"-selection-col"),{["".concat(x,"-selection-col-with-dropdown")]:m&&"checkbox"===h}),A={fixed:N,width:p,className:"".concat(x,"-selection-column"),title:(null==t?void 0:t.columnTitle)?"function"==typeof t.columnTitle?t.columnTitle(o):t.columnTitle:r,render:(e,t,n)=>{let{node:r,checked:o}=a(e,t,n);return v?v(o,t,n,r):r},onCell:t.onCell,[ei]:{className:T}};return l.map(e=>e===t_?A:e)},[E,L,t,F,q,W,p,V,C,z,f,U,B]),q]},tH=n(53346);function tq(e){return null!=e&&e===e.window}var tW=n(91086),tK=n(33759),tU=n(51646),tV=n(6543),tG=function(){let e=!(arguments.length>0)||void 0===arguments[0]||arguments[0],t=(0,i.useRef)({}),n=(0,tU.Z)(),r=(0,tV.ZP)();return(0,u.Z)(()=>{let o=r.subscribe(r=>{t.current=r,e&&n()});return()=>r.unsubscribe(o)},[]),t.current},tX=n(13823),t$={icon:{tag:"svg",attrs:{viewBox:"64 64 896 896",focusable:"false"},children:[{tag:"path",attrs:{d:"M272.9 512l265.4-339.1c4.1-5.2.4-12.9-6.3-12.9h-77.3c-4.9 0-9.6 2.3-12.6 6.1L186.8 492.3a31.99 31.99 0 000 39.5l255.3 326.1c3 3.9 7.7 6.1 12.6 6.1H532c6.7 0 10.4-7.7 6.3-12.9L272.9 512zm304 0l265.4-339.1c4.1-5.2.4-12.9-6.3-12.9h-77.3c-4.9 0-9.6 2.3-12.6 6.1L490.8 492.3a31.99 31.99 0 000 39.5l255.3 326.1c3 3.9 7.7 6.1 12.6 6.1H836c6.7 0 10.4-7.7 6.3-12.9L576.9 512z"}}]},name:"double-left",theme:"outlined"},tY=n(55015),tQ=i.forwardRef(function(e,t){return i.createElement(tY.Z,(0,m.Z)({},e,{ref:t,icon:t$}))}),tJ={icon:{tag:"svg",attrs:{viewBox:"64 64 896 896",focusable:"false"},children:[{tag:"path",attrs:{d:"M533.2 492.3L277.9 166.1c-3-3.9-7.7-6.1-12.6-6.1H188c-6.7 0-10.4 7.7-6.3 12.9L447.1 512 181.7 851.1A7.98 7.98 0 00188 864h77.3c4.9 0 9.6-2.3 12.6-6.1l255.3-326.1c9.1-11.7 9.1-27.9 0-39.5zm304 0L581.9 166.1c-3-3.9-7.7-6.1-12.6-6.1H492c-6.7 0-10.4 7.7-6.3 12.9L751.1 512 485.7 851.1A7.98 7.98 0 00492 864h77.3c4.9 0 9.6-2.3 12.6-6.1l255.3-326.1c9.1-11.7 9.1-27.9 0-39.5z"}}]},name:"double-right",theme:"outlined"},t0=i.forwardRef(function(e,t){return i.createElement(tY.Z,(0,m.Z)({},e,{ref:t,icon:tJ}))}),t1=n(15327),t2=n(77565),t6=n(95814),t3={items_per_page:"条/页",jump_to:"跳至",jump_to_confirm:"确定",page:"页",prev_page:"上一页",next_page:"下一页",prev_5:"向前 5 页",next_5:"向后 5 页",prev_3:"向前 3 页",next_3:"向后 3 页",page_size:"页码"},t4=["10","20","50","100"],t5=function(e){var t=e.pageSizeOptions,n=void 0===t?t4:t,r=e.locale,o=e.changeSize,a=e.pageSize,l=e.goButton,s=e.quickGo,u=e.rootPrefixCls,d=e.selectComponentClass,f=e.selectPrefixCls,p=e.disabled,h=e.buildOptionText,m=i.useState(""),g=(0,c.Z)(m,2),v=g[0],y=g[1],b=function(){return!v||Number.isNaN(v)?void 0:Number(v)},x="function"==typeof h?h:function(e){return"".concat(e," ").concat(r.items_per_page)},w=function(e){""!==v&&(e.keyCode===t6.Z.ENTER||"click"===e.type)&&(y(""),null==s||s(b()))},S="".concat(u,"-options");if(!o&&!s)return null;var k=null,E=null,C=null;if(o&&d){var O=(n.some(function(e){return e.toString()===a.toString()})?n:n.concat([a.toString()]).sort(function(e,t){return(Number.isNaN(Number(e))?0:Number(e))-(Number.isNaN(Number(t))?0:Number(t))})).map(function(e,t){return i.createElement(d.Option,{key:t,value:e.toString()},x(e))});k=i.createElement(d,{disabled:p,prefixCls:f,showSearch:!1,className:"".concat(S,"-size-changer"),optionLabelProp:"children",popupMatchSelectWidth:!1,value:(a||n[0]).toString(),onChange:function(e){null==o||o(Number(e))},getPopupContainer:function(e){return e.parentNode},"aria-label":r.page_size,defaultOpen:!1},O)}return s&&(l&&(C="boolean"==typeof l?i.createElement("button",{type:"button",onClick:w,onKeyUp:w,disabled:p,className:"".concat(S,"-quick-jumper-button")},r.jump_to_confirm):i.createElement("span",{onClick:w,onKeyUp:w},l)),E=i.createElement("div",{className:"".concat(S,"-quick-jumper")},r.jump_to,i.createElement("input",{disabled:p,type:"text",value:v,onChange:function(e){y(e.target.value)},onKeyUp:w,onBlur:function(e){!l&&""!==v&&(y(""),e.relatedTarget&&(e.relatedTarget.className.indexOf("".concat(u,"-item-link"))>=0||e.relatedTarget.className.indexOf("".concat(u,"-item"))>=0)||null==s||s(b()))},"aria-label":r.page}),r.page,C)),i.createElement("li",{className:S},k,E)},t8=function(e){var t,n=e.rootPrefixCls,r=e.page,o=e.active,a=e.className,l=e.showTitle,c=e.onClick,s=e.onKeyPress,u=e.itemRender,d="".concat(n,"-item"),f=P()(d,"".concat(d,"-").concat(r),(t={},(0,O.Z)(t,"".concat(d,"-active"),o),(0,O.Z)(t,"".concat(d,"-disabled"),!r),t),a),p=u(r,"page",i.createElement("a",{rel:"nofollow"},r));return p?i.createElement("li",{title:l?String(r):null,className:f,onClick:function(){c(r)},onKeyDown:function(e){s(e,c,r)},tabIndex:0},p):null},t7=function(e,t,n){return n};function t9(){}function ne(e){var t=Number(e);return"number"==typeof t&&!Number.isNaN(t)&&isFinite(t)&&Math.floor(t)===t}function nt(e,t,n){return Math.floor((n-1)/(void 0===e?t:e))+1}var nn=function(e){var t,n,r,o,a,l=e.prefixCls,s=void 0===l?"rc-pagination":l,u=e.selectPrefixCls,d=e.className,f=e.selectComponentClass,p=e.current,h=e.defaultCurrent,g=e.total,v=void 0===g?0:g,y=e.pageSize,b=e.defaultPageSize,x=e.onChange,w=void 0===x?t9:x,S=e.hideOnSinglePage,k=e.showPrevNextJumpers,E=e.showQuickJumper,j=e.showLessItems,M=e.showTitle,N=void 0===M||M,I=e.onShowSizeChange,R=void 0===I?t9:I,T=e.locale,A=void 0===T?t3:T,_=e.style,D=e.totalBoundaryShowSizeChanger,Z=e.disabled,L=e.simple,z=e.showTotal,B=e.showSizeChanger,F=e.pageSizeOptions,H=e.itemRender,q=void 0===H?t7:H,W=e.jumpPrevIcon,K=e.jumpNextIcon,U=e.prevIcon,G=e.nextIcon,X=i.useRef(null),$=(0,tf.Z)(10,{value:y,defaultValue:void 0===b?10:b}),Y=(0,c.Z)($,2),Q=Y[0],J=Y[1],ee=(0,tf.Z)(1,{value:p,defaultValue:void 0===h?1:h,postState:function(e){return Math.max(1,Math.min(e,nt(void 0,Q,v)))}}),et=(0,c.Z)(ee,2),en=et[0],er=et[1],eo=i.useState(en),ei=(0,c.Z)(eo,2),ea=ei[0],el=ei[1];(0,i.useEffect)(function(){el(en)},[en]);var ec=Math.max(1,en-(j?3:5)),es=Math.min(nt(void 0,Q,v),en+(j?3:5));function eu(t,n){var r=t||i.createElement("button",{type:"button","aria-label":n,className:"".concat(s,"-item-link")});return"function"==typeof t&&(r=i.createElement(t,(0,C.Z)({},e))),r}function ed(e){var t=e.target.value,n=nt(void 0,Q,v);return""===t?t:Number.isNaN(Number(t))?ea:t>=n?n:Number(t)}var ef=v>Q&&E;function ep(e){var t=ed(e);switch(t!==ea&&el(t),e.keyCode){case t6.Z.ENTER:eh(t);break;case t6.Z.UP:eh(t-1);break;case t6.Z.DOWN:eh(t+1)}}function eh(e){if(ne(e)&&e!==en&&ne(v)&&v>0&&!Z){var t=nt(void 0,Q,v),n=e;return e>t?n=t:e<1&&(n=1),n!==ea&&el(n),er(n),null==w||w(n,Q),n}return en}var em=en>1,eg=en(void 0===D?50:D);function ey(){em&&eh(en-1)}function eb(){eg&&eh(en+1)}function ex(){eh(ec)}function ew(){eh(es)}function eS(e,t){if("Enter"===e.key||e.charCode===t6.Z.ENTER||e.keyCode===t6.Z.ENTER){for(var n=arguments.length,r=Array(n>2?n-2:0),o=2;ov?v:en*Q])),ej=null,eP=nt(void 0,Q,v);if(S&&v<=Q)return null;var eM=[],eN={rootPrefixCls:s,onClick:eh,onKeyPress:eS,showTitle:N,itemRender:q,page:-1},eI=en-1>0?en-1:0,eR=en+1=2*eD&&3!==en&&(eM[0]=i.cloneElement(eM[0],{className:P()("".concat(s,"-item-after-jump-prev"),eM[0].props.className)}),eM.unshift(eE)),eP-en>=2*eD&&en!==eP-2){var eK=eM[eM.length-1];eM[eM.length-1]=i.cloneElement(eK,{className:P()("".concat(s,"-item-before-jump-next"),eK.props.className)}),eM.push(ej)}1!==eH&&eM.unshift(i.createElement(t8,(0,m.Z)({},eN,{key:1,page:1}))),eq!==eP&&eM.push(i.createElement(t8,(0,m.Z)({},eN,{key:eP,page:eP})))}var eU=(t=q(eI,"prev",eu(U,"prev page")),i.isValidElement(t)?i.cloneElement(t,{disabled:!em}):t);if(eU){var eV=!em||!eP;eU=i.createElement("li",{title:N?A.prev_page:null,onClick:ey,tabIndex:eV?null:0,onKeyDown:function(e){eS(e,ey)},className:P()("".concat(s,"-prev"),(0,O.Z)({},"".concat(s,"-disabled"),eV)),"aria-disabled":eV},eU)}var eG=(n=q(eR,"next",eu(G,"next page")),i.isValidElement(n)?i.cloneElement(n,{disabled:!eg}):n);eG&&(L?(o=!eg,a=em?0:null):a=(o=!eg||!eP)?null:0,eG=i.createElement("li",{title:N?A.next_page:null,onClick:eb,tabIndex:a,onKeyDown:function(e){eS(e,eb)},className:P()("".concat(s,"-next"),(0,O.Z)({},"".concat(s,"-disabled"),o)),"aria-disabled":o},eG));var eX=P()(s,d,(r={},(0,O.Z)(r,"".concat(s,"-simple"),L),(0,O.Z)(r,"".concat(s,"-disabled"),Z),r));return i.createElement("ul",(0,m.Z)({className:eX,style:_,ref:X},eC),eO,eU,L?e_:eM,eG,i.createElement(t5,{locale:A,rootPrefixCls:s,disabled:Z,selectComponentClass:f,selectPrefixCls:void 0===u?"rc-select":u,changeSize:ev?function(e){var t=nt(e,Q,v),n=en>t&&0!==t?t:en;J(e),el(n),null==R||R(en,e),er(n),null==w||w(n,e)}:null,pageSize:Q,pageSizeOptions:F,quickGo:ef?eh:null,goButton:eA}))},nr=n(96257),no=n(55274),ni=n(52787);let na=e=>i.createElement(ni.default,Object.assign({},e,{showSearch:!0,size:"small"})),nl=e=>i.createElement(ni.default,Object.assign({},e,{showSearch:!0,size:"middle"}));na.Option=ni.default.Option,nl.Option=ni.default.Option;var nc=n(31282),ns=n(37433),nu=n(65265);let nd=e=>{let{componentCls:t}=e;return{["".concat(t,"-disabled")]:{"&, &:hover":{cursor:"not-allowed",["".concat(t,"-item-link")]:{color:e.colorTextDisabled,cursor:"not-allowed"}},"&:focus-visible":{cursor:"not-allowed",["".concat(t,"-item-link")]:{color:e.colorTextDisabled,cursor:"not-allowed"}}},["&".concat(t,"-disabled")]:{cursor:"not-allowed",["".concat(t,"-item")]:{cursor:"not-allowed","&:hover, &:active":{backgroundColor:"transparent"},a:{color:e.colorTextDisabled,backgroundColor:"transparent",border:"none",cursor:"not-allowed"},"&-active":{borderColor:e.colorBorder,backgroundColor:e.itemActiveBgDisabled,"&:hover, &:active":{backgroundColor:e.itemActiveBgDisabled},a:{color:e.itemActiveColorDisabled}}},["".concat(t,"-item-link")]:{color:e.colorTextDisabled,cursor:"not-allowed","&:hover, &:active":{backgroundColor:"transparent"},["".concat(t,"-simple&")]:{backgroundColor:"transparent","&:hover, &:active":{backgroundColor:"transparent"}}},["".concat(t,"-simple-pager")]:{color:e.colorTextDisabled},["".concat(t,"-jump-prev, ").concat(t,"-jump-next")]:{["".concat(t,"-item-link-icon")]:{opacity:0},["".concat(t,"-item-ellipsis")]:{opacity:1}}},["&".concat(t,"-simple")]:{["".concat(t,"-prev, ").concat(t,"-next")]:{["&".concat(t,"-disabled ").concat(t,"-item-link")]:{"&:hover, &:active":{backgroundColor:"transparent"}}}}}},nf=e=>{let{componentCls:t}=e;return{["&".concat(t,"-mini ").concat(t,"-total-text, &").concat(t,"-mini ").concat(t,"-simple-pager")]:{height:e.itemSizeSM,lineHeight:(0,tS.bf)(e.itemSizeSM)},["&".concat(t,"-mini ").concat(t,"-item")]:{minWidth:e.itemSizeSM,height:e.itemSizeSM,margin:0,lineHeight:(0,tS.bf)(e.calc(e.itemSizeSM).sub(2).equal())},["&".concat(t,"-mini:not(").concat(t,"-disabled) ").concat(t,"-item:not(").concat(t,"-item-active)")]:{backgroundColor:"transparent",borderColor:"transparent","&:hover":{backgroundColor:e.colorBgTextHover},"&:active":{backgroundColor:e.colorBgTextActive}},["&".concat(t,"-mini ").concat(t,"-prev, &").concat(t,"-mini ").concat(t,"-next")]:{minWidth:e.itemSizeSM,height:e.itemSizeSM,margin:0,lineHeight:(0,tS.bf)(e.itemSizeSM)},["&".concat(t,"-mini:not(").concat(t,"-disabled)")]:{["".concat(t,"-prev, ").concat(t,"-next")]:{["&:hover ".concat(t,"-item-link")]:{backgroundColor:e.colorBgTextHover},["&:active ".concat(t,"-item-link")]:{backgroundColor:e.colorBgTextActive},["&".concat(t,"-disabled:hover ").concat(t,"-item-link")]:{backgroundColor:"transparent"}}},["\n &".concat(t,"-mini ").concat(t,"-prev ").concat(t,"-item-link,\n &").concat(t,"-mini ").concat(t,"-next ").concat(t,"-item-link\n ")]:{backgroundColor:"transparent",borderColor:"transparent","&::after":{height:e.itemSizeSM,lineHeight:(0,tS.bf)(e.itemSizeSM)}},["&".concat(t,"-mini ").concat(t,"-jump-prev, &").concat(t,"-mini ").concat(t,"-jump-next")]:{height:e.itemSizeSM,marginInlineEnd:0,lineHeight:(0,tS.bf)(e.itemSizeSM)},["&".concat(t,"-mini ").concat(t,"-options")]:{marginInlineStart:e.paginationMiniOptionsMarginInlineStart,"&-size-changer":{top:e.miniOptionsSizeChangerTop},"&-quick-jumper":{height:e.itemSizeSM,lineHeight:(0,tS.bf)(e.itemSizeSM),input:Object.assign(Object.assign({},(0,nc.x0)(e)),{width:e.paginationMiniQuickJumperInputWidth,height:e.controlHeightSM})}}}},np=e=>{let{componentCls:t}=e;return{["\n &".concat(t,"-simple ").concat(t,"-prev,\n &").concat(t,"-simple ").concat(t,"-next\n ")]:{height:e.itemSizeSM,lineHeight:(0,tS.bf)(e.itemSizeSM),verticalAlign:"top",["".concat(t,"-item-link")]:{height:e.itemSizeSM,backgroundColor:"transparent",border:0,"&:hover":{backgroundColor:e.colorBgTextHover},"&:active":{backgroundColor:e.colorBgTextActive},"&::after":{height:e.itemSizeSM,lineHeight:(0,tS.bf)(e.itemSizeSM)}}},["&".concat(t,"-simple ").concat(t,"-simple-pager")]:{display:"inline-block",height:e.itemSizeSM,marginInlineEnd:e.marginXS,input:{boxSizing:"border-box",height:"100%",marginInlineEnd:e.marginXS,padding:"0 ".concat((0,tS.bf)(e.paginationItemPaddingInline)),textAlign:"center",backgroundColor:e.itemInputBg,border:"".concat((0,tS.bf)(e.lineWidth)," ").concat(e.lineType," ").concat(e.colorBorder),borderRadius:e.borderRadius,outline:"none",transition:"border-color ".concat(e.motionDurationMid),color:"inherit","&:hover":{borderColor:e.colorPrimary},"&:focus":{borderColor:e.colorPrimaryHover,boxShadow:"".concat((0,tS.bf)(e.inputOutlineOffset)," 0 ").concat((0,tS.bf)(e.controlOutlineWidth)," ").concat(e.controlOutline)},"&[disabled]":{color:e.colorTextDisabled,backgroundColor:e.colorBgContainerDisabled,borderColor:e.colorBorder,cursor:"not-allowed"}}}}},nh=e=>{let{componentCls:t}=e;return{["".concat(t,"-jump-prev, ").concat(t,"-jump-next")]:{outline:0,["".concat(t,"-item-container")]:{position:"relative",["".concat(t,"-item-link-icon")]:{color:e.colorPrimary,fontSize:e.fontSizeSM,opacity:0,transition:"all ".concat(e.motionDurationMid),"&-svg":{top:0,insetInlineEnd:0,bottom:0,insetInlineStart:0,margin:"auto"}},["".concat(t,"-item-ellipsis")]:{position:"absolute",top:0,insetInlineEnd:0,bottom:0,insetInlineStart:0,display:"block",margin:"auto",color:e.colorTextDisabled,fontFamily:"Arial, Helvetica, sans-serif",letterSpacing:e.paginationEllipsisLetterSpacing,textAlign:"center",textIndent:e.paginationEllipsisTextIndent,opacity:1,transition:"all ".concat(e.motionDurationMid)}},"&:hover":{["".concat(t,"-item-link-icon")]:{opacity:1},["".concat(t,"-item-ellipsis")]:{opacity:0}}},["\n ".concat(t,"-prev,\n ").concat(t,"-jump-prev,\n ").concat(t,"-jump-next\n ")]:{marginInlineEnd:e.marginXS},["\n ".concat(t,"-prev,\n ").concat(t,"-next,\n ").concat(t,"-jump-prev,\n ").concat(t,"-jump-next\n ")]:{display:"inline-block",minWidth:e.itemSize,height:e.itemSize,color:e.colorText,fontFamily:e.fontFamily,lineHeight:"".concat((0,tS.bf)(e.itemSize)),textAlign:"center",verticalAlign:"middle",listStyle:"none",borderRadius:e.borderRadius,cursor:"pointer",transition:"all ".concat(e.motionDurationMid)},["".concat(t,"-prev, ").concat(t,"-next")]:{fontFamily:"Arial, Helvetica, sans-serif",outline:0,button:{color:e.colorText,cursor:"pointer",userSelect:"none"},["".concat(t,"-item-link")]:{display:"block",width:"100%",height:"100%",padding:0,fontSize:e.fontSizeSM,textAlign:"center",backgroundColor:"transparent",border:"".concat((0,tS.bf)(e.lineWidth)," ").concat(e.lineType," transparent"),borderRadius:e.borderRadius,outline:"none",transition:"all ".concat(e.motionDurationMid)},["&:hover ".concat(t,"-item-link")]:{backgroundColor:e.colorBgTextHover},["&:active ".concat(t,"-item-link")]:{backgroundColor:e.colorBgTextActive},["&".concat(t,"-disabled:hover")]:{["".concat(t,"-item-link")]:{backgroundColor:"transparent"}}},["".concat(t,"-slash")]:{marginInlineEnd:e.paginationSlashMarginInlineEnd,marginInlineStart:e.paginationSlashMarginInlineStart},["".concat(t,"-options")]:{display:"inline-block",marginInlineStart:e.margin,verticalAlign:"middle","&-size-changer.-select":{display:"inline-block",width:"auto"},"&-quick-jumper":{display:"inline-block",height:e.controlHeight,marginInlineStart:e.marginXS,lineHeight:(0,tS.bf)(e.controlHeight),verticalAlign:"top",input:Object.assign(Object.assign(Object.assign({},(0,nc.ik)(e)),(0,nu.$U)(e,{borderColor:e.colorBorder,hoverBorderColor:e.colorPrimaryHover,activeBorderColor:e.colorPrimary,activeShadow:e.activeShadow})),{"&[disabled]":Object.assign({},(0,nu.Xy)(e)),width:e.calc(e.controlHeightLG).mul(1.25).equal(),height:e.controlHeight,boxSizing:"border-box",margin:0,marginInlineStart:e.marginXS,marginInlineEnd:e.marginXS})}}}},nm=e=>{let{componentCls:t}=e;return{["".concat(t,"-item")]:{display:"inline-block",minWidth:e.itemSize,height:e.itemSize,marginInlineEnd:e.marginXS,fontFamily:e.fontFamily,lineHeight:(0,tS.bf)(e.calc(e.itemSize).sub(2).equal()),textAlign:"center",verticalAlign:"middle",listStyle:"none",backgroundColor:"transparent",border:"".concat((0,tS.bf)(e.lineWidth)," ").concat(e.lineType," transparent"),borderRadius:e.borderRadius,outline:0,cursor:"pointer",userSelect:"none",a:{display:"block",padding:"0 ".concat((0,tS.bf)(e.paginationItemPaddingInline)),color:e.colorText,"&:hover":{textDecoration:"none"}},["&:not(".concat(t,"-item-active)")]:{"&:hover":{transition:"all ".concat(e.motionDurationMid),backgroundColor:e.colorBgTextHover},"&:active":{backgroundColor:e.colorBgTextActive}},"&-active":{fontWeight:e.fontWeightStrong,backgroundColor:e.itemActiveBg,borderColor:e.colorPrimary,a:{color:e.colorPrimary},"&:hover":{borderColor:e.colorPrimaryHover},"&:hover a":{color:e.colorPrimaryHover}}}}},ng=e=>{let{componentCls:t}=e;return{[t]:Object.assign(Object.assign(Object.assign(Object.assign(Object.assign(Object.assign(Object.assign(Object.assign({},(0,tk.Wf)(e)),{"ul, ol":{margin:0,padding:0,listStyle:"none"},"&::after":{display:"block",clear:"both",height:0,overflow:"hidden",visibility:"hidden",content:'""'},["".concat(t,"-total-text")]:{display:"inline-block",height:e.itemSize,marginInlineEnd:e.marginXS,lineHeight:(0,tS.bf)(e.calc(e.itemSize).sub(2).equal()),verticalAlign:"middle"}}),nm(e)),nh(e)),np(e)),nf(e)),nd(e)),{["@media only screen and (max-width: ".concat(e.screenLG,"px)")]:{["".concat(t,"-item")]:{"&-after-jump-prev, &-before-jump-next":{display:"none"}}},["@media only screen and (max-width: ".concat(e.screenSM,"px)")]:{["".concat(t,"-options")]:{display:"none"}}}),["&".concat(e.componentCls,"-rtl")]:{direction:"rtl"}}},nv=e=>{let{componentCls:t}=e;return{["".concat(t,":not(").concat(t,"-disabled)")]:{["".concat(t,"-item")]:Object.assign({},(0,tk.Qy)(e)),["".concat(t,"-jump-prev, ").concat(t,"-jump-next")]:{"&:focus-visible":Object.assign({["".concat(t,"-item-link-icon")]:{opacity:1},["".concat(t,"-item-ellipsis")]:{opacity:0}},(0,tk.oN)(e))},["".concat(t,"-prev, ").concat(t,"-next")]:{["&:focus-visible ".concat(t,"-item-link")]:Object.assign({},(0,tk.oN)(e))}}}},ny=e=>Object.assign({itemBg:e.colorBgContainer,itemSize:e.controlHeight,itemSizeSM:e.controlHeightSM,itemActiveBg:e.colorBgContainer,itemLinkBg:e.colorBgContainer,itemActiveColorDisabled:e.colorTextDisabled,itemActiveBgDisabled:e.controlItemBgActiveDisabled,itemInputBg:e.colorBgContainer,miniOptionsSizeChangerTop:0},(0,ns.T)(e)),nb=e=>(0,tE.TS)(e,{inputOutlineOffset:0,paginationMiniOptionsMarginInlineStart:e.calc(e.marginXXS).div(2).equal(),paginationMiniQuickJumperInputWidth:e.calc(e.controlHeightLG).mul(1.1).equal(),paginationItemPaddingInline:e.calc(e.marginXXS).mul(1.5).equal(),paginationEllipsisLetterSpacing:e.calc(e.marginXXS).div(2).equal(),paginationSlashMarginInlineStart:e.marginXXS,paginationSlashMarginInlineEnd:e.marginSM,paginationEllipsisTextIndent:"0.13em"},(0,ns.e)(e));var nx=(0,tC.I$)("Pagination",e=>{let t=nb(e);return[ng(t),nv(t)]},ny),nw=n(29961);let nS=e=>{let{componentCls:t}=e;return{["".concat(t).concat(t,"-bordered").concat(t,"-disabled:not(").concat(t,"-mini)")]:{"&, &:hover":{["".concat(t,"-item-link")]:{borderColor:e.colorBorder}},"&:focus-visible":{["".concat(t,"-item-link")]:{borderColor:e.colorBorder}},["".concat(t,"-item, ").concat(t,"-item-link")]:{backgroundColor:e.colorBgContainerDisabled,borderColor:e.colorBorder,["&:hover:not(".concat(t,"-item-active)")]:{backgroundColor:e.colorBgContainerDisabled,borderColor:e.colorBorder,a:{color:e.colorTextDisabled}},["&".concat(t,"-item-active")]:{backgroundColor:e.itemActiveBgDisabled}},["".concat(t,"-prev, ").concat(t,"-next")]:{"&:hover button":{backgroundColor:e.colorBgContainerDisabled,borderColor:e.colorBorder,color:e.colorTextDisabled},["".concat(t,"-item-link")]:{backgroundColor:e.colorBgContainerDisabled,borderColor:e.colorBorder}}},["".concat(t).concat(t,"-bordered:not(").concat(t,"-mini)")]:{["".concat(t,"-prev, ").concat(t,"-next")]:{"&:hover button":{borderColor:e.colorPrimaryHover,backgroundColor:e.itemBg},["".concat(t,"-item-link")]:{backgroundColor:e.itemLinkBg,borderColor:e.colorBorder},["&:hover ".concat(t,"-item-link")]:{borderColor:e.colorPrimary,backgroundColor:e.itemBg,color:e.colorPrimary},["&".concat(t,"-disabled")]:{["".concat(t,"-item-link")]:{borderColor:e.colorBorder,color:e.colorTextDisabled}}},["".concat(t,"-item")]:{backgroundColor:e.itemBg,border:"".concat((0,tS.bf)(e.lineWidth)," ").concat(e.lineType," ").concat(e.colorBorder),["&:hover:not(".concat(t,"-item-active)")]:{borderColor:e.colorPrimary,backgroundColor:e.itemBg,a:{color:e.colorPrimary}},"&-active":{borderColor:e.colorPrimary}}}}};var nk=(0,tC.bk)(["Pagination","bordered"],e=>[nS(nb(e))],ny),nE=function(e,t){var n={};for(var r in e)Object.prototype.hasOwnProperty.call(e,r)&&0>t.indexOf(r)&&(n[r]=e[r]);if(null!=e&&"function"==typeof Object.getOwnPropertySymbols)for(var o=0,r=Object.getOwnPropertySymbols(e);ot.indexOf(r[o])&&Object.prototype.propertyIsEnumerable.call(e,r[o])&&(n[r[o]]=e[r[o]]);return n},nC=e=>{let{prefixCls:t,selectPrefixCls:n,className:r,rootClassName:o,style:a,size:l,locale:c,selectComponentClass:s,responsive:u,showSizeChanger:d}=e,f=nE(e,["prefixCls","selectPrefixCls","className","rootClassName","style","size","locale","selectComponentClass","responsive","showSizeChanger"]),{xs:p}=tG(u),[,h]=(0,nw.ZP)(),{getPrefixCls:m,direction:g,pagination:v={}}=i.useContext(tv.E_),y=m("pagination",t),[b,x,w]=nx(y),S=null!=d?d:v.showSizeChanger,k=i.useMemo(()=>{let e=i.createElement("span",{className:"".concat(y,"-item-ellipsis")},"•••"),t=i.createElement("button",{className:"".concat(y,"-item-link"),type:"button",tabIndex:-1},"rtl"===g?i.createElement(t2.Z,null):i.createElement(t1.Z,null));return{prevIcon:t,nextIcon:i.createElement("button",{className:"".concat(y,"-item-link"),type:"button",tabIndex:-1},"rtl"===g?i.createElement(t1.Z,null):i.createElement(t2.Z,null)),jumpPrevIcon:i.createElement("a",{className:"".concat(y,"-item-link")},i.createElement("div",{className:"".concat(y,"-item-container")},"rtl"===g?i.createElement(t0,{className:"".concat(y,"-item-link-icon")}):i.createElement(tQ,{className:"".concat(y,"-item-link-icon")}),e)),jumpNextIcon:i.createElement("a",{className:"".concat(y,"-item-link")},i.createElement("div",{className:"".concat(y,"-item-container")},"rtl"===g?i.createElement(tQ,{className:"".concat(y,"-item-link-icon")}):i.createElement(t0,{className:"".concat(y,"-item-link-icon")}),e))}},[g,y]),[E]=(0,no.Z)("Pagination",nr.Z),C=Object.assign(Object.assign({},E),c),O=(0,tK.Z)(l),j="small"===O||!!(p&&!O&&u),M=m("select",n),N=P()({["".concat(y,"-mini")]:j,["".concat(y,"-rtl")]:"rtl"===g,["".concat(y,"-bordered")]:h.wireframe},null==v?void 0:v.className,r,o,x,w),I=Object.assign(Object.assign({},null==v?void 0:v.style),a);return b(i.createElement(i.Fragment,null,h.wireframe&&i.createElement(nk,{prefixCls:y}),i.createElement(nn,Object.assign({},k,f,{style:I,prefixCls:y,selectPrefixCls:M,className:N,selectComponentClass:s||(j?na:nl),locale:C,showSizeChanger:S}))))},nO=n(87908);function nj(e,t){return"key"in e&&void 0!==e.key&&null!==e.key?e.key:e.dataIndex?Array.isArray(e.dataIndex)?e.dataIndex.join("."):e.dataIndex:t}function nP(e,t){return t?"".concat(t,"-").concat(e):"".concat(e)}function nM(e,t){return"function"==typeof e?e(t):e}var nN={icon:{tag:"svg",attrs:{viewBox:"64 64 896 896",focusable:"false"},children:[{tag:"path",attrs:{d:"M349 838c0 17.7 14.2 32 31.8 32h262.4c17.6 0 31.8-14.3 31.8-32V642H349v196zm531.1-684H143.9c-24.5 0-39.8 26.7-27.5 48l221.3 376h348.8l221.3-376c12.1-21.3-3.2-48-27.7-48z"}}]},name:"filter",theme:"filled"},nI=i.forwardRef(function(e,t){return i.createElement(tY.Z,(0,m.Z)({},e,{ref:t,icon:nN}))}),nR=n(73002),nT=n(85180),nA=n(45937),n_=n(88208);function nD(e){if(null==e)throw TypeError("Cannot destructure "+e)}var nZ=n(47970),nL=["className","style","motion","motionNodes","motionType","onMotionStart","onMotionEnd","active","treeNodeRequiredProps"],nz=function(e,t){var n,r,o,a,l,s=e.className,d=e.style,f=e.motion,p=e.motionNodes,h=e.motionType,g=e.onMotionStart,v=e.onMotionEnd,y=e.active,b=e.treeNodeRequiredProps,x=(0,z.Z)(e,nL),w=i.useState(!0),S=(0,c.Z)(w,2),k=S[0],E=S[1],C=i.useContext(eX).prefixCls,O=p&&"hide"!==h;(0,u.Z)(function(){p&&O!==k&&E(O)},[p]);var j=i.useRef(!1),M=function(){p&&!j.current&&(j.current=!0,v())};return(n=function(){p&&g()},r=i.useState(!1),a=(o=(0,c.Z)(r,2))[0],l=o[1],(0,u.Z)(function(){if(a)return n(),function(){M()}},[a]),(0,u.Z)(function(){return l(!0),function(){l(!1)}},[]),p)?i.createElement(nZ.ZP,(0,m.Z)({ref:t,visible:k},f,{motionAppear:"show"===h,onVisibleChanged:function(e){O===e&&M()}}),function(e,t){var n=e.className,r=e.style;return i.createElement("div",{ref:t,className:P()("".concat(C,"-treenode-motion"),n),style:r},p.map(function(e){var t=(0,m.Z)({},(nD(e.data),e.data)),n=e.title,r=e.key,o=e.isStart,a=e.isEnd;delete t.children;var l=e4(r,b);return i.createElement(tt,(0,m.Z)({},t,l,{title:n,active:y,data:e.data,key:r,isStart:o,isEnd:a}))}))}):i.createElement(tt,(0,m.Z)({domRef:t,className:s,style:d},x,{active:y}))};nz.displayName="MotionTreeNode";var nB=i.forwardRef(nz);function nF(e,t,n){var r=e.findIndex(function(e){return e.key===n}),o=e[r+1],i=t.findIndex(function(e){return e.key===n});if(o){var a=t.findIndex(function(e){return e.key===o.key});return t.slice(i+1,a)}return t.slice(i+1)}var nH=["prefixCls","data","selectable","checkable","expandedKeys","selectedKeys","checkedKeys","loadedKeys","loadingKeys","halfCheckedKeys","keyEntities","disabled","dragging","dragOverNodeKey","dropPosition","motion","height","itemHeight","virtual","focusable","activeItem","focused","tabIndex","onKeyDown","onFocus","onBlur","onActiveChange","onListChangeStart","onListChangeEnd"],nq={width:0,height:0,display:"flex",overflow:"hidden",opacity:0,border:0,padding:0,margin:0},nW=function(){},nK="RC_TREE_MOTION_".concat(Math.random()),nU={key:nK},nV={key:nK,level:0,index:0,pos:"0",node:nU,nodes:[nU]},nG={parent:null,children:[],pos:nV.pos,data:nU,title:null,key:nK,isStart:[],isEnd:[]};function nX(e,t,n,r){return!1!==t&&n?e.slice(0,Math.ceil(n/r)+1):e}function n$(e){return e0(e.key,e.pos)}var nY=i.forwardRef(function(e,t){var n=e.prefixCls,r=e.data,o=(e.selectable,e.checkable,e.expandedKeys),a=e.selectedKeys,l=e.checkedKeys,s=e.loadedKeys,d=e.loadingKeys,f=e.halfCheckedKeys,p=e.keyEntities,h=e.disabled,g=e.dragging,v=e.dragOverNodeKey,y=e.dropPosition,b=e.motion,x=e.height,w=e.itemHeight,S=e.virtual,k=e.focusable,E=e.activeItem,C=e.focused,O=e.tabIndex,j=e.onKeyDown,P=e.onFocus,M=e.onBlur,N=e.onActiveChange,I=e.onListChangeStart,R=e.onListChangeEnd,T=(0,z.Z)(e,nH),A=i.useRef(null),_=i.useRef(null);i.useImperativeHandle(t,function(){return{scrollTo:function(e){A.current.scrollTo(e)},getIndentWidth:function(){return _.current.offsetWidth}}});var D=i.useState(o),Z=(0,c.Z)(D,2),L=Z[0],B=Z[1],F=i.useState(r),H=(0,c.Z)(F,2),q=H[0],W=H[1],K=i.useState(r),U=(0,c.Z)(K,2),V=U[0],G=U[1],X=i.useState([]),$=(0,c.Z)(X,2),Y=$[0],Q=$[1],J=i.useState(null),ee=(0,c.Z)(J,2),et=ee[0],en=ee[1],er=i.useRef(r);function eo(){var e=er.current;W(e),G(e),Q([]),en(null),R()}er.current=r,(0,u.Z)(function(){B(o);var e=function(){var e=arguments.length>0&&void 0!==arguments[0]?arguments[0]:[],t=arguments.length>1&&void 0!==arguments[1]?arguments[1]:[],n=e.length,r=t.length;if(1!==Math.abs(n-r))return{add:!1,key:null};function o(e,t){var n=new Map;e.forEach(function(e){n.set(e,!0)});var r=t.filter(function(e){return!n.has(e)});return 1===r.length?r[0]:null}return n ").concat(t);return t}(E)),i.createElement("div",null,i.createElement("input",{style:nq,disabled:!1===k||h,tabIndex:!1!==k?O:null,onKeyDown:j,onFocus:P,onBlur:M,value:"",onChange:nW,"aria-label":"for screen reader"})),i.createElement("div",{className:"".concat(n,"-treenode"),"aria-hidden":!0,style:{position:"absolute",pointerEvents:"none",visibility:"hidden",height:0,overflow:"hidden",border:0,padding:0}},i.createElement("div",{className:"".concat(n,"-indent")},i.createElement("div",{ref:_,className:"".concat(n,"-indent-unit")}))),i.createElement(eA.Z,(0,m.Z)({},T,{data:ei,itemKey:n$,height:x,fullHeight:!1,virtual:S,itemHeight:w,prefixCls:"".concat(n,"-list"),ref:A,onVisibleChange:function(e,t){var n=new Set(e);t.filter(function(e){return!n.has(e)}).some(function(e){return n$(e)===nK})&&eo()}}),function(e){var t=e.pos,n=(0,m.Z)({},(nD(e.data),e.data)),r=e.title,o=e.key,a=e.isStart,l=e.isEnd,c=e0(o,t);delete n.key,delete n.children;var s=e4(c,ea);return i.createElement(nB,(0,m.Z)({},n,s,{title:r,active:!!E&&o===E.key,pos:t,data:e.data,isStart:a,isEnd:l,motion:b,motionNodes:o===nK?Y:null,motionType:et,onMotionStart:I,onMotionEnd:eo,treeNodeRequiredProps:ea,onMouseMove:function(){N(null)}}))}))});nY.displayName="NodeList";var nQ=function(e){(0,eV.Z)(n,e);var t=(0,eG.Z)(n);function n(){var e;(0,eW.Z)(this,n);for(var r=arguments.length,o=Array(r),a=0;a0&&void 0!==arguments[0]?arguments[0]:[];t.forEach(function(t){var n=t.key,o=t.children;r.push(n),e(o)})}(a[c].children),r),indent:e.listRef.current.getIndentWidth()}),e.setExpandedKeys(s),window.addEventListener("dragend",e.onWindowDragEnd),null==l||l({event:t,node:e5(n.props)})},e.onNodeDragEnter=function(t,n){var r=e.state,o=r.expandedKeys,i=r.keyEntities,a=r.dragChildrenKeys,l=r.flattenNodes,c=r.indent,s=e.props,u=s.onDragEnter,d=s.onExpand,f=s.allowDrop,p=s.direction,h=n.props,m=h.pos,g=h.eventKey,v=(0,eU.Z)(e).dragNode;if(e.currentMouseOverDroppableNodeKey!==g&&(e.currentMouseOverDroppableNodeKey=g),!v){e.resetDragState();return}var y=ti(t,v,n,c,e.dragStartMousePosition,f,l,i,o,p),b=y.dropPosition,x=y.dropLevelOffset,w=y.dropTargetKey,S=y.dropContainerKey,k=y.dropTargetPos,E=y.dropAllowed,C=y.dragOverNodeKey;if(-1!==a.indexOf(w)||!E||(e.delayedDragEnterLogic||(e.delayedDragEnterLogic={}),Object.keys(e.delayedDragEnterLogic).forEach(function(t){clearTimeout(e.delayedDragEnterLogic[t])}),v.props.eventKey!==n.props.eventKey&&(t.persist(),e.delayedDragEnterLogic[m]=window.setTimeout(function(){if(null!==e.state.draggingNodeKey){var r=(0,ec.Z)(o),a=i[n.props.eventKey];a&&(a.children||[]).length&&(r=tr(o,n.props.eventKey)),"expandedKeys"in e.props||e.setExpandedKeys(r),null==d||d(r,{node:e5(n.props),expanded:!0,nativeEvent:t.nativeEvent})}},800)),v.props.eventKey===w&&0===x)){e.resetDragState();return}e.setState({dragOverNodeKey:C,dropPosition:b,dropLevelOffset:x,dropTargetKey:w,dropContainerKey:S,dropTargetPos:k,dropAllowed:E}),null==u||u({event:t,node:e5(n.props),expandedKeys:o})},e.onNodeDragOver=function(t,n){var r=e.state,o=r.dragChildrenKeys,i=r.flattenNodes,a=r.keyEntities,l=r.expandedKeys,c=r.indent,s=e.props,u=s.onDragOver,d=s.allowDrop,f=s.direction,p=(0,eU.Z)(e).dragNode;if(p){var h=ti(t,p,n,c,e.dragStartMousePosition,d,i,a,l,f),m=h.dropPosition,g=h.dropLevelOffset,v=h.dropTargetKey,y=h.dropContainerKey,b=h.dropAllowed,x=h.dropTargetPos,w=h.dragOverNodeKey;-1===o.indexOf(v)&&b&&(p.props.eventKey===v&&0===g?null===e.state.dropPosition&&null===e.state.dropLevelOffset&&null===e.state.dropTargetKey&&null===e.state.dropContainerKey&&null===e.state.dropTargetPos&&!1===e.state.dropAllowed&&null===e.state.dragOverNodeKey||e.resetDragState():m===e.state.dropPosition&&g===e.state.dropLevelOffset&&v===e.state.dropTargetKey&&y===e.state.dropContainerKey&&x===e.state.dropTargetPos&&b===e.state.dropAllowed&&w===e.state.dragOverNodeKey||e.setState({dropPosition:m,dropLevelOffset:g,dropTargetKey:v,dropContainerKey:y,dropTargetPos:x,dropAllowed:b,dragOverNodeKey:w}),null==u||u({event:t,node:e5(n.props)}))}},e.onNodeDragLeave=function(t,n){e.currentMouseOverDroppableNodeKey!==n.props.eventKey||t.currentTarget.contains(t.relatedTarget)||(e.resetDragState(),e.currentMouseOverDroppableNodeKey=null);var r=e.props.onDragLeave;null==r||r({event:t,node:e5(n.props)})},e.onWindowDragEnd=function(t){e.onNodeDragEnd(t,null,!0),window.removeEventListener("dragend",e.onWindowDragEnd)},e.onNodeDragEnd=function(t,n){var r=e.props.onDragEnd;e.setState({dragOverNodeKey:null}),e.cleanDragState(),null==r||r({event:t,node:e5(n.props)}),e.dragNode=null,window.removeEventListener("dragend",e.onWindowDragEnd)},e.onNodeDrop=function(t,n){var r,o=arguments.length>2&&void 0!==arguments[2]&&arguments[2],i=e.state,a=i.dragChildrenKeys,l=i.dropPosition,c=i.dropTargetKey,s=i.dropTargetPos;if(i.dropAllowed){var u=e.props.onDrop;if(e.setState({dragOverNodeKey:null}),e.cleanDragState(),null!==c){var d=(0,C.Z)((0,C.Z)({},e4(c,e.getTreeNodeRequiredProps())),{},{active:(null===(r=e.getActiveItem())||void 0===r?void 0:r.key)===c,data:e.state.keyEntities[c].node}),f=-1!==a.indexOf(c);(0,I.ZP)(!f,"Can not drop to dragNode's children node. This is a bug of rc-tree. Please report an issue.");var p=to(s),h={event:t,node:e5(d),dragNode:e.dragNode?e5(e.dragNode.props):null,dragNodesKeys:[e.dragNode.props.eventKey].concat(a),dropToGap:0!==l,dropPosition:l+Number(p[p.length-1])};o||null==u||u(h),e.dragNode=null}}},e.cleanDragState=function(){null!==e.state.draggingNodeKey&&e.setState({draggingNodeKey:null,dropPosition:null,dropContainerKey:null,dropTargetKey:null,dropLevelOffset:null,dropAllowed:!0,dragOverNodeKey:null}),e.dragStartMousePosition=null,e.currentMouseOverDroppableNodeKey=null},e.triggerExpandActionExpand=function(t,n){var r=e.state,o=r.expandedKeys,i=r.flattenNodes,a=n.expanded,l=n.key;if(!n.isLeaf&&!t.shiftKey&&!t.metaKey&&!t.ctrlKey){var c=i.filter(function(e){return e.key===l})[0],s=e5((0,C.Z)((0,C.Z)({},e4(l,e.getTreeNodeRequiredProps())),{},{data:c.data}));e.setExpandedKeys(a?tn(o,l):tr(o,l)),e.onNodeExpand(t,s)}},e.onNodeClick=function(t,n){var r=e.props,o=r.onClick;"click"===r.expandAction&&e.triggerExpandActionExpand(t,n),null==o||o(t,n)},e.onNodeDoubleClick=function(t,n){var r=e.props,o=r.onDoubleClick;"doubleClick"===r.expandAction&&e.triggerExpandActionExpand(t,n),null==o||o(t,n)},e.onNodeSelect=function(t,n){var r=e.state.selectedKeys,o=e.state,i=o.keyEntities,a=o.fieldNames,l=e.props,c=l.onSelect,s=l.multiple,u=n.selected,d=n[a.key],f=!u,p=(r=f?s?tr(r,d):[d]:tn(r,d)).map(function(e){var t=i[e];return t?t.node:null}).filter(function(e){return e});e.setUncontrolledState({selectedKeys:r}),null==c||c(r,{event:"select",selected:f,node:n,selectedNodes:p,nativeEvent:t.nativeEvent})},e.onNodeCheck=function(t,n,r){var o,i=e.state,a=i.keyEntities,l=i.checkedKeys,c=i.halfCheckedKeys,s=e.props,u=s.checkStrictly,d=s.onCheck,f=n.key,p={event:"check",node:n,checked:r,nativeEvent:t.nativeEvent};if(u){var h=r?tr(l,f):tn(l,f);o={checked:h,halfChecked:tn(c,f)},p.checkedNodes=h.map(function(e){return a[e]}).filter(function(e){return e}).map(function(e){return e.node}),e.setUncontrolledState({checkedKeys:h})}else{var m=td([].concat((0,ec.Z)(l),[f]),!0,a),g=m.checkedKeys,v=m.halfCheckedKeys;if(!r){var y=new Set(g);y.delete(f);var b=td(Array.from(y),{checked:!1,halfCheckedKeys:v},a);g=b.checkedKeys,v=b.halfCheckedKeys}o=g,p.checkedNodes=[],p.checkedNodesPositions=[],p.halfCheckedKeys=v,g.forEach(function(e){var t=a[e];if(t){var n=t.node,r=t.pos;p.checkedNodes.push(n),p.checkedNodesPositions.push({node:n,pos:r})}}),e.setUncontrolledState({checkedKeys:g},!1,{halfCheckedKeys:v})}null==d||d(o,p)},e.onNodeLoad=function(t){var n=t.key,r=new Promise(function(r,o){e.setState(function(i){var a=i.loadedKeys,l=i.loadingKeys,c=void 0===l?[]:l,s=e.props,u=s.loadData,d=s.onLoad;return u&&-1===(void 0===a?[]:a).indexOf(n)&&-1===c.indexOf(n)?(u(t).then(function(){var o=tr(e.state.loadedKeys,n);null==d||d(o,{event:"load",node:t}),e.setUncontrolledState({loadedKeys:o}),e.setState(function(e){return{loadingKeys:tn(e.loadingKeys,n)}}),r()}).catch(function(t){if(e.setState(function(e){return{loadingKeys:tn(e.loadingKeys,n)}}),e.loadingRetryTimes[n]=(e.loadingRetryTimes[n]||0)+1,e.loadingRetryTimes[n]>=10){var i=e.state.loadedKeys;(0,I.ZP)(!1,"Retry for `loadData` many times but still failed. No more retry."),e.setUncontrolledState({loadedKeys:tr(i,n)}),r()}o(t)}),{loadingKeys:tr(c,n)}):null})});return r.catch(function(){}),r},e.onNodeMouseEnter=function(t,n){var r=e.props.onMouseEnter;null==r||r({event:t,node:n})},e.onNodeMouseLeave=function(t,n){var r=e.props.onMouseLeave;null==r||r({event:t,node:n})},e.onNodeContextMenu=function(t,n){var r=e.props.onRightClick;r&&(t.preventDefault(),r({event:t,node:n}))},e.onFocus=function(){var t=e.props.onFocus;e.setState({focused:!0});for(var n=arguments.length,r=Array(n),o=0;o1&&void 0!==arguments[1]&&arguments[1],r=arguments.length>2&&void 0!==arguments[2]?arguments[2]:null;if(!e.destroyed){var o=!1,i=!0,a={};Object.keys(t).forEach(function(n){if(n in e.props){i=!1;return}o=!0,a[n]=t[n]}),o&&(!n||i)&&e.setState((0,C.Z)((0,C.Z)({},a),r))}},e.scrollTo=function(t){e.listRef.current.scrollTo(t)},e}return(0,eK.Z)(n,[{key:"componentDidMount",value:function(){this.destroyed=!1,this.onUpdated()}},{key:"componentDidUpdate",value:function(){this.onUpdated()}},{key:"onUpdated",value:function(){var e=this.props,t=e.activeKey,n=e.itemScrollOffset;void 0!==t&&t!==this.state.activeKey&&(this.setState({activeKey:t}),null!==t&&this.scrollTo({key:t,offset:void 0===n?0:n}))}},{key:"componentWillUnmount",value:function(){window.removeEventListener("dragend",this.onWindowDragEnd),this.destroyed=!0}},{key:"resetDragState",value:function(){this.setState({dragOverNodeKey:null,dropPosition:null,dropLevelOffset:null,dropTargetKey:null,dropContainerKey:null,dropTargetPos:null,dropAllowed:!1})}},{key:"render",value:function(){var e,t,n=this.state,r=n.focused,o=n.flattenNodes,a=n.keyEntities,l=n.draggingNodeKey,c=n.activeKey,s=n.dropLevelOffset,u=n.dropContainerKey,d=n.dropTargetKey,f=n.dropPosition,p=n.dragOverNodeKey,h=n.indent,g=this.props,v=g.prefixCls,y=g.className,b=g.style,x=g.showLine,w=g.focusable,S=g.tabIndex,k=g.selectable,C=g.showIcon,j=g.icon,M=g.switcherIcon,N=g.draggable,I=g.checkable,R=g.checkStrictly,T=g.disabled,A=g.motion,_=g.loadData,D=g.filterTreeNode,Z=g.height,L=g.itemHeight,z=g.virtual,B=g.titleRender,F=g.dropIndicatorRender,H=g.onContextMenu,q=g.onScroll,W=g.direction,K=g.rootClassName,U=g.rootStyle,G=(0,V.Z)(this.props,{aria:!0,data:!0});return N&&(t="object"===(0,E.Z)(N)?N:"function"==typeof N?{nodeDraggable:N}:{}),i.createElement(eX.Provider,{value:{prefixCls:v,selectable:k,showIcon:C,icon:j,switcherIcon:M,draggable:t,draggingNodeKey:l,checkable:I,checkStrictly:R,disabled:T,keyEntities:a,dropLevelOffset:s,dropContainerKey:u,dropTargetKey:d,dropPosition:f,dragOverNodeKey:p,indent:h,direction:W,dropIndicatorRender:F,loadData:_,filterTreeNode:D,titleRender:B,onNodeClick:this.onNodeClick,onNodeDoubleClick:this.onNodeDoubleClick,onNodeExpand:this.onNodeExpand,onNodeSelect:this.onNodeSelect,onNodeCheck:this.onNodeCheck,onNodeLoad:this.onNodeLoad,onNodeMouseEnter:this.onNodeMouseEnter,onNodeMouseLeave:this.onNodeMouseLeave,onNodeContextMenu:this.onNodeContextMenu,onNodeDragStart:this.onNodeDragStart,onNodeDragEnter:this.onNodeDragEnter,onNodeDragOver:this.onNodeDragOver,onNodeDragLeave:this.onNodeDragLeave,onNodeDragEnd:this.onNodeDragEnd,onNodeDrop:this.onNodeDrop}},i.createElement("div",{role:"tree",className:P()(v,y,K,(e={},(0,O.Z)(e,"".concat(v,"-show-line"),x),(0,O.Z)(e,"".concat(v,"-focused"),r),(0,O.Z)(e,"".concat(v,"-active-focused"),null!==c),e)),style:U},i.createElement(nY,(0,m.Z)({ref:this.listRef,prefixCls:v,style:b,data:o,disabled:T,selectable:k,checkable:!!I,motion:A,dragging:null!==l,height:Z,itemHeight:L,virtual:z,focusable:w,focused:r,tabIndex:void 0===S?0:S,activeItem:this.getActiveItem(),onFocus:this.onFocus,onBlur:this.onBlur,onKeyDown:this.onKeyDown,onActiveChange:this.onActiveChange,onListChangeStart:this.onListChangeStart,onListChangeEnd:this.onListChangeEnd,onContextMenu:H,onScroll:q},this.getTreeNodeRequiredProps(),G))))}}],[{key:"getDerivedStateFromProps",value:function(e,t){var n,r,o=t.prevProps,i={prevProps:e};function a(t){return!o&&t in e||o&&o[t]!==e[t]}var l=t.fieldNames;if(a("fieldNames")&&(l=e1(e.fieldNames),i.fieldNames=l),a("treeData")?n=e.treeData:a("children")&&((0,I.ZP)(!1,"`children` of Tree is deprecated. Please use `treeData` instead."),n=e2(e.children)),n){i.treeData=n;var c=e3(n,{fieldNames:l});i.keyEntities=(0,C.Z)((0,O.Z)({},nK,nV),c.keyEntities)}var s=i.keyEntities||t.keyEntities;if(a("expandedKeys")||o&&a("autoExpandParent"))i.expandedKeys=e.autoExpandParent||!o&&e.defaultExpandParent?tc(e.expandedKeys,s):e.expandedKeys;else if(!o&&e.defaultExpandAll){var u=(0,C.Z)({},s);delete u[nK],i.expandedKeys=Object.keys(u).map(function(e){return u[e].key})}else!o&&e.defaultExpandedKeys&&(i.expandedKeys=e.autoExpandParent||e.defaultExpandParent?tc(e.defaultExpandedKeys,s):e.defaultExpandedKeys);if(i.expandedKeys||delete i.expandedKeys,n||i.expandedKeys){var d=e6(n||t.treeData,i.expandedKeys||t.expandedKeys,l);i.flattenNodes=d}if(e.selectable&&(a("selectedKeys")?i.selectedKeys=ta(e.selectedKeys,e):!o&&e.defaultSelectedKeys&&(i.selectedKeys=ta(e.defaultSelectedKeys,e))),e.checkable&&(a("checkedKeys")?r=tl(e.checkedKeys)||{}:!o&&e.defaultCheckedKeys?r=tl(e.defaultCheckedKeys)||{}:n&&(r=tl(e.checkedKeys)||{checkedKeys:t.checkedKeys,halfCheckedKeys:t.halfCheckedKeys}),r)){var f=r,p=f.checkedKeys,h=void 0===p?[]:p,m=f.halfCheckedKeys,g=void 0===m?[]:m;if(!e.checkStrictly){var v=td(h,!0,s);h=v.checkedKeys,g=v.halfCheckedKeys}i.checkedKeys=h,i.halfCheckedKeys=g}return a("loadedKeys")&&(i.loadedKeys=e.loadedKeys),i}}]),n}(i.Component);nQ.defaultProps={prefixCls:"rc-tree",showLine:!1,showIcon:!0,selectable:!0,multiple:!1,checkable:!1,disabled:!1,checkStrictly:!1,draggable:!1,defaultExpandParent:!0,autoExpandParent:!1,defaultExpandAll:!1,defaultExpandedKeys:[],defaultCheckedKeys:[],defaultSelectedKeys:[],dropIndicatorRender:function(e){var t=e.dropPosition,n=e.dropLevelOffset,r=e.indent,o={pointerEvents:"none",position:"absolute",right:0,backgroundColor:"red",height:2};switch(t){case -1:o.top=0,o.left=-n*r;break;case 1:o.bottom=0,o.left=-n*r;break;case 0:o.bottom=0,o.left=r}return i.createElement("div",{style:o})},allowDrop:function(){return!0},expandAction:!1},nQ.TreeNode=tt;var nJ={icon:{tag:"svg",attrs:{viewBox:"64 64 896 896",focusable:"false"},children:[{tag:"path",attrs:{d:"M854.6 288.6L639.4 73.4c-6-6-14.1-9.4-22.6-9.4H192c-17.7 0-32 14.3-32 32v832c0 17.7 14.3 32 32 32h640c17.7 0 32-14.3 32-32V311.3c0-8.5-3.4-16.7-9.4-22.7zM790.2 326H602V137.8L790.2 326zm1.8 562H232V136h302v216a42 42 0 0042 42h216v494z"}}]},name:"file",theme:"outlined"},n0=i.forwardRef(function(e,t){return i.createElement(tY.Z,(0,m.Z)({},e,{ref:t,icon:nJ}))}),n1={icon:{tag:"svg",attrs:{viewBox:"64 64 896 896",focusable:"false"},children:[{tag:"path",attrs:{d:"M928 444H820V330.4c0-17.7-14.3-32-32-32H473L355.7 186.2a8.15 8.15 0 00-5.5-2.2H96c-17.7 0-32 14.3-32 32v592c0 17.7 14.3 32 32 32h698c13 0 24.8-7.9 29.7-20l134-332c1.5-3.8 2.3-7.9 2.3-12 0-17.7-14.3-32-32-32zM136 256h188.5l119.6 114.4H748V444H238c-13 0-24.8 7.9-29.7 20L136 643.2V256zm635.3 512H159l103.3-256h612.4L771.3 768z"}}]},name:"folder-open",theme:"outlined"},n2=i.forwardRef(function(e,t){return i.createElement(tY.Z,(0,m.Z)({},e,{ref:t,icon:n1}))}),n6={icon:{tag:"svg",attrs:{viewBox:"64 64 896 896",focusable:"false"},children:[{tag:"path",attrs:{d:"M880 298.4H521L403.7 186.2a8.15 8.15 0 00-5.5-2.2H144c-17.7 0-32 14.3-32 32v592c0 17.7 14.3 32 32 32h736c17.7 0 32-14.3 32-32V330.4c0-17.7-14.3-32-32-32zM840 768H184V256h188.5l119.6 114.4H840V768z"}}]},name:"folder",theme:"outlined"},n3=i.forwardRef(function(e,t){return i.createElement(tY.Z,(0,m.Z)({},e,{ref:t,icon:n6}))}),n4={icon:{tag:"svg",attrs:{viewBox:"64 64 896 896",focusable:"false"},children:[{tag:"path",attrs:{d:"M300 276.5a56 56 0 1056-97 56 56 0 00-56 97zm0 284a56 56 0 1056-97 56 56 0 00-56 97zM640 228a56 56 0 10112 0 56 56 0 00-112 0zm0 284a56 56 0 10112 0 56 56 0 00-112 0zM300 844.5a56 56 0 1056-97 56 56 0 00-56 97zM640 796a56 56 0 10112 0 56 56 0 00-112 0z"}}]},name:"holder",theme:"outlined"},n5=i.forwardRef(function(e,t){return i.createElement(tY.Z,(0,m.Z)({},e,{ref:t,icon:n4}))}),n8=n(68710),n7=n(63074);let n9=new tS.E4("ant-tree-node-fx-do-not-use",{"0%":{opacity:0},"100%":{opacity:1}}),re=(e,t)=>({[".".concat(e,"-switcher-icon")]:{display:"inline-block",fontSize:10,verticalAlign:"baseline",svg:{transition:"transform ".concat(t.motionDurationSlow)}}}),rt=(e,t)=>({[".".concat(e,"-drop-indicator")]:{position:"absolute",zIndex:1,height:2,backgroundColor:t.colorPrimary,borderRadius:1,pointerEvents:"none","&:after":{position:"absolute",top:-3,insetInlineStart:-6,width:8,height:8,backgroundColor:"transparent",border:"".concat((0,tS.bf)(t.lineWidthBold)," solid ").concat(t.colorPrimary),borderRadius:"50%",content:'""'}}}),rn=(e,t)=>{let{treeCls:n,treeNodeCls:r,treeNodePadding:o,titleHeight:i,nodeSelectedBg:a,nodeHoverBg:l}=t,c=t.paddingXS;return{[n]:Object.assign(Object.assign({},(0,tk.Wf)(t)),{background:t.colorBgContainer,borderRadius:t.borderRadius,transition:"background-color ".concat(t.motionDurationSlow),["&".concat(n,"-rtl")]:{["".concat(n,"-switcher")]:{"&_close":{["".concat(n,"-switcher-icon")]:{svg:{transform:"rotate(90deg)"}}}}},["&-focused:not(:hover):not(".concat(n,"-active-focused)")]:Object.assign({},(0,tk.oN)(t)),["".concat(n,"-list-holder-inner")]:{alignItems:"flex-start"},["&".concat(n,"-block-node")]:{["".concat(n,"-list-holder-inner")]:{alignItems:"stretch",["".concat(n,"-node-content-wrapper")]:{flex:"auto"},["".concat(r,".dragging")]:{position:"relative","&:after":{position:"absolute",top:0,insetInlineEnd:0,bottom:o,insetInlineStart:0,border:"1px solid ".concat(t.colorPrimary),opacity:0,animationName:n9,animationDuration:t.motionDurationSlow,animationPlayState:"running",animationFillMode:"forwards",content:'""',pointerEvents:"none"}}}},["".concat(r)]:{display:"flex",alignItems:"flex-start",padding:"0 0 ".concat((0,tS.bf)(o)," 0"),outline:"none","&-rtl":{direction:"rtl"},"&-disabled":{["".concat(n,"-node-content-wrapper")]:{color:t.colorTextDisabled,cursor:"not-allowed","&:hover":{background:"transparent"}}},["&-active ".concat(n,"-node-content-wrapper")]:{background:t.controlItemBgHover},["&:not(".concat(r,"-disabled).filter-node ").concat(n,"-title")]:{color:"inherit",fontWeight:500},"&-draggable":{cursor:"grab",["".concat(n,"-draggable-icon")]:{flexShrink:0,width:i,lineHeight:"".concat((0,tS.bf)(i)),textAlign:"center",visibility:"visible",opacity:.2,transition:"opacity ".concat(t.motionDurationSlow),["".concat(r,":hover &")]:{opacity:.45}},["&".concat(r,"-disabled")]:{["".concat(n,"-draggable-icon")]:{visibility:"hidden"}}}},["".concat(n,"-indent")]:{alignSelf:"stretch",whiteSpace:"nowrap",userSelect:"none","&-unit":{display:"inline-block",width:i}},["".concat(n,"-draggable-icon")]:{visibility:"hidden"},["".concat(n,"-switcher")]:Object.assign(Object.assign({},re(e,t)),{position:"relative",flex:"none",alignSelf:"stretch",width:i,margin:0,lineHeight:"".concat((0,tS.bf)(i)),textAlign:"center",cursor:"pointer",userSelect:"none",transition:"all ".concat(t.motionDurationSlow),borderRadius:t.borderRadius,"&-noop":{cursor:"unset"},["&:not(".concat(n,"-switcher-noop):hover")]:{backgroundColor:t.colorBgTextHover},"&_close":{["".concat(n,"-switcher-icon")]:{svg:{transform:"rotate(-90deg)"}}},"&-loading-icon":{color:t.colorPrimary},"&-leaf-line":{position:"relative",zIndex:1,display:"inline-block",width:"100%",height:"100%","&:before":{position:"absolute",top:0,insetInlineEnd:t.calc(i).div(2).equal(),bottom:t.calc(o).mul(-1).equal(),marginInlineStart:-1,borderInlineEnd:"1px solid ".concat(t.colorBorder),content:'""'},"&:after":{position:"absolute",width:t.calc(t.calc(i).div(2).equal()).mul(.8).equal(),height:t.calc(i).div(2).equal(),borderBottom:"1px solid ".concat(t.colorBorder),content:'""'}}}),["".concat(n,"-checkbox")]:{top:"initial",marginInlineEnd:c,alignSelf:"flex-start",marginTop:t.marginXXS},["".concat(n,"-node-content-wrapper, ").concat(n,"-checkbox + span")]:{position:"relative",zIndex:"auto",minHeight:i,margin:0,padding:"0 ".concat((0,tS.bf)(t.calc(t.paddingXS).div(2).equal())),color:"inherit",lineHeight:"".concat((0,tS.bf)(i)),background:"transparent",borderRadius:t.borderRadius,cursor:"pointer",transition:"all ".concat(t.motionDurationMid,", border 0s, line-height 0s, box-shadow 0s"),"&:hover":{backgroundColor:l},["&".concat(n,"-node-selected")]:{backgroundColor:a},["".concat(n,"-iconEle")]:{display:"inline-block",width:i,height:i,lineHeight:"".concat((0,tS.bf)(i)),textAlign:"center",verticalAlign:"top","&:empty":{display:"none"}}},["".concat(n,"-unselectable ").concat(n,"-node-content-wrapper:hover")]:{backgroundColor:"transparent"},["".concat(n,"-node-content-wrapper")]:Object.assign({lineHeight:"".concat((0,tS.bf)(i)),userSelect:"none"},rt(e,t)),["".concat(r,".drop-container")]:{"> [draggable]":{boxShadow:"0 0 0 2px ".concat(t.colorPrimary)}},"&-show-line":{["".concat(n,"-indent")]:{"&-unit":{position:"relative",height:"100%","&:before":{position:"absolute",top:0,insetInlineEnd:t.calc(i).div(2).equal(),bottom:t.calc(o).mul(-1).equal(),borderInlineEnd:"1px solid ".concat(t.colorBorder),content:'""'},"&-end":{"&:before":{display:"none"}}}},["".concat(n,"-switcher")]:{background:"transparent","&-line-icon":{verticalAlign:"-0.15em"}}},["".concat(r,"-leaf-last")]:{["".concat(n,"-switcher")]:{"&-leaf-line":{"&:before":{top:"auto !important",bottom:"auto !important",height:"".concat((0,tS.bf)(t.calc(i).div(2).equal())," !important")}}}}})}},rr=e=>{let{treeCls:t,treeNodeCls:n,treeNodePadding:r,directoryNodeSelectedBg:o,directoryNodeSelectedColor:i}=e;return{["".concat(t).concat(t,"-directory")]:{[n]:{position:"relative","&:before":{position:"absolute",top:0,insetInlineEnd:0,bottom:r,insetInlineStart:0,transition:"background-color ".concat(e.motionDurationMid),content:'""',pointerEvents:"none"},"&:hover":{"&:before":{background:e.controlItemBgHover}},"> *":{zIndex:1},["".concat(t,"-switcher")]:{transition:"color ".concat(e.motionDurationMid)},["".concat(t,"-node-content-wrapper")]:{borderRadius:0,userSelect:"none","&:hover":{background:"transparent"},["&".concat(t,"-node-selected")]:{color:i,background:"transparent"}},"&-selected":{"\n &:hover::before,\n &::before\n ":{background:o},["".concat(t,"-switcher")]:{color:i},["".concat(t,"-node-content-wrapper")]:{color:i,background:"transparent"}}}}}},ro=(e,t)=>{let n=".".concat(e),r=t.calc(t.paddingXS).div(2).equal(),o=(0,tE.TS)(t,{treeCls:n,treeNodeCls:"".concat(n,"-treenode"),treeNodePadding:r});return[rn(e,o),rr(o)]},ri=e=>{let{controlHeightSM:t}=e;return{titleHeight:t,nodeHoverBg:e.controlItemBgHover,nodeSelectedBg:e.controlItemBgActive}};var ra=(0,tC.I$)("Tree",(e,t)=>{let{prefixCls:n}=t;return[{[e.componentCls]:tj("".concat(n,"-checkbox"),e)},ro(n,e),(0,n7.Z)(e)]},e=>{let{colorTextLightSolid:t,colorPrimary:n}=e;return Object.assign(Object.assign({},ri(e)),{directoryNodeSelectedColor:t,directoryNodeSelectedBg:n})});function rl(e){let{dropPosition:t,dropLevelOffset:n,prefixCls:r,indent:o,direction:a="ltr"}=e,l="ltr"===a?"left":"right",c={[l]:-n*o+4,["ltr"===a?"right":"left"]:0};switch(t){case -1:c.top=-3;break;case 1:c.bottom=-3;break;default:c.bottom=-3,c[l]=o+4}return i.createElement("div",{style:c,className:"".concat(r,"-drop-indicator")})}var rc={icon:{tag:"svg",attrs:{viewBox:"0 0 1024 1024",focusable:"false"},children:[{tag:"path",attrs:{d:"M840.4 300H183.6c-19.7 0-30.7 20.8-18.5 35l328.4 380.8c9.4 10.9 27.5 10.9 37 0L858.9 335c12.2-14.2 1.2-35-18.5-35z"}}]},name:"caret-down",theme:"filled"},rs=i.forwardRef(function(e,t){return i.createElement(tY.Z,(0,m.Z)({},e,{ref:t,icon:rc}))}),ru=n(61935),rd={icon:{tag:"svg",attrs:{viewBox:"64 64 896 896",focusable:"false"},children:[{tag:"path",attrs:{d:"M328 544h368c4.4 0 8-3.6 8-8v-48c0-4.4-3.6-8-8-8H328c-4.4 0-8 3.6-8 8v48c0 4.4 3.6 8 8 8z"}},{tag:"path",attrs:{d:"M880 112H144c-17.7 0-32 14.3-32 32v736c0 17.7 14.3 32 32 32h736c17.7 0 32-14.3 32-32V144c0-17.7-14.3-32-32-32zm-40 728H184V184h656v656z"}}]},name:"minus-square",theme:"outlined"},rf=i.forwardRef(function(e,t){return i.createElement(tY.Z,(0,m.Z)({},e,{ref:t,icon:rd}))}),rp={icon:{tag:"svg",attrs:{viewBox:"64 64 896 896",focusable:"false"},children:[{tag:"path",attrs:{d:"M328 544h152v152c0 4.4 3.6 8 8 8h48c4.4 0 8-3.6 8-8V544h152c4.4 0 8-3.6 8-8v-48c0-4.4-3.6-8-8-8H544V328c0-4.4-3.6-8-8-8h-48c-4.4 0-8 3.6-8 8v152H328c-4.4 0-8 3.6-8 8v48c0 4.4 3.6 8 8 8z"}},{tag:"path",attrs:{d:"M880 112H144c-17.7 0-32 14.3-32 32v736c0 17.7 14.3 32 32 32h736c17.7 0 32-14.3 32-32V144c0-17.7-14.3-32-32-32zm-40 728H184V184h656v656z"}}]},name:"plus-square",theme:"outlined"},rh=i.forwardRef(function(e,t){return i.createElement(tY.Z,(0,m.Z)({},e,{ref:t,icon:rp}))}),rm=n(19722),rg=e=>{let t;let{prefixCls:n,switcherIcon:r,treeNodeProps:o,showLine:a}=e,{isLeaf:l,expanded:c,loading:s}=o;if(s)return i.createElement(ru.Z,{className:"".concat(n,"-switcher-loading-icon")});if(a&&"object"==typeof a&&(t=a.showLeafIcon),l){if(!a)return null;if("boolean"!=typeof t&&t){let e="function"==typeof t?t(o):t;return(0,rm.l$)(e)?(0,rm.Tm)(e,{className:P()(e.props.className||"","".concat(n,"-switcher-line-custom-icon"))}):e}return t?i.createElement(n0,{className:"".concat(n,"-switcher-line-icon")}):i.createElement("span",{className:"".concat(n,"-switcher-leaf-line")})}let u="".concat(n,"-switcher-icon"),d="function"==typeof r?r(o):r;return(0,rm.l$)(d)?(0,rm.Tm)(d,{className:P()(d.props.className||"",u)}):void 0!==d?d:a?c?i.createElement(rf,{className:"".concat(n,"-switcher-line-icon")}):i.createElement(rh,{className:"".concat(n,"-switcher-line-icon")}):i.createElement(rs,{className:u})};let rv=i.forwardRef((e,t)=>{var n;let{getPrefixCls:r,direction:o,virtual:a,tree:l}=i.useContext(tv.E_),{prefixCls:c,className:s,showIcon:u=!1,showLine:d,switcherIcon:f,blockNode:p=!1,children:h,checkable:m=!1,selectable:g=!0,draggable:v,motion:y,style:b}=e,x=r("tree",c),w=r(),S=null!=y?y:Object.assign(Object.assign({},(0,n8.Z)(w)),{motionAppear:!1}),k=Object.assign(Object.assign({},e),{checkable:m,selectable:g,showIcon:u,motion:S,blockNode:p,showLine:!!d,dropIndicatorRender:rl}),[E,C,O]=ra(x),[,j]=(0,nw.ZP)(),M=j.paddingXS/2+((null===(n=j.Tree)||void 0===n?void 0:n.titleHeight)||j.controlHeightSM),N=i.useMemo(()=>{if(!v)return!1;let e={};switch(typeof v){case"function":e.nodeDraggable=v;break;case"object":e=Object.assign({},v)}return!1!==e.icon&&(e.icon=e.icon||i.createElement(n5,null)),e},[v]);return E(i.createElement(nQ,Object.assign({itemHeight:M,ref:t,virtual:a},k,{style:Object.assign(Object.assign({},null==l?void 0:l.style),b),prefixCls:x,className:P()({["".concat(x,"-icon-hide")]:!u,["".concat(x,"-block-node")]:p,["".concat(x,"-unselectable")]:!g,["".concat(x,"-rtl")]:"rtl"===o},null==l?void 0:l.className,s,C,O),direction:o,checkable:m?i.createElement("span",{className:"".concat(x,"-checkbox-inner")}):m,selectable:g,switcherIcon:e=>i.createElement(rg,{prefixCls:x,switcherIcon:f,treeNodeProps:e,showLine:d}),draggable:N}),h))});function ry(e,t,n){let{key:r,children:o}=n;e.forEach(function(e){let i=e[r],a=e[o];!1!==t(i,e)&&ry(a||[],t,n)})}(r=o||(o={}))[r.None=0]="None",r[r.Start=1]="Start",r[r.End=2]="End";var rb=function(e,t){var n={};for(var r in e)Object.prototype.hasOwnProperty.call(e,r)&&0>t.indexOf(r)&&(n[r]=e[r]);if(null!=e&&"function"==typeof Object.getOwnPropertySymbols)for(var o=0,r=Object.getOwnPropertySymbols(e);ot.indexOf(r[o])&&Object.prototype.propertyIsEnumerable.call(e,r[o])&&(n[r[o]]=e[r[o]]);return n};function rx(e){let{isLeaf:t,expanded:n}=e;return t?i.createElement(n0,null):n?i.createElement(n2,null):i.createElement(n3,null)}function rw(e){let{treeData:t,children:n}=e;return t||e2(n)}let rS=i.forwardRef((e,t)=>{var{defaultExpandAll:n,defaultExpandParent:r,defaultExpandedKeys:a}=e,l=rb(e,["defaultExpandAll","defaultExpandParent","defaultExpandedKeys"]);let c=i.useRef(),s=i.useRef(),u=()=>{let{keyEntities:e}=e3(rw(l));return n?Object.keys(e):r?tc(l.expandedKeys||a||[],e):l.expandedKeys||a},[d,f]=i.useState(l.selectedKeys||l.defaultSelectedKeys||[]),[p,h]=i.useState(()=>u());i.useEffect(()=>{"selectedKeys"in l&&f(l.selectedKeys)},[l.selectedKeys]),i.useEffect(()=>{"expandedKeys"in l&&h(l.expandedKeys)},[l.expandedKeys]);let{getPrefixCls:m,direction:g}=i.useContext(tv.E_),{prefixCls:v,className:y,showIcon:b=!0,expandAction:x="click"}=l,w=rb(l,["prefixCls","className","showIcon","expandAction"]),S=m("tree",v),k=P()("".concat(S,"-directory"),{["".concat(S,"-directory-rtl")]:"rtl"===g},y);return i.createElement(rv,Object.assign({icon:rx,ref:t,blockNode:!0},w,{showIcon:b,expandAction:x,prefixCls:S,className:k,expandedKeys:p,selectedKeys:d,onSelect:(e,t)=>{var n;let r;let{multiple:i,fieldNames:a}=l,{node:u,nativeEvent:d}=t,{key:h=""}=u,m=rw(l),g=Object.assign(Object.assign({},t),{selected:!0}),v=(null==d?void 0:d.ctrlKey)||(null==d?void 0:d.metaKey),y=null==d?void 0:d.shiftKey;i&&v?(r=e,c.current=h,s.current=r):i&&y?r=Array.from(new Set([].concat((0,ec.Z)(s.current||[]),(0,ec.Z)(function(e){let{treeData:t,expandedKeys:n,startKey:r,endKey:i,fieldNames:a}=e,l=[],c=o.None;return r&&r===i?[r]:r&&i?(ry(t,e=>{if(c===o.End)return!1;if(e===r||e===i){if(l.push(e),c===o.None)c=o.Start;else if(c===o.Start)return c=o.End,!1}else c===o.Start&&l.push(e);return n.includes(e)},e1(a)),l):[]}({treeData:m,expandedKeys:p,startKey:h,endKey:c.current,fieldNames:a}))))):(r=[h],c.current=h,s.current=r),g.selectedNodes=function(e,t,n){let r=(0,ec.Z)(t),o=[];return ry(e,(e,t)=>{let n=r.indexOf(e);return -1!==n&&(o.push(t),r.splice(n,1)),!!r.length},e1(n)),o}(m,r,a),null===(n=l.onSelect)||void 0===n||n.call(l,r,g),"selectedKeys"in l||f(r)},onExpand:(e,t)=>{var n;return"expandedKeys"in l||h(e),null===(n=l.onExpand)||void 0===n?void 0:n.call(l,e,t)}}))});rv.DirectoryTree=rS,rv.TreeNode=tt;var rk=n(29436),rE=n(56632),rC=function(e){let{value:t,onChange:n,filterSearch:r,tablePrefixCls:o,locale:a}=e;return r?i.createElement("div",{className:"".concat(o,"-filter-dropdown-search")},i.createElement(rE.Z,{prefix:i.createElement(rk.Z,null),placeholder:a.filterSearchPlaceholder,onChange:n,value:t,htmlSize:1,className:"".concat(o,"-filter-dropdown-search-input")})):null};let rO=e=>{let{keyCode:t}=e;t===t6.Z.ENTER&&e.stopPropagation()},rj=i.forwardRef((e,t)=>i.createElement("div",{className:e.className,onClick:e=>e.stopPropagation(),onKeyDown:rO,ref:t},e.children));function rP(e){let t=[];return(e||[]).forEach(e=>{let{value:n,children:r}=e;t.push(n),r&&(t=[].concat((0,ec.Z)(t),(0,ec.Z)(rP(r))))}),t}function rM(e,t){return("string"==typeof t||"number"==typeof t)&&(null==t?void 0:t.toString().toLowerCase().includes(e.trim().toLowerCase()))}var rN=function(e){var t,n;let r,o;let{tablePrefixCls:a,prefixCls:l,column:c,dropdownPrefixCls:s,columnKey:u,filterMultiple:f,filterMode:p="menu",filterSearch:h=!1,filterState:m,triggerFilter:g,locale:v,children:y,getPopupContainer:b,rootClassName:x}=e,{filterDropdownOpen:w,onFilterDropdownOpenChange:S,filterResetToDefaultFilteredValue:k,defaultFilteredValue:E,filterDropdownVisible:C,onFilterDropdownVisibleChange:O}=c,[j,M]=i.useState(!1),N=!!(m&&((null===(t=m.filteredKeys)||void 0===t?void 0:t.length)||m.forceFiltered)),I=e=>{M(e),null==S||S(e),null==O||O(e)},R=null!==(n=null!=w?w:C)&&void 0!==n?n:j,T=null==m?void 0:m.filteredKeys,[A,_]=function(e){let t=i.useRef(e),n=(0,tU.Z)();return[()=>t.current,e=>{t.current=e,n()}]}(T||[]),D=e=>{let{selectedKeys:t}=e;_(t)};i.useEffect(()=>{j&&D({selectedKeys:T||[]})},[T]);let[Z,L]=i.useState([]),[z,B]=i.useState(""),F=e=>{let{value:t}=e.target;B(t)};i.useEffect(()=>{j||B("")},[j]);let H=e=>{let t=e&&e.length?e:null;if(null===t&&(!m||!m.filteredKeys)||(0,d.Z)(t,null==m?void 0:m.filteredKeys,!0))return null;g({column:c,key:u,filteredKeys:t})},q=()=>{I(!1),H(A())},W=function(){let{confirm:e,closeDropdown:t}=arguments.length>0&&void 0!==arguments[0]?arguments[0]:{confirm:!1,closeDropdown:!1};e&&H([]),t&&I(!1),B(""),k?_((E||[]).map(e=>String(e))):_([])},K=P()({["".concat(s,"-menu-without-submenu")]:!(c.filters||[]).some(e=>{let{children:t}=e;return t})}),U=e=>{let{filters:t}=e;return(t||[]).map((e,t)=>{let n=String(e.value),r={title:e.text,key:void 0!==e.value?n:String(t)};return e.children&&(r.children=U({filters:e.children})),r})},V=e=>{var t;return Object.assign(Object.assign({},e),{text:e.title,value:e.key,children:(null===(t=e.children)||void 0===t?void 0:t.map(e=>V(e)))||[]})};if("function"==typeof c.filterDropdown)r=c.filterDropdown({prefixCls:"".concat(s,"-custom"),setSelectedKeys:e=>D({selectedKeys:e}),selectedKeys:A(),confirm:function(){let{closeDropdown:e}=arguments.length>0&&void 0!==arguments[0]?arguments[0]:{closeDropdown:!0};e&&I(!1),H(A())},clearFilters:W,filters:c.filters,visible:R,close:()=>{I(!1)}});else if(c.filterDropdown)r=c.filterDropdown;else{let e=A()||[];r=i.createElement(i.Fragment,null,0===(c.filters||[]).length?i.createElement(nT.Z,{image:nT.Z.PRESENTED_IMAGE_SIMPLE,description:v.filterEmptyText,imageStyle:{height:24},style:{margin:0,padding:"16px 0"}}):"tree"===p?i.createElement(i.Fragment,null,i.createElement(rC,{filterSearch:h,value:z,onChange:F,tablePrefixCls:a,locale:v}),i.createElement("div",{className:"".concat(a,"-filter-dropdown-tree")},f?i.createElement(tN,{checked:e.length===rP(c.filters).length,indeterminate:e.length>0&&e.length{e.target.checked?_(rP(null==c?void 0:c.filters).map(e=>String(e))):_([])}},v.filterCheckall):null,i.createElement(rv,{checkable:!0,selectable:!1,blockNode:!0,multiple:f,checkStrictly:!f,className:"".concat(s,"-menu"),onCheck:(e,t)=>{let{node:n,checked:r}=t;f?D({selectedKeys:e}):D({selectedKeys:r&&n.key?[n.key]:[]})},checkedKeys:e,selectedKeys:e,showIcon:!1,treeData:U({filters:c.filters}),autoExpandParent:!0,defaultExpandAll:!0,filterTreeNode:z.trim()?e=>"function"==typeof h?h(z,V(e)):rM(z,e.title):void 0}))):i.createElement(i.Fragment,null,i.createElement(rC,{filterSearch:h,value:z,onChange:F,tablePrefixCls:a,locale:v}),i.createElement(nA.Z,{selectable:!0,multiple:f,prefixCls:"".concat(s,"-menu"),className:K,onSelect:D,onDeselect:D,selectedKeys:e,getPopupContainer:b,openKeys:Z,onOpenChange:e=>{L(e)},items:function e(t){let{filters:n,prefixCls:r,filteredKeys:o,filterMultiple:a,searchValue:l,filterSearch:c}=t;return n.map((t,n)=>{let s=String(t.value);if(t.children)return{key:s||n,label:t.text,popupClassName:"".concat(r,"-dropdown-submenu"),children:e({filters:t.children,prefixCls:r,filteredKeys:o,filterMultiple:a,searchValue:l,filterSearch:c})};let u=a?tN:tA.ZP,d={key:void 0!==t.value?s:n,label:i.createElement(i.Fragment,null,i.createElement(u,{checked:o.includes(s)}),i.createElement("span",null,t.text))};return l.trim()?"function"==typeof c?c(l,t)?d:null:rM(l,t.text)?d:null:d})}({filters:c.filters||[],filterSearch:h,prefixCls:l,filteredKeys:A(),filterMultiple:f,searchValue:z})})),i.createElement("div",{className:"".concat(l,"-dropdown-btns")},i.createElement(nR.ZP,{type:"link",size:"small",disabled:k?(0,d.Z)((E||[]).map(e=>String(e)),e,!0):0===e.length,onClick:()=>W()},v.filterReset),i.createElement(nR.ZP,{type:"primary",size:"small",onClick:q},v.filterConfirm)))}c.filterDropdown&&(r=i.createElement(n_.J,{selectable:void 0},r)),o="function"==typeof c.filterIcon?c.filterIcon(N):c.filterIcon?c.filterIcon:i.createElement(nI,null);let{direction:G}=i.useContext(tv.E_);return i.createElement("div",{className:"".concat(l,"-column")},i.createElement("span",{className:"".concat(a,"-column-title")},y),i.createElement(tT.Z,{dropdownRender:()=>i.createElement(rj,{className:"".concat(l,"-dropdown")},r),trigger:["click"],open:R,onOpenChange:(e,t)=>{"trigger"===t.source&&(e&&void 0!==T&&_(T||[]),I(e),e||c.filterDropdown||q())},getPopupContainer:b,placement:"rtl"===G?"bottomLeft":"bottomRight",rootClassName:x},i.createElement("span",{role:"button",tabIndex:-1,className:P()("".concat(l,"-trigger"),{active:N}),onClick:e=>{e.stopPropagation()}},o)))};function rI(e,t,n){let r=[];return(e||[]).forEach((e,o)=>{var i;let a=nP(o,n);if(e.filters||"filterDropdown"in e||"onFilter"in e){if("filteredValue"in e){let t=e.filteredValue;"filterDropdown"in e||(t=null!==(i=null==t?void 0:t.map(String))&&void 0!==i?i:t),r.push({column:e,key:nj(e,a),filteredKeys:t,forceFiltered:e.filtered})}else r.push({column:e,key:nj(e,a),filteredKeys:t&&e.defaultFilteredValue?e.defaultFilteredValue:void 0,forceFiltered:e.filtered})}"children"in e&&(r=[].concat((0,ec.Z)(r),(0,ec.Z)(rI(e.children,t,a))))}),r}function rR(e){let t={};return e.forEach(e=>{let{key:n,filteredKeys:r,column:o}=e,{filters:i,filterDropdown:a}=o;if(a)t[n]=r||null;else if(Array.isArray(r)){let e=rP(i);t[n]=e.filter(e=>r.includes(String(e)))}else t[n]=null}),t}function rT(e,t){return t.reduce((e,t)=>{let{column:{onFilter:n,filters:r},filteredKeys:o}=t;return n&&o&&o.length?e.filter(e=>o.some(t=>{let o=rP(r),i=o.findIndex(e=>String(e)===String(t));return n(-1!==i?o[i]:t,e)})):e},e)}let rA=e=>e.flatMap(e=>"children"in e?[e].concat((0,ec.Z)(rA(e.children||[]))):[e]);var r_=function(e){let{prefixCls:t,dropdownPrefixCls:n,mergedColumns:r,onFilterChange:o,getPopupContainer:a,locale:l,rootClassName:c}=e;(0,tp.ln)("Table");let s=i.useMemo(()=>rA(r||[]),[r]),[u,d]=i.useState(()=>rI(s,!0)),f=i.useMemo(()=>{let e=rI(s,!1);if(0===e.length)return e;let t=!0;if(e.forEach(e=>{let{filteredKeys:n}=e;void 0!==n&&(t=!1)}),t){let e=(s||[]).map((e,t)=>nj(e,nP(t)));return u.filter(t=>{let{key:n}=t;return e.includes(n)}).map(t=>{let n=s[e.findIndex(e=>e===t.key)];return Object.assign(Object.assign({},t),{column:Object.assign(Object.assign({},t.column),n),forceFiltered:n.filtered})})}return e},[s,u]),p=i.useMemo(()=>rR(f),[f]),h=e=>{let t=f.filter(t=>{let{key:n}=t;return n!==e.key});t.push(e),d(t),o(rR(t),t)};return[e=>(function e(t,n,r,o,a,l,c,s,u){return r.map((r,d)=>{let f=nP(d,s),{filterMultiple:p=!0,filterMode:h,filterSearch:m}=r,g=r;if(g.filters||g.filterDropdown){let e=nj(g,f),s=o.find(t=>{let{key:n}=t;return e===n});g=Object.assign(Object.assign({},g),{title:o=>i.createElement(rN,{tablePrefixCls:t,prefixCls:"".concat(t,"-filter"),dropdownPrefixCls:n,column:g,columnKey:e,filterState:s,filterMultiple:p,filterMode:h,filterSearch:m,triggerFilter:l,locale:a,getPopupContainer:c,rootClassName:u},nM(r.title,o))})}return"children"in g&&(g=Object.assign(Object.assign({},g),{children:e(t,n,g.children,o,a,l,c,f,u)})),g})})(t,n,e,f,l,h,a,void 0,c),f,p]},rD=function(){let e=Object.assign({},arguments.length<=0?void 0:arguments[0]);for(let t=1;t{let r=n[t];void 0!==r&&(e[t]=r)})}return e},rZ=function(e,t){var n={};for(var r in e)Object.prototype.hasOwnProperty.call(e,r)&&0>t.indexOf(r)&&(n[r]=e[r]);if(null!=e&&"function"==typeof Object.getOwnPropertySymbols)for(var o=0,r=Object.getOwnPropertySymbols(e);ot.indexOf(r[o])&&Object.prototype.propertyIsEnumerable.call(e,r[o])&&(n[r[o]]=e[r[o]]);return n},rL=function(e,t,n){let r=n&&"object"==typeof n?n:{},{total:o=0}=r,a=rZ(r,["total"]),[l,c]=(0,i.useState)(()=>({current:"defaultCurrent"in a?a.defaultCurrent:1,pageSize:"defaultPageSize"in a?a.defaultPageSize:10})),s=rD(l,a,{total:o>0?o:e}),u=Math.ceil((o||e)/s.pageSize);s.current>u&&(s.current=u||1);let d=(e,t)=>{c({current:null!=e?e:1,pageSize:t||s.pageSize})};return!1===n?[{},()=>{}]:[Object.assign(Object.assign({},s),{onChange:(e,r)=>{var o;n&&(null===(o=n.onChange)||void 0===o||o.call(n,e,r)),d(e,r),t(e,r||(null==s?void 0:s.pageSize))}}),d]},rz={icon:{tag:"svg",attrs:{viewBox:"0 0 1024 1024",focusable:"false"},children:[{tag:"path",attrs:{d:"M840.4 300H183.6c-19.7 0-30.7 20.8-18.5 35l328.4 380.8c9.4 10.9 27.5 10.9 37 0L858.9 335c12.2-14.2 1.2-35-18.5-35z"}}]},name:"caret-down",theme:"outlined"},rB=i.forwardRef(function(e,t){return i.createElement(tY.Z,(0,m.Z)({},e,{ref:t,icon:rz}))}),rF={icon:{tag:"svg",attrs:{viewBox:"0 0 1024 1024",focusable:"false"},children:[{tag:"path",attrs:{d:"M858.9 689L530.5 308.2c-9.4-10.9-27.5-10.9-37 0L165.1 689c-12.2 14.2-1.2 35 18.5 35h656.8c19.7 0 30.7-20.8 18.5-35z"}}]},name:"caret-up",theme:"outlined"},rH=i.forwardRef(function(e,t){return i.createElement(tY.Z,(0,m.Z)({},e,{ref:t,icon:rF}))}),rq=n(98074);let rW="ascend",rK="descend";function rU(e){return"object"==typeof e.sorter&&"number"==typeof e.sorter.multiple&&e.sorter.multiple}function rV(e){return"function"==typeof e?e:!!e&&"object"==typeof e&&!!e.compare&&e.compare}function rG(e,t,n){let r=[];function o(e,t){r.push({column:e,key:nj(e,t),multiplePriority:rU(e),sortOrder:e.sortOrder})}return(e||[]).forEach((e,i)=>{let a=nP(i,n);e.children?("sortOrder"in e&&o(e,a),r=[].concat((0,ec.Z)(r),(0,ec.Z)(rG(e.children,t,a)))):e.sorter&&("sortOrder"in e?o(e,a):t&&e.defaultSortOrder&&r.push({column:e,key:nj(e,a),multiplePriority:rU(e),sortOrder:e.defaultSortOrder}))}),r}function rX(e){let{column:t,sortOrder:n}=e;return{column:t,order:n,field:t.dataIndex,columnKey:t.key}}function r$(e){let t=e.filter(e=>{let{sortOrder:t}=e;return t}).map(rX);return 0===t.length&&e.length?Object.assign(Object.assign({},rX(e[e.length-1])),{column:void 0}):t.length<=1?t[0]||{}:t}function rY(e,t,n){let r=t.slice().sort((e,t)=>t.multiplePriority-e.multiplePriority),o=e.slice(),i=r.filter(e=>{let{column:{sorter:t},sortOrder:n}=e;return rV(t)&&n});return i.length?o.sort((e,t)=>{for(let n=0;n{let r=e[n];return r?Object.assign(Object.assign({},e),{[n]:rY(r,t,n)}):e}):o}var rQ=x(eR,(e,t)=>{let{_renderTimes:n}=e,{_renderTimes:r}=t;return n!==r}),rJ=x(eH,(e,t)=>{let{_renderTimes:n}=e,{_renderTimes:r}=t;return n!==r}),r0=n(36360),r1=e=>{let{componentCls:t,lineWidth:n,lineType:r,tableBorderColor:o,tableHeaderBg:i,tablePaddingVertical:a,tablePaddingHorizontal:l,calc:c}=e,s="".concat((0,tS.bf)(n)," ").concat(r," ").concat(o),u=(e,r,o)=>({["&".concat(t,"-").concat(e)]:{["> ".concat(t,"-container")]:{["> ".concat(t,"-content, > ").concat(t,"-body")]:{"\n > table > tbody > tr > th,\n > table > tbody > tr > td\n ":{["> ".concat(t,"-expanded-row-fixed")]:{margin:"".concat((0,tS.bf)(c(r).mul(-1).equal()),"\n ").concat((0,tS.bf)(c(c(o).add(n)).mul(-1).equal()))}}}}}});return{["".concat(t,"-wrapper")]:{["".concat(t).concat(t,"-bordered")]:Object.assign(Object.assign(Object.assign({["> ".concat(t,"-title")]:{border:s,borderBottom:0},["> ".concat(t,"-container")]:{borderInlineStart:s,borderTop:s,["\n > ".concat(t,"-content,\n > ").concat(t,"-header,\n > ").concat(t,"-body,\n > ").concat(t,"-summary\n ")]:{"> table":{"\n > thead > tr > th,\n > thead > tr > td,\n > tbody > tr > th,\n > tbody > tr > td,\n > tfoot > tr > th,\n > tfoot > tr > td\n ":{borderInlineEnd:s},"> thead":{"> tr:not(:last-child) > th":{borderBottom:s},"> tr > th::before":{backgroundColor:"transparent !important"}},"\n > thead > tr,\n > tbody > tr,\n > tfoot > tr\n ":{["> ".concat(t,"-cell-fix-right-first::after")]:{borderInlineEnd:s}},"\n > tbody > tr > th,\n > tbody > tr > td\n ":{["> ".concat(t,"-expanded-row-fixed")]:{margin:"".concat((0,tS.bf)(c(a).mul(-1).equal())," ").concat((0,tS.bf)(c(c(l).add(n)).mul(-1).equal())),"&::after":{position:"absolute",top:0,insetInlineEnd:n,bottom:0,borderInlineEnd:s,content:'""'}}}}}},["&".concat(t,"-scroll-horizontal")]:{["> ".concat(t,"-container > ").concat(t,"-body")]:{"> table > tbody":{["\n > tr".concat(t,"-expanded-row,\n > tr").concat(t,"-placeholder\n ")]:{"> th, > td":{borderInlineEnd:0}}}}}},u("middle",e.tablePaddingVerticalMiddle,e.tablePaddingHorizontalMiddle)),u("small",e.tablePaddingVerticalSmall,e.tablePaddingHorizontalSmall)),{["> ".concat(t,"-footer")]:{border:s,borderTop:0}}),["".concat(t,"-cell")]:{["".concat(t,"-container:first-child")]:{borderTop:0},"&-scrollbar:not([rowspan])":{boxShadow:"0 ".concat((0,tS.bf)(n)," 0 ").concat((0,tS.bf)(n)," ").concat(i)}},["".concat(t,"-bordered ").concat(t,"-cell-scrollbar")]:{borderInlineEnd:s}}}},r2=e=>{let{componentCls:t}=e;return{["".concat(t,"-wrapper")]:{["".concat(t,"-cell-ellipsis")]:Object.assign(Object.assign({},tk.vS),{wordBreak:"keep-all",["\n &".concat(t,"-cell-fix-left-last,\n &").concat(t,"-cell-fix-right-first\n ")]:{overflow:"visible",["".concat(t,"-cell-content")]:{display:"block",overflow:"hidden",textOverflow:"ellipsis"}},["".concat(t,"-column-title")]:{overflow:"hidden",textOverflow:"ellipsis",wordBreak:"keep-all"}})}}},r6=e=>{let{componentCls:t}=e;return{["".concat(t,"-wrapper")]:{["".concat(t,"-tbody > tr").concat(t,"-placeholder")]:{textAlign:"center",color:e.colorTextDisabled,"\n &:hover > th,\n &:hover > td,\n ":{background:e.colorBgContainer}}}}},r3=n(76122),r4=e=>{let{componentCls:t,antCls:n,motionDurationSlow:r,lineWidth:o,paddingXS:i,lineType:a,tableBorderColor:l,tableExpandIconBg:c,tableExpandColumnWidth:s,borderRadius:u,tablePaddingVertical:d,tablePaddingHorizontal:f,tableExpandedRowBg:p,paddingXXS:h,expandIconMarginTop:m,expandIconSize:g,expandIconHalfInner:v,expandIconScale:y,calc:b}=e,x="".concat((0,tS.bf)(o)," ").concat(a," ").concat(l),w=b(h).sub(o).equal();return{["".concat(t,"-wrapper")]:{["".concat(t,"-expand-icon-col")]:{width:s},["".concat(t,"-row-expand-icon-cell")]:{textAlign:"center",["".concat(t,"-row-expand-icon")]:{display:"inline-flex",float:"none",verticalAlign:"sub"}},["".concat(t,"-row-indent")]:{height:1,float:"left"},["".concat(t,"-row-expand-icon")]:Object.assign(Object.assign({},(0,r3.N)(e)),{position:"relative",float:"left",boxSizing:"border-box",width:g,height:g,padding:0,color:"inherit",lineHeight:(0,tS.bf)(g),background:c,border:x,borderRadius:u,transform:"scale(".concat(y,")"),transition:"all ".concat(r),userSelect:"none","&:focus, &:hover, &:active":{borderColor:"currentcolor"},"&::before, &::after":{position:"absolute",background:"currentcolor",transition:"transform ".concat(r," ease-out"),content:'""'},"&::before":{top:v,insetInlineEnd:w,insetInlineStart:w,height:o},"&::after":{top:w,bottom:w,insetInlineStart:v,width:o,transform:"rotate(90deg)"},"&-collapsed::before":{transform:"rotate(-180deg)"},"&-collapsed::after":{transform:"rotate(0deg)"},"&-spaced":{"&::before, &::after":{display:"none",content:"none"},background:"transparent",border:0,visibility:"hidden"}}),["".concat(t,"-row-indent + ").concat(t,"-row-expand-icon")]:{marginTop:m,marginInlineEnd:i},["tr".concat(t,"-expanded-row")]:{"&, &:hover":{"> th, > td":{background:p}},["".concat(n,"-descriptions-view")]:{display:"flex",table:{flex:"auto",width:"auto"}}},["".concat(t,"-expanded-row-fixed")]:{position:"relative",margin:"".concat((0,tS.bf)(b(d).mul(-1).equal())," ").concat((0,tS.bf)(b(f).mul(-1).equal())),padding:"".concat((0,tS.bf)(d)," ").concat((0,tS.bf)(f))}}}},r5=e=>{let{componentCls:t,antCls:n,iconCls:r,tableFilterDropdownWidth:o,tableFilterDropdownSearchWidth:i,paddingXXS:a,paddingXS:l,colorText:c,lineWidth:s,lineType:u,tableBorderColor:d,headerIconColor:f,fontSizeSM:p,tablePaddingHorizontal:h,borderRadius:m,motionDurationSlow:g,colorTextDescription:v,colorPrimary:y,tableHeaderFilterActiveBg:b,colorTextDisabled:x,tableFilterDropdownBg:w,tableFilterDropdownHeight:S,controlItemBgHover:k,controlItemBgActive:E,boxShadowSecondary:C,filterDropdownMenuBg:O,calc:j}=e,P="".concat(n,"-dropdown"),M="".concat(t,"-filter-dropdown"),N="".concat(n,"-tree"),I="".concat((0,tS.bf)(s)," ").concat(u," ").concat(d);return[{["".concat(t,"-wrapper")]:{["".concat(t,"-filter-column")]:{display:"flex",justifyContent:"space-between"},["".concat(t,"-filter-trigger")]:{position:"relative",display:"flex",alignItems:"center",marginBlock:j(a).mul(-1).equal(),marginInline:"".concat((0,tS.bf)(a)," ").concat((0,tS.bf)(j(h).div(2).mul(-1).equal())),padding:"0 ".concat((0,tS.bf)(a)),color:f,fontSize:p,borderRadius:m,cursor:"pointer",transition:"all ".concat(g),"&:hover":{color:v,background:b},"&.active":{color:y}}}},{["".concat(n,"-dropdown")]:{[M]:Object.assign(Object.assign({},(0,tk.Wf)(e)),{minWidth:o,backgroundColor:w,borderRadius:m,boxShadow:C,overflow:"hidden",["".concat(P,"-menu")]:{maxHeight:S,overflowX:"hidden",border:0,boxShadow:"none",borderRadius:"unset",backgroundColor:O,"&:empty::after":{display:"block",padding:"".concat((0,tS.bf)(l)," 0"),color:x,fontSize:p,textAlign:"center",content:'"Not Found"'}},["".concat(M,"-tree")]:{paddingBlock:"".concat((0,tS.bf)(l)," 0"),paddingInline:l,[N]:{padding:0},["".concat(N,"-treenode ").concat(N,"-node-content-wrapper:hover")]:{backgroundColor:k},["".concat(N,"-treenode-checkbox-checked ").concat(N,"-node-content-wrapper")]:{"&, &:hover":{backgroundColor:E}}},["".concat(M,"-search")]:{padding:l,borderBottom:I,"&-input":{input:{minWidth:i},[r]:{color:x}}},["".concat(M,"-checkall")]:{width:"100%",marginBottom:a,marginInlineStart:a},["".concat(M,"-btns")]:{display:"flex",justifyContent:"space-between",padding:"".concat((0,tS.bf)(j(l).sub(s).equal())," ").concat((0,tS.bf)(l)),overflow:"hidden",borderTop:I}})}},{["".concat(n,"-dropdown ").concat(M,", ").concat(M,"-submenu")]:{["".concat(n,"-checkbox-wrapper + span")]:{paddingInlineStart:l,color:c},"> ul":{maxHeight:"calc(100vh - 130px)",overflowX:"hidden",overflowY:"auto"}}}]},r8=e=>{let{componentCls:t,lineWidth:n,colorSplit:r,motionDurationSlow:o,zIndexTableFixed:i,tableBg:a,zIndexTableSticky:l,calc:c}=e;return{["".concat(t,"-wrapper")]:{["\n ".concat(t,"-cell-fix-left,\n ").concat(t,"-cell-fix-right\n ")]:{position:"sticky !important",zIndex:i,background:a},["\n ".concat(t,"-cell-fix-left-first::after,\n ").concat(t,"-cell-fix-left-last::after\n ")]:{position:"absolute",top:0,right:{_skip_check_:!0,value:0},bottom:c(n).mul(-1).equal(),width:30,transform:"translateX(100%)",transition:"box-shadow ".concat(o),content:'""',pointerEvents:"none"},["".concat(t,"-cell-fix-left-all::after")]:{display:"none"},["\n ".concat(t,"-cell-fix-right-first::after,\n ").concat(t,"-cell-fix-right-last::after\n ")]:{position:"absolute",top:0,bottom:c(n).mul(-1).equal(),left:{_skip_check_:!0,value:0},width:30,transform:"translateX(-100%)",transition:"box-shadow ".concat(o),content:'""',pointerEvents:"none"},["".concat(t,"-container")]:{position:"relative","&::before, &::after":{position:"absolute",top:0,bottom:0,zIndex:c(l).add(1).equal({unit:!1}),width:30,transition:"box-shadow ".concat(o),content:'""',pointerEvents:"none"},"&::before":{insetInlineStart:0},"&::after":{insetInlineEnd:0}},["".concat(t,"-ping-left")]:{["&:not(".concat(t,"-has-fix-left) ").concat(t,"-container::before")]:{boxShadow:"inset 10px 0 8px -8px ".concat(r)},["\n ".concat(t,"-cell-fix-left-first::after,\n ").concat(t,"-cell-fix-left-last::after\n ")]:{boxShadow:"inset 10px 0 8px -8px ".concat(r)},["".concat(t,"-cell-fix-left-last::before")]:{backgroundColor:"transparent !important"}},["".concat(t,"-ping-right")]:{["&:not(".concat(t,"-has-fix-right) ").concat(t,"-container::after")]:{boxShadow:"inset -10px 0 8px -8px ".concat(r)},["\n ".concat(t,"-cell-fix-right-first::after,\n ").concat(t,"-cell-fix-right-last::after\n ")]:{boxShadow:"inset -10px 0 8px -8px ".concat(r)}}}}},r7=e=>{let{componentCls:t,antCls:n,margin:r}=e;return{["".concat(t,"-wrapper")]:{["".concat(t,"-pagination").concat(n,"-pagination")]:{margin:"".concat((0,tS.bf)(r)," 0")},["".concat(t,"-pagination")]:{display:"flex",flexWrap:"wrap",rowGap:e.paddingXS,"> *":{flex:"none"},"&-left":{justifyContent:"flex-start"},"&-center":{justifyContent:"center"},"&-right":{justifyContent:"flex-end"}}}}},r9=e=>{let{componentCls:t,tableRadius:n}=e;return{["".concat(t,"-wrapper")]:{[t]:{["".concat(t,"-title, ").concat(t,"-header")]:{borderRadius:"".concat((0,tS.bf)(n)," ").concat((0,tS.bf)(n)," 0 0")},["".concat(t,"-title + ").concat(t,"-container")]:{borderStartStartRadius:0,borderStartEndRadius:0,["".concat(t,"-header, table")]:{borderRadius:0},"table > thead > tr:first-child":{"th:first-child, th:last-child, td:first-child, td:last-child":{borderRadius:0}}},"&-container":{borderStartStartRadius:n,borderStartEndRadius:n,"table > thead > tr:first-child":{"> *:first-child":{borderStartStartRadius:n},"> *:last-child":{borderStartEndRadius:n}}},"&-footer":{borderRadius:"0 0 ".concat((0,tS.bf)(n)," ").concat((0,tS.bf)(n))}}}}},oe=e=>{let{componentCls:t}=e;return{["".concat(t,"-wrapper-rtl")]:{direction:"rtl",table:{direction:"rtl"},["".concat(t,"-pagination-left")]:{justifyContent:"flex-end"},["".concat(t,"-pagination-right")]:{justifyContent:"flex-start"},["".concat(t,"-row-expand-icon")]:{float:"right","&::after":{transform:"rotate(-90deg)"},"&-collapsed::before":{transform:"rotate(180deg)"},"&-collapsed::after":{transform:"rotate(0deg)"}},["".concat(t,"-container")]:{"&::before":{insetInlineStart:"unset",insetInlineEnd:0},"&::after":{insetInlineStart:0,insetInlineEnd:"unset"},["".concat(t,"-row-indent")]:{float:"right"}}}}},ot=e=>{let{componentCls:t,antCls:n,iconCls:r,fontSizeIcon:o,padding:i,paddingXS:a,headerIconColor:l,headerIconHoverColor:c,tableSelectionColumnWidth:s,tableSelectedRowBg:u,tableSelectedRowHoverBg:d,tableRowHoverBg:f,tablePaddingHorizontal:p,calc:h}=e;return{["".concat(t,"-wrapper")]:{["".concat(t,"-selection-col")]:{width:s,["&".concat(t,"-selection-col-with-dropdown")]:{width:h(s).add(o).add(h(i).div(4)).equal()}},["".concat(t,"-bordered ").concat(t,"-selection-col")]:{width:h(s).add(h(a).mul(2)).equal(),["&".concat(t,"-selection-col-with-dropdown")]:{width:h(s).add(o).add(h(i).div(4)).add(h(a).mul(2)).equal()}},["\n table tr th".concat(t,"-selection-column,\n table tr td").concat(t,"-selection-column,\n ").concat(t,"-selection-column\n ")]:{paddingInlineEnd:e.paddingXS,paddingInlineStart:e.paddingXS,textAlign:"center",["".concat(n,"-radio-wrapper")]:{marginInlineEnd:0}},["table tr th".concat(t,"-selection-column").concat(t,"-cell-fix-left")]:{zIndex:e.zIndexTableFixed+1},["table tr th".concat(t,"-selection-column::after")]:{backgroundColor:"transparent !important"},["".concat(t,"-selection")]:{position:"relative",display:"inline-flex",flexDirection:"column"},["".concat(t,"-selection-extra")]:{position:"absolute",top:0,zIndex:1,cursor:"pointer",transition:"all ".concat(e.motionDurationSlow),marginInlineStart:"100%",paddingInlineStart:(0,tS.bf)(h(p).div(4).equal()),[r]:{color:l,fontSize:o,verticalAlign:"baseline","&:hover":{color:c}}},["".concat(t,"-tbody")]:{["".concat(t,"-row")]:{["&".concat(t,"-row-selected")]:{["> ".concat(t,"-cell")]:{background:u,"&-row-hover":{background:d}}},["> ".concat(t,"-cell-row-hover")]:{background:f}}}}}},on=e=>{let{componentCls:t,tableExpandColumnWidth:n,calc:r}=e,o=(e,o,i,a)=>({["".concat(t).concat(t,"-").concat(e)]:{fontSize:a,["\n ".concat(t,"-title,\n ").concat(t,"-footer,\n ").concat(t,"-cell,\n ").concat(t,"-thead > tr > th,\n ").concat(t,"-tbody > tr > th,\n ").concat(t,"-tbody > tr > td,\n tfoot > tr > th,\n tfoot > tr > td\n ")]:{padding:"".concat((0,tS.bf)(o)," ").concat((0,tS.bf)(i))},["".concat(t,"-filter-trigger")]:{marginInlineEnd:(0,tS.bf)(r(i).div(2).mul(-1).equal())},["".concat(t,"-expanded-row-fixed")]:{margin:"".concat((0,tS.bf)(r(o).mul(-1).equal())," ").concat((0,tS.bf)(r(i).mul(-1).equal()))},["".concat(t,"-tbody")]:{["".concat(t,"-wrapper:only-child ").concat(t)]:{marginBlock:(0,tS.bf)(r(o).mul(-1).equal()),marginInline:"".concat((0,tS.bf)(r(n).sub(i).equal())," ").concat((0,tS.bf)(r(i).mul(-1).equal()))}},["".concat(t,"-selection-extra")]:{paddingInlineStart:(0,tS.bf)(r(i).div(4).equal())}}});return{["".concat(t,"-wrapper")]:Object.assign(Object.assign({},o("middle",e.tablePaddingVerticalMiddle,e.tablePaddingHorizontalMiddle,e.tableFontSizeMiddle)),o("small",e.tablePaddingVerticalSmall,e.tablePaddingHorizontalSmall,e.tableFontSizeSmall))}},or=e=>{let{componentCls:t,marginXXS:n,fontSizeIcon:r,headerIconColor:o,headerIconHoverColor:i}=e;return{["".concat(t,"-wrapper")]:{["".concat(t,"-thead th").concat(t,"-column-has-sorters")]:{outline:"none",cursor:"pointer",transition:"all ".concat(e.motionDurationSlow),"&:hover":{background:e.tableHeaderSortHoverBg,"&::before":{backgroundColor:"transparent !important"}},"&:focus-visible":{color:e.colorPrimary},["\n &".concat(t,"-cell-fix-left:hover,\n &").concat(t,"-cell-fix-right:hover\n ")]:{background:e.tableFixedHeaderSortActiveBg}},["".concat(t,"-thead th").concat(t,"-column-sort")]:{background:e.tableHeaderSortBg,"&::before":{backgroundColor:"transparent !important"}},["td".concat(t,"-column-sort")]:{background:e.tableBodySortBg},["".concat(t,"-column-title")]:{position:"relative",zIndex:1,flex:1},["".concat(t,"-column-sorters")]:{display:"flex",flex:"auto",alignItems:"center",justifyContent:"space-between","&::after":{position:"absolute",inset:0,width:"100%",height:"100%",content:'""'}},["".concat(t,"-column-sorter")]:{marginInlineStart:n,color:o,fontSize:0,transition:"color ".concat(e.motionDurationSlow),"&-inner":{display:"inline-flex",flexDirection:"column",alignItems:"center"},"&-up, &-down":{fontSize:r,"&.active":{color:e.colorPrimary}},["".concat(t,"-column-sorter-up + ").concat(t,"-column-sorter-down")]:{marginTop:"-0.3em"}},["".concat(t,"-column-sorters:hover ").concat(t,"-column-sorter")]:{color:i}}}},oo=e=>{let{componentCls:t,opacityLoading:n,tableScrollThumbBg:r,tableScrollThumbBgHover:o,tableScrollThumbSize:i,tableScrollBg:a,zIndexTableSticky:l,stickyScrollBarBorderRadius:c,lineWidth:s,lineType:u,tableBorderColor:d}=e,f="".concat((0,tS.bf)(s)," ").concat(u," ").concat(d);return{["".concat(t,"-wrapper")]:{["".concat(t,"-sticky")]:{"&-holder":{position:"sticky",zIndex:l,background:e.colorBgContainer},"&-scroll":{position:"sticky",bottom:0,height:"".concat((0,tS.bf)(i)," !important"),zIndex:l,display:"flex",alignItems:"center",background:a,borderTop:f,opacity:n,"&:hover":{transformOrigin:"center bottom"},"&-bar":{height:i,backgroundColor:r,borderRadius:c,transition:"all ".concat(e.motionDurationSlow,", transform none"),position:"absolute",bottom:0,"&:hover, &-active":{backgroundColor:o}}}}}}},oi=e=>{let{componentCls:t,lineWidth:n,tableBorderColor:r,calc:o}=e,i="".concat((0,tS.bf)(n)," ").concat(e.lineType," ").concat(r);return{["".concat(t,"-wrapper")]:{["".concat(t,"-summary")]:{position:"relative",zIndex:e.zIndexTableFixed,background:e.tableBg,"> tr":{"> th, > td":{borderBottom:i}}},["div".concat(t,"-summary")]:{boxShadow:"0 ".concat((0,tS.bf)(o(n).mul(-1).equal())," 0 ").concat(r)}}}},oa=e=>{let{componentCls:t,motionDurationMid:n,lineWidth:r,lineType:o,tableBorderColor:i,calc:a}=e,l="".concat((0,tS.bf)(r)," ").concat(o," ").concat(i),c="".concat(t,"-expanded-row-cell");return{["".concat(t,"-wrapper")]:{["".concat(t,"-tbody-virtual")]:{["".concat(t,"-row")]:{display:"flex",boxSizing:"border-box",width:"100%"},["".concat(t,"-cell")]:{borderBottom:l,transition:"background ".concat(n)},["".concat(t,"-expanded-row")]:{["".concat(c).concat(c,"-fixed")]:{position:"sticky",insetInlineStart:0,overflow:"hidden",width:"calc(var(--virtual-width) - ".concat((0,tS.bf)(r),")"),borderInlineEnd:"none"}}},["".concat(t,"-bordered")]:{["".concat(t,"-tbody-virtual")]:{"&:after":{content:'""',insetInline:0,bottom:0,borderBottom:l,position:"absolute"},["".concat(t,"-cell")]:{borderInlineEnd:l,["&".concat(t,"-cell-fix-right-first:before")]:{content:'""',position:"absolute",insetBlock:0,insetInlineStart:a(r).mul(-1).equal(),borderInlineStart:l}}},["&".concat(t,"-virtual")]:{["".concat(t,"-placeholder ").concat(t,"-cell")]:{borderInlineEnd:l,borderBottom:l}}}}}};let ol=e=>{let{componentCls:t,fontWeightStrong:n,tablePaddingVertical:r,tablePaddingHorizontal:o,tableExpandColumnWidth:i,lineWidth:a,lineType:l,tableBorderColor:c,tableFontSize:s,tableBg:u,tableRadius:d,tableHeaderTextColor:f,motionDurationMid:p,tableHeaderBg:h,tableHeaderCellSplitColor:m,tableFooterTextColor:g,tableFooterBg:v,calc:y}=e,b="".concat((0,tS.bf)(a)," ").concat(l," ").concat(c);return{["".concat(t,"-wrapper")]:Object.assign(Object.assign({clear:"both",maxWidth:"100%"},(0,tk.dF)()),{[t]:Object.assign(Object.assign({},(0,tk.Wf)(e)),{fontSize:s,background:u,borderRadius:"".concat((0,tS.bf)(d)," ").concat((0,tS.bf)(d)," 0 0")}),table:{width:"100%",textAlign:"start",borderRadius:"".concat((0,tS.bf)(d)," ").concat((0,tS.bf)(d)," 0 0"),borderCollapse:"separate",borderSpacing:0},["\n ".concat(t,"-cell,\n ").concat(t,"-thead > tr > th,\n ").concat(t,"-tbody > tr > th,\n ").concat(t,"-tbody > tr > td,\n tfoot > tr > th,\n tfoot > tr > td\n ")]:{position:"relative",padding:"".concat((0,tS.bf)(r)," ").concat((0,tS.bf)(o)),overflowWrap:"break-word"},["".concat(t,"-title")]:{padding:"".concat((0,tS.bf)(r)," ").concat((0,tS.bf)(o))},["".concat(t,"-thead")]:{"\n > tr > th,\n > tr > td\n ":{position:"relative",color:f,fontWeight:n,textAlign:"start",background:h,borderBottom:b,transition:"background ".concat(p," ease"),"&[colspan]:not([colspan='1'])":{textAlign:"center"},["&:not(:last-child):not(".concat(t,"-selection-column):not(").concat(t,"-row-expand-icon-cell):not([colspan])::before")]:{position:"absolute",top:"50%",insetInlineEnd:0,width:1,height:"1.6em",backgroundColor:m,transform:"translateY(-50%)",transition:"background-color ".concat(p),content:'""'}},"> tr:not(:last-child) > th[colspan]":{borderBottom:0}},["".concat(t,"-tbody")]:{"> tr":{"> th, > td":{transition:"background ".concat(p,", border-color ").concat(p),borderBottom:b,["\n > ".concat(t,"-wrapper:only-child,\n > ").concat(t,"-expanded-row-fixed > ").concat(t,"-wrapper:only-child\n ")]:{[t]:{marginBlock:(0,tS.bf)(y(r).mul(-1).equal()),marginInline:"".concat((0,tS.bf)(y(i).sub(o).equal()),"\n ").concat((0,tS.bf)(y(o).mul(-1).equal())),["".concat(t,"-tbody > tr:last-child > td")]:{borderBottom:0,"&:first-child, &:last-child":{borderRadius:0}}}}},"> th":{position:"relative",color:f,fontWeight:n,textAlign:"start",background:h,borderBottom:b,transition:"background ".concat(p," ease")}}},["".concat(t,"-footer")]:{padding:"".concat((0,tS.bf)(r)," ").concat((0,tS.bf)(o)),color:g,background:v}})}};var oc=(0,tC.I$)("Table",e=>{let{colorTextHeading:t,colorSplit:n,colorBgContainer:r,controlInteractiveSize:o,headerBg:i,headerColor:a,headerSortActiveBg:l,headerSortHoverBg:c,bodySortBg:s,rowHoverBg:u,rowSelectedBg:d,rowSelectedHoverBg:f,rowExpandedBg:p,cellPaddingBlock:h,cellPaddingInline:m,cellPaddingBlockMD:g,cellPaddingInlineMD:v,cellPaddingBlockSM:y,cellPaddingInlineSM:b,borderColor:x,footerBg:w,footerColor:S,headerBorderRadius:k,cellFontSize:E,cellFontSizeMD:C,cellFontSizeSM:O,headerSplitColor:j,fixedHeaderSortActiveBg:P,headerFilterHoverBg:M,filterDropdownBg:N,expandIconBg:I,selectionColumnWidth:R,stickyScrollBarBg:T,calc:A}=e,_=(0,tE.TS)(e,{tableFontSize:E,tableBg:r,tableRadius:k,tablePaddingVertical:h,tablePaddingHorizontal:m,tablePaddingVerticalMiddle:g,tablePaddingHorizontalMiddle:v,tablePaddingVerticalSmall:y,tablePaddingHorizontalSmall:b,tableBorderColor:x,tableHeaderTextColor:a,tableHeaderBg:i,tableFooterTextColor:S,tableFooterBg:w,tableHeaderCellSplitColor:j,tableHeaderSortBg:l,tableHeaderSortHoverBg:c,tableBodySortBg:s,tableFixedHeaderSortActiveBg:P,tableHeaderFilterActiveBg:M,tableFilterDropdownBg:N,tableRowHoverBg:u,tableSelectedRowBg:d,tableSelectedRowHoverBg:f,zIndexTableFixed:2,zIndexTableSticky:3,tableFontSizeMiddle:C,tableFontSizeSmall:O,tableSelectionColumnWidth:R,tableExpandIconBg:I,tableExpandColumnWidth:A(o).add(A(e.padding).mul(2)).equal(),tableExpandedRowBg:p,tableFilterDropdownWidth:120,tableFilterDropdownHeight:264,tableFilterDropdownSearchWidth:140,tableScrollThumbSize:8,tableScrollThumbBg:T,tableScrollThumbBgHover:t,tableScrollBg:n});return[ol(_),r7(_),oi(_),or(_),r5(_),r1(_),r9(_),r4(_),oi(_),r6(_),ot(_),r8(_),oo(_),r2(_),on(_),oe(_),oa(_)]},e=>{let{colorFillAlter:t,colorBgContainer:n,colorTextHeading:r,colorFillSecondary:o,colorFillContent:i,controlItemBgActive:a,controlItemBgActiveHover:l,padding:c,paddingSM:s,paddingXS:u,colorBorderSecondary:d,borderRadiusLG:f,controlHeight:p,colorTextPlaceholder:h,fontSize:m,fontSizeSM:g,lineHeight:v,lineWidth:y,colorIcon:b,colorIconHover:x,opacityLoading:w,controlInteractiveSize:S}=e,k=new r0.C(o).onBackground(n).toHexShortString(),E=new r0.C(i).onBackground(n).toHexShortString(),C=new r0.C(t).onBackground(n).toHexShortString(),O=new r0.C(b),j=new r0.C(x),P=S/2-y,M=2*P+3*y;return{headerBg:C,headerColor:r,headerSortActiveBg:k,headerSortHoverBg:E,bodySortBg:C,rowHoverBg:C,rowSelectedBg:a,rowSelectedHoverBg:l,rowExpandedBg:t,cellPaddingBlock:c,cellPaddingInline:c,cellPaddingBlockMD:s,cellPaddingInlineMD:u,cellPaddingBlockSM:u,cellPaddingInlineSM:u,borderColor:d,headerBorderRadius:f,footerBg:C,footerColor:r,cellFontSize:m,cellFontSizeMD:m,cellFontSizeSM:m,headerSplitColor:d,fixedHeaderSortActiveBg:k,headerFilterHoverBg:i,filterDropdownMenuBg:n,filterDropdownBg:n,expandIconBg:n,selectionColumnWidth:p,stickyScrollBarBg:h,stickyScrollBarBorderRadius:100,expandIconMarginTop:(m*v-3*y)/2-Math.ceil((1.4*g-3*y)/2),headerIconColor:O.clone().setAlpha(O.getAlpha()*w).toRgbString(),headerIconHoverColor:j.clone().setAlpha(j.getAlpha()*w).toRgbString(),expandIconHalfInner:P,expandIconSize:M,expandIconScale:S/M}},{unitless:{expandIconScale:!0}});let os=[];var ou=i.forwardRef((e,t)=>{var n,r;let o,a,c;let{prefixCls:s,className:u,rootClassName:d,style:f,size:p,bordered:h,dropdownPrefixCls:m,dataSource:g,pagination:v,rowSelection:y,rowKey:b="key",rowClassName:x,columns:w,children:S,childrenColumnName:k,onChange:E,getPopupContainer:C,loading:O,expandIcon:j,expandable:M,expandedRowRender:N,expandIconColumnIndex:I,indentSize:R,scroll:T,sortDirections:A,locale:_,showSorterTooltip:D=!0,virtual:Z}=e;(0,tp.ln)("Table");let L=i.useMemo(()=>w||ey(S),[w,S]),z=tG(i.useMemo(()=>L.some(e=>e.responsive),[L])),B=i.useMemo(()=>{let e=new Set(Object.keys(z).filter(e=>z[e]));return L.filter(t=>!t.responsive||t.responsive.some(t=>e.has(t)))},[L,z]),F=(0,eY.Z)(e,["className","style","columns"]),{locale:H=tX.Z,direction:q,table:W,renderEmpty:K,getPrefixCls:U,getPopupContainer:V}=i.useContext(tv.E_),G=(0,tK.Z)(p),X=Object.assign(Object.assign({},H.Table),_),$=g||os,Y=U("table",s),Q=U("dropdown",m),[,J]=(0,nw.ZP)(),ee=(0,tb.Z)(Y),[et,en,er]=oc(Y,ee),eo=Object.assign({childrenColumnName:k,expandIconColumnIndex:I},M),{childrenColumnName:ei="children"}=eo,ea=i.useMemo(()=>$.some(e=>null==e?void 0:e[ei])?"nest":N||M&&M.expandedRowRender?"row":null,[$]),el={body:i.useRef()},es=i.useRef(null),eu=i.useRef(null);n=()=>Object.assign(Object.assign({},eu.current),{nativeElement:es.current}),(0,i.useImperativeHandle)(t,()=>{let e=n(),{nativeElement:t}=e;return"undefined"!=typeof Proxy?new Proxy(t,{get:(t,n)=>e[n]?e[n]:Reflect.get(t,n)}):(t._antProxy=t._antProxy||{},Object.keys(e).forEach(n=>{if(!(n in t._antProxy)){let r=t[n];t._antProxy[n]=r,t[n]=e[n]}}),t)});let ed=i.useMemo(()=>"function"==typeof b?b:e=>null==e?void 0:e[b],[b]),[ef]=function(e,t,n){let r=i.useRef({});return[function(o){if(!r.current||r.current.data!==e||r.current.childrenColumnName!==t||r.current.getRowKey!==n){let o=new Map;!function e(r){r.forEach((r,i)=>{let a=n(r,i);o.set(a,r),r&&"object"==typeof r&&t in r&&e(r[t]||[])})}(e),r.current={data:e,childrenColumnName:t,kvMap:o,getRowKey:n}}return r.current.kvMap.get(o)}]}($,ei,ed),ep={},eh=function(e,t){var n,r,o;let i=arguments.length>2&&void 0!==arguments[2]&&arguments[2],a=Object.assign(Object.assign({},ep),e);i&&(null===(n=ep.resetPagination)||void 0===n||n.call(ep),(null===(r=a.pagination)||void 0===r?void 0:r.current)&&(a.pagination.current=1),v&&v.onChange&&v.onChange(1,null===(o=a.pagination)||void 0===o?void 0:o.pageSize)),T&&!1!==T.scrollToFirstRowOnChange&&el.body.current&&function(e){let t=arguments.length>1&&void 0!==arguments[1]?arguments[1]:{},{getContainer:n=()=>window,callback:r,duration:o=450}=t,i=n(),a=function(e,t){var n,r;if("undefined"==typeof window)return 0;let o=t?"scrollTop":"scrollLeft",i=0;return tq(e)?i=e[t?"pageYOffset":"pageXOffset"]:e instanceof Document?i=e.documentElement[o]:e instanceof HTMLElement?i=e[o]:e&&(i=e[o]),e&&!tq(e)&&"number"!=typeof i&&(i=null===(r=(null!==(n=e.ownerDocument)&&void 0!==n?n:e).documentElement)||void 0===r?void 0:r[o]),i}(i,!0),l=Date.now(),c=()=>{let t=Date.now()-l,n=function(e,t,n,r){let o=n-t;return(e/=r/2)<1?o/2*e*e*e+t:o/2*((e-=2)*e*e+2)+t}(t>o?o:t,a,e,o);tq(i)?i.scrollTo(window.pageXOffset,n):i instanceof Document||"HTMLDocument"===i.constructor.name?i.documentElement.scrollTop=n:i.scrollTop=n,tel.body.current}),null==E||E(a.pagination,a.filters,a.sorter,{currentDataSource:rT(rY($,a.sorterStates,ei),a.filterStates),action:t})},[em,eg,ev,eb]=function(e){let{prefixCls:t,mergedColumns:n,onSorterChange:r,sortDirections:o,tableLocale:a,showSorterTooltip:l}=e,[c,s]=i.useState(rG(n,!0)),u=i.useMemo(()=>{let e=!0,t=rG(n,!1);if(!t.length)return c;let r=[];function o(t){e?r.push(t):r.push(Object.assign(Object.assign({},t),{sortOrder:null}))}let i=null;return t.forEach(t=>{null===i?(o(t),t.sortOrder&&(!1===t.multiplePriority?e=!1:i=!0)):(i&&!1!==t.multiplePriority||(e=!1),o(t))}),r},[n,c]),d=i.useMemo(()=>{let e=u.map(e=>{let{column:t,sortOrder:n}=e;return{column:t,order:n}});return{sortColumns:e,sortColumn:e[0]&&e[0].column,sortOrder:e[0]&&e[0].order}},[u]);function f(e){let t;s(t=!1!==e.multiplePriority&&u.length&&!1!==u[0].multiplePriority?[].concat((0,ec.Z)(u.filter(t=>{let{key:n}=t;return n!==e.key})),[e]):[e]),r(r$(t),t)}return[e=>(function e(t,n,r,o,a,l,c,s){return(n||[]).map((n,u)=>{let d=nP(u,s),f=n;if(f.sorter){let e;let s=f.sortDirections||a,u=void 0===f.showSorterTooltip?c:f.showSorterTooltip,p=nj(f,d),h=r.find(e=>{let{key:t}=e;return t===p}),m=h?h.sortOrder:null,g=m?s[s.indexOf(m)+1]:s[0];if(n.sortIcon)e=n.sortIcon({sortOrder:m});else{let n=s.includes(rW)&&i.createElement(rH,{className:P()("".concat(t,"-column-sorter-up"),{active:m===rW})}),r=s.includes(rK)&&i.createElement(rB,{className:P()("".concat(t,"-column-sorter-down"),{active:m===rK})});e=i.createElement("span",{className:P()("".concat(t,"-column-sorter"),{["".concat(t,"-column-sorter-full")]:!!(n&&r)})},i.createElement("span",{className:"".concat(t,"-column-sorter-inner"),"aria-hidden":"true"},n,r))}let{cancelSort:v,triggerAsc:y,triggerDesc:b}=l||{},x=v;g===rK?x=b:g===rW&&(x=y);let w="object"==typeof u?Object.assign({title:x},u):{title:x};f=Object.assign(Object.assign({},f),{className:P()(f.className,{["".concat(t,"-column-sort")]:m}),title:r=>{let o=i.createElement("div",{className:"".concat(t,"-column-sorters")},i.createElement("span",{className:"".concat(t,"-column-title")},nM(n.title,r)),e);return u?i.createElement(rq.Z,Object.assign({},w),o):o},onHeaderCell:e=>{let r=n.onHeaderCell&&n.onHeaderCell(e)||{},i=r.onClick,a=r.onKeyDown;r.onClick=e=>{o({column:n,key:p,sortOrder:g,multiplePriority:rU(n)}),null==i||i(e)},r.onKeyDown=e=>{e.keyCode===t6.Z.ENTER&&(o({column:n,key:p,sortOrder:g,multiplePriority:rU(n)}),null==a||a(e))};let l=function(e,t){let n=nM(e,t);return"[object Object]"===Object.prototype.toString.call(n)?"":n}(n.title,{}),c=null==l?void 0:l.toString();return m?r["aria-sort"]="ascend"===m?"ascending":"descending":r["aria-label"]=c||"",r.className=P()(r.className,"".concat(t,"-column-has-sorters")),r.tabIndex=0,n.ellipsis&&(r.title=(null!=l?l:"").toString()),r}})}return"children"in f&&(f=Object.assign(Object.assign({},f),{children:e(t,f.children,r,o,a,l,c,d)})),f})})(t,e,u,f,o,a,l),u,d,()=>r$(u)]}({prefixCls:Y,mergedColumns:B,onSorterChange:(e,t)=>{eh({sorter:e,sorterStates:t},"sort",!1)},sortDirections:A||["ascend","descend"],tableLocale:X,showSorterTooltip:D}),ex=i.useMemo(()=>rY($,eg,ei),[$,eg]);ep.sorter=eb(),ep.sorterStates=eg;let[ew,eS,ek]=r_({prefixCls:Y,locale:X,dropdownPrefixCls:Q,mergedColumns:B,onFilterChange:(e,t)=>{eh({filters:e,filterStates:t},"filter",!0)},getPopupContainer:C||V,rootClassName:P()(d,ee)}),eE=rT(ex,eS);ep.filters=ek,ep.filterStates=eS;let[eC]=(r=i.useMemo(()=>{let e={};return Object.keys(ek).forEach(t=>{null!==ek[t]&&(e[t]=ek[t])}),Object.assign(Object.assign({},ev),{filters:e})},[ev,ek]),[i.useCallback(e=>(function e(t,n){return t.map(t=>{let r=Object.assign({},t);return r.title=nM(t.title,n),"children"in r&&(r.children=e(r.children,n)),r})})(e,r),[r])]),[eO,ej]=rL(eE.length,(e,t)=>{eh({pagination:Object.assign(Object.assign({},ep.pagination),{current:e,pageSize:t})},"paginate")},v);ep.pagination=!1===v?{}:function(e,t){let n={current:e.current,pageSize:e.pageSize};return Object.keys(t&&"object"==typeof t?t:{}).forEach(t=>{let r=e[t];"function"!=typeof r&&(n[t]=r)}),n}(eO,v),ep.resetPagination=ej;let eP=i.useMemo(()=>{if(!1===v||!eO.pageSize)return eE;let{current:e=1,total:t,pageSize:n=10}=eO;return eE.lengthn?eE.slice((e-1)*n,e*n):eE:eE.slice((e-1)*n,e*n)},[!!v,eE,eO&&eO.current,eO&&eO.pageSize,eO&&eO.total]),[eM,eN]=tF({prefixCls:Y,data:eE,pageData:eP,getRowKey:ed,getRecordByKey:ef,expandType:ea,childrenColumnName:ei,locale:X,getPopupContainer:C||V},y);eo.__PARENT_RENDER_ICON__=eo.expandIcon,eo.expandIcon=eo.expandIcon||j||function(e){let{prefixCls:t,onExpand:n,record:r,expanded:o,expandable:a}=e,l="".concat(t,"-row-expand-icon");return i.createElement("button",{type:"button",onClick:e=>{n(r,e),e.stopPropagation()},className:P()(l,{["".concat(l,"-spaced")]:!a,["".concat(l,"-expanded")]:a&&o,["".concat(l,"-collapsed")]:a&&!o}),"aria-label":o?X.collapse:X.expand,"aria-expanded":o})},"nest"===ea&&void 0===eo.expandIconColumnIndex?eo.expandIconColumnIndex=y?1:0:eo.expandIconColumnIndex>0&&y&&(eo.expandIconColumnIndex-=1),"number"!=typeof eo.indentSize&&(eo.indentSize="number"==typeof R?R:15);let eI=i.useCallback(e=>eC(eM(ew(em(e)))),[em,ew,eM]);if(!1!==v&&(null==eO?void 0:eO.total)){let e;e=eO.size?eO.size:"small"===G||"middle"===G?"small":void 0;let t=t=>i.createElement(nC,Object.assign({},eO,{className:P()("".concat(Y,"-pagination ").concat(Y,"-pagination-").concat(t),eO.className),size:e})),n="rtl"===q?"left":"right",{position:r}=eO;if(null!==r&&Array.isArray(r)){let e=r.find(e=>e.includes("top")),i=r.find(e=>e.includes("bottom")),l=r.every(e=>"none"==="".concat(e));e||i||l||(a=t(n)),e&&(o=t(e.toLowerCase().replace("top",""))),i&&(a=t(i.toLowerCase().replace("bottom","")))}else a=t(n)}"boolean"==typeof O?c={spinning:O}:"object"==typeof O&&(c=Object.assign({spinning:!0},O));let eR=P()(er,ee,"".concat(Y,"-wrapper"),null==W?void 0:W.className,{["".concat(Y,"-wrapper-rtl")]:"rtl"===q},u,d,en),eT=Object.assign(Object.assign({},null==W?void 0:W.style),f),eA=_&&_.emptyText||(null==K?void 0:K("Table"))||i.createElement(tW.Z,{componentName:"Table"}),e_={},eD=i.useMemo(()=>{let{fontSize:e,lineHeight:t,padding:n,paddingXS:r,paddingSM:o}=J,i=Math.floor(e*t);switch(G){case"large":return 2*n+i;case"small":return 2*r+i;default:return 2*o+i}},[J,G]);return Z&&(e_.listItemHeight=eD),et(i.createElement("div",{ref:es,className:eR,style:eT},i.createElement(nO.Z,Object.assign({spinning:!1},c),o,i.createElement(Z?rJ:rQ,Object.assign({},e_,F,{ref:eu,columns:B,direction:q,expandable:eo,prefixCls:Y,className:P()({["".concat(Y,"-middle")]:"middle"===G,["".concat(Y,"-small")]:"small"===G,["".concat(Y,"-bordered")]:h,["".concat(Y,"-empty")]:0===$.length},er,ee,en),data:eP,rowKey:ed,rowClassName:(e,t,n)=>{let r;return r="function"==typeof x?P()(x(e,t,n)):P()(x),P()({["".concat(Y,"-row-selected")]:eN.has(ed(e,t))},r)},emptyText:eA,internalHooks:l,internalRefs:el,transformColumns:eI,getContainerWidth:(e,t)=>{let n=e.querySelector(".".concat(Y,"-container")),r=t;if(n){let e=getComputedStyle(n);r=t-parseInt(e.borderLeftWidth,10)-parseInt(e.borderRightWidth,10)}return r}})),a)))});let od=i.forwardRef((e,t)=>{let n=i.useRef(0);return n.current+=1,i.createElement(ou,Object.assign({},e,{ref:t,_renderTimes:n.current}))});od.SELECTION_COLUMN=t_,od.EXPAND_COLUMN=a,od.SELECTION_ALL=tD,od.SELECTION_INVERT=tZ,od.SELECTION_NONE=tL,od.Column=function(e){return null},od.ColumnGroup=function(e){return null},od.Summary=F;var of=od},93192:function(e,t,n){"use strict";n.d(t,{default:function(){return ef}});var r=n(2265),o=n(9738),i=n(23639),a=n(1119),l={icon:{tag:"svg",attrs:{viewBox:"64 64 896 896",focusable:"false"},children:[{tag:"path",attrs:{d:"M257.7 752c2 0 4-.2 6-.5L431.9 722c2-.4 3.9-1.3 5.3-2.8l423.9-423.9a9.96 9.96 0 000-14.1L694.9 114.9c-1.9-1.9-4.4-2.9-7.1-2.9s-5.2 1-7.1 2.9L256.8 538.8c-1.5 1.5-2.4 3.3-2.8 5.3l-29.5 168.2a33.5 33.5 0 009.4 29.8c6.6 6.4 14.9 9.9 23.8 9.9zm67.4-174.4L687.8 215l73.3 73.3-362.7 362.6-88.9 15.7 15.6-89zM880 836H144c-17.7 0-32 14.3-32 32v36c0 4.4 3.6 8 8 8h784c4.4 0 8-3.6 8-8v-36c0-17.7-14.3-32-32-32z"}}]},name:"edit",theme:"outlined"},c=n(55015),s=r.forwardRef(function(e,t){return r.createElement(c.Z,(0,a.Z)({},e,{ref:t,icon:l}))}),u=n(36760),d=n.n(u),f=n(49211),p=n.n(f),h=n(31474),m=n(45287),g=n(27380),v=n(50506),y=n(18694),b=n(28791),x=n(10281),w=n(95814),S=function(e,t){var n={};for(var r in e)Object.prototype.hasOwnProperty.call(e,r)&&0>t.indexOf(r)&&(n[r]=e[r]);if(null!=e&&"function"==typeof Object.getOwnPropertySymbols)for(var o=0,r=Object.getOwnPropertySymbols(e);ot.indexOf(r[o])&&Object.prototype.propertyIsEnumerable.call(e,r[o])&&(n[r[o]]=e[r[o]]);return n};let k={border:0,background:"transparent",padding:0,lineHeight:"inherit",display:"inline-block"},E=r.forwardRef((e,t)=>{let{style:n,noStyle:o,disabled:i}=e,a=S(e,["style","noStyle","disabled"]),l={};return o||(l=Object.assign({},k)),i&&(l.pointerEvents="none"),l=Object.assign(Object.assign({},l),n),r.createElement("div",Object.assign({role:"button",tabIndex:0,ref:t},a,{onKeyDown:e=>{let{keyCode:t}=e;t===w.Z.ENTER&&e.preventDefault()},onKeyUp:t=>{let{keyCode:n}=t,{onClick:r}=e;n===w.Z.ENTER&&r&&r()},style:l}))});var C=n(71744),O=n(55274),j=n(98074),P={icon:{tag:"svg",attrs:{viewBox:"64 64 896 896",focusable:"false"},children:[{tag:"path",attrs:{d:"M864 170h-60c-4.4 0-8 3.6-8 8v518H310v-73c0-6.7-7.8-10.5-13-6.3l-141.9 112a8 8 0 000 12.6l141.9 112c5.3 4.2 13 .4 13-6.3v-75h498c35.3 0 64-28.7 64-64V178c0-4.4-3.6-8-8-8z"}}]},name:"enter",theme:"outlined"},M=r.forwardRef(function(e,t){return r.createElement(c.Z,(0,a.Z)({},e,{ref:t,icon:P}))}),N=n(19722),I=n(90464),R=n(76122),T=n(80669),A=n(31373),_=n(352);let D=(e,t,n,r)=>{let{titleMarginBottom:o,fontWeightStrong:i}=r;return{marginBottom:o,color:n,fontWeight:i,fontSize:e,lineHeight:t}},Z=e=>{let t={};return[1,2,3,4,5].forEach(n=>{t["\n h".concat(n,"&,\n div&-h").concat(n,",\n div&-h").concat(n," > textarea,\n h").concat(n,"\n ")]=D(e["fontSizeHeading".concat(n)],e["lineHeightHeading".concat(n)],e.colorTextHeading,e)}),t},L=e=>{let{componentCls:t}=e;return{"a&, a":Object.assign(Object.assign({},(0,R.N)(e)),{textDecoration:e.linkDecoration,"&:active, &:hover":{textDecoration:e.linkHoverDecoration},["&[disabled], &".concat(t,"-disabled")]:{color:e.colorTextDisabled,cursor:"not-allowed","&:active, &:hover":{color:e.colorTextDisabled},"&:active":{pointerEvents:"none"}}})}},z=e=>({code:{margin:"0 0.2em",paddingInline:"0.4em",paddingBlock:"0.2em 0.1em",fontSize:"85%",fontFamily:e.fontFamilyCode,background:"rgba(150, 150, 150, 0.1)",border:"1px solid rgba(100, 100, 100, 0.2)",borderRadius:3},kbd:{margin:"0 0.2em",paddingInline:"0.4em",paddingBlock:"0.15em 0.1em",fontSize:"90%",fontFamily:e.fontFamilyCode,background:"rgba(150, 150, 150, 0.06)",border:"1px solid rgba(100, 100, 100, 0.2)",borderBottomWidth:2,borderRadius:3},mark:{padding:0,backgroundColor:A.EV[2]},"u, ins":{textDecoration:"underline",textDecorationSkipInk:"auto"},"s, del":{textDecoration:"line-through"},strong:{fontWeight:600},"ul, ol":{marginInline:0,marginBlock:"0 1em",padding:0,li:{marginInline:"20px 0",marginBlock:0,paddingInline:"4px 0",paddingBlock:0}},ul:{listStyleType:"circle",ul:{listStyleType:"disc"}},ol:{listStyleType:"decimal"},"pre, blockquote":{margin:"1em 0"},pre:{padding:"0.4em 0.6em",whiteSpace:"pre-wrap",wordWrap:"break-word",background:"rgba(150, 150, 150, 0.1)",border:"1px solid rgba(100, 100, 100, 0.2)",borderRadius:3,fontFamily:e.fontFamilyCode,code:{display:"inline",margin:0,padding:0,fontSize:"inherit",fontFamily:"inherit",background:"transparent",border:0}},blockquote:{paddingInline:"0.6em 0",paddingBlock:0,borderInlineStart:"4px solid rgba(100, 100, 100, 0.2)",opacity:.85}}),B=e=>{let{componentCls:t,paddingSM:n}=e;return{"&-edit-content":{position:"relative","div&":{insetInlineStart:e.calc(e.paddingSM).mul(-1).equal(),marginTop:e.calc(n).mul(-1).equal(),marginBottom:"calc(1em - ".concat((0,_.bf)(n),")")},["".concat(t,"-edit-content-confirm")]:{position:"absolute",insetInlineEnd:e.calc(e.marginXS).add(2).equal(),insetBlockEnd:e.marginXS,color:e.colorTextDescription,fontWeight:"normal",fontSize:e.fontSize,fontStyle:"normal",pointerEvents:"none"},textarea:{margin:"0!important",MozTransition:"none",height:"1em"}}}},F=e=>({["".concat(e.componentCls,"-copy-success")]:{"\n &,\n &:hover,\n &:focus":{color:e.colorSuccess}},["".concat(e.componentCls,"-copy-icon-only")]:{marginInlineStart:0}}),H=()=>({"\n a&-ellipsis,\n span&-ellipsis\n ":{display:"inline-block",maxWidth:"100%"},"&-single-line":{whiteSpace:"nowrap"},"&-ellipsis-single-line":{overflow:"hidden",textOverflow:"ellipsis","a&, span&":{verticalAlign:"bottom"},"> code":{paddingBlock:0,maxWidth:"calc(100% - 1.2em)",display:"inline-block",overflow:"hidden",textOverflow:"ellipsis",verticalAlign:"bottom",boxSizing:"content-box"}},"&-ellipsis-multiple-line":{display:"-webkit-box",overflow:"hidden",WebkitLineClamp:3,WebkitBoxOrient:"vertical"}}),q=e=>{let{componentCls:t,titleMarginTop:n}=e;return{[t]:Object.assign(Object.assign(Object.assign(Object.assign(Object.assign(Object.assign(Object.assign(Object.assign(Object.assign({color:e.colorText,wordBreak:"break-word",lineHeight:e.lineHeight,["&".concat(t,"-secondary")]:{color:e.colorTextDescription},["&".concat(t,"-success")]:{color:e.colorSuccess},["&".concat(t,"-warning")]:{color:e.colorWarning},["&".concat(t,"-danger")]:{color:e.colorError,"a&:active, a&:focus":{color:e.colorErrorActive},"a&:hover":{color:e.colorErrorHover}},["&".concat(t,"-disabled")]:{color:e.colorTextDisabled,cursor:"not-allowed",userSelect:"none"},"\n div&,\n p\n ":{marginBottom:"1em"}},Z(e)),{["\n & + h1".concat(t,",\n & + h2").concat(t,",\n & + h3").concat(t,",\n & + h4").concat(t,",\n & + h5").concat(t,"\n ")]:{marginTop:n},"\n div,\n ul,\n li,\n p,\n h1,\n h2,\n h3,\n h4,\n h5":{"\n + h1,\n + h2,\n + h3,\n + h4,\n + h5\n ":{marginTop:n}}}),z(e)),L(e)),{["\n ".concat(t,"-expand,\n ").concat(t,"-edit,\n ").concat(t,"-copy\n ")]:Object.assign(Object.assign({},(0,R.N)(e)),{marginInlineStart:e.marginXXS})}),B(e)),F(e)),H()),{"&-rtl":{direction:"rtl"}})}};var W=(0,T.I$)("Typography",e=>[q(e)],()=>({titleMarginTop:"1.2em",titleMarginBottom:"0.5em"})),K=e=>{let{prefixCls:t,"aria-label":n,className:o,style:i,direction:a,maxLength:l,autoSize:c=!0,value:s,onSave:u,onCancel:f,onEnd:p,component:h,enterIcon:m=r.createElement(M,null)}=e,g=r.useRef(null),v=r.useRef(!1),y=r.useRef(),[b,x]=r.useState(s);r.useEffect(()=>{x(s)},[s]),r.useEffect(()=>{if(g.current&&g.current.resizableTextArea){let{textArea:e}=g.current.resizableTextArea;e.focus();let{length:t}=e.value;e.setSelectionRange(t,t)}},[]);let S=()=>{u(b.trim())},k=h?"".concat(t,"-").concat(h):"",[E,C,O]=W(t),j=d()(t,"".concat(t,"-edit-content"),{["".concat(t,"-rtl")]:"rtl"===a},o,k,C,O);return E(r.createElement("div",{className:j,style:i},r.createElement(I.Z,{ref:g,maxLength:l,value:b,onChange:e=>{let{target:t}=e;x(t.value.replace(/[\n\r]/g,""))},onKeyDown:e=>{let{keyCode:t}=e;v.current||(y.current=t)},onKeyUp:e=>{let{keyCode:t,ctrlKey:n,altKey:r,metaKey:o,shiftKey:i}=e;y.current!==t||v.current||n||r||o||i||(t===w.Z.ENTER?(S(),null==p||p()):t===w.Z.ESC&&f())},onCompositionStart:()=>{v.current=!0},onCompositionEnd:()=>{v.current=!1},onBlur:()=>{S()},"aria-label":n,rows:1,autoSize:c}),null!==m?(0,N.Tm)(m,{className:"".concat(t,"-edit-content-confirm")}):null))};function U(e,t){return r.useMemo(()=>{let n=!!e;return[n,Object.assign(Object.assign({},t),n&&"object"==typeof e?e:null)]},[e])}var V=(e,t)=>{let n=r.useRef(!1);r.useEffect(()=>{n.current?e():n.current=!0},t)},G=function(e,t){var n={};for(var r in e)Object.prototype.hasOwnProperty.call(e,r)&&0>t.indexOf(r)&&(n[r]=e[r]);if(null!=e&&"function"==typeof Object.getOwnPropertySymbols)for(var o=0,r=Object.getOwnPropertySymbols(e);ot.indexOf(r[o])&&Object.prototype.propertyIsEnumerable.call(e,r[o])&&(n[r[o]]=e[r[o]]);return n};let X=r.forwardRef((e,t)=>{let{prefixCls:n,component:o="article",className:i,rootClassName:a,setContentRef:l,children:c,direction:s,style:u}=e,f=G(e,["prefixCls","component","className","rootClassName","setContentRef","children","direction","style"]),{getPrefixCls:p,direction:h,typography:m}=r.useContext(C.E_),g=t;l&&(g=(0,b.sQ)(t,l));let v=p("typography",n),[y,x,w]=W(v),S=d()(v,null==m?void 0:m.className,{["".concat(v,"-rtl")]:"rtl"===(null!=s?s:h)},i,a,x,w),k=Object.assign(Object.assign({},null==m?void 0:m.style),u);return y(r.createElement(o,Object.assign({className:S,style:k,ref:g},f),c))});function $(e){let t=typeof e;return"string"===t||"number"===t}function Y(e,t){let n=0,r=[];for(let o=0;ot){let e=t-n;return r.push(String(i).slice(0,e)),r}r.push(i),n=a}return e}var Q=e=>{let{enabledMeasure:t,children:n,text:o,width:i,fontSize:a,rows:l,onEllipsis:c}=e,[[s,u,d],f]=r.useState([0,0,0]),[p,h]=r.useState(0),[v,y]=r.useState(0),[b,x]=r.useState(0),w=r.useRef(null),S=r.useRef(null),k=r.useMemo(()=>(0,m.Z)(o),[o]),E=r.useMemo(()=>{let e;return e=0,k.forEach(t=>{$(t)?e+=String(t).length:e+=1}),e},[k]),C=r.useMemo(()=>t&&3===v?n(Y(k,u),u{t&&i&&a&&E&&(y(1),f([0,Math.ceil(E/2),E]))},[t,i,a,o,E,l]),(0,g.Z)(()=>{var e;1===v&&x((null===(e=w.current)||void 0===e?void 0:e.offsetHeight)||0)},[v]),(0,g.Z)(()=>{var e,t;if(b){if(1===v)((null===(e=S.current)||void 0===e?void 0:e.offsetHeight)||0)<=l*b?(y(4),c(!1)):y(2);else if(2===v){if(s!==d){let e=(null===(t=S.current)||void 0===t?void 0:t.offsetHeight)||0,n=l*b,r=s,o=d;s===d-1?o=s:e<=n?r=u:o=u;let i=Math.ceil((r+o)/2);f([r,i,o])}else y(3),h(u),c(!0)}}},[v,s,d,l,b]);let O=(e,t,n)=>r.createElement("span",{"aria-hidden":!0,ref:t,style:Object.assign({position:"fixed",display:"block",left:0,top:0,zIndex:-9999,visibility:"hidden",pointerEvents:"none",fontSize:2*Math.ceil(a/2)},n)},e);return r.createElement(r.Fragment,null,C,t&&3!==v&&4!==v&&r.createElement(r.Fragment,null,O("lg",w,{wordBreak:"keep-all",whiteSpace:"nowrap"}),O(1===v?n(k,!1):n(Y(k,u),!0),S,{width:i,whiteSpace:"normal",margin:0,padding:0})))},J=e=>{let{enabledEllipsis:t,isEllipsis:n,children:o,tooltipProps:i}=e;return(null==i?void 0:i.title)&&t?r.createElement(j.Z,Object.assign({open:!!n&&void 0},i),o):o},ee=function(e,t){var n={};for(var r in e)Object.prototype.hasOwnProperty.call(e,r)&&0>t.indexOf(r)&&(n[r]=e[r]);if(null!=e&&"function"==typeof Object.getOwnPropertySymbols)for(var o=0,r=Object.getOwnPropertySymbols(e);ot.indexOf(r[o])&&Object.prototype.propertyIsEnumerable.call(e,r[o])&&(n[r[o]]=e[r[o]]);return n};function et(e,t,n){return!0===e||void 0===e?t:e||n&&t}function en(e){return!1===e?[!1,!1]:Array.isArray(e)?e:[e]}let er=r.forwardRef((e,t)=>{var n,a,l;let{prefixCls:c,className:u,style:f,type:w,disabled:S,children:k,ellipsis:P,editable:M,copyable:N,component:I,title:R}=e,T=ee(e,["prefixCls","className","style","type","disabled","children","ellipsis","editable","copyable","component","title"]),{getPrefixCls:A,direction:_}=r.useContext(C.E_),[D]=(0,O.Z)("Text"),Z=r.useRef(null),L=r.useRef(null),z=A("typography",c),B=(0,y.Z)(T,["mark","code","delete","underline","strong","keyboard","italic"]),[F,H]=U(M),[q,W]=(0,v.Z)(!1,{value:H.editing}),{triggerType:G=["icon"]}=H,$=e=>{var t;e&&(null===(t=H.onStart)||void 0===t||t.call(H)),W(e)};V(()=>{var e;q||null===(e=L.current)||void 0===e||e.focus()},[q]);let Y=e=>{null==e||e.preventDefault(),$(!0)},[er,eo]=U(N),[ei,ea]=r.useState(!1),el=r.useRef(null),ec={};eo.format&&(ec.format=eo.format);let es=()=>{el.current&&clearTimeout(el.current)},eu=e=>{var t;null==e||e.preventDefault(),null==e||e.stopPropagation(),p()(eo.text||String(k)||"",ec),ea(!0),es(),el.current=setTimeout(()=>{ea(!1)},3e3),null===(t=eo.onCopy)||void 0===t||t.call(eo,e)};r.useEffect(()=>es,[]);let[ed,ef]=r.useState(!1),[ep,eh]=r.useState(!1),[em,eg]=r.useState(!1),[ev,ey]=r.useState(!1),[eb,ex]=r.useState(!1),[ew,eS]=r.useState(!0),[ek,eE]=U(P,{expandable:!1}),eC=ek&&!em,{rows:eO=1}=eE,ej=r.useMemo(()=>!eC||void 0!==eE.suffix||eE.onEllipsis||eE.expandable||F||er,[eC,eE,F,er]);(0,g.Z)(()=>{ek&&!ej&&(ef((0,x.G)("webkitLineClamp")),eh((0,x.G)("textOverflow")))},[ej,ek]);let eP=r.useMemo(()=>!ej&&(1===eO?ep:ed),[ej,ep,ed]),eM=eC&&(eP?eb:ev),eN=eC&&1===eO&&eP,eI=eC&&eO>1&&eP,eR=e=>{var t;eg(!0),null===(t=eE.onExpand)||void 0===t||t.call(eE,e)},[eT,eA]=r.useState(0),[e_,eD]=r.useState(0),eZ=e=>{var t;ey(e),ev!==e&&(null===(t=eE.onEllipsis)||void 0===t||t.call(eE,e))};r.useEffect(()=>{let e=Z.current;if(ek&&eP&&e){let t=eI?e.offsetHeight{let e=Z.current;if("undefined"==typeof IntersectionObserver||!e||!eP||!eC)return;let t=new IntersectionObserver(()=>{eS(!!e.offsetParent)});return t.observe(e),()=>{t.disconnect()}},[eP,eC]);let eL={};eL=!0===eE.tooltip?{title:null!==(n=H.text)&&void 0!==n?n:k}:r.isValidElement(eE.tooltip)?{title:eE.tooltip}:"object"==typeof eE.tooltip?Object.assign({title:null!==(a=H.text)&&void 0!==a?a:k},eE.tooltip):{title:eE.tooltip};let ez=r.useMemo(()=>{let e=e=>["string","number"].includes(typeof e);return!ek||eP?void 0:e(H.text)?H.text:e(k)?k:e(R)?R:e(eL.title)?eL.title:void 0},[ek,eP,R,eL.title,eM]);if(q)return r.createElement(K,{value:null!==(l=H.text)&&void 0!==l?l:"string"==typeof k?k:"",onSave:e=>{var t;null===(t=H.onChange)||void 0===t||t.call(H,e),$(!1)},onCancel:()=>{var e;null===(e=H.onCancel)||void 0===e||e.call(H),$(!1)},onEnd:H.onEnd,prefixCls:z,className:u,style:f,direction:_,component:I,maxLength:H.maxLength,autoSize:H.autoSize,enterIcon:H.enterIcon});let eB=()=>{let e;let{expandable:t,symbol:n}=eE;return t?(e=n||(null==D?void 0:D.expand),r.createElement("a",{key:"expand",className:"".concat(z,"-expand"),onClick:eR,"aria-label":null==D?void 0:D.expand},e)):null},eF=()=>{if(!F)return;let{icon:e,tooltip:t}=H,n=(0,m.Z)(t)[0]||(null==D?void 0:D.edit),o="string"==typeof n?n:"";return G.includes("icon")?r.createElement(j.Z,{key:"edit",title:!1===t?"":n},r.createElement(E,{ref:L,className:"".concat(z,"-edit"),onClick:Y,"aria-label":o},e||r.createElement(s,{role:"button"}))):null},eH=()=>{if(!er)return null;let{tooltips:e,icon:t}=eo,n=en(e),a=en(t),l=ei?et(n[1],null==D?void 0:D.copied):et(n[0],null==D?void 0:D.copy),c=ei?null==D?void 0:D.copied:null==D?void 0:D.copy,s="string"==typeof l?l:c;return r.createElement(j.Z,{key:"copy",title:l},r.createElement(E,{className:d()("".concat(z,"-copy"),{["".concat(z,"-copy-success")]:ei,["".concat(z,"-copy-icon-only")]:null==k}),onClick:eu,"aria-label":s},ei?et(a[1],r.createElement(o.Z,null),!0):et(a[0],r.createElement(i.Z,null),!0)))},eq=e=>[e&&eB(),eF(),eH()],eW=e=>[e&&r.createElement("span",{"aria-hidden":!0,key:"ellipsis"},"..."),eE.suffix,eq(e)];return r.createElement(h.Z,{onResize:(e,t)=>{var n;let{offsetWidth:r}=e;eA(r),eD(parseInt(null===(n=window.getComputedStyle)||void 0===n?void 0:n.call(window,t).fontSize,10)||0)},disabled:!eC},n=>r.createElement(J,{tooltipProps:eL,enabledEllipsis:eC,isEllipsis:eM},r.createElement(X,Object.assign({className:d()({["".concat(z,"-").concat(w)]:w,["".concat(z,"-disabled")]:S,["".concat(z,"-ellipsis")]:ek,["".concat(z,"-single-line")]:eC&&1===eO,["".concat(z,"-ellipsis-single-line")]:eN,["".concat(z,"-ellipsis-multiple-line")]:eI},u),prefixCls:c,style:Object.assign(Object.assign({},f),{WebkitLineClamp:eI?eO:void 0}),component:I,ref:(0,b.sQ)(n,Z,t),direction:_,onClick:G.includes("text")?Y:void 0,"aria-label":null==ez?void 0:ez.toString(),title:R},B),r.createElement(Q,{enabledMeasure:eC&&!eP,text:k,rows:eO,width:eT,fontSize:e_,onEllipsis:eZ},(t,n)=>{let o=t;return t.length&&n&&ez&&(o=r.createElement("span",{key:"show-content","aria-hidden":!0},o)),function(e,t){let{mark:n,code:o,underline:i,delete:a,strong:l,keyboard:c,italic:s}=e,u=t;function d(e,t){t&&(u=r.createElement(e,{},u))}return d("strong",l),d("u",i),d("del",a),d("code",o),d("mark",n),d("kbd",c),d("i",s),u}(e,r.createElement(r.Fragment,null,o,eW(n)))}))))});var eo=function(e,t){var n={};for(var r in e)Object.prototype.hasOwnProperty.call(e,r)&&0>t.indexOf(r)&&(n[r]=e[r]);if(null!=e&&"function"==typeof Object.getOwnPropertySymbols)for(var o=0,r=Object.getOwnPropertySymbols(e);ot.indexOf(r[o])&&Object.prototype.propertyIsEnumerable.call(e,r[o])&&(n[r[o]]=e[r[o]]);return n};let ei=r.forwardRef((e,t)=>{var{ellipsis:n,rel:o}=e,i=eo(e,["ellipsis","rel"]);let a=Object.assign(Object.assign({},i),{rel:void 0===o&&"_blank"===i.target?"noopener noreferrer":o});return delete a.navigate,r.createElement(er,Object.assign({},a,{ref:t,ellipsis:!!n,component:"a"}))}),ea=r.forwardRef((e,t)=>r.createElement(er,Object.assign({ref:t},e,{component:"div"})));var el=function(e,t){var n={};for(var r in e)Object.prototype.hasOwnProperty.call(e,r)&&0>t.indexOf(r)&&(n[r]=e[r]);if(null!=e&&"function"==typeof Object.getOwnPropertySymbols)for(var o=0,r=Object.getOwnPropertySymbols(e);ot.indexOf(r[o])&&Object.prototype.propertyIsEnumerable.call(e,r[o])&&(n[r[o]]=e[r[o]]);return n},ec=r.forwardRef((e,t)=>{var{ellipsis:n}=e,o=el(e,["ellipsis"]);let i=r.useMemo(()=>n&&"object"==typeof n?(0,y.Z)(n,["expandable","rows"]):n,[n]);return r.createElement(er,Object.assign({ref:t},o,{ellipsis:i,component:"span"}))}),es=function(e,t){var n={};for(var r in e)Object.prototype.hasOwnProperty.call(e,r)&&0>t.indexOf(r)&&(n[r]=e[r]);if(null!=e&&"function"==typeof Object.getOwnPropertySymbols)for(var o=0,r=Object.getOwnPropertySymbols(e);ot.indexOf(r[o])&&Object.prototype.propertyIsEnumerable.call(e,r[o])&&(n[r[o]]=e[r[o]]);return n};let eu=[1,2,3,4,5],ed=r.forwardRef((e,t)=>{let n;let{level:o=1}=e,i=es(e,["level"]);return n=eu.includes(o)?"h".concat(o):"h1",r.createElement(er,Object.assign({ref:t},i,{component:n}))});X.Text=ec,X.Link=ei,X.Title=ed,X.Paragraph=ea;var ef=X},30967:function(e,t,n){"use strict";n.d(t,{Z:function(){return ts}});var r=n(2265),o=n(83145),i=n(54887),a=n(36760),l=n.n(a),c=n(1119),s=n(76405),u=n(25049),d=n(63496),f=n(15354),p=n(15900),h=n(11993),m=n(31686),g=n(6989),v=n(73129),y=n(41154),b=n(54580),x=n(18242),w=n(32559),S=function(e,t){if(e&&t){var n=Array.isArray(t)?t:t.split(","),r=e.name||"",o=e.type||"",i=o.replace(/\/.*$/,"");return n.some(function(e){var t=e.trim();if(/^\*(\/\*)?$/.test(e))return!0;if("."===t.charAt(0)){var n=r.toLowerCase(),a=t.toLowerCase(),l=[a];return(".jpg"===a||".jpeg"===a)&&(l=[".jpg",".jpeg"]),l.some(function(e){return n.endsWith(e)})}return/\/\*$/.test(t)?i===t.replace(/\/.*$/,""):o===t||!!/^\w+$/.test(t)&&((0,w.ZP)(!1,"Upload takes an invalidate 'accept' type '".concat(t,"'.Skip for check.")),!0)})}return!0};function k(e){var t=e.responseText||e.response;if(!t)return t;try{return JSON.parse(t)}catch(e){return t}}var E=function(e,t,n){var r=function e(r,o){if(r){if(r.path=o||"",r.isFile)r.file(function(e){n(e)&&(r.fullPath&&!e.webkitRelativePath&&(Object.defineProperties(e,{webkitRelativePath:{writable:!0}}),e.webkitRelativePath=r.fullPath.replace(/^\//,""),Object.defineProperties(e,{webkitRelativePath:{writable:!1}})),t([e]))});else if(r.isDirectory){var i,a,l;i=function(t){t.forEach(function(t){e(t,"".concat(o).concat(r.name,"/"))})},a=r.createReader(),l=[],function e(){a.readEntries(function(t){var n=Array.prototype.slice.apply(t);l=l.concat(n),n.length?e():i(l)})}()}}};e.forEach(function(e){r(e.webkitGetAsEntry())})},C=+new Date,O=0;function j(){return"rc-upload-".concat(C,"-").concat(++O)}var P=["component","prefixCls","className","classNames","disabled","id","style","styles","multiple","accept","capture","children","directory","openFileDialogOnClick","onMouseEnter","onMouseLeave","hasControlInside"],M=function(e){(0,f.Z)(n,e);var t=(0,p.Z)(n);function n(){(0,s.Z)(this,n);for(var e,r,i=arguments.length,a=Array(i),l=0;l0&&(t.percent=t.loaded/t.total*100),e.onProgress(t)});var n=new FormData;e.data&&Object.keys(e.data).forEach(function(t){var r=e.data[t];if(Array.isArray(r)){r.forEach(function(e){n.append("".concat(t,"[]"),e)});return}n.append(t,r)}),e.file instanceof Blob?n.append(e.filename,e.file,e.file.name):n.append(e.filename,e.file),t.onerror=function(t){e.onError(t)},t.onload=function(){if(t.status<200||t.status>=300){var n;return e.onError(((n=Error("cannot ".concat(e.method," ").concat(e.action," ").concat(t.status,"'"))).status=t.status,n.method=e.method,n.url=e.action,n),k(t))}return e.onSuccess(k(t),t)},t.open(e.method,e.action,!0),e.withCredentials&&"withCredentials"in t&&(t.withCredentials=!0);var r=e.headers||{};return null!==r["X-Requested-With"]&&t.setRequestHeader("X-Requested-With","XMLHttpRequest"),Object.keys(r).forEach(function(e){null!==r[e]&&t.setRequestHeader(e,r[e])}),t.send(n),{abort:function(){t.abort()}}})({action:o,filename:s,data:n,file:i,headers:u,withCredentials:d,method:f||"post",onProgress:function(e){var n=t.props.onProgress;null==n||n(e,i)},onSuccess:function(e,n){var r=t.props.onSuccess;null==r||r(e,i,n),delete t.reqs[p]},onError:function(e,n){var r=t.props.onError;null==r||r(e,n,i),delete t.reqs[p]}})}}},{key:"reset",value:function(){this.setState({uid:j()})}},{key:"abort",value:function(e){var t=this.reqs;if(e){var n=e.uid?e.uid:e;t[n]&&t[n].abort&&t[n].abort(),delete t[n]}else Object.keys(t).forEach(function(e){t[e]&&t[e].abort&&t[e].abort(),delete t[e]})}},{key:"render",value:function(){var e,t=this.props,n=t.component,o=t.prefixCls,i=t.className,a=t.classNames,s=t.disabled,u=t.id,d=t.style,f=t.styles,p=t.multiple,v=t.accept,y=t.capture,b=t.children,w=t.directory,S=t.openFileDialogOnClick,k=t.onMouseEnter,E=t.onMouseLeave,C=t.hasControlInside,O=(0,g.Z)(t,P),j=l()((e={},(0,h.Z)(e,o,!0),(0,h.Z)(e,"".concat(o,"-disabled"),s),(0,h.Z)(e,i,i),e)),M=s?{}:{onClick:S?this.onClick:function(){},onKeyDown:S?this.onKeyDown:function(){},onMouseEnter:k,onMouseLeave:E,onDrop:this.onFileDrop,onDragOver:this.onFileDrop,tabIndex:C?void 0:"0"};return r.createElement(n,(0,c.Z)({},M,{className:j,role:C?void 0:"button",style:d}),r.createElement("input",(0,c.Z)({},(0,x.Z)(O,{aria:!0,data:!0}),{id:u,disabled:s,type:"file",ref:this.saveFileInput,onClick:function(e){return e.stopPropagation()},key:this.state.uid,style:(0,m.Z)({display:"none"},(void 0===f?{}:f).input),className:(void 0===a?{}:a).input,accept:v},w?{directory:"directory",webkitdirectory:"webkitdirectory"}:{},{multiple:p,onChange:this.onChange},null!=y?{capture:y}:{})),b)}}]),n}(r.Component);function N(){}var I=function(e){(0,f.Z)(n,e);var t=(0,p.Z)(n);function n(){var e;(0,s.Z)(this,n);for(var r=arguments.length,o=Array(r),i=0;i{let{componentCls:t,iconCls:n}=e;return{["".concat(t,"-wrapper")]:{["".concat(t,"-drag")]:{position:"relative",width:"100%",height:"100%",textAlign:"center",background:e.colorFillAlter,border:"".concat((0,F.bf)(e.lineWidth)," dashed ").concat(e.colorBorder),borderRadius:e.borderRadiusLG,cursor:"pointer",transition:"border-color ".concat(e.motionDurationSlow),[t]:{padding:e.padding},["".concat(t,"-btn")]:{display:"table",width:"100%",height:"100%",outline:"none",borderRadius:e.borderRadiusLG,"&:focus-visible":{outline:"".concat((0,F.bf)(e.lineWidthFocus)," solid ").concat(e.colorPrimaryBorder)}},["".concat(t,"-drag-container")]:{display:"table-cell",verticalAlign:"middle"},["\n &:not(".concat(t,"-disabled):hover,\n &-hover:not(").concat(t,"-disabled)\n ")]:{borderColor:e.colorPrimaryHover},["p".concat(t,"-drag-icon")]:{marginBottom:e.margin,[n]:{color:e.colorPrimary,fontSize:e.uploadThumbnailSize}},["p".concat(t,"-text")]:{margin:"0 0 ".concat((0,F.bf)(e.marginXXS)),color:e.colorTextHeading,fontSize:e.fontSizeLG},["p".concat(t,"-hint")]:{color:e.colorTextDescription,fontSize:e.fontSize},["&".concat(t,"-disabled")]:{["p".concat(t,"-drag-icon ").concat(n,",\n p").concat(t,"-text,\n p").concat(t,"-hint\n ")]:{color:e.colorTextDisabled}}}}}},q=e=>{let{componentCls:t,antCls:n,iconCls:r,fontSize:o,lineHeight:i,calc:a}=e,l="".concat(t,"-list-item"),c="".concat(l,"-actions"),s="".concat(l,"-action"),u=e.fontHeightSM;return{["".concat(t,"-wrapper")]:{["".concat(t,"-list")]:Object.assign(Object.assign({},(0,Z.dF)()),{lineHeight:e.lineHeight,[l]:{position:"relative",height:a(e.lineHeight).mul(o).equal(),marginTop:e.marginXS,fontSize:o,display:"flex",alignItems:"center",transition:"background-color ".concat(e.motionDurationSlow),"&:hover":{backgroundColor:e.controlItemBgHover},["".concat(l,"-name")]:Object.assign(Object.assign({},Z.vS),{padding:"0 ".concat((0,F.bf)(e.paddingXS)),lineHeight:i,flex:"auto",transition:"all ".concat(e.motionDurationSlow)}),[c]:{[s]:{opacity:0},[r]:{color:e.actionsColor,transition:"all ".concat(e.motionDurationSlow)},["\n ".concat(s,":focus-visible,\n &.picture ").concat(s,"\n ")]:{opacity:1},["".concat(s).concat(n,"-btn")]:{height:u,border:0,lineHeight:1}},["".concat(t,"-icon ").concat(r)]:{color:e.colorTextDescription,fontSize:o},["".concat(l,"-progress")]:{position:"absolute",bottom:e.calc(e.uploadProgressOffset).mul(-1).equal(),width:"100%",paddingInlineStart:a(o).add(e.paddingXS).equal(),fontSize:o,lineHeight:0,pointerEvents:"none","> div":{margin:0}}},["".concat(l,":hover ").concat(s)]:{opacity:1},["".concat(l,"-error")]:{color:e.colorError,["".concat(l,"-name, ").concat(t,"-icon ").concat(r)]:{color:e.colorError},[c]:{["".concat(r,", ").concat(r,":hover")]:{color:e.colorError},[s]:{opacity:1}}},["".concat(t,"-list-item-container")]:{transition:"opacity ".concat(e.motionDurationSlow,", height ").concat(e.motionDurationSlow),"&::before":{display:"table",width:0,height:0,content:'""'}}})}}},W=n(11699);let K=new F.E4("uploadAnimateInlineIn",{from:{width:0,height:0,margin:0,padding:0,opacity:0}}),U=new F.E4("uploadAnimateInlineOut",{to:{width:0,height:0,margin:0,padding:0,opacity:0}});var V=e=>{let{componentCls:t}=e,n="".concat(t,"-animate-inline");return[{["".concat(t,"-wrapper")]:{["".concat(n,"-appear, ").concat(n,"-enter, ").concat(n,"-leave")]:{animationDuration:e.motionDurationSlow,animationTimingFunction:e.motionEaseInOutCirc,animationFillMode:"forwards"},["".concat(n,"-appear, ").concat(n,"-enter")]:{animationName:K},["".concat(n,"-leave")]:{animationName:U}}},{["".concat(t,"-wrapper")]:(0,W.J$)(e)},K,U]},G=n(31373);let X=e=>{let{componentCls:t,iconCls:n,uploadThumbnailSize:r,uploadProgressOffset:o,calc:i}=e,a="".concat(t,"-list"),l="".concat(a,"-item");return{["".concat(t,"-wrapper")]:{["\n ".concat(a).concat(a,"-picture,\n ").concat(a).concat(a,"-picture-card,\n ").concat(a).concat(a,"-picture-circle\n ")]:{[l]:{position:"relative",height:i(r).add(i(e.lineWidth).mul(2)).add(i(e.paddingXS).mul(2)).equal(),padding:e.paddingXS,border:"".concat((0,F.bf)(e.lineWidth)," ").concat(e.lineType," ").concat(e.colorBorder),borderRadius:e.borderRadiusLG,"&:hover":{background:"transparent"},["".concat(l,"-thumbnail")]:Object.assign(Object.assign({},Z.vS),{width:r,height:r,lineHeight:(0,F.bf)(i(r).add(e.paddingSM).equal()),textAlign:"center",flex:"none",[n]:{fontSize:e.fontSizeHeading2,color:e.colorPrimary},img:{display:"block",width:"100%",height:"100%",overflow:"hidden"}}),["".concat(l,"-progress")]:{bottom:o,width:"calc(100% - ".concat((0,F.bf)(i(e.paddingSM).mul(2).equal()),")"),marginTop:0,paddingInlineStart:i(r).add(e.paddingXS).equal()}},["".concat(l,"-error")]:{borderColor:e.colorError,["".concat(l,"-thumbnail ").concat(n)]:{["svg path[fill='".concat(G.iN[0],"']")]:{fill:e.colorErrorBg},["svg path[fill='".concat(G.iN.primary,"']")]:{fill:e.colorError}}},["".concat(l,"-uploading")]:{borderStyle:"dashed",["".concat(l,"-name")]:{marginBottom:o}}},["".concat(a).concat(a,"-picture-circle ").concat(l)]:{["&, &::before, ".concat(l,"-thumbnail")]:{borderRadius:"50%"}}}}},$=e=>{let{componentCls:t,iconCls:n,fontSizeLG:r,colorTextLightSolid:o,calc:i}=e,a="".concat(t,"-list"),l="".concat(a,"-item"),c=e.uploadPicCardSize;return{["\n ".concat(t,"-wrapper").concat(t,"-picture-card-wrapper,\n ").concat(t,"-wrapper").concat(t,"-picture-circle-wrapper\n ")]:Object.assign(Object.assign({},(0,Z.dF)()),{display:"inline-block",width:"100%",["".concat(t).concat(t,"-select")]:{width:c,height:c,marginInlineEnd:e.marginXS,marginBottom:e.marginXS,textAlign:"center",verticalAlign:"top",backgroundColor:e.colorFillAlter,border:"".concat((0,F.bf)(e.lineWidth)," dashed ").concat(e.colorBorder),borderRadius:e.borderRadiusLG,cursor:"pointer",transition:"border-color ".concat(e.motionDurationSlow),["> ".concat(t)]:{display:"flex",alignItems:"center",justifyContent:"center",height:"100%",textAlign:"center"},["&:not(".concat(t,"-disabled):hover")]:{borderColor:e.colorPrimary}},["".concat(a).concat(a,"-picture-card, ").concat(a).concat(a,"-picture-circle")]:{["".concat(a,"-item-container")]:{display:"inline-block",width:c,height:c,marginBlock:"0 ".concat((0,F.bf)(e.marginXS)),marginInline:"0 ".concat((0,F.bf)(e.marginXS)),verticalAlign:"top"},"&::after":{display:"none"},[l]:{height:"100%",margin:0,"&::before":{position:"absolute",zIndex:1,width:"calc(100% - ".concat((0,F.bf)(i(e.paddingXS).mul(2).equal()),")"),height:"calc(100% - ".concat((0,F.bf)(i(e.paddingXS).mul(2).equal()),")"),backgroundColor:e.colorBgMask,opacity:0,transition:"all ".concat(e.motionDurationSlow),content:'" "'}},["".concat(l,":hover")]:{["&::before, ".concat(l,"-actions")]:{opacity:1}},["".concat(l,"-actions")]:{position:"absolute",insetInlineStart:0,zIndex:10,width:"100%",whiteSpace:"nowrap",textAlign:"center",opacity:0,transition:"all ".concat(e.motionDurationSlow),["\n ".concat(n,"-eye,\n ").concat(n,"-download,\n ").concat(n,"-delete\n ")]:{zIndex:10,width:r,margin:"0 ".concat((0,F.bf)(e.marginXXS)),fontSize:r,cursor:"pointer",transition:"all ".concat(e.motionDurationSlow),color:o,"&:hover":{color:o},svg:{verticalAlign:"baseline"}}},["".concat(l,"-thumbnail, ").concat(l,"-thumbnail img")]:{position:"static",display:"block",width:"100%",height:"100%",objectFit:"contain"},["".concat(l,"-name")]:{display:"none",textAlign:"center"},["".concat(l,"-file + ").concat(l,"-name")]:{position:"absolute",bottom:e.margin,display:"block",width:"calc(100% - ".concat((0,F.bf)(i(e.paddingXS).mul(2).equal()),")")},["".concat(l,"-uploading")]:{["&".concat(l)]:{backgroundColor:e.colorFillAlter},["&::before, ".concat(n,"-eye, ").concat(n,"-download, ").concat(n,"-delete")]:{display:"none"}},["".concat(l,"-progress")]:{bottom:e.marginXL,width:"calc(100% - ".concat((0,F.bf)(i(e.paddingXS).mul(2).equal()),")"),paddingInlineStart:0}}}),["".concat(t,"-wrapper").concat(t,"-picture-circle-wrapper")]:{["".concat(t).concat(t,"-select")]:{borderRadius:"50%"}}}};var Y=e=>{let{componentCls:t}=e;return{["".concat(t,"-rtl")]:{direction:"rtl"}}};let Q=e=>{let{componentCls:t,colorTextDisabled:n}=e;return{["".concat(t,"-wrapper")]:Object.assign(Object.assign({},(0,Z.Wf)(e)),{[t]:{outline:0,"input[type='file']":{cursor:"pointer"}},["".concat(t,"-select")]:{display:"inline-block"},["".concat(t,"-disabled")]:{color:n,cursor:"not-allowed"}})}};var J=(0,z.I$)("Upload",e=>{let{fontSizeHeading3:t,fontHeight:n,lineWidth:r,controlHeightLG:o,calc:i}=e,a=(0,B.TS)(e,{uploadThumbnailSize:i(t).mul(2).equal(),uploadProgressOffset:i(i(n).div(2)).add(r).equal(),uploadPicCardSize:i(o).mul(2.55).equal()});return[Q(a),H(a),X(a),$(a),q(a),V(a),Y(a),(0,L.Z)(a)]},e=>({actionsColor:e.colorTextDescription})),ee={icon:function(e,t){return{tag:"svg",attrs:{viewBox:"64 64 896 896",focusable:"false"},children:[{tag:"path",attrs:{d:"M534 352V136H232v752h560V394H576a42 42 0 01-42-42z",fill:t}},{tag:"path",attrs:{d:"M854.6 288.6L639.4 73.4c-6-6-14.1-9.4-22.6-9.4H192c-17.7 0-32 14.3-32 32v832c0 17.7 14.3 32 32 32h640c17.7 0 32-14.3 32-32V311.3c0-8.5-3.4-16.7-9.4-22.7zM602 137.8L790.2 326H602V137.8zM792 888H232V136h302v216a42 42 0 0042 42h216v494z",fill:e}}]}},name:"file",theme:"twotone"},et=n(55015),en=r.forwardRef(function(e,t){return r.createElement(et.Z,(0,c.Z)({},e,{ref:t,icon:ee}))}),er=n(61935),eo={icon:{tag:"svg",attrs:{viewBox:"64 64 896 896",focusable:"false"},children:[{tag:"path",attrs:{d:"M779.3 196.6c-94.2-94.2-247.6-94.2-341.7 0l-261 260.8c-1.7 1.7-2.6 4-2.6 6.4s.9 4.7 2.6 6.4l36.9 36.9a9 9 0 0012.7 0l261-260.8c32.4-32.4 75.5-50.2 121.3-50.2s88.9 17.8 121.2 50.2c32.4 32.4 50.2 75.5 50.2 121.2 0 45.8-17.8 88.8-50.2 121.2l-266 265.9-43.1 43.1c-40.3 40.3-105.8 40.3-146.1 0-19.5-19.5-30.2-45.4-30.2-73s10.7-53.5 30.2-73l263.9-263.8c6.7-6.6 15.5-10.3 24.9-10.3h.1c9.4 0 18.1 3.7 24.7 10.3 6.7 6.7 10.3 15.5 10.3 24.9 0 9.3-3.7 18.1-10.3 24.7L372.4 653c-1.7 1.7-2.6 4-2.6 6.4s.9 4.7 2.6 6.4l36.9 36.9a9 9 0 0012.7 0l215.6-215.6c19.9-19.9 30.8-46.3 30.8-74.4s-11-54.6-30.8-74.4c-41.1-41.1-107.9-41-149 0L463 364 224.8 602.1A172.22 172.22 0 00174 724.8c0 46.3 18.1 89.8 50.8 122.5 33.9 33.8 78.3 50.7 122.7 50.7 44.4 0 88.8-16.9 122.6-50.7l309.2-309C824.8 492.7 850 432 850 367.5c.1-64.6-25.1-125.3-70.7-170.9z"}}]},name:"paper-clip",theme:"outlined"},ei=r.forwardRef(function(e,t){return r.createElement(et.Z,(0,c.Z)({},e,{ref:t,icon:eo}))}),ea={icon:function(e,t){return{tag:"svg",attrs:{viewBox:"64 64 896 896",focusable:"false"},children:[{tag:"path",attrs:{d:"M928 160H96c-17.7 0-32 14.3-32 32v640c0 17.7 14.3 32 32 32h832c17.7 0 32-14.3 32-32V192c0-17.7-14.3-32-32-32zm-40 632H136v-39.9l138.5-164.3 150.1 178L658.1 489 888 761.6V792zm0-129.8L664.2 396.8c-3.2-3.8-9-3.8-12.2 0L424.6 666.4l-144-170.7c-3.2-3.8-9-3.8-12.2 0L136 652.7V232h752v430.2z",fill:e}},{tag:"path",attrs:{d:"M424.6 765.8l-150.1-178L136 752.1V792h752v-30.4L658.1 489z",fill:t}},{tag:"path",attrs:{d:"M136 652.7l132.4-157c3.2-3.8 9-3.8 12.2 0l144 170.7L652 396.8c3.2-3.8 9-3.8 12.2 0L888 662.2V232H136v420.7zM304 280a88 88 0 110 176 88 88 0 010-176z",fill:t}},{tag:"path",attrs:{d:"M276 368a28 28 0 1056 0 28 28 0 10-56 0z",fill:t}},{tag:"path",attrs:{d:"M304 456a88 88 0 100-176 88 88 0 000 176zm0-116c15.5 0 28 12.5 28 28s-12.5 28-28 28-28-12.5-28-28 12.5-28 28-28z",fill:e}}]}},name:"picture",theme:"twotone"},el=r.forwardRef(function(e,t){return r.createElement(et.Z,(0,c.Z)({},e,{ref:t,icon:ea}))}),ec=n(47970),es=n(51646),eu=n(68710),ed=n(19722),ef=n(73002);function ep(e){return Object.assign(Object.assign({},e),{lastModified:e.lastModified,lastModifiedDate:e.lastModifiedDate,name:e.name,size:e.size,type:e.type,uid:e.uid,percent:0,originFileObj:e})}function eh(e,t){let n=(0,o.Z)(t),r=n.findIndex(t=>{let{uid:n}=t;return n===e.uid});return -1===r?n.push(e):n[r]=e,n}function em(e,t){let n=void 0!==e.uid?"uid":"name";return t.filter(t=>t[n]===e[n])[0]}let eg=function(){let e=arguments.length>0&&void 0!==arguments[0]?arguments[0]:"",t=e.split("/"),n=t[t.length-1].split(/#|\?/)[0];return(/\.[^./\\]*$/.exec(n)||[""])[0]},ev=e=>0===e.indexOf("image/"),ey=e=>{if(e.type&&!e.thumbUrl)return ev(e.type);let t=e.thumbUrl||e.url||"",n=eg(t);return!!(/^data:image\//.test(t)||/(webp|svg|png|gif|jpg|jpeg|jfif|bmp|dpg|ico|heic|heif)$/i.test(n))||!/^data:/.test(t)&&!n};function eb(e){return new Promise(t=>{if(!e.type||!ev(e.type)){t("");return}let n=document.createElement("canvas");n.width=200,n.height=200,n.style.cssText="position: fixed; left: 0; top: 0; width: ".concat(200,"px; height: ").concat(200,"px; z-index: 9999; display: none;"),document.body.appendChild(n);let r=n.getContext("2d"),o=new Image;if(o.onload=()=>{let{width:e,height:i}=o,a=200,l=200,c=0,s=0;e>i?s=-((l=200/e*i)-a)/2:c=-((a=200/i*e)-l)/2,r.drawImage(o,c,s,a,l);let u=n.toDataURL();document.body.removeChild(n),window.URL.revokeObjectURL(o.src),t(u)},o.crossOrigin="anonymous",e.type.startsWith("image/svg+xml")){let t=new FileReader;t.onload=()=>{t.result&&(o.src=t.result)},t.readAsDataURL(e)}else if(e.type.startsWith("image/gif")){let n=new FileReader;n.onload=()=>{n.result&&t(n.result)},n.readAsDataURL(e)}else o.src=window.URL.createObjectURL(e)})}var ex={icon:{tag:"svg",attrs:{viewBox:"64 64 896 896",focusable:"false"},children:[{tag:"path",attrs:{d:"M360 184h-8c4.4 0 8-3.6 8-8v8h304v-8c0 4.4 3.6 8 8 8h-8v72h72v-80c0-35.3-28.7-64-64-64H352c-35.3 0-64 28.7-64 64v80h72v-72zm504 72H160c-17.7 0-32 14.3-32 32v32c0 4.4 3.6 8 8 8h60.4l24.7 523c1.6 34.1 29.8 61 63.9 61h454c34.2 0 62.3-26.8 63.9-61l24.7-523H888c4.4 0 8-3.6 8-8v-32c0-17.7-14.3-32-32-32zM731.3 840H292.7l-24.2-512h487l-24.2 512z"}}]},name:"delete",theme:"outlined"},ew=r.forwardRef(function(e,t){return r.createElement(et.Z,(0,c.Z)({},e,{ref:t,icon:ex}))}),eS=n(73879),ek=n(6520),eE=n(8900),eC=n(9738),eO=n(39725),ej=n(49638),eP=n(18694),eM={percent:0,prefixCls:"rc-progress",strokeColor:"#2db7f5",strokeLinecap:"round",strokeWidth:1,trailColor:"#D9D9D9",trailWidth:1,gapPosition:"bottom"},eN=function(){var e=(0,r.useRef)([]),t=(0,r.useRef)(null);return(0,r.useEffect)(function(){var n=Date.now(),r=!1;e.current.forEach(function(e){if(e){r=!0;var o=e.style;o.transitionDuration=".3s, .3s, .3s, .06s",t.current&&n-t.current<100&&(o.transitionDuration="0s, 0s")}}),r&&(t.current=Date.now())}),e.current},eI=n(26365),eR=n(94981),eT=0,eA=(0,eR.Z)(),e_=function(e){var t=r.useState(),n=(0,eI.Z)(t,2),o=n[0],i=n[1];return r.useEffect(function(){var e;i("rc_progress_".concat((eA?(e=eT,eT+=1):e="TEST_OR_SSR",e)))},[]),e||o},eD=function(e){var t=e.bg,n=e.children;return r.createElement("div",{style:{width:"100%",height:"100%",background:t}},n)};function eZ(e,t){return Object.keys(e).map(function(n){var r=parseFloat(n);return"".concat(e[n]," ").concat("".concat(Math.floor(r*t),"%"))})}var eL=r.forwardRef(function(e,t){var n=e.prefixCls,o=e.color,i=e.gradientId,a=e.radius,l=e.style,c=e.ptg,s=e.strokeLinecap,u=e.strokeWidth,d=e.size,f=e.gapDegree,p=o&&"object"===(0,y.Z)(o),h=d/2,m=r.createElement("circle",{className:"".concat(n,"-circle-path"),r:a,cx:h,cy:h,stroke:p?"#FFF":void 0,strokeLinecap:s,strokeWidth:u,opacity:0===c?0:1,style:l,ref:t});if(!p)return m;var g="".concat(i,"-conic"),v=eZ(o,(360-f)/360),b=eZ(o,1),x="conic-gradient(from ".concat(f?"".concat(180+f/2,"deg"):"0deg",", ").concat(v.join(", "),")"),w="linear-gradient(to ".concat(f?"bottom":"top",", ").concat(b.join(", "),")");return r.createElement(r.Fragment,null,r.createElement("mask",{id:g},m),r.createElement("foreignObject",{x:0,y:0,width:d,height:d,mask:"url(#".concat(g,")")},r.createElement(eD,{bg:w},r.createElement(eD,{bg:x}))))}),ez=function(e,t,n,r,o,i,a,l,c,s){var u=arguments.length>10&&void 0!==arguments[10]?arguments[10]:0,d=(100-r)/100*t;return"round"===c&&100!==r&&(d+=s/2)>=t&&(d=t-.01),{stroke:"string"==typeof l?l:void 0,strokeDasharray:"".concat(t,"px ").concat(e),strokeDashoffset:d+u,transform:"rotate(".concat(o+n/100*360*((360-i)/360)+(0===i?0:({bottom:0,top:180,left:90,right:-90})[a]),"deg)"),transformOrigin:"".concat(50,"px ").concat(50,"px"),transition:"stroke-dashoffset .3s ease 0s, stroke-dasharray .3s ease 0s, stroke .3s, stroke-width .06s ease .3s, opacity .3s ease 0s",fillOpacity:0}},eB=["id","prefixCls","steps","strokeWidth","trailWidth","gapDegree","gapPosition","trailColor","strokeLinecap","style","className","strokeColor","percent"];function eF(e){var t=null!=e?e:[];return Array.isArray(t)?t:[t]}var eH=function(e){var t,n,o,i,a=(0,m.Z)((0,m.Z)({},eM),e),s=a.id,u=a.prefixCls,d=a.steps,f=a.strokeWidth,p=a.trailWidth,h=a.gapDegree,v=void 0===h?0:h,b=a.gapPosition,x=a.trailColor,w=a.strokeLinecap,S=a.style,k=a.className,E=a.strokeColor,C=a.percent,O=(0,g.Z)(a,eB),j=e_(s),P="".concat(j,"-gradient"),M=50-f/2,N=2*Math.PI*M,I=v>0?90+v/2:-90,R=(360-v)/360*N,T="object"===(0,y.Z)(d)?d:{count:d,space:2},A=T.count,_=T.space,D=eF(C),Z=eF(E),L=Z.find(function(e){return e&&"object"===(0,y.Z)(e)}),z=L&&"object"===(0,y.Z)(L)?"butt":w,B=ez(N,R,0,100,I,v,b,x,z,f),F=eN();return r.createElement("svg",(0,c.Z)({className:l()("".concat(u,"-circle"),k),viewBox:"0 0 ".concat(100," ").concat(100),style:S,id:s,role:"presentation"},O),!A&&r.createElement("circle",{className:"".concat(u,"-circle-trail"),r:M,cx:50,cy:50,stroke:x,strokeLinecap:z,strokeWidth:p||f,style:B}),A?(t=Math.round(D[0]/100*A),n=100/A,o=0,Array(A).fill(null).map(function(e,i){var a=i<=t-1?Z[0]:x,l=a&&"object"===(0,y.Z)(a)?"url(#".concat(P,")"):void 0,c=ez(N,R,o,n,I,v,b,a,"butt",f,_);return o+=(R-c.strokeDashoffset+_)*100/R,r.createElement("circle",{key:i,className:"".concat(u,"-circle-path"),r:M,cx:50,cy:50,stroke:l,strokeWidth:f,opacity:1,style:c,ref:function(e){F[i]=e}})})):(i=0,D.map(function(e,t){var n=Z[t]||Z[Z.length-1],o=ez(N,R,i,e,I,v,b,n,z,f);return i+=e,r.createElement(eL,{key:t,color:n,ptg:e,radius:M,prefixCls:u,gradientId:P,style:o,strokeLinecap:z,strokeWidth:f,gapDegree:v,ref:function(e){F[t]=e},size:100})}).reverse()))},eq=n(98074);function eW(e){return!e||e<0?0:e>100?100:e}function eK(e){let{success:t,successPercent:n}=e,r=n;return t&&"progress"in t&&(r=t.progress),t&&"percent"in t&&(r=t.percent),r}let eU=e=>{let{percent:t,success:n,successPercent:r}=e,o=eW(eK({success:n,successPercent:r}));return[o,eW(eW(t)-o)]},eV=e=>{let{success:t={},strokeColor:n}=e,{strokeColor:r}=t;return[r||G.ez.green,n||null]},eG=(e,t,n)=>{var r,o,i,a;let l=-1,c=-1;if("step"===t){let t=n.steps,r=n.strokeWidth;"string"==typeof e||void 0===e?(l="small"===e?2:14,c=null!=r?r:8):"number"==typeof e?[l,c]=[e,e]:[l=14,c=8]=e,l*=t}else if("line"===t){let t=null==n?void 0:n.strokeWidth;"string"==typeof e||void 0===e?c=t||("small"===e?6:8):"number"==typeof e?[l,c]=[e,e]:[l=-1,c=8]=e}else("circle"===t||"dashboard"===t)&&("string"==typeof e||void 0===e?[l,c]="small"===e?[60,60]:[120,120]:"number"==typeof e?[l,c]=[e,e]:(l=null!==(o=null!==(r=e[0])&&void 0!==r?r:e[1])&&void 0!==o?o:120,c=null!==(a=null!==(i=e[0])&&void 0!==i?i:e[1])&&void 0!==a?a:120));return[l,c]},eX=e=>3/e*100;var e$=e=>{let{prefixCls:t,trailColor:n=null,strokeLinecap:o="round",gapPosition:i,gapDegree:a,width:c=120,type:s,children:u,success:d,size:f=c}=e,[p,h]=eG(f,"circle"),{strokeWidth:m}=e;void 0===m&&(m=Math.max(eX(p),6));let g=r.useMemo(()=>a||0===a?a:"dashboard"===s?75:void 0,[a,s]),v="[object Object]"===Object.prototype.toString.call(e.strokeColor),y=eV({success:d,strokeColor:e.strokeColor}),b=l()("".concat(t,"-inner"),{["".concat(t,"-circle-gradient")]:v}),x=r.createElement(eH,{percent:eU(e),strokeWidth:m,trailWidth:m,strokeColor:y,strokeLinecap:o,trailColor:n,prefixCls:t,gapDegree:g,gapPosition:i||"dashboard"===s&&"bottom"||void 0});return r.createElement("div",{className:b,style:{width:p,height:h,fontSize:.15*p+6}},p<=20?r.createElement(eq.Z,{title:u},r.createElement("span",null,x)):r.createElement(r.Fragment,null,x,u))};let eY="--progress-line-stroke-color",eQ="--progress-percent",eJ=e=>{let t=e?"100%":"-100%";return new F.E4("antProgress".concat(e?"RTL":"LTR","Active"),{"0%":{transform:"translateX(".concat(t,") scaleX(0)"),opacity:.1},"20%":{transform:"translateX(".concat(t,") scaleX(0)"),opacity:.5},to:{transform:"translateX(0) scaleX(1)",opacity:0}})},e0=e=>{let{componentCls:t,iconCls:n}=e;return{[t]:Object.assign(Object.assign({},(0,Z.Wf)(e)),{display:"inline-block","&-rtl":{direction:"rtl"},"&-line":{position:"relative",width:"100%",fontSize:e.fontSize,marginInlineEnd:e.marginXS,marginBottom:e.marginXS},["".concat(t,"-outer")]:{display:"inline-block",width:"100%"},["&".concat(t,"-show-info")]:{["".concat(t,"-outer")]:{marginInlineEnd:"calc(-2em - ".concat((0,F.bf)(e.marginXS),")"),paddingInlineEnd:"calc(2em + ".concat((0,F.bf)(e.paddingXS),")")}},["".concat(t,"-inner")]:{position:"relative",display:"inline-block",width:"100%",overflow:"hidden",verticalAlign:"middle",backgroundColor:e.remainingColor,borderRadius:e.lineBorderRadius},["".concat(t,"-inner:not(").concat(t,"-circle-gradient)")]:{["".concat(t,"-circle-path")]:{stroke:e.defaultColor}},["".concat(t,"-success-bg, ").concat(t,"-bg")]:{position:"relative",background:e.defaultColor,borderRadius:e.lineBorderRadius,transition:"all ".concat(e.motionDurationSlow," ").concat(e.motionEaseInOutCirc)},["".concat(t,"-bg")]:{overflow:"hidden","&::after":{content:'""',background:{_multi_value_:!0,value:["inherit","var(".concat(eY,")")]},height:"100%",width:"calc(1 / var(".concat(eQ,") * 100%)"),display:"block"}},["".concat(t,"-success-bg")]:{position:"absolute",insetBlockStart:0,insetInlineStart:0,backgroundColor:e.colorSuccess},["".concat(t,"-text")]:{display:"inline-block",width:"2em",marginInlineStart:e.marginXS,color:e.colorText,lineHeight:1,whiteSpace:"nowrap",textAlign:"start",verticalAlign:"middle",wordBreak:"normal",[n]:{fontSize:e.fontSize}},["&".concat(t,"-status-active")]:{["".concat(t,"-bg::before")]:{position:"absolute",inset:0,backgroundColor:e.colorBgContainer,borderRadius:e.lineBorderRadius,opacity:0,animationName:eJ(),animationDuration:e.progressActiveMotionDuration,animationTimingFunction:e.motionEaseOutQuint,animationIterationCount:"infinite",content:'""'}},["&".concat(t,"-rtl").concat(t,"-status-active")]:{["".concat(t,"-bg::before")]:{animationName:eJ(!0)}},["&".concat(t,"-status-exception")]:{["".concat(t,"-bg")]:{backgroundColor:e.colorError},["".concat(t,"-text")]:{color:e.colorError}},["&".concat(t,"-status-exception ").concat(t,"-inner:not(").concat(t,"-circle-gradient)")]:{["".concat(t,"-circle-path")]:{stroke:e.colorError}},["&".concat(t,"-status-success")]:{["".concat(t,"-bg")]:{backgroundColor:e.colorSuccess},["".concat(t,"-text")]:{color:e.colorSuccess}},["&".concat(t,"-status-success ").concat(t,"-inner:not(").concat(t,"-circle-gradient)")]:{["".concat(t,"-circle-path")]:{stroke:e.colorSuccess}}})}},e1=e=>{let{componentCls:t,iconCls:n}=e;return{[t]:{["".concat(t,"-circle-trail")]:{stroke:e.remainingColor},["&".concat(t,"-circle ").concat(t,"-inner")]:{position:"relative",lineHeight:1,backgroundColor:"transparent"},["&".concat(t,"-circle ").concat(t,"-text")]:{position:"absolute",insetBlockStart:"50%",insetInlineStart:0,width:"100%",margin:0,padding:0,color:e.circleTextColor,fontSize:e.circleTextFontSize,lineHeight:1,whiteSpace:"normal",textAlign:"center",transform:"translateY(-50%)",[n]:{fontSize:e.circleIconFontSize}},["".concat(t,"-circle&-status-exception")]:{["".concat(t,"-text")]:{color:e.colorError}},["".concat(t,"-circle&-status-success")]:{["".concat(t,"-text")]:{color:e.colorSuccess}}},["".concat(t,"-inline-circle")]:{lineHeight:1,["".concat(t,"-inner")]:{verticalAlign:"bottom"}}}},e2=e=>{let{componentCls:t}=e;return{[t]:{["".concat(t,"-steps")]:{display:"inline-block","&-outer":{display:"flex",flexDirection:"row",alignItems:"center"},"&-item":{flexShrink:0,minWidth:e.progressStepMinWidth,marginInlineEnd:e.progressStepMarginInlineEnd,backgroundColor:e.remainingColor,transition:"all ".concat(e.motionDurationSlow),"&-active":{backgroundColor:e.defaultColor}}}}}},e6=e=>{let{componentCls:t,iconCls:n}=e;return{[t]:{["".concat(t,"-small&-line, ").concat(t,"-small&-line ").concat(t,"-text ").concat(n)]:{fontSize:e.fontSizeSM}}}};var e3=(0,z.I$)("Progress",e=>{let t=e.calc(e.marginXXS).div(2).equal(),n=(0,B.TS)(e,{progressStepMarginInlineEnd:t,progressStepMinWidth:t,progressActiveMotionDuration:"2.4s"});return[e0(n),e1(n),e2(n),e6(n)]},e=>({circleTextColor:e.colorText,defaultColor:e.colorInfo,remainingColor:e.colorFillSecondary,lineBorderRadius:100,circleTextFontSize:"1em",circleIconFontSize:"".concat(e.fontSize/e.fontSizeSM,"em")})),e4=function(e,t){var n={};for(var r in e)Object.prototype.hasOwnProperty.call(e,r)&&0>t.indexOf(r)&&(n[r]=e[r]);if(null!=e&&"function"==typeof Object.getOwnPropertySymbols)for(var o=0,r=Object.getOwnPropertySymbols(e);ot.indexOf(r[o])&&Object.prototype.propertyIsEnumerable.call(e,r[o])&&(n[r[o]]=e[r[o]]);return n};let e5=e=>{let t=[];return Object.keys(e).forEach(n=>{let r=parseFloat(n.replace(/%/g,""));isNaN(r)||t.push({key:r,value:e[n]})}),(t=t.sort((e,t)=>e.key-t.key)).map(e=>{let{key:t,value:n}=e;return"".concat(n," ").concat(t,"%")}).join(", ")},e8=(e,t)=>{let{from:n=G.ez.blue,to:r=G.ez.blue,direction:o="rtl"===t?"to left":"to right"}=e,i=e4(e,["from","to","direction"]);if(0!==Object.keys(i).length){let e=e5(i),t="linear-gradient(".concat(o,", ").concat(e,")");return{background:t,[eY]:t}}let a="linear-gradient(".concat(o,", ").concat(n,", ").concat(r,")");return{background:a,[eY]:a}};var e7=e=>{let{prefixCls:t,direction:n,percent:o,size:i,strokeWidth:a,strokeColor:l,strokeLinecap:c="round",children:s,trailColor:u=null,success:d}=e,f=l&&"string"!=typeof l?e8(l,n):{[eY]:l,background:l},p="square"===c||"butt"===c?0:void 0,[h,m]=eG(null!=i?i:[-1,a||("small"===i?6:8)],"line",{strokeWidth:a}),g=Object.assign(Object.assign({width:"".concat(eW(o),"%"),height:m,borderRadius:p},f),{[eQ]:eW(o)/100}),v=eK(e),y={width:"".concat(eW(v),"%"),height:m,borderRadius:p,backgroundColor:null==d?void 0:d.strokeColor};return r.createElement(r.Fragment,null,r.createElement("div",{className:"".concat(t,"-outer"),style:{width:h<0?"100%":h,height:m}},r.createElement("div",{className:"".concat(t,"-inner"),style:{backgroundColor:u||void 0,borderRadius:p}},r.createElement("div",{className:"".concat(t,"-bg"),style:g}),void 0!==v?r.createElement("div",{className:"".concat(t,"-success-bg"),style:y}):null)),s)},e9=e=>{let{size:t,steps:n,percent:o=0,strokeWidth:i=8,strokeColor:a,trailColor:c=null,prefixCls:s,children:u}=e,d=Math.round(o/100*n),[f,p]=eG(null!=t?t:["small"===t?2:14,i],"step",{steps:n,strokeWidth:i}),h=f/n,m=Array(n);for(let e=0;et.indexOf(r)&&(n[r]=e[r]);if(null!=e&&"function"==typeof Object.getOwnPropertySymbols)for(var o=0,r=Object.getOwnPropertySymbols(e);ot.indexOf(r[o])&&Object.prototype.propertyIsEnumerable.call(e,r[o])&&(n[r[o]]=e[r[o]]);return n};let tt=["normal","exception","active","success"],tn=r.forwardRef((e,t)=>{let n;let{prefixCls:o,className:i,rootClassName:a,steps:c,strokeColor:s,percent:u=0,size:d="default",showInfo:f=!0,type:p="line",status:h,format:m,style:g}=e,v=te(e,["prefixCls","className","rootClassName","steps","strokeColor","percent","size","showInfo","type","status","format","style"]),y=r.useMemo(()=>{var t,n;let r=eK(e);return parseInt(void 0!==r?null===(t=null!=r?r:0)||void 0===t?void 0:t.toString():null===(n=null!=u?u:0)||void 0===n?void 0:n.toString(),10)},[u,e.success,e.successPercent]),b=r.useMemo(()=>!tt.includes(h)&&y>=100?"success":h||"normal",[h,y]),{getPrefixCls:x,direction:w,progress:S}=r.useContext(T.E_),k=x("progress",o),[E,C,O]=e3(k),j=r.useMemo(()=>{let t;if(!f)return null;let n=eK(e),o=m||(e=>"".concat(e,"%")),i="line"===p;return m||"exception"!==b&&"success"!==b?t=o(eW(u),eW(n)):"exception"===b?t=i?r.createElement(eO.Z,null):r.createElement(ej.Z,null):"success"===b&&(t=i?r.createElement(eE.Z,null):r.createElement(eC.Z,null)),r.createElement("span",{className:"".concat(k,"-text"),title:"string"==typeof t?t:void 0},t)},[f,u,y,b,p,k,m]),P=Array.isArray(s)?s[0]:s,M="string"==typeof s||Array.isArray(s)?s:void 0;"line"===p?n=c?r.createElement(e9,Object.assign({},e,{strokeColor:M,prefixCls:k,steps:c}),j):r.createElement(e7,Object.assign({},e,{strokeColor:P,prefixCls:k,direction:w}),j):("circle"===p||"dashboard"===p)&&(n=r.createElement(e$,Object.assign({},e,{strokeColor:P,prefixCls:k,progressStatus:b}),j));let N=l()(k,"".concat(k,"-status-").concat(b),"".concat(k,"-").concat("dashboard"===p&&"circle"||c&&"steps"||p),{["".concat(k,"-inline-circle")]:"circle"===p&&eG(d,"circle")[0]<=20,["".concat(k,"-show-info")]:f,["".concat(k,"-").concat(d)]:"string"==typeof d,["".concat(k,"-rtl")]:"rtl"===w},null==S?void 0:S.className,i,a,C,O);return E(r.createElement("div",Object.assign({ref:t,style:Object.assign(Object.assign({},null==S?void 0:S.style),g),className:N,role:"progressbar","aria-valuenow":y},(0,eP.Z)(v,["trailColor","strokeWidth","width","gapDegree","gapPosition","strokeLinecap","success","successPercent"])),n))}),tr=r.forwardRef((e,t)=>{var n,o;let{prefixCls:i,className:a,style:c,locale:s,listType:u,file:d,items:f,progress:p,iconRender:h,actionIconRender:m,itemRender:g,isImgUrl:v,showPreviewIcon:y,showRemoveIcon:b,showDownloadIcon:x,previewIcon:w,removeIcon:S,downloadIcon:k,onPreview:E,onDownload:C,onClose:O}=e,{status:j}=d,[P,M]=r.useState(j);r.useEffect(()=>{"removed"!==j&&M(j)},[j]);let[N,I]=r.useState(!1);r.useEffect(()=>{let e=setTimeout(()=>{I(!0)},300);return()=>{clearTimeout(e)}},[]);let R=h(d),A=r.createElement("div",{className:"".concat(i,"-icon")},R);if("picture"===u||"picture-card"===u||"picture-circle"===u){if("uploading"!==P&&(d.thumbUrl||d.url)){let e=(null==v?void 0:v(d))?r.createElement("img",{src:d.thumbUrl||d.url,alt:d.name,className:"".concat(i,"-list-item-image"),crossOrigin:d.crossOrigin}):R,t=l()("".concat(i,"-list-item-thumbnail"),{["".concat(i,"-list-item-file")]:v&&!v(d)});A=r.createElement("a",{className:t,onClick:e=>E(d,e),href:d.url||d.thumbUrl,target:"_blank",rel:"noopener noreferrer"},e)}else{let e=l()("".concat(i,"-list-item-thumbnail"),{["".concat(i,"-list-item-file")]:"uploading"!==P});A=r.createElement("div",{className:e},R)}}let _=l()("".concat(i,"-list-item"),"".concat(i,"-list-item-").concat(P)),D="string"==typeof d.linkProps?JSON.parse(d.linkProps):d.linkProps,Z=b?m(("function"==typeof S?S(d):S)||r.createElement(ew,null),()=>O(d),i,s.removeFile,!0):null,L=x&&"done"===P?m(("function"==typeof k?k(d):k)||r.createElement(eS.Z,null),()=>C(d),i,s.downloadFile):null,z="picture-card"!==u&&"picture-circle"!==u&&r.createElement("span",{key:"download-delete",className:l()("".concat(i,"-list-item-actions"),{picture:"picture"===u})},L,Z),B=l()("".concat(i,"-list-item-name")),F=d.url?[r.createElement("a",Object.assign({key:"view",target:"_blank",rel:"noopener noreferrer",className:B,title:d.name},D,{href:d.url,onClick:e=>E(d,e)}),d.name),z]:[r.createElement("span",{key:"view",className:B,onClick:e=>E(d,e),title:d.name},d.name),z],H=y&&(d.url||d.thumbUrl)?r.createElement("a",{href:d.url||d.thumbUrl,target:"_blank",rel:"noopener noreferrer",onClick:e=>E(d,e),title:s.previewFile},"function"==typeof w?w(d):w||r.createElement(ek.Z,null)):null,q=("picture-card"===u||"picture-circle"===u)&&"uploading"!==P&&r.createElement("span",{className:"".concat(i,"-list-item-actions")},H,"done"===P&&L,Z),{getPrefixCls:W}=r.useContext(T.E_),K=W(),U=r.createElement("div",{className:_},A,F,q,N&&r.createElement(ec.ZP,{motionName:"".concat(K,"-fade"),visible:"uploading"===P,motionDeadline:2e3},e=>{let{className:t}=e,n="percent"in d?r.createElement(tn,Object.assign({},p,{type:"line",percent:d.percent,"aria-label":d["aria-label"],"aria-labelledby":d["aria-labelledby"]})):null;return r.createElement("div",{className:l()("".concat(i,"-list-item-progress"),t)},n)})),V=d.response&&"string"==typeof d.response?d.response:(null===(n=d.error)||void 0===n?void 0:n.statusText)||(null===(o=d.error)||void 0===o?void 0:o.message)||s.uploadError,G="error"===P?r.createElement(eq.Z,{title:V,getPopupContainer:e=>e.parentNode},U):U;return r.createElement("div",{className:l()("".concat(i,"-list-item-container"),a),style:c,ref:t},g?g(G,d,f,{download:C.bind(null,d),preview:E.bind(null,d),remove:O.bind(null,d)}):G)}),to=r.forwardRef((e,t)=>{let{listType:n="text",previewFile:i=eb,onPreview:a,onDownload:c,onRemove:s,locale:u,iconRender:d,isImageUrl:f=ey,prefixCls:p,items:h=[],showPreviewIcon:m=!0,showRemoveIcon:g=!0,showDownloadIcon:v=!1,removeIcon:y,previewIcon:b,downloadIcon:x,progress:w={size:[-1,2],showInfo:!1},appendAction:S,appendActionVisible:k=!0,itemRender:E,disabled:C}=e,O=(0,es.Z)(),[j,P]=r.useState(!1);r.useEffect(()=>{("picture"===n||"picture-card"===n||"picture-circle"===n)&&(h||[]).forEach(e=>{"undefined"!=typeof document&&"undefined"!=typeof window&&window.FileReader&&window.File&&(e.originFileObj instanceof File||e.originFileObj instanceof Blob)&&void 0===e.thumbUrl&&(e.thumbUrl="",i&&i(e.originFileObj).then(t=>{e.thumbUrl=t||"",O()}))})},[n,h,i]),r.useEffect(()=>{P(!0)},[]);let M=(e,t)=>{if(a)return null==t||t.preventDefault(),a(e)},N=e=>{"function"==typeof c?c(e):e.url&&window.open(e.url)},I=e=>{null==s||s(e)},R=e=>{if(d)return d(e,n);let t="uploading"===e.status,o=f&&f(e)?r.createElement(el,null):r.createElement(en,null),i=t?r.createElement(er.Z,null):r.createElement(ei,null);return"picture"===n?i=t?r.createElement(er.Z,null):o:("picture-card"===n||"picture-circle"===n)&&(i=t?u.uploading:o),i},A=(e,t,n,o,i)=>{let a={type:"text",size:"small",title:o,onClick:n=>{t(),(0,ed.l$)(e)&&e.props.onClick&&e.props.onClick(n)},className:"".concat(n,"-list-item-action")};if(i&&(a.disabled=C),(0,ed.l$)(e)){let t=(0,ed.Tm)(e,Object.assign(Object.assign({},e.props),{onClick:()=>{}}));return r.createElement(ef.ZP,Object.assign({},a,{icon:t}))}return r.createElement(ef.ZP,Object.assign({},a),r.createElement("span",null,e))};r.useImperativeHandle(t,()=>({handlePreview:M,handleDownload:N}));let{getPrefixCls:_}=r.useContext(T.E_),D=_("upload",p),Z=_(),L=l()("".concat(D,"-list"),"".concat(D,"-list-").concat(n)),z=(0,o.Z)(h.map(e=>({key:e.uid,file:e}))),B={motionDeadline:2e3,motionName:"".concat(D,"-").concat("picture-card"===n||"picture-circle"===n?"animate-inline":"animate"),keys:z,motionAppear:j},F=r.useMemo(()=>{let e=Object.assign({},(0,eu.Z)(Z));return delete e.onAppearEnd,delete e.onEnterEnd,delete e.onLeaveEnd,e},[Z]);return"picture-card"!==n&&"picture-circle"!==n&&(B=Object.assign(Object.assign({},F),B)),r.createElement("div",{className:L},r.createElement(ec.V4,Object.assign({},B,{component:!1}),e=>{let{key:t,file:o,className:i,style:a}=e;return r.createElement(tr,{key:t,locale:u,prefixCls:D,className:i,style:a,file:o,items:h,progress:w,listType:n,isImgUrl:f,showPreviewIcon:m,showRemoveIcon:g,showDownloadIcon:v,removeIcon:y,previewIcon:b,downloadIcon:x,iconRender:R,actionIconRender:A,itemRender:E,onPreview:M,onDownload:N,onClose:I})}),S&&r.createElement(ec.ZP,Object.assign({},B,{visible:k,forceRender:!0}),e=>{let{className:t,style:n}=e;return(0,ed.Tm)(S,e=>({className:l()(e.className,t),style:Object.assign(Object.assign(Object.assign({},n),{pointerEvents:t?"none":void 0}),e.style)}))}))}),ti="__LIST_IGNORE_".concat(Date.now(),"__"),ta=r.forwardRef((e,t)=>{let n;let{fileList:a,defaultFileList:c,onRemove:s,showUploadList:u=!0,listType:d="text",onPreview:f,onDownload:p,onChange:h,onDrop:m,previewFile:g,disabled:v,locale:y,iconRender:b,isImageUrl:x,progress:w,prefixCls:S,className:k,type:E="select",children:C,style:O,itemRender:j,maxCount:P,data:M={},multiple:N=!1,hasControlInside:Z=!0,action:L="",accept:z="",supportServerRender:B=!0,rootClassName:F}=e,H=r.useContext(A.Z),q=null!=v?v:H,[W,K]=(0,R.Z)(c||[],{value:a,postState:e=>null!=e?e:[]}),[U,V]=r.useState("drop"),G=r.useRef(null);r.useMemo(()=>{let e=Date.now();(a||[]).forEach((t,n)=>{t.uid||Object.isFrozen(t)||(t.uid="__AUTO__".concat(e,"_").concat(n,"__"))})},[a]);let X=(e,t,n)=>{let r=(0,o.Z)(t),a=!1;1===P?r=r.slice(-1):P&&(a=r.length>P,r=r.slice(0,P)),(0,i.flushSync)(()=>{K(r)});let l={file:e,fileList:r};n&&(l.event=n),(!a||r.some(t=>t.uid===e.uid))&&(0,i.flushSync)(()=>{null==h||h(l)})},$=e=>{let t=e.filter(e=>!e.file[ti]);if(!t.length)return;let n=t.map(e=>ep(e.file)),r=(0,o.Z)(W);n.forEach(e=>{r=eh(e,r)}),n.forEach((e,n)=>{let o=e;if(t[n].parsedFile)e.status="uploading";else{let t;let{originFileObj:n}=e;try{t=new File([n],n.name,{type:n.type})}catch(e){(t=new Blob([n],{type:n.type})).name=n.name,t.lastModifiedDate=new Date,t.lastModified=new Date().getTime()}t.uid=e.uid,o=t}X(o,r)})},Y=(e,t,n)=>{try{"string"==typeof e&&(e=JSON.parse(e))}catch(e){}if(!em(t,W))return;let r=ep(t);r.status="done",r.percent=100,r.response=e,r.xhr=n;let o=eh(r,W);X(r,o)},Q=(e,t)=>{if(!em(t,W))return;let n=ep(t);n.status="uploading",n.percent=e.percent;let r=eh(n,W);X(n,r,e)},ee=(e,t,n)=>{if(!em(n,W))return;let r=ep(n);r.error=e,r.response=t,r.status="error";let o=eh(r,W);X(r,o)},et=e=>{let t;Promise.resolve("function"==typeof s?s(e):s).then(n=>{var r;if(!1===n)return;let o=function(e,t){let n=void 0!==e.uid?"uid":"name",r=t.filter(t=>t[n]!==e[n]);return r.length===t.length?null:r}(e,W);o&&(t=Object.assign(Object.assign({},e),{status:"removed"}),null==W||W.forEach(e=>{let n=void 0!==t.uid?"uid":"name";e[n]!==t[n]||Object.isFrozen(e)||(e.status="removed")}),null===(r=G.current)||void 0===r||r.abort(t),X(t,o))})},en=e=>{V(e.type),"drop"===e.type&&(null==m||m(e))};r.useImperativeHandle(t,()=>({onBatchStart:$,onSuccess:Y,onProgress:Q,onError:ee,fileList:W,upload:G.current}));let{getPrefixCls:er,direction:eo,upload:ei}=r.useContext(T.E_),ea=er("upload",S),el=Object.assign(Object.assign({onBatchStart:$,onError:ee,onProgress:Q,onSuccess:Y},e),{data:M,multiple:N,action:L,accept:z,supportServerRender:B,prefixCls:ea,disabled:q,beforeUpload:(t,n)=>{var r,o,i,a;return r=void 0,o=void 0,i=void 0,a=function*(){let{beforeUpload:r,transformFile:o}=e,i=t;if(r){let e=yield r(t,n);if(!1===e)return!1;if(delete t[ti],e===ti)return Object.defineProperty(t,ti,{value:!0,configurable:!0}),!1;"object"==typeof e&&e&&(i=e)}return o&&(i=yield o(i)),i},new(i||(i=Promise))(function(e,t){function n(e){try{c(a.next(e))}catch(e){t(e)}}function l(e){try{c(a.throw(e))}catch(e){t(e)}}function c(t){var r;t.done?e(t.value):((r=t.value)instanceof i?r:new i(function(e){e(r)})).then(n,l)}c((a=a.apply(r,o||[])).next())})},onChange:void 0,hasControlInside:Z});delete el.className,delete el.style,(!C||q)&&delete el.id;let ec="".concat(ea,"-wrapper"),[es,eu,ed]=J(ea,ec),[ef]=(0,_.Z)("Upload",D.Z.Upload),{showRemoveIcon:eg,showPreviewIcon:ev,showDownloadIcon:ey,removeIcon:eb,previewIcon:ex,downloadIcon:ew}="boolean"==typeof u?{}:u,eS=void 0===eg?!q:!!eg,ek=(e,t)=>u?r.createElement(to,{prefixCls:ea,listType:d,items:W,previewFile:g,onPreview:f,onDownload:p,onRemove:et,showRemoveIcon:eS,showPreviewIcon:ev,showDownloadIcon:ey,removeIcon:eb,previewIcon:ex,downloadIcon:ew,iconRender:b,locale:Object.assign(Object.assign({},ef),y),isImageUrl:x,progress:w,appendAction:e,appendActionVisible:t,itemRender:j,disabled:q}):e,eE=l()(ec,k,F,eu,ed,null==ei?void 0:ei.className,{["".concat(ea,"-rtl")]:"rtl"===eo,["".concat(ea,"-picture-card-wrapper")]:"picture-card"===d,["".concat(ea,"-picture-circle-wrapper")]:"picture-circle"===d}),eC=Object.assign(Object.assign({},null==ei?void 0:ei.style),O);if("drag"===E){let e=l()(eu,ea,"".concat(ea,"-drag"),{["".concat(ea,"-drag-uploading")]:W.some(e=>"uploading"===e.status),["".concat(ea,"-drag-hover")]:"dragover"===U,["".concat(ea,"-disabled")]:q,["".concat(ea,"-rtl")]:"rtl"===eo});return es(r.createElement("span",{className:eE},r.createElement("div",{className:e,style:eC,onDrop:en,onDragOver:en,onDragLeave:en},r.createElement(I,Object.assign({},el,{ref:G,className:"".concat(ea,"-btn")}),r.createElement("div",{className:"".concat(ea,"-drag-container")},C))),ek()))}let eO=l()(ea,"".concat(ea,"-select"),{["".concat(ea,"-disabled")]:q}),ej=(n=C?void 0:{display:"none"},r.createElement("div",{className:eO,style:n},r.createElement(I,Object.assign({},el,{ref:G}))));return es("picture-card"===d||"picture-circle"===d?r.createElement("span",{className:eE},ek(ej,!!C)):r.createElement("span",{className:eE},ej,ek()))});var tl=function(e,t){var n={};for(var r in e)Object.prototype.hasOwnProperty.call(e,r)&&0>t.indexOf(r)&&(n[r]=e[r]);if(null!=e&&"function"==typeof Object.getOwnPropertySymbols)for(var o=0,r=Object.getOwnPropertySymbols(e);ot.indexOf(r[o])&&Object.prototype.propertyIsEnumerable.call(e,r[o])&&(n[r[o]]=e[r[o]]);return n};let tc=r.forwardRef((e,t)=>{var{style:n,height:o,hasControlInside:i=!1}=e,a=tl(e,["style","height","hasControlInside"]);return r.createElement(ta,Object.assign({ref:t,hasControlInside:i},a,{type:"drag",style:Object.assign(Object.assign({},n),{height:o})}))});ta.Dragger=tc,ta.LIST_IGNORE=ti;var ts=ta},49211:function(e,t,n){"use strict";var r=n(99623),o={"text/plain":"Text","text/html":"Url",default:"Text"};e.exports=function(e,t){var n,i,a,l,c,s,u,d,f=!1;t||(t={}),a=t.debug||!1;try{if(c=r(),s=document.createRange(),u=document.getSelection(),(d=document.createElement("span")).textContent=e,d.ariaHidden="true",d.style.all="unset",d.style.position="fixed",d.style.top=0,d.style.clip="rect(0, 0, 0, 0)",d.style.whiteSpace="pre",d.style.webkitUserSelect="text",d.style.MozUserSelect="text",d.style.msUserSelect="text",d.style.userSelect="text",d.addEventListener("copy",function(n){if(n.stopPropagation(),t.format){if(n.preventDefault(),void 0===n.clipboardData){a&&console.warn("unable to use e.clipboardData"),a&&console.warn("trying IE specific stuff"),window.clipboardData.clearData();var r=o[t.format]||o.default;window.clipboardData.setData(r,e)}else n.clipboardData.clearData(),n.clipboardData.setData(t.format,e)}t.onCopy&&(n.preventDefault(),t.onCopy(n.clipboardData))}),document.body.appendChild(d),s.selectNodeContents(d),u.addRange(s),!document.execCommand("copy"))throw Error("copy command was unsuccessful");f=!0}catch(r){a&&console.error("unable to copy using execCommand: ",r),a&&console.warn("trying IE specific stuff");try{window.clipboardData.setData(t.format||"text",e),t.onCopy&&t.onCopy(window.clipboardData),f=!0}catch(r){a&&console.error("unable to copy using clipboardData: ",r),a&&console.error("falling back to prompt"),n="message"in t?t.message:"Copy to clipboard: #{key}, Enter",i=(/mac os x/i.test(navigator.userAgent)?"⌘":"Ctrl")+"+C",l=n.replace(/#{\s*key\s*}/g,i),window.prompt(l,e)}}finally{u&&("function"==typeof u.removeRange?u.removeRange(s):u.removeAllRanges()),d&&document.body.removeChild(d),c()}return f}},7656:function(e,t,n){"use strict";function r(e,t){if(t.length1?"s":"")+" required, but only "+t.length+" present")}n.d(t,{Z:function(){return r}})},47869:function(e,t,n){"use strict";function r(e){if(null===e||!0===e||!1===e)return NaN;var t=Number(e);return isNaN(t)?t:t<0?Math.ceil(t):Math.floor(t)}n.d(t,{Z:function(){return r}})},7366:function(e,t,n){"use strict";n.d(t,{Z:function(){return s}});var r=n(41154),o=n(25721),i=n(55463),a=n(99735),l=n(7656),c=n(47869);function s(e,t){if((0,l.Z)(2,arguments),!t||"object"!==(0,r.Z)(t))return new Date(NaN);var n=t.years?(0,c.Z)(t.years):0,s=t.months?(0,c.Z)(t.months):0,u=t.weeks?(0,c.Z)(t.weeks):0,d=t.days?(0,c.Z)(t.days):0,f=t.hours?(0,c.Z)(t.hours):0,p=t.minutes?(0,c.Z)(t.minutes):0,h=t.seconds?(0,c.Z)(t.seconds):0,m=(0,a.Z)(e),g=s||n?(0,i.Z)(m,s+12*n):m;return new Date((d||u?(0,o.Z)(g,d+7*u):g).getTime()+1e3*(h+60*(p+60*f)))}},25721:function(e,t,n){"use strict";n.d(t,{Z:function(){return a}});var r=n(47869),o=n(99735),i=n(7656);function a(e,t){(0,i.Z)(2,arguments);var n=(0,o.Z)(e),a=(0,r.Z)(t);return isNaN(a)?new Date(NaN):(a&&n.setDate(n.getDate()+a),n)}},55463:function(e,t,n){"use strict";n.d(t,{Z:function(){return a}});var r=n(47869),o=n(99735),i=n(7656);function a(e,t){(0,i.Z)(2,arguments);var n=(0,o.Z)(e),a=(0,r.Z)(t);if(isNaN(a))return new Date(NaN);if(!a)return n;var l=n.getDate(),c=new Date(n.getTime());return(c.setMonth(n.getMonth()+a+1,0),l>=c.getDate())?c:(n.setFullYear(c.getFullYear(),c.getMonth(),l),n)}},99735:function(e,t,n){"use strict";n.d(t,{Z:function(){return i}});var r=n(41154),o=n(7656);function i(e){(0,o.Z)(1,arguments);var t=Object.prototype.toString.call(e);return e instanceof Date||"object"===(0,r.Z)(e)&&"[object Date]"===t?new Date(e.getTime()):"number"==typeof e||"[object Number]"===t?new Date(e):(("string"==typeof e||"[object String]"===t)&&"undefined"!=typeof console&&(console.warn("Starting with v2.0.0-beta.1 date-fns doesn't accept strings as date arguments. Please use `parseISO` to parse strings. See: https://github.com/date-fns/date-fns/blob/master/docs/upgradeGuide.md#string-arguments"),console.warn(Error().stack)),new Date(NaN))}},61134:function(e,t,n){var r;!function(o){"use strict";var i,a={precision:20,rounding:4,toExpNeg:-7,toExpPos:21,LN10:"2.302585092994045684017991454684364207601101488628772976033327900967572609677352480235997205089598298341967784042286"},l=!0,c="[DecimalError] ",s=c+"Invalid argument: ",u=c+"Exponent out of range: ",d=Math.floor,f=Math.pow,p=/^(\d+(\.\d*)?|\.\d+)(e[+-]?\d+)?$/i,h=d(1286742750677284.5),m={};function g(e,t){var n,r,o,i,a,c,s,u,d=e.constructor,f=d.precision;if(!e.s||!t.s)return t.s||(t=new d(e)),l?O(t,f):t;if(s=e.d,u=t.d,a=e.e,o=t.e,s=s.slice(),i=a-o){for(i<0?(r=s,i=-i,c=u.length):(r=u,o=a,c=s.length),i>(c=(a=Math.ceil(f/7))>c?a+1:c+1)&&(i=c,r.length=1),r.reverse();i--;)r.push(0);r.reverse()}for((c=s.length)-(i=u.length)<0&&(i=c,r=u,u=s,s=r),n=0;i;)n=(s[--i]=s[i]+u[i]+n)/1e7|0,s[i]%=1e7;for(n&&(s.unshift(n),++o),c=s.length;0==s[--c];)s.pop();return t.d=s,t.e=o,l?O(t,f):t}function v(e,t,n){if(e!==~~e||en)throw Error(s+e)}function y(e){var t,n,r,o=e.length-1,i="",a=e[0];if(o>0){for(i+=a,t=1;te.e^this.s<0?1:-1;for(t=0,n=(r=this.d.length)<(o=e.d.length)?r:o;te.d[t]^this.s<0?1:-1;return r===o?0:r>o^this.s<0?1:-1},m.decimalPlaces=m.dp=function(){var e=this.d.length-1,t=(e-this.e)*7;if(e=this.d[e])for(;e%10==0;e/=10)t--;return t<0?0:t},m.dividedBy=m.div=function(e){return b(this,new this.constructor(e))},m.dividedToIntegerBy=m.idiv=function(e){var t=this.constructor;return O(b(this,new t(e),0,1),t.precision)},m.equals=m.eq=function(e){return!this.cmp(e)},m.exponent=function(){return w(this)},m.greaterThan=m.gt=function(e){return this.cmp(e)>0},m.greaterThanOrEqualTo=m.gte=function(e){return this.cmp(e)>=0},m.isInteger=m.isint=function(){return this.e>this.d.length-2},m.isNegative=m.isneg=function(){return this.s<0},m.isPositive=m.ispos=function(){return this.s>0},m.isZero=function(){return 0===this.s},m.lessThan=m.lt=function(e){return 0>this.cmp(e)},m.lessThanOrEqualTo=m.lte=function(e){return 1>this.cmp(e)},m.logarithm=m.log=function(e){var t,n=this.constructor,r=n.precision,o=r+5;if(void 0===e)e=new n(10);else if((e=new n(e)).s<1||e.eq(i))throw Error(c+"NaN");if(this.s<1)throw Error(c+(this.s?"NaN":"-Infinity"));return this.eq(i)?new n(0):(l=!1,t=b(E(this,o),E(e,o),o),l=!0,O(t,r))},m.minus=m.sub=function(e){return e=new this.constructor(e),this.s==e.s?j(this,e):g(this,(e.s=-e.s,e))},m.modulo=m.mod=function(e){var t,n=this.constructor,r=n.precision;if(!(e=new n(e)).s)throw Error(c+"NaN");return this.s?(l=!1,t=b(this,e,0,1).times(e),l=!0,this.minus(t)):O(new n(this),r)},m.naturalExponential=m.exp=function(){return x(this)},m.naturalLogarithm=m.ln=function(){return E(this)},m.negated=m.neg=function(){var e=new this.constructor(this);return e.s=-e.s||0,e},m.plus=m.add=function(e){return e=new this.constructor(e),this.s==e.s?g(this,e):j(this,(e.s=-e.s,e))},m.precision=m.sd=function(e){var t,n,r;if(void 0!==e&&!!e!==e&&1!==e&&0!==e)throw Error(s+e);if(t=w(this)+1,n=7*(r=this.d.length-1)+1,r=this.d[r]){for(;r%10==0;r/=10)n--;for(r=this.d[0];r>=10;r/=10)n++}return e&&t>n?t:n},m.squareRoot=m.sqrt=function(){var e,t,n,r,o,i,a,s=this.constructor;if(this.s<1){if(!this.s)return new s(0);throw Error(c+"NaN")}for(e=w(this),l=!1,0==(o=Math.sqrt(+this))||o==1/0?(((t=y(this.d)).length+e)%2==0&&(t+="0"),o=Math.sqrt(t),e=d((e+1)/2)-(e<0||e%2),r=new s(t=o==1/0?"5e"+e:(t=o.toExponential()).slice(0,t.indexOf("e")+1)+e)):r=new s(o.toString()),o=a=(n=s.precision)+3;;)if(r=(i=r).plus(b(this,i,a+2)).times(.5),y(i.d).slice(0,a)===(t=y(r.d)).slice(0,a)){if(t=t.slice(a-3,a+1),o==a&&"4999"==t){if(O(i,n+1,0),i.times(i).eq(this)){r=i;break}}else if("9999"!=t)break;a+=4}return l=!0,O(r,n)},m.times=m.mul=function(e){var t,n,r,o,i,a,c,s,u,d=this.constructor,f=this.d,p=(e=new d(e)).d;if(!this.s||!e.s)return new d(0);for(e.s*=this.s,n=this.e+e.e,(s=f.length)<(u=p.length)&&(i=f,f=p,p=i,a=s,s=u,u=a),i=[],r=a=s+u;r--;)i.push(0);for(r=u;--r>=0;){for(t=0,o=s+r;o>r;)c=i[o]+p[r]*f[o-r-1]+t,i[o--]=c%1e7|0,t=c/1e7|0;i[o]=(i[o]+t)%1e7|0}for(;!i[--a];)i.pop();return t?++n:i.shift(),e.d=i,e.e=n,l?O(e,d.precision):e},m.toDecimalPlaces=m.todp=function(e,t){var n=this,r=n.constructor;return(n=new r(n),void 0===e)?n:(v(e,0,1e9),void 0===t?t=r.rounding:v(t,0,8),O(n,e+w(n)+1,t))},m.toExponential=function(e,t){var n,r=this,o=r.constructor;return void 0===e?n=P(r,!0):(v(e,0,1e9),void 0===t?t=o.rounding:v(t,0,8),n=P(r=O(new o(r),e+1,t),!0,e+1)),n},m.toFixed=function(e,t){var n,r,o=this.constructor;return void 0===e?P(this):(v(e,0,1e9),void 0===t?t=o.rounding:v(t,0,8),n=P((r=O(new o(this),e+w(this)+1,t)).abs(),!1,e+w(r)+1),this.isneg()&&!this.isZero()?"-"+n:n)},m.toInteger=m.toint=function(){var e=this.constructor;return O(new e(this),w(this)+1,e.rounding)},m.toNumber=function(){return+this},m.toPower=m.pow=function(e){var t,n,r,o,a,s,u=this,f=u.constructor,p=+(e=new f(e));if(!e.s)return new f(i);if(!(u=new f(u)).s){if(e.s<1)throw Error(c+"Infinity");return u}if(u.eq(i))return u;if(r=f.precision,e.eq(i))return O(u,r);if(s=(t=e.e)>=(n=e.d.length-1),a=u.s,s){if((n=p<0?-p:p)<=9007199254740991){for(o=new f(i),t=Math.ceil(r/7+4),l=!1;n%2&&M((o=o.times(u)).d,t),0!==(n=d(n/2));)M((u=u.times(u)).d,t);return l=!0,e.s<0?new f(i).div(o):O(o,r)}}else if(a<0)throw Error(c+"NaN");return a=a<0&&1&e.d[Math.max(t,n)]?-1:1,u.s=1,l=!1,o=e.times(E(u,r+12)),l=!0,(o=x(o)).s=a,o},m.toPrecision=function(e,t){var n,r,o=this,i=o.constructor;return void 0===e?(n=w(o),r=P(o,n<=i.toExpNeg||n>=i.toExpPos)):(v(e,1,1e9),void 0===t?t=i.rounding:v(t,0,8),n=w(o=O(new i(o),e,t)),r=P(o,e<=n||n<=i.toExpNeg,e)),r},m.toSignificantDigits=m.tosd=function(e,t){var n=this.constructor;return void 0===e?(e=n.precision,t=n.rounding):(v(e,1,1e9),void 0===t?t=n.rounding:v(t,0,8)),O(new n(this),e,t)},m.toString=m.valueOf=m.val=m.toJSON=function(){var e=w(this),t=this.constructor;return P(this,e<=t.toExpNeg||e>=t.toExpPos)};var b=function(){function e(e,t){var n,r=0,o=e.length;for(e=e.slice();o--;)n=e[o]*t+r,e[o]=n%1e7|0,r=n/1e7|0;return r&&e.unshift(r),e}function t(e,t,n,r){var o,i;if(n!=r)i=n>r?1:-1;else for(o=i=0;ot[o]?1:-1;break}return i}function n(e,t,n){for(var r=0;n--;)e[n]-=r,r=e[n]1;)e.shift()}return function(r,o,i,a){var l,s,u,d,f,p,h,m,g,v,y,b,x,S,k,E,C,j,P=r.constructor,M=r.s==o.s?1:-1,N=r.d,I=o.d;if(!r.s)return new P(r);if(!o.s)throw Error(c+"Division by zero");for(u=0,s=r.e-o.e,C=I.length,k=N.length,m=(h=new P(M)).d=[];I[u]==(N[u]||0);)++u;if(I[u]>(N[u]||0)&&--s,(b=null==i?i=P.precision:a?i+(w(r)-w(o))+1:i)<0)return new P(0);if(b=b/7+2|0,u=0,1==C)for(d=0,I=I[0],b++;(u1&&(I=e(I,d),N=e(N,d),C=I.length,k=N.length),S=C,v=(g=N.slice(0,C)).length;v=1e7/2&&++E;do d=0,(l=t(I,g,C,v))<0?(y=g[0],C!=v&&(y=1e7*y+(g[1]||0)),(d=y/E|0)>1?(d>=1e7&&(d=1e7-1),p=(f=e(I,d)).length,v=g.length,1==(l=t(f,g,p,v))&&(d--,n(f,C16)throw Error(u+w(e));if(!e.s)return new p(i);for(null==t?(l=!1,c=h):c=t,a=new p(.03125);e.abs().gte(.1);)e=e.times(a),d+=5;for(c+=Math.log(f(2,d))/Math.LN10*2+5|0,n=r=o=new p(i),p.precision=c;;){if(r=O(r.times(e),c),n=n.times(++s),y((a=o.plus(b(r,n,c))).d).slice(0,c)===y(o.d).slice(0,c)){for(;d--;)o=O(o.times(o),c);return p.precision=h,null==t?(l=!0,O(o,h)):o}o=a}}function w(e){for(var t=7*e.e,n=e.d[0];n>=10;n/=10)t++;return t}function S(e,t,n){if(t>e.LN10.sd())throw l=!0,n&&(e.precision=n),Error(c+"LN10 precision limit exceeded");return O(new e(e.LN10),t)}function k(e){for(var t="";e--;)t+="0";return t}function E(e,t){var n,r,o,a,s,u,d,f,p,h=1,m=e,g=m.d,v=m.constructor,x=v.precision;if(m.s<1)throw Error(c+(m.s?"NaN":"-Infinity"));if(m.eq(i))return new v(0);if(null==t?(l=!1,f=x):f=t,m.eq(10))return null==t&&(l=!0),S(v,f);if(f+=10,v.precision=f,r=(n=y(g)).charAt(0),!(15e14>Math.abs(a=w(m))))return d=S(v,f+2,x).times(a+""),m=E(new v(r+"."+n.slice(1)),f-10).plus(d),v.precision=x,null==t?(l=!0,O(m,x)):m;for(;r<7&&1!=r||1==r&&n.charAt(1)>3;)r=(n=y((m=m.times(e)).d)).charAt(0),h++;for(a=w(m),r>1?(m=new v("0."+n),a++):m=new v(r+"."+n.slice(1)),u=s=m=b(m.minus(i),m.plus(i),f),p=O(m.times(m),f),o=3;;){if(s=O(s.times(p),f),y((d=u.plus(b(s,new v(o),f))).d).slice(0,f)===y(u.d).slice(0,f))return u=u.times(2),0!==a&&(u=u.plus(S(v,f+2,x).times(a+""))),u=b(u,new v(h),f),v.precision=x,null==t?(l=!0,O(u,x)):u;u=d,o+=2}}function C(e,t){var n,r,o;for((n=t.indexOf("."))>-1&&(t=t.replace(".","")),(r=t.search(/e/i))>0?(n<0&&(n=r),n+=+t.slice(r+1),t=t.substring(0,r)):n<0&&(n=t.length),r=0;48===t.charCodeAt(r);)++r;for(o=t.length;48===t.charCodeAt(o-1);)--o;if(t=t.slice(r,o)){if(o-=r,n=n-r-1,e.e=d(n/7),e.d=[],r=(n+1)%7,n<0&&(r+=7),rh||e.e<-h))throw Error(u+n)}else e.s=0,e.e=0,e.d=[0];return e}function O(e,t,n){var r,o,i,a,c,s,p,m,g=e.d;for(a=1,i=g[0];i>=10;i/=10)a++;if((r=t-a)<0)r+=7,o=t,p=g[m=0];else{if((m=Math.ceil((r+1)/7))>=(i=g.length))return e;for(a=1,p=i=g[m];i>=10;i/=10)a++;r%=7,o=r-7+a}if(void 0!==n&&(c=p/(i=f(10,a-o-1))%10|0,s=t<0||void 0!==g[m+1]||p%i,s=n<4?(c||s)&&(0==n||n==(e.s<0?3:2)):c>5||5==c&&(4==n||s||6==n&&(r>0?o>0?p/f(10,a-o):0:g[m-1])%10&1||n==(e.s<0?8:7))),t<1||!g[0])return s?(i=w(e),g.length=1,t=t-i-1,g[0]=f(10,(7-t%7)%7),e.e=d(-t/7)||0):(g.length=1,g[0]=e.e=e.s=0),e;if(0==r?(g.length=m,i=1,m--):(g.length=m+1,i=f(10,7-r),g[m]=o>0?(p/f(10,a-o)%f(10,o)|0)*i:0),s)for(;;){if(0==m){1e7==(g[0]+=i)&&(g[0]=1,++e.e);break}if(g[m]+=i,1e7!=g[m])break;g[m--]=0,i=1}for(r=g.length;0===g[--r];)g.pop();if(l&&(e.e>h||e.e<-h))throw Error(u+w(e));return e}function j(e,t){var n,r,o,i,a,c,s,u,d,f,p=e.constructor,h=p.precision;if(!e.s||!t.s)return t.s?t.s=-t.s:t=new p(e),l?O(t,h):t;if(s=e.d,f=t.d,r=t.e,u=e.e,s=s.slice(),a=u-r){for((d=a<0)?(n=s,a=-a,c=f.length):(n=f,r=u,c=s.length),a>(o=Math.max(Math.ceil(h/7),c)+2)&&(a=o,n.length=1),n.reverse(),o=a;o--;)n.push(0);n.reverse()}else{for((d=(o=s.length)<(c=f.length))&&(c=o),o=0;o0;--o)s[c++]=0;for(o=f.length;o>a;){if(s[--o]0?i=i.charAt(0)+"."+i.slice(1)+k(r):a>1&&(i=i.charAt(0)+"."+i.slice(1)),i=i+(o<0?"e":"e+")+o):o<0?(i="0."+k(-o-1)+i,n&&(r=n-a)>0&&(i+=k(r))):o>=a?(i+=k(o+1-a),n&&(r=n-o-1)>0&&(i=i+"."+k(r))):((r=o+1)0&&(o+1===a&&(i+="."),i+=k(r))),e.s<0?"-"+i:i}function M(e,t){if(e.length>t)return e.length=t,!0}function N(e){if(!e||"object"!=typeof e)throw Error(c+"Object expected");var t,n,r,o=["precision",1,1e9,"rounding",0,8,"toExpNeg",-1/0,0,"toExpPos",0,1/0];for(t=0;t=o[t+1]&&r<=o[t+2])this[n]=r;else throw Error(s+n+": "+r)}if(void 0!==(r=e[n="LN10"])){if(r==Math.LN10)this[n]=new this(r);else throw Error(s+n+": "+r)}return this}(a=function e(t){var n,r,o;function i(e){if(!(this instanceof i))return new i(e);if(this.constructor=i,e instanceof i){this.s=e.s,this.e=e.e,this.d=(e=e.d)?e.slice():e;return}if("number"==typeof e){if(0*e!=0)throw Error(s+e);if(e>0)this.s=1;else if(e<0)e=-e,this.s=-1;else{this.s=0,this.e=0,this.d=[0];return}if(e===~~e&&e<1e7){this.e=0,this.d=[e];return}return C(this,e.toString())}if("string"!=typeof e)throw Error(s+e);if(45===e.charCodeAt(0)?(e=e.slice(1),this.s=-1):this.s=1,p.test(e))C(this,e);else throw Error(s+e)}if(i.prototype=m,i.ROUND_UP=0,i.ROUND_DOWN=1,i.ROUND_CEIL=2,i.ROUND_FLOOR=3,i.ROUND_HALF_UP=4,i.ROUND_HALF_DOWN=5,i.ROUND_HALF_EVEN=6,i.ROUND_HALF_CEIL=7,i.ROUND_HALF_FLOOR=8,i.clone=e,i.config=i.set=N,void 0===t&&(t={}),t)for(n=0,o=["precision","rounding","toExpNeg","toExpPos","LN10"];n-1}},56883:function(e){e.exports=function(e,t,n){for(var r=-1,o=null==e?0:e.length;++r0&&i(u)?n>1?e(u,n-1,i,a,l):r(l,u):a||(l[l.length]=u)}return l}},63321:function(e,t,n){var r=n(33023)();e.exports=r},98060:function(e,t,n){var r=n(63321),o=n(43228);e.exports=function(e,t){return e&&r(e,t,o)}},92167:function(e,t,n){var r=n(67906),o=n(70235);e.exports=function(e,t){t=r(t,e);for(var n=0,i=t.length;null!=e&&nt}},93012:function(e){e.exports=function(e,t){return null!=e&&t in Object(e)}},47909:function(e,t,n){var r=n(8235),o=n(31953),i=n(35281);e.exports=function(e,t,n){return t==t?i(e,t,n):r(e,o,n)}},90370:function(e,t,n){var r=n(54506),o=n(10303);e.exports=function(e){return o(e)&&"[object Arguments]"==r(e)}},56318:function(e,t,n){var r=n(6791),o=n(10303);e.exports=function e(t,n,i,a,l){return t===n||(null!=t&&null!=n&&(o(t)||o(n))?r(t,n,i,a,e,l):t!=t&&n!=n)}},6791:function(e,t,n){var r=n(85885),o=n(97638),i=n(88030),a=n(64974),l=n(81690),c=n(25614),s=n(98051),u=n(9792),d="[object Arguments]",f="[object Array]",p="[object Object]",h=Object.prototype.hasOwnProperty;e.exports=function(e,t,n,m,g,v){var y=c(e),b=c(t),x=y?f:l(e),w=b?f:l(t);x=x==d?p:x,w=w==d?p:w;var S=x==p,k=w==p,E=x==w;if(E&&s(e)){if(!s(t))return!1;y=!0,S=!1}if(E&&!S)return v||(v=new r),y||u(e)?o(e,t,n,m,g,v):i(e,t,x,n,m,g,v);if(!(1&n)){var C=S&&h.call(e,"__wrapped__"),O=k&&h.call(t,"__wrapped__");if(C||O){var j=C?e.value():e,P=O?t.value():t;return v||(v=new r),g(j,P,n,m,v)}}return!!E&&(v||(v=new r),a(e,t,n,m,g,v))}},62538:function(e,t,n){var r=n(85885),o=n(56318);e.exports=function(e,t,n,i){var a=n.length,l=a,c=!i;if(null==e)return!l;for(e=Object(e);a--;){var s=n[a];if(c&&s[2]?s[1]!==e[s[0]]:!(s[0]in e))return!1}for(;++ao?0:o+t),(n=n>o?o:n)<0&&(n+=o),o=t>n?0:n-t>>>0,t>>>=0;for(var i=Array(o);++r=200){var m=t?null:l(e);if(m)return c(m);f=!1,u=a,h=new r}else h=t?[]:p;e:for(;++s=o?e:r(e,t,n)}},1536:function(e,t,n){var r=n(78371);e.exports=function(e,t){if(e!==t){var n=void 0!==e,o=null===e,i=e==e,a=r(e),l=void 0!==t,c=null===t,s=t==t,u=r(t);if(!c&&!u&&!a&&e>t||a&&l&&s&&!c&&!u||o&&l&&s||!n&&s||!i)return 1;if(!o&&!a&&!u&&e=c)return s;return s*("desc"==n[o]?-1:1)}}return e.index-t.index}},92077:function(e,t,n){var r=n(74288)["__core-js_shared__"];e.exports=r},97930:function(e,t,n){var r=n(5629);e.exports=function(e,t){return function(n,o){if(null==n)return n;if(!r(n))return e(n,o);for(var i=n.length,a=t?i:-1,l=Object(n);(t?a--:++a-1?l[c?t[s]:s]:void 0}}},35464:function(e,t,n){var r=n(19608),o=n(49639),i=n(175);e.exports=function(e){return function(t,n,a){return a&&"number"!=typeof a&&o(t,n,a)&&(n=a=void 0),t=i(t),void 0===n?(n=t,t=0):n=i(n),a=void 0===a?tu))return!1;var f=c.get(e),p=c.get(t);if(f&&p)return f==t&&p==e;var h=-1,m=!0,g=2&n?new r:void 0;for(c.set(e,t),c.set(t,e);++h-1&&e%1==0&&e-1}},13368:function(e,t,n){var r=n(24457);e.exports=function(e,t){var n=this.__data__,o=r(n,e);return o<0?(++this.size,n.push([e,t])):n[o][1]=t,this}},38764:function(e,t,n){var r=n(9855),o=n(99078),i=n(88675);e.exports=function(){this.size=0,this.__data__={hash:new r,map:new(i||o),string:new r}}},78615:function(e,t,n){var r=n(1507);e.exports=function(e){var t=r(this,e).delete(e);return this.size-=t?1:0,t}},83391:function(e,t,n){var r=n(1507);e.exports=function(e){return r(this,e).get(e)}},53483:function(e,t,n){var r=n(1507);e.exports=function(e){return r(this,e).has(e)}},74724:function(e,t,n){var r=n(1507);e.exports=function(e,t){var n=r(this,e),o=n.size;return n.set(e,t),this.size+=n.size==o?0:1,this}},22523:function(e){e.exports=function(e){var t=-1,n=Array(e.size);return e.forEach(function(e,r){n[++t]=[r,e]}),n}},47073:function(e){e.exports=function(e,t){return function(n){return null!=n&&n[e]===t&&(void 0!==t||e in Object(n))}}},23787:function(e,t,n){var r=n(50967);e.exports=function(e){var t=r(e,function(e){return 500===n.size&&n.clear(),e}),n=t.cache;return t}},20453:function(e,t,n){var r=n(39866)(Object,"create");e.exports=r},77184:function(e,t,n){var r=n(45070)(Object.keys,Object);e.exports=r},39931:function(e,t,n){e=n.nmd(e);var r=n(17071),o=t&&!t.nodeType&&t,i=o&&e&&!e.nodeType&&e,a=i&&i.exports===o&&r.process,l=function(){try{var e=i&&i.require&&i.require("util").types;if(e)return e;return a&&a.binding&&a.binding("util")}catch(e){}}();e.exports=l},80910:function(e){var t=Object.prototype.toString;e.exports=function(e){return t.call(e)}},45070:function(e){e.exports=function(e,t){return function(n){return e(t(n))}}},49478:function(e,t,n){var r=n(60493),o=Math.max;e.exports=function(e,t,n){return t=o(void 0===t?e.length-1:t,0),function(){for(var i=arguments,a=-1,l=o(i.length-t,0),c=Array(l);++a0){if(++n>=800)return arguments[0]}else n=0;return e.apply(void 0,arguments)}}},84092:function(e,t,n){var r=n(99078);e.exports=function(){this.__data__=new r,this.size=0}},31663:function(e){e.exports=function(e){var t=this.__data__,n=t.delete(e);return this.size=t.size,n}},69135:function(e){e.exports=function(e){return this.__data__.get(e)}},39552:function(e){e.exports=function(e){return this.__data__.has(e)}},8381:function(e,t,n){var r=n(99078),o=n(88675),i=n(76219);e.exports=function(e,t){var n=this.__data__;if(n instanceof r){var a=n.__data__;if(!o||a.length<199)return a.push([e,t]),this.size=++n.size,this;n=this.__data__=new i(a)}return n.set(e,t),this.size=n.size,this}},35281:function(e){e.exports=function(e,t,n){for(var r=n-1,o=e.length;++r=t||n<0||g&&r>=u}function x(){var e,n,r,i=o();if(b(i))return w(i);f=setTimeout(x,(e=i-p,n=i-h,r=t-e,g?l(r,u-n):r))}function w(e){return(f=void 0,v&&c)?y(e):(c=s=void 0,d)}function S(){var e,n=o(),r=b(n);if(c=arguments,s=this,p=n,r){if(void 0===f)return h=e=p,f=setTimeout(x,t),m?y(e):d;if(g)return clearTimeout(f),f=setTimeout(x,t),y(p)}return void 0===f&&(f=setTimeout(x,t)),d}return t=i(t)||0,r(n)&&(m=!!n.leading,u=(g="maxWait"in n)?a(i(n.maxWait)||0,t):u,v="trailing"in n?!!n.trailing:v),S.cancel=function(){void 0!==f&&clearTimeout(f),h=0,c=p=s=f=void 0},S.flush=function(){return void 0===f?d:w(o())},S}},37560:function(e){e.exports=function(e,t){return e===t||e!=e&&t!=t}},32242:function(e,t,n){var r=n(78897),o=n(28935),i=n(88157),a=n(25614),l=n(49639);e.exports=function(e,t,n){var c=a(e)?r:o;return n&&l(e,t,n)&&(t=void 0),c(e,i(t,3))}},84173:function(e,t,n){var r=n(82602)(n(12152));e.exports=r},12152:function(e,t,n){var r=n(8235),o=n(88157),i=n(85759),a=Math.max;e.exports=function(e,t,n){var l=null==e?0:e.length;if(!l)return -1;var c=null==n?0:i(n);return c<0&&(c=a(l+c,0)),r(e,o(t,3),c)}},11314:function(e,t,n){var r=n(72569),o=n(89238);e.exports=function(e,t){return r(o(e,t),1)}},13735:function(e,t,n){var r=n(92167);e.exports=function(e,t,n){var o=null==e?void 0:r(e,t);return void 0===o?n:o}},17764:function(e,t,n){var r=n(93012),o=n(59592);e.exports=function(e,t){return null!=e&&o(e,t,r)}},79586:function(e){e.exports=function(e){return e}},56569:function(e,t,n){var r=n(90370),o=n(10303),i=Object.prototype,a=i.hasOwnProperty,l=i.propertyIsEnumerable,c=r(function(){return arguments}())?r:function(e){return o(e)&&a.call(e,"callee")&&!l.call(e,"callee")};e.exports=c},25614:function(e){var t=Array.isArray;e.exports=t},5629:function(e,t,n){var r=n(86757),o=n(13973);e.exports=function(e){return null!=e&&o(e.length)&&!r(e)}},24342:function(e,t,n){var r=n(54506),o=n(10303);e.exports=function(e){return!0===e||!1===e||o(e)&&"[object Boolean]"==r(e)}},98051:function(e,t,n){e=n.nmd(e);var r=n(74288),o=n(7406),i=t&&!t.nodeType&&t,a=i&&e&&!e.nodeType&&e,l=a&&a.exports===i?r.Buffer:void 0,c=l?l.isBuffer:void 0;e.exports=c||o},21652:function(e,t,n){var r=n(56318);e.exports=function(e,t){return r(e,t)}},86757:function(e,t,n){var r=n(54506),o=n(28302);e.exports=function(e){if(!o(e))return!1;var t=r(e);return"[object Function]"==t||"[object GeneratorFunction]"==t||"[object AsyncFunction]"==t||"[object Proxy]"==t}},13973:function(e){e.exports=function(e){return"number"==typeof e&&e>-1&&e%1==0&&e<=9007199254740991}},82559:function(e,t,n){var r=n(22345);e.exports=function(e){return r(e)&&e!=+e}},77571:function(e){e.exports=function(e){return null==e}},22345:function(e,t,n){var r=n(54506),o=n(10303);e.exports=function(e){return"number"==typeof e||o(e)&&"[object Number]"==r(e)}},28302:function(e){e.exports=function(e){var t=typeof e;return null!=e&&("object"==t||"function"==t)}},10303:function(e){e.exports=function(e){return null!=e&&"object"==typeof e}},90231:function(e,t,n){var r=n(54506),o=n(62602),i=n(10303),a=Object.prototype,l=Function.prototype.toString,c=a.hasOwnProperty,s=l.call(Object);e.exports=function(e){if(!i(e)||"[object Object]"!=r(e))return!1;var t=o(e);if(null===t)return!0;var n=c.call(t,"constructor")&&t.constructor;return"function"==typeof n&&n instanceof n&&l.call(n)==s}},42715:function(e,t,n){var r=n(54506),o=n(25614),i=n(10303);e.exports=function(e){return"string"==typeof e||!o(e)&&i(e)&&"[object String]"==r(e)}},78371:function(e,t,n){var r=n(54506),o=n(10303);e.exports=function(e){return"symbol"==typeof e||o(e)&&"[object Symbol]"==r(e)}},9792:function(e,t,n){var r=n(59332),o=n(23305),i=n(39931),a=i&&i.isTypedArray,l=a?o(a):r;e.exports=l},43228:function(e,t,n){var r=n(28579),o=n(4578),i=n(5629);e.exports=function(e){return i(e)?r(e):o(e)}},86185:function(e){e.exports=function(e){var t=null==e?0:e.length;return t?e[t-1]:void 0}},89238:function(e,t,n){var r=n(73819),o=n(88157),i=n(24240),a=n(25614);e.exports=function(e,t){return(a(e)?r:i)(e,o(t,3))}},41443:function(e,t,n){var r=n(83023),o=n(98060),i=n(88157);e.exports=function(e,t){var n={};return t=i(t,3),o(e,function(e,o,i){r(n,o,t(e,o,i))}),n}},95645:function(e,t,n){var r=n(67646),o=n(58905),i=n(79586);e.exports=function(e){return e&&e.length?r(e,i,o):void 0}},35802:function(e,t,n){var r=n(67646),o=n(58905),i=n(88157);e.exports=function(e,t){return e&&e.length?r(e,i(t,2),o):void 0}},50967:function(e,t,n){var r=n(76219);function o(e,t){if("function"!=typeof e||null!=t&&"function"!=typeof t)throw TypeError("Expected a function");var n=function(){var r=arguments,o=t?t.apply(this,r):r[0],i=n.cache;if(i.has(o))return i.get(o);var a=e.apply(this,r);return n.cache=i.set(o,a)||i,a};return n.cache=new(o.Cache||r),n}o.Cache=r,e.exports=o},99008:function(e,t,n){var r=n(67646),o=n(20121),i=n(79586);e.exports=function(e){return e&&e.length?r(e,i,o):void 0}},37891:function(e,t,n){var r=n(67646),o=n(88157),i=n(20121);e.exports=function(e,t){return e&&e.length?r(e,o(t,2),i):void 0}},93810:function(e){e.exports=function(){}},11121:function(e,t,n){var r=n(74288);e.exports=function(){return r.Date.now()}},22350:function(e,t,n){var r=n(18155),o=n(73584),i=n(67352),a=n(70235);e.exports=function(e){return i(e)?r(a(e)):o(e)}},99676:function(e,t,n){var r=n(35464)();e.exports=r},33645:function(e,t,n){var r=n(25253),o=n(88157),i=n(12327),a=n(25614),l=n(49639);e.exports=function(e,t,n){var c=a(e)?r:i;return n&&l(e,t,n)&&(t=void 0),c(e,o(t,3))}},34935:function(e,t,n){var r=n(72569),o=n(84046),i=n(44843),a=n(49639),l=i(function(e,t){if(null==e)return[];var n=t.length;return n>1&&a(e,t[0],t[1])?t=[]:n>2&&a(t[0],t[1],t[2])&&(t=[t[0]]),o(e,r(t,1),[])});e.exports=l},55716:function(e){e.exports=function(){return[]}},7406:function(e){e.exports=function(){return!1}},37065:function(e,t,n){var r=n(7310),o=n(28302);e.exports=function(e,t,n){var i=!0,a=!0;if("function"!=typeof e)throw TypeError("Expected a function");return o(n)&&(i="leading"in n?!!n.leading:i,a="trailing"in n?!!n.trailing:a),r(e,t,{leading:i,maxWait:t,trailing:a})}},175:function(e,t,n){var r=n(6660),o=1/0;e.exports=function(e){return e?(e=r(e))===o||e===-o?(e<0?-1:1)*17976931348623157e292:e==e?e:0:0===e?e:0}},85759:function(e,t,n){var r=n(175);e.exports=function(e){var t=r(e),n=t%1;return t==t?n?t-n:t:0}},6660:function(e,t,n){var r=n(41087),o=n(28302),i=n(78371),a=0/0,l=/^[-+]0x[0-9a-f]+$/i,c=/^0b[01]+$/i,s=/^0o[0-7]+$/i,u=parseInt;e.exports=function(e){if("number"==typeof e)return e;if(i(e))return a;if(o(e)){var t="function"==typeof e.valueOf?e.valueOf():e;e=o(t)?t+"":t}if("string"!=typeof e)return 0===e?e:+e;e=r(e);var n=c.test(e);return n||s.test(e)?u(e.slice(2),n?2:8):l.test(e)?a:+e}},3641:function(e,t,n){var r=n(65020);e.exports=function(e){return null==e?"":r(e)}},47230:function(e,t,n){var r=n(88157),o=n(13826);e.exports=function(e,t){return e&&e.length?o(e,r(t,2)):[]}},75551:function(e,t,n){var r=n(80675)("toUpperCase");e.exports=r},27648:function(e,t,n){"use strict";n.d(t,{default:function(){return o.a}});var r=n(72972),o=n.n(r)},55449:function(e,t,n){"use strict";Object.defineProperty(t,"__esModule",{value:!0}),Object.defineProperty(t,"addLocale",{enumerable:!0,get:function(){return r}}),n(33068);let r=function(e){for(var t=arguments.length,n=Array(t>1?t-1:0),r=1;ri?e.prefetch(t,o):e.prefetch(t,n,r))().catch(e=>{})}}function b(e){return"string"==typeof e?e:(0,c.formatUrl)(e)}let x=i.default.forwardRef(function(e,t){let n,r;let{href:c,as:v,children:x,prefetch:w=null,passHref:S,replace:k,shallow:E,scroll:C,locale:O,onClick:j,onMouseEnter:P,onTouchStart:M,legacyBehavior:N=!1,...I}=e;n=x,N&&("string"==typeof n||"number"==typeof n)&&(n=(0,o.jsx)("a",{children:n}));let R=i.default.useContext(d.RouterContext),T=i.default.useContext(f.AppRouterContext),A=null!=R?R:T,_=!R,D=!1!==w,Z=null===w?g.PrefetchKind.AUTO:g.PrefetchKind.FULL,{href:L,as:z}=i.default.useMemo(()=>{if(!R){let e=b(c);return{href:e,as:v?b(v):e}}let[e,t]=(0,a.resolveHref)(R,c,!0);return{href:e,as:v?(0,a.resolveHref)(R,v):t||e}},[R,c,v]),B=i.default.useRef(L),F=i.default.useRef(z);N&&(r=i.default.Children.only(n));let H=N?r&&"object"==typeof r&&r.ref:t,[q,W,K]=(0,p.useIntersection)({rootMargin:"200px"}),U=i.default.useCallback(e=>{(F.current!==z||B.current!==L)&&(K(),F.current=z,B.current=L),q(e),H&&("function"==typeof H?H(e):"object"==typeof H&&(H.current=e))},[z,H,L,K,q]);i.default.useEffect(()=>{A&&W&&D&&y(A,L,z,{locale:O},{kind:Z},_)},[z,L,W,O,D,null==R?void 0:R.locale,A,_,Z]);let V={ref:U,onClick(e){N||"function"!=typeof j||j(e),N&&r.props&&"function"==typeof r.props.onClick&&r.props.onClick(e),A&&!e.defaultPrevented&&function(e,t,n,r,o,a,c,s,u){let{nodeName:d}=e.currentTarget;if("A"===d.toUpperCase()&&(function(e){let t=e.currentTarget.getAttribute("target");return t&&"_self"!==t||e.metaKey||e.ctrlKey||e.shiftKey||e.altKey||e.nativeEvent&&2===e.nativeEvent.which}(e)||!u&&!(0,l.isLocalURL)(n)))return;e.preventDefault();let f=()=>{let e=null==c||c;"beforePopState"in t?t[o?"replace":"push"](n,r,{shallow:a,locale:s,scroll:e}):t[o?"replace":"push"](r||n,{scroll:e})};u?i.default.startTransition(f):f()}(e,A,L,z,k,E,C,O,_)},onMouseEnter(e){N||"function"!=typeof P||P(e),N&&r.props&&"function"==typeof r.props.onMouseEnter&&r.props.onMouseEnter(e),A&&(D||!_)&&y(A,L,z,{locale:O,priority:!0,bypassPrefetchedCheck:!0},{kind:Z},_)},onTouchStart:function(e){N||"function"!=typeof M||M(e),N&&r.props&&"function"==typeof r.props.onTouchStart&&r.props.onTouchStart(e),A&&(D||!_)&&y(A,L,z,{locale:O,priority:!0,bypassPrefetchedCheck:!0},{kind:Z},_)}};if((0,s.isAbsoluteUrl)(z))V.href=z;else if(!N||S||"a"===r.type&&!("href"in r.props)){let e=void 0!==O?O:null==R?void 0:R.locale,t=(null==R?void 0:R.isLocaleDomain)&&(0,h.getDomainLocale)(z,e,null==R?void 0:R.locales,null==R?void 0:R.domainLocales);V.href=t||(0,m.addBasePath)((0,u.addLocale)(z,e,null==R?void 0:R.defaultLocale))}return N?i.default.cloneElement(r,V):(0,o.jsx)("a",{...I,...V,children:n})});("function"==typeof t.default||"object"==typeof t.default&&null!==t.default)&&void 0===t.default.__esModule&&(Object.defineProperty(t.default,"__esModule",{value:!0}),Object.assign(t.default,t),e.exports=t.default)},63515:function(e,t){"use strict";Object.defineProperty(t,"__esModule",{value:!0}),function(e,t){for(var n in t)Object.defineProperty(e,n,{enumerable:!0,get:t[n]})}(t,{cancelIdleCallback:function(){return r},requestIdleCallback:function(){return n}});let n="undefined"!=typeof self&&self.requestIdleCallback&&self.requestIdleCallback.bind(window)||function(e){let t=Date.now();return self.setTimeout(function(){e({didTimeout:!1,timeRemaining:function(){return Math.max(0,50-(Date.now()-t))}})},1)},r="undefined"!=typeof self&&self.cancelIdleCallback&&self.cancelIdleCallback.bind(window)||function(e){return clearTimeout(e)};("function"==typeof t.default||"object"==typeof t.default&&null!==t.default)&&void 0===t.default.__esModule&&(Object.defineProperty(t.default,"__esModule",{value:!0}),Object.assign(t.default,t),e.exports=t.default)},25246:function(e,t,n){"use strict";Object.defineProperty(t,"__esModule",{value:!0}),Object.defineProperty(t,"resolveHref",{enumerable:!0,get:function(){return d}});let r=n(48637),o=n(57497),i=n(17053),a=n(3987),l=n(33068),c=n(53552),s=n(86279),u=n(37205);function d(e,t,n){let d;let f="string"==typeof t?t:(0,o.formatWithValidation)(t),p=f.match(/^[a-zA-Z]{1,}:\/\//),h=p?f.slice(p[0].length):f;if((h.split("?",1)[0]||"").match(/(\/\/|\\)/)){console.error("Invalid href '"+f+"' passed to next/router in page: '"+e.pathname+"'. Repeated forward-slashes (//) or backslashes \\ are not valid in the href.");let t=(0,a.normalizeRepeatedSlashes)(h);f=(p?p[0]:"")+t}if(!(0,c.isLocalURL)(f))return n?[f]:f;try{d=new URL(f.startsWith("#")?e.asPath:e.pathname,"http://n")}catch(e){d=new URL("/","http://n")}try{let e=new URL(f,d);e.pathname=(0,l.normalizePathTrailingSlash)(e.pathname);let t="";if((0,s.isDynamicRoute)(e.pathname)&&e.searchParams&&n){let n=(0,r.searchParamsToUrlQuery)(e.searchParams),{result:a,params:l}=(0,u.interpolateAs)(e.pathname,e.pathname,n);a&&(t=(0,o.formatWithValidation)({pathname:a,hash:e.hash,query:(0,i.omit)(n,l)}))}let a=e.origin===d.origin?e.href.slice(e.origin.length):e.href;return n?[a,t||a]:a}catch(e){return n?[f]:f}}("function"==typeof t.default||"object"==typeof t.default&&null!==t.default)&&void 0===t.default.__esModule&&(Object.defineProperty(t.default,"__esModule",{value:!0}),Object.assign(t.default,t),e.exports=t.default)},16081:function(e,t,n){"use strict";Object.defineProperty(t,"__esModule",{value:!0}),Object.defineProperty(t,"useIntersection",{enumerable:!0,get:function(){return c}});let r=n(2265),o=n(63515),i="function"==typeof IntersectionObserver,a=new Map,l=[];function c(e){let{rootRef:t,rootMargin:n,disabled:c}=e,s=c||!i,[u,d]=(0,r.useState)(!1),f=(0,r.useRef)(null),p=(0,r.useCallback)(e=>{f.current=e},[]);return(0,r.useEffect)(()=>{if(i){if(s||u)return;let e=f.current;if(e&&e.tagName)return function(e,t,n){let{id:r,observer:o,elements:i}=function(e){let t;let n={root:e.root||null,margin:e.rootMargin||""},r=l.find(e=>e.root===n.root&&e.margin===n.margin);if(r&&(t=a.get(r)))return t;let o=new Map;return t={id:n,observer:new IntersectionObserver(e=>{e.forEach(e=>{let t=o.get(e.target),n=e.isIntersecting||e.intersectionRatio>0;t&&n&&t(n)})},e),elements:o},l.push(n),a.set(n,t),t}(n);return i.set(e,t),o.observe(e),function(){if(i.delete(e),o.unobserve(e),0===i.size){o.disconnect(),a.delete(r);let e=l.findIndex(e=>e.root===r.root&&e.margin===r.margin);e>-1&&l.splice(e,1)}}}(e,e=>e&&d(e),{root:null==t?void 0:t.current,rootMargin:n})}else if(!u){let e=(0,o.requestIdleCallback)(()=>d(!0));return()=>(0,o.cancelIdleCallback)(e)}},[s,n,t,u,f.current]),[p,u,(0,r.useCallback)(()=>{d(!1)},[])]}("function"==typeof t.default||"object"==typeof t.default&&null!==t.default)&&void 0===t.default.__esModule&&(Object.defineProperty(t.default,"__esModule",{value:!0}),Object.assign(t.default,t),e.exports=t.default)},90042:function(e,t){"use strict";Object.defineProperty(t,"__esModule",{value:!0}),Object.defineProperty(t,"escapeStringRegexp",{enumerable:!0,get:function(){return o}});let n=/[|\\{}()[\]^$+*?.-]/,r=/[|\\{}()[\]^$+*?.-]/g;function o(e){return n.test(e)?e.replace(r,"\\$&"):e}},25523:function(e,t,n){"use strict";Object.defineProperty(t,"__esModule",{value:!0}),Object.defineProperty(t,"RouterContext",{enumerable:!0,get:function(){return r}});let r=n(47043)._(n(2265)).default.createContext(null)},57497:function(e,t,n){"use strict";Object.defineProperty(t,"__esModule",{value:!0}),function(e,t){for(var n in t)Object.defineProperty(e,n,{enumerable:!0,get:t[n]})}(t,{formatUrl:function(){return i},formatWithValidation:function(){return l},urlObjectKeys:function(){return a}});let r=n(53099)._(n(48637)),o=/https?|ftp|gopher|file/;function i(e){let{auth:t,hostname:n}=e,i=e.protocol||"",a=e.pathname||"",l=e.hash||"",c=e.query||"",s=!1;t=t?encodeURIComponent(t).replace(/%3A/i,":")+"@":"",e.host?s=t+e.host:n&&(s=t+(~n.indexOf(":")?"["+n+"]":n),e.port&&(s+=":"+e.port)),c&&"object"==typeof c&&(c=String(r.urlQueryToSearchParams(c)));let u=e.search||c&&"?"+c||"";return i&&!i.endsWith(":")&&(i+=":"),e.slashes||(!i||o.test(i))&&!1!==s?(s="//"+(s||""),a&&"/"!==a[0]&&(a="/"+a)):s||(s=""),l&&"#"!==l[0]&&(l="#"+l),u&&"?"!==u[0]&&(u="?"+u),""+i+s+(a=a.replace(/[?#]/g,encodeURIComponent))+(u=u.replace("#","%23"))+l}let a=["auth","hash","host","hostname","href","path","pathname","port","protocol","query","search","slashes"];function l(e){return i(e)}},86279:function(e,t,n){"use strict";Object.defineProperty(t,"__esModule",{value:!0}),function(e,t){for(var n in t)Object.defineProperty(e,n,{enumerable:!0,get:t[n]})}(t,{getSortedRoutes:function(){return r.getSortedRoutes},isDynamicRoute:function(){return o.isDynamicRoute}});let r=n(14777),o=n(38104)},37205:function(e,t,n){"use strict";Object.defineProperty(t,"__esModule",{value:!0}),Object.defineProperty(t,"interpolateAs",{enumerable:!0,get:function(){return i}});let r=n(4199),o=n(9964);function i(e,t,n){let i="",a=(0,o.getRouteRegex)(e),l=a.groups,c=(t!==e?(0,r.getRouteMatcher)(a)(t):"")||n;i=e;let s=Object.keys(l);return s.every(e=>{let t=c[e]||"",{repeat:n,optional:r}=l[e],o="["+(n?"...":"")+e+"]";return r&&(o=(t?"":"/")+"["+o+"]"),n&&!Array.isArray(t)&&(t=[t]),(r||e in c)&&(i=i.replace(o,n?t.map(e=>encodeURIComponent(e)).join("/"):encodeURIComponent(t))||"/")})||(i=""),{params:s,result:i}}},38104:function(e,t,n){"use strict";Object.defineProperty(t,"__esModule",{value:!0}),Object.defineProperty(t,"isDynamicRoute",{enumerable:!0,get:function(){return i}});let r=n(91182),o=/\/\[[^/]+?\](?=\/|$)/;function i(e){return(0,r.isInterceptionRouteAppPath)(e)&&(e=(0,r.extractInterceptionRouteInformation)(e).interceptedRoute),o.test(e)}},53552:function(e,t,n){"use strict";Object.defineProperty(t,"__esModule",{value:!0}),Object.defineProperty(t,"isLocalURL",{enumerable:!0,get:function(){return i}});let r=n(3987),o=n(11283);function i(e){if(!(0,r.isAbsoluteUrl)(e))return!0;try{let t=(0,r.getLocationOrigin)(),n=new URL(e,t);return n.origin===t&&(0,o.hasBasePath)(n.pathname)}catch(e){return!1}}},17053:function(e,t){"use strict";function n(e,t){let n={};return Object.keys(e).forEach(r=>{t.includes(r)||(n[r]=e[r])}),n}Object.defineProperty(t,"__esModule",{value:!0}),Object.defineProperty(t,"omit",{enumerable:!0,get:function(){return n}})},48637:function(e,t){"use strict";function n(e){let t={};return e.forEach((e,n)=>{void 0===t[n]?t[n]=e:Array.isArray(t[n])?t[n].push(e):t[n]=[t[n],e]}),t}function r(e){return"string"!=typeof e&&("number"!=typeof e||isNaN(e))&&"boolean"!=typeof e?"":String(e)}function o(e){let t=new URLSearchParams;return Object.entries(e).forEach(e=>{let[n,o]=e;Array.isArray(o)?o.forEach(e=>t.append(n,r(e))):t.set(n,r(o))}),t}function i(e){for(var t=arguments.length,n=Array(t>1?t-1:0),r=1;r{Array.from(t.keys()).forEach(t=>e.delete(t)),t.forEach((t,n)=>e.append(n,t))}),e}Object.defineProperty(t,"__esModule",{value:!0}),function(e,t){for(var n in t)Object.defineProperty(e,n,{enumerable:!0,get:t[n]})}(t,{assign:function(){return i},searchParamsToUrlQuery:function(){return n},urlQueryToSearchParams:function(){return o}})},4199:function(e,t,n){"use strict";Object.defineProperty(t,"__esModule",{value:!0}),Object.defineProperty(t,"getRouteMatcher",{enumerable:!0,get:function(){return o}});let r=n(3987);function o(e){let{re:t,groups:n}=e;return e=>{let o=t.exec(e);if(!o)return!1;let i=e=>{try{return decodeURIComponent(e)}catch(e){throw new r.DecodeError("failed to decode param")}},a={};return Object.keys(n).forEach(e=>{let t=n[e],r=o[t.pos];void 0!==r&&(a[e]=~r.indexOf("/")?r.split("/").map(e=>i(e)):t.repeat?[i(r)]:i(r))}),a}}},9964:function(e,t,n){"use strict";Object.defineProperty(t,"__esModule",{value:!0}),function(e,t){for(var n in t)Object.defineProperty(e,n,{enumerable:!0,get:t[n]})}(t,{getNamedMiddlewareRegex:function(){return f},getNamedRouteRegex:function(){return d},getRouteRegex:function(){return c},parseParameter:function(){return a}});let r=n(91182),o=n(90042),i=n(26674);function a(e){let t=e.startsWith("[")&&e.endsWith("]");t&&(e=e.slice(1,-1));let n=e.startsWith("...");return n&&(e=e.slice(3)),{key:e,repeat:n,optional:t}}function l(e){let t=(0,i.removeTrailingSlash)(e).slice(1).split("/"),n={},l=1;return{parameterizedRoute:t.map(e=>{let t=r.INTERCEPTION_ROUTE_MARKERS.find(t=>e.startsWith(t)),i=e.match(/\[((?:\[.*\])|.+)\]/);if(t&&i){let{key:e,optional:r,repeat:c}=a(i[1]);return n[e]={pos:l++,repeat:c,optional:r},"/"+(0,o.escapeStringRegexp)(t)+"([^/]+?)"}if(!i)return"/"+(0,o.escapeStringRegexp)(e);{let{key:e,repeat:t,optional:r}=a(i[1]);return n[e]={pos:l++,repeat:t,optional:r},t?r?"(?:/(.+?))?":"/(.+?)":"/([^/]+?)"}}).join(""),groups:n}}function c(e){let{parameterizedRoute:t,groups:n}=l(e);return{re:RegExp("^"+t+"(?:/)?$"),groups:n}}function s(e){let{interceptionMarker:t,getSafeRouteKey:n,segment:r,routeKeys:i,keyPrefix:l}=e,{key:c,optional:s,repeat:u}=a(r),d=c.replace(/\W/g,"");l&&(d=""+l+d);let f=!1;(0===d.length||d.length>30)&&(f=!0),isNaN(parseInt(d.slice(0,1)))||(f=!0),f&&(d=n()),l?i[d]=""+l+c:i[d]=c;let p=t?(0,o.escapeStringRegexp)(t):"";return u?s?"(?:/"+p+"(?<"+d+">.+?))?":"/"+p+"(?<"+d+">.+?)":"/"+p+"(?<"+d+">[^/]+?)"}function u(e,t){let n;let a=(0,i.removeTrailingSlash)(e).slice(1).split("/"),l=(n=0,()=>{let e="",t=++n;for(;t>0;)e+=String.fromCharCode(97+(t-1)%26),t=Math.floor((t-1)/26);return e}),c={};return{namedParameterizedRoute:a.map(e=>{let n=r.INTERCEPTION_ROUTE_MARKERS.some(t=>e.startsWith(t)),i=e.match(/\[((?:\[.*\])|.+)\]/);if(n&&i){let[n]=e.split(i[0]);return s({getSafeRouteKey:l,interceptionMarker:n,segment:i[1],routeKeys:c,keyPrefix:t?"nxtI":void 0})}return i?s({getSafeRouteKey:l,segment:i[1],routeKeys:c,keyPrefix:t?"nxtP":void 0}):"/"+(0,o.escapeStringRegexp)(e)}).join(""),routeKeys:c}}function d(e,t){let n=u(e,t);return{...c(e),namedRegex:"^"+n.namedParameterizedRoute+"(?:/)?$",routeKeys:n.routeKeys}}function f(e,t){let{parameterizedRoute:n}=l(e),{catchAll:r=!0}=t;if("/"===n)return{namedRegex:"^/"+(r?".*":"")+"$"};let{namedParameterizedRoute:o}=u(e,!1);return{namedRegex:"^"+o+(r?"(?:(/.*)?)":"")+"$"}}},14777:function(e,t){"use strict";Object.defineProperty(t,"__esModule",{value:!0}),Object.defineProperty(t,"getSortedRoutes",{enumerable:!0,get:function(){return r}});class n{insert(e){this._insert(e.split("/").filter(Boolean),[],!1)}smoosh(){return this._smoosh()}_smoosh(e){void 0===e&&(e="/");let t=[...this.children.keys()].sort();null!==this.slugName&&t.splice(t.indexOf("[]"),1),null!==this.restSlugName&&t.splice(t.indexOf("[...]"),1),null!==this.optionalRestSlugName&&t.splice(t.indexOf("[[...]]"),1);let n=t.map(t=>this.children.get(t)._smoosh(""+e+t+"/")).reduce((e,t)=>[...e,...t],[]);if(null!==this.slugName&&n.push(...this.children.get("[]")._smoosh(e+"["+this.slugName+"]/")),!this.placeholder){let t="/"===e?"/":e.slice(0,-1);if(null!=this.optionalRestSlugName)throw Error('You cannot define a route with the same specificity as a optional catch-all route ("'+t+'" and "'+t+"[[..."+this.optionalRestSlugName+']]").');n.unshift(t)}return null!==this.restSlugName&&n.push(...this.children.get("[...]")._smoosh(e+"[..."+this.restSlugName+"]/")),null!==this.optionalRestSlugName&&n.push(...this.children.get("[[...]]")._smoosh(e+"[[..."+this.optionalRestSlugName+"]]/")),n}_insert(e,t,r){if(0===e.length){this.placeholder=!1;return}if(r)throw Error("Catch-all must be the last part of the URL.");let o=e[0];if(o.startsWith("[")&&o.endsWith("]")){let n=o.slice(1,-1),a=!1;if(n.startsWith("[")&&n.endsWith("]")&&(n=n.slice(1,-1),a=!0),n.startsWith("...")&&(n=n.substring(3),r=!0),n.startsWith("[")||n.endsWith("]"))throw Error("Segment names may not start or end with extra brackets ('"+n+"').");if(n.startsWith("."))throw Error("Segment names may not start with erroneous periods ('"+n+"').");function i(e,n){if(null!==e&&e!==n)throw Error("You cannot use different slug names for the same dynamic path ('"+e+"' !== '"+n+"').");t.forEach(e=>{if(e===n)throw Error('You cannot have the same slug name "'+n+'" repeat within a single dynamic path');if(e.replace(/\W/g,"")===o.replace(/\W/g,""))throw Error('You cannot have the slug names "'+e+'" and "'+n+'" differ only by non-word symbols within a single dynamic path')}),t.push(n)}if(r){if(a){if(null!=this.restSlugName)throw Error('You cannot use both an required and optional catch-all route at the same level ("[...'+this.restSlugName+']" and "'+e[0]+'" ).');i(this.optionalRestSlugName,n),this.optionalRestSlugName=n,o="[[...]]"}else{if(null!=this.optionalRestSlugName)throw Error('You cannot use both an optional and required catch-all route at the same level ("[[...'+this.optionalRestSlugName+']]" and "'+e[0]+'").');i(this.restSlugName,n),this.restSlugName=n,o="[...]"}}else{if(a)throw Error('Optional route parameters are not yet supported ("'+e[0]+'").');i(this.slugName,n),this.slugName=n,o="[]"}}this.children.has(o)||this.children.set(o,new n),this.children.get(o)._insert(e.slice(1),t,r)}constructor(){this.placeholder=!0,this.children=new Map,this.slugName=null,this.restSlugName=null,this.optionalRestSlugName=null}}function r(e){let t=new n;return e.forEach(e=>t.insert(e)),t.smoosh()}},3987:function(e,t){"use strict";Object.defineProperty(t,"__esModule",{value:!0}),function(e,t){for(var n in t)Object.defineProperty(e,n,{enumerable:!0,get:t[n]})}(t,{DecodeError:function(){return h},MiddlewareNotFoundError:function(){return y},MissingStaticPage:function(){return v},NormalizeError:function(){return m},PageNotFoundError:function(){return g},SP:function(){return f},ST:function(){return p},WEB_VITALS:function(){return n},execOnce:function(){return r},getDisplayName:function(){return c},getLocationOrigin:function(){return a},getURL:function(){return l},isAbsoluteUrl:function(){return i},isResSent:function(){return s},loadGetInitialProps:function(){return d},normalizeRepeatedSlashes:function(){return u},stringifyError:function(){return b}});let n=["CLS","FCP","FID","INP","LCP","TTFB"];function r(e){let t,n=!1;return function(){for(var r=arguments.length,o=Array(r),i=0;io.test(e);function a(){let{protocol:e,hostname:t,port:n}=window.location;return e+"//"+t+(n?":"+n:"")}function l(){let{href:e}=window.location,t=a();return e.substring(t.length)}function c(e){return"string"==typeof e?e:e.displayName||e.name||"Unknown"}function s(e){return e.finished||e.headersSent}function u(e){let t=e.split("?");return t[0].replace(/\\/g,"/").replace(/\/\/+/g,"/")+(t[1]?"?"+t.slice(1).join("?"):"")}async function d(e,t){let n=t.res||t.ctx&&t.ctx.res;if(!e.getInitialProps)return t.ctx&&t.Component?{pageProps:await d(t.Component,t.ctx)}:{};let r=await e.getInitialProps(t);if(n&&s(n))return r;if(!r)throw Error('"'+c(e)+'.getInitialProps()" should resolve to an object. But found "'+r+'" instead.');return r}let f="undefined"!=typeof performance,p=f&&["mark","measure","getEntriesByName"].every(e=>"function"==typeof performance[e]);class h extends Error{}class m extends Error{}class g extends Error{constructor(e){super(),this.code="ENOENT",this.name="PageNotFoundError",this.message="Cannot find module for page: "+e}}class v extends Error{constructor(e,t){super(),this.message="Failed to load static file for page: "+e+" "+t}}class y extends Error{constructor(){super(),this.code="ENOENT",this.message="Cannot find the middleware module"}}function b(e){return JSON.stringify({message:e.message,stack:e.stack})}},15452:function(e,t){var n,r,o;r=[],void 0!==(o="function"==typeof(n=function e(){var t,n="undefined"!=typeof self?self:"undefined"!=typeof window?window:void 0!==n?n:{},r=!n.document&&!!n.postMessage,o=n.IS_PAPA_WORKER||!1,i={},a=0,l={};function c(e){this._handle=null,this._finished=!1,this._completed=!1,this._halted=!1,this._input=null,this._baseIndex=0,this._partialLine="",this._rowCount=0,this._start=0,this._nextChunk=null,this.isFirstChunk=!0,this._completeResults={data:[],errors:[],meta:{}},(function(e){var t=b(e);t.chunkSize=parseInt(t.chunkSize),e.step||e.chunk||(t.chunkSize=null),this._handle=new p(t),(this._handle.streamer=this)._config=t}).call(this,e),this.parseChunk=function(e,t){var r=parseInt(this._config.skipFirstNLines)||0;if(this.isFirstChunk&&0=this._config.preview,o)n.postMessage({results:i,workerId:l.WORKER_ID,finished:r});else if(w(this._config.chunk)&&!t){if(this._config.chunk(i,this._handle),this._handle.paused()||this._handle.aborted())return void(this._halted=!0);this._completeResults=i=void 0}return this._config.step||this._config.chunk||(this._completeResults.data=this._completeResults.data.concat(i.data),this._completeResults.errors=this._completeResults.errors.concat(i.errors),this._completeResults.meta=i.meta),this._completed||!r||!w(this._config.complete)||i&&i.meta.aborted||(this._config.complete(this._completeResults,this._input),this._completed=!0),r||i&&i.meta.paused||this._nextChunk(),i}this._halted=!0},this._sendError=function(e){w(this._config.error)?this._config.error(e):o&&this._config.error&&n.postMessage({workerId:l.WORKER_ID,error:e,finished:!1})}}function s(e){var t;(e=e||{}).chunkSize||(e.chunkSize=l.RemoteChunkSize),c.call(this,e),this._nextChunk=r?function(){this._readChunk(),this._chunkLoaded()}:function(){this._readChunk()},this.stream=function(e){this._input=e,this._nextChunk()},this._readChunk=function(){if(this._finished)this._chunkLoaded();else{if(t=new XMLHttpRequest,this._config.withCredentials&&(t.withCredentials=this._config.withCredentials),r||(t.onload=x(this._chunkLoaded,this),t.onerror=x(this._chunkError,this)),t.open(this._config.downloadRequestBody?"POST":"GET",this._input,!r),this._config.downloadRequestHeaders){var e,n,o=this._config.downloadRequestHeaders;for(n in o)t.setRequestHeader(n,o[n])}this._config.chunkSize&&(e=this._start+this._config.chunkSize-1,t.setRequestHeader("Range","bytes="+this._start+"-"+e));try{t.send(this._config.downloadRequestBody)}catch(e){this._chunkError(e.message)}r&&0===t.status&&this._chunkError()}},this._chunkLoaded=function(){let e;4===t.readyState&&(t.status<200||400<=t.status?this._chunkError():(this._start+=this._config.chunkSize||t.responseText.length,this._finished=!this._config.chunkSize||this._start>=(null!==(e=(e=t).getResponseHeader("Content-Range"))?parseInt(e.substring(e.lastIndexOf("/")+1)):-1),this.parseChunk(t.responseText)))},this._chunkError=function(e){e=t.statusText||e,this._sendError(Error(e))}}function u(e){(e=e||{}).chunkSize||(e.chunkSize=l.LocalChunkSize),c.call(this,e);var t,n,r="undefined"!=typeof FileReader;this.stream=function(e){this._input=e,n=e.slice||e.webkitSlice||e.mozSlice,r?((t=new FileReader).onload=x(this._chunkLoaded,this),t.onerror=x(this._chunkError,this)):t=new FileReaderSync,this._nextChunk()},this._nextChunk=function(){this._finished||this._config.preview&&!(this._rowCount=this._input.size,this.parseChunk(e.target.result)},this._chunkError=function(){this._sendError(t.error)}}function d(e){var t;c.call(this,e=e||{}),this.stream=function(e){return t=e,this._nextChunk()},this._nextChunk=function(){var e,n;if(!this._finished)return t=(e=this._config.chunkSize)?(n=t.substring(0,e),t.substring(e)):(n=t,""),this._finished=!t,this.parseChunk(n)}}function f(e){c.call(this,e=e||{});var t=[],n=!0,r=!1;this.pause=function(){c.prototype.pause.apply(this,arguments),this._input.pause()},this.resume=function(){c.prototype.resume.apply(this,arguments),this._input.resume()},this.stream=function(e){this._input=e,this._input.on("data",this._streamData),this._input.on("end",this._streamEnd),this._input.on("error",this._streamError)},this._checkIsFinished=function(){r&&1===t.length&&(this._finished=!0)},this._nextChunk=function(){this._checkIsFinished(),t.length?this.parseChunk(t.shift()):n=!0},this._streamData=x(function(e){try{t.push("string"==typeof e?e:e.toString(this._config.encoding)),n&&(n=!1,this._checkIsFinished(),this.parseChunk(t.shift()))}catch(e){this._streamError(e)}},this),this._streamError=x(function(e){this._streamCleanUp(),this._sendError(e)},this),this._streamEnd=x(function(){this._streamCleanUp(),r=!0,this._streamData("")},this),this._streamCleanUp=x(function(){this._input.removeListener("data",this._streamData),this._input.removeListener("end",this._streamEnd),this._input.removeListener("error",this._streamError)},this)}function p(e){var t,n,r,o,i=/^\s*-?(\d+\.?|\.\d+|\d+\.\d+)([eE][-+]?\d+)?\s*$/,a=/^((\d{4}-[01]\d-[0-3]\dT[0-2]\d:[0-5]\d:[0-5]\d\.\d+([+-][0-2]\d:[0-5]\d|Z))|(\d{4}-[01]\d-[0-3]\dT[0-2]\d:[0-5]\d:[0-5]\d([+-][0-2]\d:[0-5]\d|Z))|(\d{4}-[01]\d-[0-3]\dT[0-2]\d:[0-5]\d([+-][0-2]\d:[0-5]\d|Z)))$/,c=this,s=0,u=0,d=!1,f=!1,p=[],g={data:[],errors:[],meta:{}};function v(t){return"greedy"===e.skipEmptyLines?""===t.join("").trim():1===t.length&&0===t[0].length}function y(){if(g&&r&&(S("Delimiter","UndetectableDelimiter","Unable to auto-detect delimiting character; defaulted to '"+l.DefaultDelimiter+"'"),r=!1),e.skipEmptyLines&&(g.data=g.data.filter(function(e){return!v(e)})),x()){if(g){if(Array.isArray(g.data[0])){for(var t,n=0;x()&&n=p.length?"__parsed_extra":p[o]:l,s=c=e.transform?e.transform(c,l):c,(e.dynamicTypingFunction&&void 0===e.dynamicTyping[n]&&(e.dynamicTyping[n]=e.dynamicTypingFunction(n)),!0===(e.dynamicTyping[n]||e.dynamicTyping))?"true"===s||"TRUE"===s||"false"!==s&&"FALSE"!==s&&((e=>{if(i.test(e)&&-9007199254740992<(e=parseFloat(e))&&e<9007199254740992)return 1})(s)?parseFloat(s):a.test(s)?new Date(s):""===s?null:s):s);"__parsed_extra"===l?(r[l]=r[l]||[],r[l].push(c)):r[l]=c}return e.header&&(o>p.length?S("FieldMismatch","TooManyFields","Too many fields: expected "+p.length+" fields but parsed "+o,u+n):oe.preview?n.abort():(g.data=g.data[0],o(g,c))))}),this.parse=function(o,i,a){var c=e.quoteChar||'"',c=(e.newline||(e.newline=this.guessLineEndings(o,c)),r=!1,e.delimiter?w(e.delimiter)&&(e.delimiter=e.delimiter(o),g.meta.delimiter=e.delimiter):((c=((t,n,r,o,i)=>{var a,c,s,u;i=i||[","," ","|",";",l.RECORD_SEP,l.UNIT_SEP];for(var d=0;d=n.length/2?"\r\n":"\r"}}function h(e){return e.replace(/[.*+?^${}()|[\]\\]/g,"\\$&")}function m(e){var t=(e=e||{}).delimiter,n=e.newline,r=e.comments,o=e.step,i=e.preview,a=e.fastMode,c=null,s=!1,u=null==e.quoteChar?'"':e.quoteChar,d=u;if(void 0!==e.escapeChar&&(d=e.escapeChar),("string"!=typeof t||-1=i)return L(!0);break}E.push({type:"Quotes",code:"InvalidQuotes",message:"Trailing quote on quoted field is malformed",row:k.length,index:f}),R++}}else if(r&&0===C.length&&l.substring(f,f+x)===r){if(-1===N)return L();f=N+b,N=l.indexOf(n,f),M=l.indexOf(t,f)}else if(-1!==M&&(M=i)return L(!0)}return D();function A(e){k.push(e),O=f}function _(e){return -1!==e&&(e=l.substring(R+1,e))&&""===e.trim()?e.length:0}function D(e){return g||(void 0===e&&(e=l.substring(f)),C.push(e),f=v,A(C),S&&z()),L()}function Z(e){f=e,A(C),C=[],N=l.indexOf(n,f)}function L(r){if(e.header&&!m&&k.length&&!s){var o=k[0],i={},a=new Set(o);let t=!1;for(let n=0;n{if("object"==typeof t){if("string"!=typeof t.delimiter||l.BAD_DELIMITERS.filter(function(e){return -1!==t.delimiter.indexOf(e)}).length||(o=t.delimiter),("boolean"==typeof t.quotes||"function"==typeof t.quotes||Array.isArray(t.quotes))&&(n=t.quotes),"boolean"!=typeof t.skipEmptyLines&&"string"!=typeof t.skipEmptyLines||(s=t.skipEmptyLines),"string"==typeof t.newline&&(i=t.newline),"string"==typeof t.quoteChar&&(a=t.quoteChar),"boolean"==typeof t.header&&(r=t.header),Array.isArray(t.columns)){if(0===t.columns.length)throw Error("Option columns is empty");u=t.columns}void 0!==t.escapeChar&&(c=t.escapeChar+a),t.escapeFormulae instanceof RegExp?d=t.escapeFormulae:"boolean"==typeof t.escapeFormulae&&t.escapeFormulae&&(d=/^[=+\-@\t\r].*$/)}})(),RegExp(h(a),"g"));if("string"==typeof e&&(e=JSON.parse(e)),Array.isArray(e)){if(!e.length||Array.isArray(e[0]))return p(null,e,s);if("object"==typeof e[0])return p(u||Object.keys(e[0]),e,s)}else if("object"==typeof e)return"string"==typeof e.data&&(e.data=JSON.parse(e.data)),Array.isArray(e.data)&&(e.fields||(e.fields=e.meta&&e.meta.fields||u),e.fields||(e.fields=Array.isArray(e.data[0])?e.fields:"object"==typeof e.data[0]?Object.keys(e.data[0]):[]),Array.isArray(e.data[0])||"object"==typeof e.data[0]||(e.data=[e.data])),p(e.fields||[],e.data||[],s);throw Error("Unable to serialize unrecognized input");function p(e,t,n){var a="",l=("string"==typeof e&&(e=JSON.parse(e)),"string"==typeof t&&(t=JSON.parse(t)),Array.isArray(e)&&0{for(var n=0;nen;(0,s.useImperativeHandle)(t,function(){return{focus:K,blur:function(){var e;null===(e=W.current)||void 0===e||e.blur()},setSelectionRange:function(e,t,n){var r;null===(r=W.current)||void 0===r||r.setSelectionRange(e,t,n)},select:function(){var e;null===(e=W.current)||void 0===e||e.select()},input:W.current}}),(0,s.useEffect)(function(){H(function(e){return(!e||!C)&&e})},[C]);var ei=function(e,t,n){var r,o,i=t;if(!q.current&&et.exceedFormatter&&et.max&&et.strategy(t)>et.max)i=et.exceedFormatter(t,{max:et.max}),t!==i&&ee([(null===(r=W.current)||void 0===r?void 0:r.selectionStart)||0,(null===(o=W.current)||void 0===o?void 0:o.selectionEnd)||0]);else if("compositionEnd"===n.source)return;X(i),W.current&&(0,u.rJ)(W.current,e,l,i)};(0,s.useEffect)(function(){if(J){var e;null===(e=W.current)||void 0===e||e.setSelectionRange.apply(e,(0,f.Z)(J))}},[J]);var ea=eo&&"".concat(E,"-out-of-range");return s.createElement(d,(0,o.Z)({},L,{prefixCls:E,className:c()(j,ea),handleReset:function(e){X(""),K(),W.current&&(0,u.rJ)(W.current,e,l)},value:$,focused:F,triggerFocus:K,suffix:function(){var e=Number(en)>0;if(M||et.show){var t=et.showFormatter?et.showFormatter({value:$,count:er,maxLength:en}):"".concat(er).concat(e?" / ".concat(en):"");return s.createElement(s.Fragment,null,et.show&&s.createElement("span",{className:c()("".concat(E,"-show-count-suffix"),(0,i.Z)({},"".concat(E,"-show-count-has-suffix"),!!M),null==A?void 0:A.count),style:(0,r.Z)({},null==_?void 0:_.count)},t),M)}return null}(),disabled:C,classes:T,classNames:A,styles:_}),(n=(0,g.Z)(e,["prefixCls","onPressEnter","addonBefore","addonAfter","prefix","suffix","allowClear","defaultValue","showCount","count","classes","htmlSize","styles","classNames"]),s.createElement("input",(0,o.Z)({autoComplete:a},n,{onChange:function(e){ei(e,e.target.value,{source:"change"})},onFocus:function(e){H(!0),null==b||b(e)},onBlur:function(e){H(!1),null==x||x(e)},onKeyDown:function(e){w&&"Enter"===e.key&&w(e),null==S||S(e)},className:c()(E,(0,i.Z)({},"".concat(E,"-disabled"),C),null==A?void 0:A.input),style:null==_?void 0:_.input,ref:W,size:O,type:void 0===R?"text":R,onCompositionStart:function(e){q.current=!0,null==D||D(e)},onCompositionEnd:function(e){q.current=!1,ei(e,e.currentTarget.value,{source:"compositionEnd"}),null==Z||Z(e)}}))))})},55041:function(e,t,n){"use strict";function r(e){return!!(e.addonBefore||e.addonAfter)}function o(e){return!!(e.prefix||e.suffix||e.allowClear)}function i(e,t,n,r){if(n){var o=t;if("click"===t.type){var i=e.cloneNode(!0);o=Object.create(t,{target:{value:i},currentTarget:{value:i}}),i.value="",n(o);return}if("file"!==e.type&&void 0!==r){var a=e.cloneNode(!0);o=Object.create(t,{target:{value:a},currentTarget:{value:a}}),a.value=r,n(o);return}n(o)}}function a(e,t){if(e){e.focus(t);var n=(t||{}).cursor;if(n){var r=e.value.length;switch(n){case"start":e.setSelectionRange(0,0);break;case"end":e.setSelectionRange(r,r);break;default:e.setSelectionRange(0,r)}}}}n.d(t,{He:function(){return r},X3:function(){return o},nH:function(){return a},rJ:function(){return i}})},33082:function(e,t,n){"use strict";n.d(t,{iz:function(){return eZ},ck:function(){return eh},BW:function(){return eD},sN:function(){return eh},Wd:function(){return eI},ZP:function(){return eH},Xl:function(){return j}});var r=n(1119),o=n(11993),i=n(31686),a=n(83145),l=n(26365),c=n(6989),s=n(36760),u=n.n(s),d=n(1699),f=n(50506),p=n(16671),h=n(32559),m=n(2265),g=n(54887),v=m.createContext(null);function y(e,t){return void 0===e?null:"".concat(e,"-").concat(t)}function b(e){return y(m.useContext(v),e)}var x=n(6397),w=["children","locked"],S=m.createContext(null);function k(e){var t=e.children,n=e.locked,r=(0,c.Z)(e,w),o=m.useContext(S),a=(0,x.Z)(function(){var e;return e=(0,i.Z)({},o),Object.keys(r).forEach(function(t){var n=r[t];void 0!==n&&(e[t]=n)}),e},[o,r],function(e,t){return!n&&(e[0]!==t[0]||!(0,p.Z)(e[1],t[1],!0))});return m.createElement(S.Provider,{value:a},t)}var E=m.createContext(null);function C(){return m.useContext(E)}var O=m.createContext([]);function j(e){var t=m.useContext(O);return m.useMemo(function(){return void 0!==e?[].concat((0,a.Z)(t),[e]):t},[t,e])}var P=m.createContext(null),M=m.createContext({}),N=n(2857);function I(e){var t=arguments.length>1&&void 0!==arguments[1]&&arguments[1];if((0,N.Z)(e)){var n=e.nodeName.toLowerCase(),r=["input","select","textarea","button"].includes(n)||e.isContentEditable||"a"===n&&!!e.getAttribute("href"),o=e.getAttribute("tabindex"),i=Number(o),a=null;return o&&!Number.isNaN(i)?a=i:r&&null===a&&(a=0),r&&e.disabled&&(a=null),null!==a&&(a>=0||t&&a<0)}return!1}var R=n(95814),T=n(53346),A=R.Z.LEFT,_=R.Z.RIGHT,D=R.Z.UP,Z=R.Z.DOWN,L=R.Z.ENTER,z=R.Z.ESC,B=R.Z.HOME,F=R.Z.END,H=[D,Z,A,_];function q(e,t){return(function(e){var t=arguments.length>1&&void 0!==arguments[1]&&arguments[1],n=(0,a.Z)(e.querySelectorAll("*")).filter(function(e){return I(e,t)});return I(e,t)&&n.unshift(e),n})(e,!0).filter(function(e){return t.has(e)})}function W(e,t,n){var r=arguments.length>3&&void 0!==arguments[3]?arguments[3]:1;if(!e)return null;var o=q(e,t),i=o.length,a=o.findIndex(function(e){return n===e});return r<0?-1===a?a=i-1:a-=1:r>0&&(a+=1),o[a=(a+i)%i]}var K=function(e,t){var n=new Set,r=new Map,o=new Map;return e.forEach(function(e){var i=document.querySelector("[data-menu-id='".concat(y(t,e),"']"));i&&(n.add(i),o.set(i,e),r.set(e,i))}),{elements:n,key2element:r,element2key:o}},U="__RC_UTIL_PATH_SPLIT__",V=function(e){return e.join(U)},G="rc-menu-more";function X(e){var t=m.useRef(e);t.current=e;var n=m.useCallback(function(){for(var e,n=arguments.length,r=Array(n),o=0;o1&&(E.motionAppear=!1);var C=E.onVisibleChanged;return(E.onVisibleChanged=function(e){return g.current||e||x(!0),null==C?void 0:C(e)},b)?null:m.createElement(k,{mode:c,locked:!g.current},m.createElement(eO.ZP,(0,r.Z)({visible:w},E,{forceRender:d,removeOnLeave:!1,leavedClassName:"".concat(u,"-hidden")}),function(e){var n=e.className,r=e.style;return m.createElement(eg,{id:t,className:n,style:r},a)}))}var eP=["style","className","title","eventKey","warnKey","disabled","internalPopupClose","children","itemIcon","expandIcon","popupClassName","popupOffset","popupStyle","onClick","onMouseEnter","onMouseLeave","onTitleClick","onTitleMouseEnter","onTitleMouseLeave"],eM=["active"],eN=function(e){var t,n=e.style,a=e.className,s=e.title,f=e.eventKey,p=(e.warnKey,e.disabled),h=e.internalPopupClose,g=e.children,v=e.itemIcon,y=e.expandIcon,x=e.popupClassName,w=e.popupOffset,E=e.popupStyle,C=e.onClick,O=e.onMouseEnter,N=e.onMouseLeave,I=e.onTitleClick,R=e.onTitleMouseEnter,T=e.onTitleMouseLeave,A=(0,c.Z)(e,eP),_=b(f),D=m.useContext(S),Z=D.prefixCls,L=D.mode,z=D.openKeys,B=D.disabled,F=D.overflowDisabled,H=D.activeKey,q=D.selectedKeys,W=D.itemIcon,K=D.expandIcon,U=D.onItemClick,V=D.onOpenChange,G=D.onActive,$=m.useContext(M)._internalRenderSubMenuItem,Y=m.useContext(P).isSubPathKey,Q=j(),J="".concat(Z,"-submenu"),ee=B||p,et=m.useRef(),en=m.useRef(),er=null!=y?y:K,el=z.includes(f),es=!F&&el,eu=Y(q,f),ed=eo(f,ee,R,T),ef=ed.active,ep=(0,c.Z)(ed,eM),eh=m.useState(!1),em=(0,l.Z)(eh,2),ev=em[0],ey=em[1],eb=function(e){ee||ey(e)},ex=m.useMemo(function(){return ef||"inline"!==L&&(ev||Y([H],f))},[L,ef,H,ev,f,Y]),ew=ei(Q.length),eS=X(function(e){null==C||C(ec(e)),U(e)}),ek=_&&"".concat(_,"-popup"),eE=m.createElement("div",(0,r.Z)({role:"menuitem",style:ew,className:"".concat(J,"-title"),tabIndex:ee?null:-1,ref:et,title:"string"==typeof s?s:null,"data-menu-id":F&&_?null:_,"aria-expanded":es,"aria-haspopup":!0,"aria-controls":ek,"aria-disabled":ee,onClick:function(e){ee||(null==I||I({key:f,domEvent:e}),"inline"===L&&V(f,!el))},onFocus:function(){G(f)}},ep),s,m.createElement(ea,{icon:"horizontal"!==L?er:void 0,props:(0,i.Z)((0,i.Z)({},e),{},{isOpen:es,isSubMenu:!0})},m.createElement("i",{className:"".concat(J,"-arrow")}))),eO=m.useRef(L);if("inline"!==L&&Q.length>1?eO.current="vertical":eO.current=L,!F){var eN=eO.current;eE=m.createElement(eC,{mode:eN,prefixCls:J,visible:!h&&es&&"inline"!==L,popupClassName:x,popupOffset:w,popupStyle:E,popup:m.createElement(k,{mode:"horizontal"===eN?"vertical":eN},m.createElement(eg,{id:ek,ref:en},g)),disabled:ee,onVisibleChange:function(e){"inline"!==L&&V(f,e)}},eE)}var eI=m.createElement(d.Z.Item,(0,r.Z)({role:"none"},A,{component:"li",style:n,className:u()(J,"".concat(J,"-").concat(L),a,(t={},(0,o.Z)(t,"".concat(J,"-open"),es),(0,o.Z)(t,"".concat(J,"-active"),ex),(0,o.Z)(t,"".concat(J,"-selected"),eu),(0,o.Z)(t,"".concat(J,"-disabled"),ee),t)),onMouseEnter:function(e){eb(!0),null==O||O({key:f,domEvent:e})},onMouseLeave:function(e){eb(!1),null==N||N({key:f,domEvent:e})}}),eE,!F&&m.createElement(ej,{id:ek,open:es,keyPath:Q},g));return $&&(eI=$(eI,e,{selected:eu,active:ex,open:es,disabled:ee})),m.createElement(k,{onItemClick:eS,mode:"horizontal"===L?"vertical":L,itemIcon:null!=v?v:W,expandIcon:er},eI)};function eI(e){var t,n=e.eventKey,r=e.children,o=j(n),i=ey(r,o),a=C();return m.useEffect(function(){if(a)return a.registerPath(n,o),function(){a.unregisterPath(n,o)}},[o]),t=a?i:m.createElement(eN,e,i),m.createElement(O.Provider,{value:o},t)}var eR=n(41154),eT=["className","title","eventKey","children"],eA=["children"],e_=function(e){var t=e.className,n=e.title,o=(e.eventKey,e.children),i=(0,c.Z)(e,eT),a=m.useContext(S).prefixCls,l="".concat(a,"-item-group");return m.createElement("li",(0,r.Z)({role:"presentation"},i,{onClick:function(e){return e.stopPropagation()},className:u()(l,t)}),m.createElement("div",{role:"presentation",className:"".concat(l,"-title"),title:"string"==typeof n?n:void 0},n),m.createElement("ul",{role:"group",className:"".concat(l,"-list")},o))};function eD(e){var t=e.children,n=(0,c.Z)(e,eA),r=ey(t,j(n.eventKey));return C()?r:m.createElement(e_,(0,en.Z)(n,["warnKey"]),r)}function eZ(e){var t=e.className,n=e.style,r=m.useContext(S).prefixCls;return C()?null:m.createElement("li",{role:"separator",className:u()("".concat(r,"-item-divider"),t),style:n})}var eL=["label","children","key","type"],ez=["prefixCls","rootClassName","style","className","tabIndex","items","children","direction","id","mode","inlineCollapsed","disabled","disabledOverflow","subMenuOpenDelay","subMenuCloseDelay","forceSubMenuRender","defaultOpenKeys","openKeys","activeKey","defaultActiveFirst","selectable","multiple","defaultSelectedKeys","selectedKeys","onSelect","onDeselect","inlineIndent","motion","defaultMotions","triggerSubMenuAction","builtinPlacements","itemIcon","expandIcon","overflowedIndicator","overflowedIndicatorPopupClassName","getPopupContainer","onClick","onOpenChange","onKeyDown","openAnimation","openTransitionName","_internalRenderMenuItem","_internalRenderSubMenuItem"],eB=[],eF=m.forwardRef(function(e,t){var n,s,h,y,b,x,w,S,C,O,j,N,I,R,Q,J,ee,et,en,er,eo,ei,ea,el,es,eu,ed,ef=e.prefixCls,ep=void 0===ef?"rc-menu":ef,em=e.rootClassName,eg=e.style,ev=e.className,eb=e.tabIndex,ex=e.items,ew=e.children,eS=e.direction,ek=e.id,eE=e.mode,eC=void 0===eE?"vertical":eE,eO=e.inlineCollapsed,ej=e.disabled,eP=e.disabledOverflow,eM=e.subMenuOpenDelay,eN=e.subMenuCloseDelay,eT=e.forceSubMenuRender,eA=e.defaultOpenKeys,e_=e.openKeys,eF=e.activeKey,eH=e.defaultActiveFirst,eq=e.selectable,eW=void 0===eq||eq,eK=e.multiple,eU=void 0!==eK&&eK,eV=e.defaultSelectedKeys,eG=e.selectedKeys,eX=e.onSelect,e$=e.onDeselect,eY=e.inlineIndent,eQ=e.motion,eJ=e.defaultMotions,e0=e.triggerSubMenuAction,e1=e.builtinPlacements,e2=e.itemIcon,e6=e.expandIcon,e3=e.overflowedIndicator,e4=void 0===e3?"...":e3,e5=e.overflowedIndicatorPopupClassName,e8=e.getPopupContainer,e7=e.onClick,e9=e.onOpenChange,te=e.onKeyDown,tt=(e.openAnimation,e.openTransitionName,e._internalRenderMenuItem),tn=e._internalRenderSubMenuItem,tr=(0,c.Z)(e,ez),to=m.useMemo(function(){var e;return e=ew,ex&&(e=function e(t){return(t||[]).map(function(t,n){if(t&&"object"===(0,eR.Z)(t)){var o=t.label,i=t.children,a=t.key,l=t.type,s=(0,c.Z)(t,eL),u=null!=a?a:"tmp-".concat(n);return i||"group"===l?"group"===l?m.createElement(eD,(0,r.Z)({key:u},s,{title:o}),e(i)):m.createElement(eI,(0,r.Z)({key:u},s,{title:o}),e(i)):"divider"===l?m.createElement(eZ,(0,r.Z)({key:u},s)):m.createElement(eh,(0,r.Z)({key:u},s),o)}return null}).filter(function(e){return e})}(ex)),ey(e,eB)},[ew,ex]),ti=m.useState(!1),ta=(0,l.Z)(ti,2),tl=ta[0],tc=ta[1],ts=m.useRef(),tu=(n=(0,f.Z)(ek,{value:ek}),h=(s=(0,l.Z)(n,2))[0],y=s[1],m.useEffect(function(){Y+=1;var e="".concat($,"-").concat(Y);y("rc-menu-uuid-".concat(e))},[]),h),td="rtl"===eS,tf=(0,f.Z)(eA,{value:e_,postState:function(e){return e||eB}}),tp=(0,l.Z)(tf,2),th=tp[0],tm=tp[1],tg=function(e){var t=arguments.length>1&&void 0!==arguments[1]&&arguments[1];function n(){tm(e),null==e9||e9(e)}t?(0,g.flushSync)(n):n()},tv=m.useState(th),ty=(0,l.Z)(tv,2),tb=ty[0],tx=ty[1],tw=m.useRef(!1),tS=m.useMemo(function(){return("inline"===eC||"vertical"===eC)&&eO?["vertical",eO]:[eC,!1]},[eC,eO]),tk=(0,l.Z)(tS,2),tE=tk[0],tC=tk[1],tO="inline"===tE,tj=m.useState(tE),tP=(0,l.Z)(tj,2),tM=tP[0],tN=tP[1],tI=m.useState(tC),tR=(0,l.Z)(tI,2),tT=tR[0],tA=tR[1];m.useEffect(function(){tN(tE),tA(tC),tw.current&&(tO?tm(tb):tg(eB))},[tE,tC]);var t_=m.useState(0),tD=(0,l.Z)(t_,2),tZ=tD[0],tL=tD[1],tz=tZ>=to.length-1||"horizontal"!==tM||eP;m.useEffect(function(){tO&&tx(th)},[th]),m.useEffect(function(){return tw.current=!0,function(){tw.current=!1}},[]);var tB=(b=m.useState({}),x=(0,l.Z)(b,2)[1],w=(0,m.useRef)(new Map),S=(0,m.useRef)(new Map),C=m.useState([]),j=(O=(0,l.Z)(C,2))[0],N=O[1],I=(0,m.useRef)(0),R=(0,m.useRef)(!1),Q=function(){R.current||x({})},J=(0,m.useCallback)(function(e,t){var n,r=V(t);S.current.set(r,e),w.current.set(e,r),I.current+=1;var o=I.current;n=function(){o===I.current&&Q()},Promise.resolve().then(n)},[]),ee=(0,m.useCallback)(function(e,t){var n=V(t);S.current.delete(n),w.current.delete(e)},[]),et=(0,m.useCallback)(function(e){N(e)},[]),en=(0,m.useCallback)(function(e,t){var n=(w.current.get(e)||"").split(U);return t&&j.includes(n[0])&&n.unshift(G),n},[j]),er=(0,m.useCallback)(function(e,t){return e.some(function(e){return en(e,!0).includes(t)})},[en]),eo=(0,m.useCallback)(function(e){var t="".concat(w.current.get(e)).concat(U),n=new Set;return(0,a.Z)(S.current.keys()).forEach(function(e){e.startsWith(t)&&n.add(S.current.get(e))}),n},[]),m.useEffect(function(){return function(){R.current=!0}},[]),{registerPath:J,unregisterPath:ee,refreshOverflowKeys:et,isSubPathKey:er,getKeyPath:en,getKeys:function(){var e=(0,a.Z)(w.current.keys());return j.length&&e.push(G),e},getSubPathKeys:eo}),tF=tB.registerPath,tH=tB.unregisterPath,tq=tB.refreshOverflowKeys,tW=tB.isSubPathKey,tK=tB.getKeyPath,tU=tB.getKeys,tV=tB.getSubPathKeys,tG=m.useMemo(function(){return{registerPath:tF,unregisterPath:tH}},[tF,tH]),tX=m.useMemo(function(){return{isSubPathKey:tW}},[tW]);m.useEffect(function(){tq(tz?eB:to.slice(tZ+1).map(function(e){return e.key}))},[tZ,tz]);var t$=(0,f.Z)(eF||eH&&(null===(eu=to[0])||void 0===eu?void 0:eu.key),{value:eF}),tY=(0,l.Z)(t$,2),tQ=tY[0],tJ=tY[1],t0=X(function(e){tJ(e)}),t1=X(function(){tJ(void 0)});(0,m.useImperativeHandle)(t,function(){return{list:ts.current,focus:function(e){var t,n,r=K(tU(),tu),o=r.elements,i=r.key2element,a=r.element2key,l=q(ts.current,o),c=null!=tQ?tQ:l[0]?a.get(l[0]):null===(t=to.find(function(e){return!e.props.disabled}))||void 0===t?void 0:t.key,s=i.get(c);c&&s&&(null==s||null===(n=s.focus)||void 0===n||n.call(s,e))}}});var t2=(0,f.Z)(eV||[],{value:eG,postState:function(e){return Array.isArray(e)?e:null==e?eB:[e]}}),t6=(0,l.Z)(t2,2),t3=t6[0],t4=t6[1],t5=function(e){if(eW){var t,n=e.key,r=t3.includes(n);t4(t=eU?r?t3.filter(function(e){return e!==n}):[].concat((0,a.Z)(t3),[n]):[n]);var o=(0,i.Z)((0,i.Z)({},e),{},{selectedKeys:t});r?null==e$||e$(o):null==eX||eX(o)}!eU&&th.length&&"inline"!==tM&&tg(eB)},t8=X(function(e){null==e7||e7(ec(e)),t5(e)}),t7=X(function(e,t){var n=th.filter(function(t){return t!==e});if(t)n.push(e);else if("inline"!==tM){var r=tV(e);n=n.filter(function(e){return!r.has(e)})}(0,p.Z)(th,n,!0)||tg(n,!0)}),t9=(ei=function(e,t){var n=null!=t?t:!th.includes(e);t7(e,n)},ea=m.useRef(),(el=m.useRef()).current=tQ,es=function(){T.Z.cancel(ea.current)},m.useEffect(function(){return function(){es()}},[]),function(e){var t=e.which;if([].concat(H,[L,z,B,F]).includes(t)){var n=tU(),r=K(n,tu),i=r,a=i.elements,l=i.key2element,c=i.element2key,s=function(e,t){for(var n=e||document.activeElement;n;){if(t.has(n))return n;n=n.parentElement}return null}(l.get(tQ),a),u=c.get(s),d=function(e,t,n,r){var i,a,l,c,s="prev",u="next",d="children",f="parent";if("inline"===e&&r===L)return{inlineTrigger:!0};var p=(i={},(0,o.Z)(i,D,s),(0,o.Z)(i,Z,u),i),h=(a={},(0,o.Z)(a,A,n?u:s),(0,o.Z)(a,_,n?s:u),(0,o.Z)(a,Z,d),(0,o.Z)(a,L,d),a),m=(l={},(0,o.Z)(l,D,s),(0,o.Z)(l,Z,u),(0,o.Z)(l,L,d),(0,o.Z)(l,z,f),(0,o.Z)(l,A,n?d:f),(0,o.Z)(l,_,n?f:d),l);switch(null===(c=({inline:p,horizontal:h,vertical:m,inlineSub:p,horizontalSub:m,verticalSub:m})["".concat(e).concat(t?"":"Sub")])||void 0===c?void 0:c[r]){case s:return{offset:-1,sibling:!0};case u:return{offset:1,sibling:!0};case f:return{offset:-1,sibling:!1};case d:return{offset:1,sibling:!1};default:return null}}(tM,1===tK(u,!0).length,td,t);if(!d&&t!==B&&t!==F)return;(H.includes(t)||[B,F].includes(t))&&e.preventDefault();var f=function(e){if(e){var t=e,n=e.querySelector("a");null!=n&&n.getAttribute("href")&&(t=n);var r=c.get(e);tJ(r),es(),ea.current=(0,T.Z)(function(){el.current===r&&t.focus()})}};if([B,F].includes(t)||d.sibling||!s){var p,h=q(p=s&&"inline"!==tM?function(e){for(var t=e;t;){if(t.getAttribute("data-menu-list"))return t;t=t.parentElement}return null}(s):ts.current,a);f(t===B?h[0]:t===F?h[h.length-1]:W(p,a,s,d.offset))}else if(d.inlineTrigger)ei(u);else if(d.offset>0)ei(u,!0),es(),ea.current=(0,T.Z)(function(){r=K(n,tu);var e=s.getAttribute("aria-controls");f(W(document.getElementById(e),r.elements))},5);else if(d.offset<0){var m=tK(u,!0),g=m[m.length-2],v=l.get(g);ei(g,!1),f(v)}}null==te||te(e)});m.useEffect(function(){tc(!0)},[]);var ne=m.useMemo(function(){return{_internalRenderMenuItem:tt,_internalRenderSubMenuItem:tn}},[tt,tn]),nt="horizontal"!==tM||eP?to:to.map(function(e,t){return m.createElement(k,{key:e.key,overflowDisabled:t>tZ},e)}),nn=m.createElement(d.Z,(0,r.Z)({id:ek,ref:ts,prefixCls:"".concat(ep,"-overflow"),component:"ul",itemComponent:eh,className:u()(ep,"".concat(ep,"-root"),"".concat(ep,"-").concat(tM),ev,(ed={},(0,o.Z)(ed,"".concat(ep,"-inline-collapsed"),tT),(0,o.Z)(ed,"".concat(ep,"-rtl"),td),ed),em),dir:eS,style:eg,role:"menu",tabIndex:void 0===eb?0:eb,data:nt,renderRawItem:function(e){return e},renderRawRest:function(e){var t=e.length,n=t?to.slice(-t):null;return m.createElement(eI,{eventKey:G,title:e4,disabled:tz,internalPopupClose:0===t,popupClassName:e5},n)},maxCount:"horizontal"!==tM||eP?d.Z.INVALIDATE:d.Z.RESPONSIVE,ssr:"full","data-menu-list":!0,onVisibleChange:function(e){tL(e)},onKeyDown:t9},tr));return m.createElement(M.Provider,{value:ne},m.createElement(v.Provider,{value:tu},m.createElement(k,{prefixCls:ep,rootClassName:em,mode:tM,openKeys:th,rtl:td,disabled:ej,motion:tl?eQ:null,defaultMotions:tl?eJ:null,activeKey:tQ,onActive:t0,onInactive:t1,selectedKeys:t3,inlineIndent:void 0===eY?24:eY,subMenuOpenDelay:void 0===eM?.1:eM,subMenuCloseDelay:void 0===eN?.1:eN,forceSubMenuRender:eT,builtinPlacements:e1,triggerSubMenuAction:void 0===e0?"hover":e0,getPopupContainer:e8,itemIcon:e2,expandIcon:e6,onItemClick:t8,onOpenChange:t7},m.createElement(P.Provider,{value:tX},nn),m.createElement("div",{style:{display:"none"},"aria-hidden":!0},m.createElement(E.Provider,{value:tG},to)))))});eF.Item=eh,eF.SubMenu=eI,eF.ItemGroup=eD,eF.Divider=eZ;var eH=eF},1699:function(e,t,n){"use strict";n.d(t,{Z:function(){return M}});var r=n(1119),o=n(31686),i=n(26365),a=n(6989),l=n(2265),c=n(36760),s=n.n(c),u=n(31474),d=n(27380),f=["prefixCls","invalidate","item","renderItem","responsive","responsiveDisabled","registerSize","itemKey","className","style","children","display","order","component"],p=void 0,h=l.forwardRef(function(e,t){var n,i=e.prefixCls,c=e.invalidate,d=e.item,h=e.renderItem,m=e.responsive,g=e.responsiveDisabled,v=e.registerSize,y=e.itemKey,b=e.className,x=e.style,w=e.children,S=e.display,k=e.order,E=e.component,C=(0,a.Z)(e,f),O=m&&!S;l.useEffect(function(){return function(){v(y,null)}},[]);var j=h&&d!==p?h(d):w;c||(n={opacity:O?0:1,height:O?0:p,overflowY:O?"hidden":p,order:m?k:p,pointerEvents:O?"none":p,position:O?"absolute":p});var P={};O&&(P["aria-hidden"]=!0);var M=l.createElement(void 0===E?"div":E,(0,r.Z)({className:s()(!c&&i,b),style:(0,o.Z)((0,o.Z)({},n),x)},P,C,{ref:t}),j);return m&&(M=l.createElement(u.Z,{onResize:function(e){v(y,e.offsetWidth)},disabled:g},M)),M});h.displayName="Item";var m=n(58525),g=n(54887),v=n(53346);function y(e,t){var n=l.useState(t),r=(0,i.Z)(n,2),o=r[0],a=r[1];return[o,(0,m.Z)(function(t){e(function(){a(t)})})]}var b=l.createContext(null),x=["component"],w=["className"],S=["className"],k=l.forwardRef(function(e,t){var n=l.useContext(b);if(!n){var o=e.component,i=(0,a.Z)(e,x);return l.createElement(void 0===o?"div":o,(0,r.Z)({},i,{ref:t}))}var c=n.className,u=(0,a.Z)(n,w),d=e.className,f=(0,a.Z)(e,S);return l.createElement(b.Provider,{value:null},l.createElement(h,(0,r.Z)({ref:t,className:s()(c,d)},u,f)))});k.displayName="RawItem";var E=["prefixCls","data","renderItem","renderRawItem","itemKey","itemWidth","ssr","style","className","maxCount","renderRest","renderRawRest","suffix","component","itemComponent","onVisibleChange"],C="responsive",O="invalidate";function j(e){return"+ ".concat(e.length," ...")}var P=l.forwardRef(function(e,t){var n,c,f=e.prefixCls,p=void 0===f?"rc-overflow":f,m=e.data,x=void 0===m?[]:m,w=e.renderItem,S=e.renderRawItem,k=e.itemKey,P=e.itemWidth,M=void 0===P?10:P,N=e.ssr,I=e.style,R=e.className,T=e.maxCount,A=e.renderRest,_=e.renderRawRest,D=e.suffix,Z=e.component,L=e.itemComponent,z=e.onVisibleChange,B=(0,a.Z)(e,E),F="full"===N,H=(n=l.useRef(null),function(e){n.current||(n.current=[],function(e){if("undefined"==typeof MessageChannel)(0,v.Z)(e);else{var t=new MessageChannel;t.port1.onmessage=function(){return e()},t.port2.postMessage(void 0)}}(function(){(0,g.unstable_batchedUpdates)(function(){n.current.forEach(function(e){e()}),n.current=null})})),n.current.push(e)}),q=y(H,null),W=(0,i.Z)(q,2),K=W[0],U=W[1],V=K||0,G=y(H,new Map),X=(0,i.Z)(G,2),$=X[0],Y=X[1],Q=y(H,0),J=(0,i.Z)(Q,2),ee=J[0],et=J[1],en=y(H,0),er=(0,i.Z)(en,2),eo=er[0],ei=er[1],ea=y(H,0),el=(0,i.Z)(ea,2),ec=el[0],es=el[1],eu=(0,l.useState)(null),ed=(0,i.Z)(eu,2),ef=ed[0],ep=ed[1],eh=(0,l.useState)(null),em=(0,i.Z)(eh,2),eg=em[0],ev=em[1],ey=l.useMemo(function(){return null===eg&&F?Number.MAX_SAFE_INTEGER:eg||0},[eg,K]),eb=(0,l.useState)(!1),ex=(0,i.Z)(eb,2),ew=ex[0],eS=ex[1],ek="".concat(p,"-item"),eE=Math.max(ee,eo),eC=T===C,eO=x.length&&eC,ej=T===O,eP=eO||"number"==typeof T&&x.length>T,eM=(0,l.useMemo)(function(){var e=x;return eO?e=null===K&&F?x:x.slice(0,Math.min(x.length,V/M)):"number"==typeof T&&(e=x.slice(0,T)),e},[x,M,K,T,eO]),eN=(0,l.useMemo)(function(){return eO?x.slice(ey+1):x.slice(eM.length)},[x,eM,eO,ey]),eI=(0,l.useCallback)(function(e,t){var n;return"function"==typeof k?k(e):null!==(n=k&&(null==e?void 0:e[k]))&&void 0!==n?n:t},[k]),eR=(0,l.useCallback)(w||function(e){return e},[w]);function eT(e,t,n){(eg!==e||void 0!==t&&t!==ef)&&(ev(e),n||(eS(eV){eT(r-1,e-o-ec+eo);break}}D&&e_(0)+ec>V&&ep(null)}},[V,$,eo,ec,eI,eM]);var eD=ew&&!!eN.length,eZ={};null!==ef&&eO&&(eZ={position:"absolute",left:ef,top:0});var eL={prefixCls:ek,responsive:eO,component:L,invalidate:ej},ez=S?function(e,t){var n=eI(e,t);return l.createElement(b.Provider,{key:n,value:(0,o.Z)((0,o.Z)({},eL),{},{order:t,item:e,itemKey:n,registerSize:eA,display:t<=ey})},S(e,t))}:function(e,t){var n=eI(e,t);return l.createElement(h,(0,r.Z)({},eL,{order:t,key:n,item:e,renderItem:eR,itemKey:n,registerSize:eA,display:t<=ey}))},eB={order:eD?ey:Number.MAX_SAFE_INTEGER,className:"".concat(ek,"-rest"),registerSize:function(e,t){ei(t),et(eo)},display:eD};if(_)_&&(c=l.createElement(b.Provider,{value:(0,o.Z)((0,o.Z)({},eL),eB)},_(eN)));else{var eF=A||j;c=l.createElement(h,(0,r.Z)({},eL,eB),"function"==typeof eF?eF(eN):eF)}var eH=l.createElement(void 0===Z?"div":Z,(0,r.Z)({className:s()(!ej&&p,R),style:I,ref:t},B),eM.map(ez),eP?c:null,D&&l.createElement(h,(0,r.Z)({},eL,{responsive:eC,responsiveDisabled:!eO,order:ey,className:"".concat(ek,"-suffix"),registerSize:function(e,t){es(t)},display:!0,style:eZ}),D));return eC&&(eH=l.createElement(u.Z,{onResize:function(e,t){U(t.clientWidth)},disabled:!eO},eH)),eH});P.displayName="Overflow",P.Item=k,P.RESPONSIVE=C,P.INVALIDATE=O;var M=P},10281:function(e,t,n){"use strict";n.d(t,{G:function(){return a}});var r=n(94981),o=function(e){if((0,r.Z)()&&window.document.documentElement){var t=Array.isArray(e)?e:[e],n=window.document.documentElement;return t.some(function(e){return e in n.style})}return!1},i=function(e,t){if(!o(e))return!1;var n=document.createElement("div"),r=n.style[e];return n.style[e]=t,n.style[e]!==r};function a(e,t){return Array.isArray(e)||void 0===t?o(e):i(e,t)}},48625:function(e,t,n){"use strict";n.d(t,{Z:function(){return T}});var r=n(1119),o=n(31686),i=n(41154),a=n(26365),l=n(11993),c=n(6989),s=n(2265),u=n(54887),d=n(36760),f=n.n(d),p=n(31474),h=s.forwardRef(function(e,t){var n,i=e.height,a=e.offsetY,c=e.offsetX,u=e.children,d=e.prefixCls,h=e.onInnerResize,m=e.innerProps,g=e.rtl,v=e.extra,y={},b={display:"flex",flexDirection:"column"};return void 0!==a&&(y={height:i,position:"relative",overflow:"hidden"},b=(0,o.Z)((0,o.Z)({},b),{},(n={transform:"translateY(".concat(a,"px)")},(0,l.Z)(n,g?"marginRight":"marginLeft",-c),(0,l.Z)(n,"position","absolute"),(0,l.Z)(n,"left",0),(0,l.Z)(n,"right",0),(0,l.Z)(n,"top",0),n))),s.createElement("div",{style:y},s.createElement(p.Z,{onResize:function(e){e.offsetHeight&&h&&h()}},s.createElement("div",(0,r.Z)({style:b,className:f()((0,l.Z)({},"".concat(d,"-holder-inner"),d)),ref:t},m),u,v)))});h.displayName="Filler";var m=n(53346);function g(e,t){return("touches"in e?e.touches[0]:e)[t?"pageX":"pageY"]}var v=s.forwardRef(function(e,t){var n,r=e.prefixCls,i=e.rtl,c=e.scrollOffset,u=e.scrollRange,d=e.onStartMove,p=e.onStopMove,h=e.onScroll,v=e.horizontal,y=e.spinSize,b=e.containerSize,x=e.style,w=e.thumbStyle,S=s.useState(!1),k=(0,a.Z)(S,2),E=k[0],C=k[1],O=s.useState(null),j=(0,a.Z)(O,2),P=j[0],M=j[1],N=s.useState(null),I=(0,a.Z)(N,2),R=I[0],T=I[1],A=!i,_=s.useRef(),D=s.useRef(),Z=s.useState(!1),L=(0,a.Z)(Z,2),z=L[0],B=L[1],F=s.useRef(),H=function(){clearTimeout(F.current),B(!0),F.current=setTimeout(function(){B(!1)},3e3)},q=u-b||0,W=b-y||0,K=s.useMemo(function(){return 0===c||0===q?0:c/q*W},[c,q,W]),U=s.useRef({top:K,dragging:E,pageY:P,startTop:R});U.current={top:K,dragging:E,pageY:P,startTop:R};var V=function(e){C(!0),M(g(e,v)),T(U.current.top),d(),e.stopPropagation(),e.preventDefault()};s.useEffect(function(){var e=function(e){e.preventDefault()},t=_.current,n=D.current;return t.addEventListener("touchstart",e),n.addEventListener("touchstart",V),function(){t.removeEventListener("touchstart",e),n.removeEventListener("touchstart",V)}},[]);var G=s.useRef();G.current=q;var X=s.useRef();X.current=W,s.useEffect(function(){if(E){var e,t=function(t){var n=U.current,r=n.dragging,o=n.pageY,i=n.startTop;if(m.Z.cancel(e),r){var a=g(t,v)-o,l=i;!A&&v?l-=a:l+=a;var c=G.current,s=X.current,u=Math.ceil((s?l/s:0)*c);u=Math.min(u=Math.max(u,0),c),e=(0,m.Z)(function(){h(u,v)})}},n=function(){C(!1),p()};return window.addEventListener("mousemove",t),window.addEventListener("touchmove",t),window.addEventListener("mouseup",n),window.addEventListener("touchend",n),function(){window.removeEventListener("mousemove",t),window.removeEventListener("touchmove",t),window.removeEventListener("mouseup",n),window.removeEventListener("touchend",n),m.Z.cancel(e)}}},[E]),s.useEffect(function(){H()},[c]),s.useImperativeHandle(t,function(){return{delayHidden:H}});var $="".concat(r,"-scrollbar"),Y={position:"absolute",visibility:z&&q>0?null:"hidden"},Q={position:"absolute",background:"rgba(0, 0, 0, 0.5)",borderRadius:99,cursor:"pointer",userSelect:"none"};return v?(Y.height=8,Y.left=0,Y.right=0,Y.bottom=0,Q.height="100%",Q.width=y,A?Q.left=K:Q.right=K):(Y.width=8,Y.top=0,Y.bottom=0,A?Y.right=0:Y.left=0,Q.width="100%",Q.height=y,Q.top=K),s.createElement("div",{ref:_,className:f()($,(n={},(0,l.Z)(n,"".concat($,"-horizontal"),v),(0,l.Z)(n,"".concat($,"-vertical"),!v),(0,l.Z)(n,"".concat($,"-visible"),z),n)),style:(0,o.Z)((0,o.Z)({},Y),x),onMouseDown:function(e){e.stopPropagation(),e.preventDefault()},onMouseMove:H},s.createElement("div",{ref:D,className:f()("".concat($,"-thumb"),(0,l.Z)({},"".concat($,"-thumb-moving"),E)),style:(0,o.Z)((0,o.Z)({},Q),w),onMouseDown:V}))});function y(e){var t=e.children,n=e.setRef,r=s.useCallback(function(e){n(e)},[]);return s.cloneElement(t,{ref:r})}var b=n(2868),x=n(76405),w=n(25049),S=function(){function e(){(0,x.Z)(this,e),this.maps=void 0,this.id=0,this.maps=Object.create(null)}return(0,w.Z)(e,[{key:"set",value:function(e,t){this.maps[e]=t,this.id+=1}},{key:"get",value:function(e){return this.maps[e]}}]),e}(),k=n(27380),E=n(74126),C=("undefined"==typeof navigator?"undefined":(0,i.Z)(navigator))==="object"&&/Firefox/i.test(navigator.userAgent),O=function(e,t){var n=(0,s.useRef)(!1),r=(0,s.useRef)(null),o=(0,s.useRef)({top:e,bottom:t});return o.current.top=e,o.current.bottom=t,function(e){var t=arguments.length>1&&void 0!==arguments[1]&&arguments[1],i=e<0&&o.current.top||e>0&&o.current.bottom;return t&&i?(clearTimeout(r.current),n.current=!1):(!i||n.current)&&(clearTimeout(r.current),n.current=!0,r.current=setTimeout(function(){n.current=!1},50)),!n.current&&i}},j=14/15;function P(){var e=arguments.length>0&&void 0!==arguments[0]?arguments[0]:0,t=arguments.length>1&&void 0!==arguments[1]?arguments[1]:0,n=e/t*100;return isNaN(n)&&(n=0),Math.floor(n=Math.min(n=Math.max(n,20),e/2))}var M=["prefixCls","className","height","itemHeight","fullHeight","style","data","children","itemKey","virtual","direction","scrollWidth","component","onScroll","onVirtualScroll","onVisibleChange","innerProps","extraRender","styles"],N=[],I={overflowY:"auto",overflowAnchor:"none"},R=s.forwardRef(function(e,t){var n,d,g,x,w,R,T,A,_,D,Z,L,z,B,F,H,q,W,K,U,V,G,X,$,Y,Q,J,ee,et,en,er,eo,ei,ea,el,ec=e.prefixCls,es=void 0===ec?"rc-virtual-list":ec,eu=e.className,ed=e.height,ef=e.itemHeight,ep=e.fullHeight,eh=e.style,em=e.data,eg=e.children,ev=e.itemKey,ey=e.virtual,eb=e.direction,ex=e.scrollWidth,ew=e.component,eS=e.onScroll,ek=e.onVirtualScroll,eE=e.onVisibleChange,eC=e.innerProps,eO=e.extraRender,ej=e.styles,eP=(0,c.Z)(e,M),eM=!!(!1!==ey&&ed&&ef),eN=eM&&em&&(ef*em.length>ed||!!ex),eI="rtl"===eb,eR=f()(es,(0,l.Z)({},"".concat(es,"-rtl"),eI),eu),eT=em||N,eA=(0,s.useRef)(),e_=(0,s.useRef)(),eD=(0,s.useState)(0),eZ=(0,a.Z)(eD,2),eL=eZ[0],ez=eZ[1],eB=(0,s.useState)(0),eF=(0,a.Z)(eB,2),eH=eF[0],eq=eF[1],eW=(0,s.useState)(!1),eK=(0,a.Z)(eW,2),eU=eK[0],eV=eK[1],eG=function(){eV(!0)},eX=function(){eV(!1)},e$=s.useCallback(function(e){return"function"==typeof ev?ev(e):null==e?void 0:e[ev]},[ev]);function eY(e){ez(function(t){var n,r=(n="function"==typeof e?e(t):e,Number.isNaN(tf.current)||(n=Math.min(n,tf.current)),n=Math.max(n,0));return eA.current.scrollTop=r,r})}var eQ=(0,s.useRef)({start:0,end:eT.length}),eJ=(0,s.useRef)(),e0=(n=s.useState(eT),g=(d=(0,a.Z)(n,2))[0],x=d[1],w=s.useState(null),T=(R=(0,a.Z)(w,2))[0],A=R[1],s.useEffect(function(){var e=function(e,t,n){var r,o,i=e.length,a=t.length;if(0===i&&0===a)return null;i0&&void 0!==arguments[0]&&arguments[0];f();var t=function(){c.current.forEach(function(e,t){if(e&&e.offsetParent){var n=(0,b.Z)(e),r=n.offsetHeight;u.current.get(t)!==r&&u.current.set(t,n.offsetHeight)}}),l(function(e){return e+1})};e?t():d.current=(0,m.Z)(t)}return(0,s.useEffect)(function(){return f},[]),[function(r,o){var i=e(r),a=c.current.get(i);o?(c.current.set(i,o),p()):c.current.delete(i),!a!=!o&&(o?null==t||t(r):null==n||n(r))},p,u.current,i]}(e$,null,null),e6=(0,a.Z)(e2,4),e3=e6[0],e4=e6[1],e5=e6[2],e8=e6[3],e7=s.useMemo(function(){if(!eM)return{scrollHeight:void 0,start:0,end:eT.length-1,offset:void 0};if(!eN)return{scrollHeight:(null===(e=e_.current)||void 0===e?void 0:e.offsetHeight)||0,start:0,end:eT.length-1,offset:void 0};for(var e,t,n,r,o=0,i=eT.length,a=0;a=eL&&void 0===t&&(t=a,n=o),s>eL+ed&&void 0===r&&(r=a),o=s}return void 0===t&&(t=0,n=0,r=Math.ceil(ed/ef)),void 0===r&&(r=eT.length-1),{scrollHeight:o,start:t,end:r=Math.min(r+1,eT.length-1),offset:n}},[eN,eM,eL,eT,e8,ed]),e9=e7.scrollHeight,te=e7.start,tt=e7.end,tn=e7.offset;eQ.current.start=te,eQ.current.end=tt;var tr=s.useState({width:0,height:ed}),to=(0,a.Z)(tr,2),ti=to[0],ta=to[1],tl=(0,s.useRef)(),tc=(0,s.useRef)(),ts=s.useMemo(function(){return P(ti.width,ex)},[ti.width,ex]),tu=s.useMemo(function(){return P(ti.height,e9)},[ti.height,e9]),td=e9-ed,tf=(0,s.useRef)(td);tf.current=td;var tp=eL<=0,th=eL>=td,tm=O(tp,th),tg=function(){return{x:eI?-eH:eH,y:eL}},tv=(0,s.useRef)(tg()),ty=(0,E.zX)(function(){if(ek){var e=tg();(tv.current.x!==e.x||tv.current.y!==e.y)&&(ek(e),tv.current=e)}});function tb(e,t){t?((0,u.flushSync)(function(){eq(e)}),ty()):eY(e)}var tx=function(e){var t=e,n=ex-ti.width;return Math.min(t=Math.max(t,0),n)},tw=(0,E.zX)(function(e,t){t?((0,u.flushSync)(function(){eq(function(t){return tx(t+(eI?-e:e))})}),ty()):eY(function(t){return t+e})}),tS=(_=!!ex,D=(0,s.useRef)(0),Z=(0,s.useRef)(null),L=(0,s.useRef)(null),z=(0,s.useRef)(!1),B=O(tp,th),F=(0,s.useRef)(null),H=(0,s.useRef)(null),[function(e){if(eM){m.Z.cancel(H.current),H.current=(0,m.Z)(function(){F.current=null},2);var t,n=e.deltaX,r=e.deltaY,o=e.shiftKey,i=n,a=r;("sx"===F.current||!F.current&&o&&r&&!n)&&(i=r,a=0,F.current="sx");var l=Math.abs(i),c=Math.abs(a);(null===F.current&&(F.current=_&&l>c?"x":"y"),"y"===F.current)?(t=a,m.Z.cancel(Z.current),D.current+=t,L.current=t,B(t)||(C||e.preventDefault(),Z.current=(0,m.Z)(function(){var e=z.current?10:1;tw(D.current*e),D.current=0}))):(tw(i,!0),C||e.preventDefault())}},function(e){eM&&(z.current=e.detail===L.current)}]),tk=(0,a.Z)(tS,2),tE=tk[0],tC=tk[1];q=function(e,t){return!tm(e,t)&&(tE({preventDefault:function(){},deltaY:e}),!0)},K=(0,s.useRef)(!1),U=(0,s.useRef)(0),V=(0,s.useRef)(null),G=(0,s.useRef)(null),X=function(e){if(K.current){var t=Math.ceil(e.touches[0].pageY),n=U.current-t;U.current=t,q(n)&&e.preventDefault(),clearInterval(G.current),G.current=setInterval(function(){(!q(n*=j,!0)||.1>=Math.abs(n))&&clearInterval(G.current)},16)}},$=function(){K.current=!1,W()},Y=function(e){W(),1!==e.touches.length||K.current||(K.current=!0,U.current=Math.ceil(e.touches[0].pageY),V.current=e.target,V.current.addEventListener("touchmove",X),V.current.addEventListener("touchend",$))},W=function(){V.current&&(V.current.removeEventListener("touchmove",X),V.current.removeEventListener("touchend",$))},(0,k.Z)(function(){return eM&&eA.current.addEventListener("touchstart",Y),function(){var e;null===(e=eA.current)||void 0===e||e.removeEventListener("touchstart",Y),W(),clearInterval(G.current)}},[eM]),(0,k.Z)(function(){function e(e){eM&&e.preventDefault()}var t=eA.current;return t.addEventListener("wheel",tE),t.addEventListener("DOMMouseScroll",tC),t.addEventListener("MozMousePixelScroll",e),function(){t.removeEventListener("wheel",tE),t.removeEventListener("DOMMouseScroll",tC),t.removeEventListener("MozMousePixelScroll",e)}},[eM]),(0,k.Z)(function(){ex&&eq(function(e){return tx(e)})},[ti.width,ex]);var tO=function(){var e,t;null===(e=tl.current)||void 0===e||e.delayHidden(),null===(t=tc.current)||void 0===t||t.delayHidden()},tj=(Q=function(){return e4(!0)},J=s.useRef(),ee=s.useState(null),en=(et=(0,a.Z)(ee,2))[0],er=et[1],(0,k.Z)(function(){if(en&&en.times<10){if(!eA.current){er(function(e){return(0,o.Z)({},e)});return}Q();var e=en.targetAlign,t=en.originAlign,n=en.index,r=en.offset,i=eA.current.clientHeight,a=!1,l=e,c=null;if(i){for(var s=e||t,u=0,d=0,f=0,p=Math.min(eT.length-1,n),h=0;h<=p;h+=1){var m=e$(eT[h]);d=u;var g=e5.get(m);u=f=d+(void 0===g?ef:g)}for(var v="top"===s?r:i-r,y=p;y>=0;y-=1){var b=e$(eT[y]),x=e5.get(b);if(void 0===x){a=!0;break}if((v-=x)<=0)break}switch(s){case"top":c=d-r;break;case"bottom":c=f-i+r;break;default:var w=eA.current.scrollTop;dw+i&&(l="bottom")}null!==c&&eY(c),c!==en.lastTop&&(a=!0)}a&&er((0,o.Z)((0,o.Z)({},en),{},{times:en.times+1,targetAlign:l,lastTop:c}))}},[en,eA.current]),function(e){if(null==e){tO();return}if(m.Z.cancel(J.current),"number"==typeof e)eY(e);else if(e&&"object"===(0,i.Z)(e)){var t,n=e.align;t="index"in e?e.index:eT.findIndex(function(t){return e$(t)===e.key});var r=e.offset;er({times:0,index:t,offset:void 0===r?0:r,originAlign:n})}});s.useImperativeHandle(t,function(){return{getScrollInfo:tg,scrollTo:function(e){e&&"object"===(0,i.Z)(e)&&("left"in e||"top"in e)?(void 0!==e.left&&eq(tx(e.left)),tj(e.top)):tj(e)}}}),(0,k.Z)(function(){eE&&eE(eT.slice(te,tt+1),eT)},[te,tt,eT]);var tP=(eo=s.useMemo(function(){return[new Map,[]]},[eT,e5.id,ef]),ea=(ei=(0,a.Z)(eo,2))[0],el=ei[1],function(e){var t=arguments.length>1&&void 0!==arguments[1]?arguments[1]:e,n=ea.get(e),r=ea.get(t);if(void 0===n||void 0===r)for(var o=eT.length,i=el.length;ied&&s.createElement(v,{ref:tl,prefixCls:es,scrollOffset:eL,scrollRange:e9,rtl:eI,onScroll:tb,onStartMove:eG,onStopMove:eX,spinSize:tu,containerSize:ti.height,style:null==ej?void 0:ej.verticalScrollBar,thumbStyle:null==ej?void 0:ej.verticalScrollBarThumb}),eN&&ex&&s.createElement(v,{ref:tc,prefixCls:es,scrollOffset:eH,scrollRange:ex,rtl:eI,onScroll:tb,onStartMove:eG,onStopMove:eX,spinSize:ts,containerSize:ti.width,horizontal:!0,style:null==ej?void 0:ej.horizontalScrollBar,thumbStyle:null==ej?void 0:ej.horizontalScrollBarThumb}))});R.displayName="List";var T=R},6337:function(e,t,n){"use strict";function r(e){return(r="function"==typeof Symbol&&"symbol"==typeof Symbol.iterator?function(e){return typeof e}:function(e){return e&&"function"==typeof Symbol&&e.constructor===Symbol&&e!==Symbol.prototype?"symbol":typeof e})(e)}Object.defineProperty(t,"__esModule",{value:!0}),t.CopyToClipboard=void 0;var o=l(n(2265)),i=l(n(49211)),a=["text","onCopy","options","children"];function l(e){return e&&e.__esModule?e:{default:e}}function c(e,t){var n=Object.keys(e);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);t&&(r=r.filter(function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable})),n.push.apply(n,r)}return n}function s(e){for(var t=1;t=0||(o[n]=e[n]);return o}(e,t);if(Object.getOwnPropertySymbols){var i=Object.getOwnPropertySymbols(e);for(r=0;r=0)&&Object.prototype.propertyIsEnumerable.call(e,n)&&(o[n]=e[n])}return o}(e,a),r=o.default.Children.only(t);return o.default.cloneElement(r,s(s({},n),{},{onClick:this.onClick}))}}],function(e,t){for(var n=0;n0;)if(!n.equals(e[r],t[r],r,r,e,t,n))return!1;return!0}function y(e,t){return p(e.getTime(),t.getTime())}function b(e,t,n){if(e.size!==t.size)return!1;for(var r,o,i={},a=e.entries(),l=0;(r=a.next())&&!r.done;){for(var c=t.entries(),s=!1,u=0;(o=c.next())&&!o.done;){var d=r.value,f=d[0],p=d[1],h=o.value,m=h[0],g=h[1];!s&&!i[u]&&(s=n.equals(f,m,l,u,e,t,n)&&n.equals(p,g,f,m,e,t,n))&&(i[u]=!0),u++}if(!s)return!1;l++}return!0}function x(e,t,n){var r,o=g(e),i=o.length;if(g(t).length!==i)return!1;for(;i-- >0;)if((r=o[i])===h&&(e.$$typeof||t.$$typeof)&&e.$$typeof!==t.$$typeof||!f(t,r)||!n.equals(e[r],t[r],r,r,e,t,n))return!1;return!0}function w(e,t,n){var r,o,i,a=d(e),l=a.length;if(d(t).length!==l)return!1;for(;l-- >0;)if((r=a[l])===h&&(e.$$typeof||t.$$typeof)&&e.$$typeof!==t.$$typeof||!f(t,r)||!n.equals(e[r],t[r],r,r,e,t,n)||(o=m(e,r),i=m(t,r),(o||i)&&(!o||!i||o.configurable!==i.configurable||o.enumerable!==i.enumerable||o.writable!==i.writable)))return!1;return!0}function S(e,t){return p(e.valueOf(),t.valueOf())}function k(e,t){return e.source===t.source&&e.flags===t.flags}function E(e,t,n){if(e.size!==t.size)return!1;for(var r,o,i={},a=e.values();(r=a.next())&&!r.done;){for(var l=t.values(),c=!1,s=0;(o=l.next())&&!o.done;)!c&&!i[s]&&(c=n.equals(r.value,o.value,r.value,o.value,e,t,n))&&(i[s]=!0),s++;if(!c)return!1}return!0}function C(e,t){var n=e.length;if(t.length!==n)return!1;for(;n-- >0;)if(e[n]!==t[n])return!1;return!0}var O=Array.isArray,j="function"==typeof ArrayBuffer&&ArrayBuffer.isView?ArrayBuffer.isView:null,P=Object.assign,M=Object.prototype.toString.call.bind(Object.prototype.toString),N=I();function I(e){void 0===e&&(e={});var t,n,r,o,i,a,l,c,d,f=e.circular,p=e.createInternalComparator,h=e.createState,m=e.strict,g=(n=(t=function(e){var t=e.circular,n=e.createCustomConfig,r=e.strict,o={areArraysEqual:r?w:v,areDatesEqual:y,areMapsEqual:r?s(b,w):b,areObjectsEqual:r?w:x,arePrimitiveWrappersEqual:S,areRegExpsEqual:k,areSetsEqual:r?s(E,w):E,areTypedArraysEqual:r?w:C};if(n&&(o=P({},o,n(o))),t){var i=u(o.areArraysEqual),a=u(o.areMapsEqual),l=u(o.areObjectsEqual),c=u(o.areSetsEqual);o=P({},o,{areArraysEqual:i,areMapsEqual:a,areObjectsEqual:l,areSetsEqual:c})}return o}(e)).areArraysEqual,r=t.areDatesEqual,o=t.areMapsEqual,i=t.areObjectsEqual,a=t.arePrimitiveWrappersEqual,l=t.areRegExpsEqual,c=t.areSetsEqual,d=t.areTypedArraysEqual,function(e,t,s){if(e===t)return!0;if(null==e||null==t||"object"!=typeof e||"object"!=typeof t)return e!=e&&t!=t;var u=e.constructor;if(u!==t.constructor)return!1;if(u===Object)return i(e,t,s);if(O(e))return n(e,t,s);if(null!=j&&j(e))return d(e,t,s);if(u===Date)return r(e,t,s);if(u===RegExp)return l(e,t,s);if(u===Map)return o(e,t,s);if(u===Set)return c(e,t,s);var f=M(e);return"[object Date]"===f?r(e,t,s):"[object RegExp]"===f?l(e,t,s):"[object Map]"===f?o(e,t,s):"[object Set]"===f?c(e,t,s):"[object Object]"===f?"function"!=typeof e.then&&"function"!=typeof t.then&&i(e,t,s):"[object Arguments]"===f?i(e,t,s):("[object Boolean]"===f||"[object Number]"===f||"[object String]"===f)&&a(e,t,s)}),N=p?p(g):function(e,t,n,r,o,i,a){return g(e,t,a)};return function(e){var t=e.circular,n=e.comparator,r=e.createState,o=e.equals,i=e.strict;if(r)return function(e,a){var l=r(),c=l.cache;return n(e,a,{cache:void 0===c?t?new WeakMap:void 0:c,equals:o,meta:l.meta,strict:i})};if(t)return function(e,t){return n(e,t,{cache:new WeakMap,equals:o,meta:void 0,strict:i})};var a={cache:void 0,equals:o,meta:void 0,strict:i};return function(e,t){return n(e,t,a)}}({circular:void 0!==f&&f,comparator:g,createState:h,equals:N,strict:void 0!==m&&m})}function R(e){var t=arguments.length>1&&void 0!==arguments[1]?arguments[1]:0,n=-1;requestAnimationFrame(function r(o){if(n<0&&(n=o),o-n>t)e(o),n=-1;else{var i;i=r,"undefined"!=typeof requestAnimationFrame&&requestAnimationFrame(i)}})}function T(e){return(T="function"==typeof Symbol&&"symbol"==typeof Symbol.iterator?function(e){return typeof e}:function(e){return e&&"function"==typeof Symbol&&e.constructor===Symbol&&e!==Symbol.prototype?"symbol":typeof e})(e)}function A(e,t){(null==t||t>e.length)&&(t=e.length);for(var n=0,r=Array(t);ne.length)&&(t=e.length);for(var n=0,r=Array(t);n=0&&e<=1}),"[configBezier]: arguments should be x1, y1, x2, y2 of [0, 1] instead received %s",r);var f=Q(i,l),p=Q(a,c),h=(e=i,t=l,function(n){var r;return Y([].concat(function(e){if(Array.isArray(e))return X(e)}(r=$(e,t).map(function(e,t){return e*t}).slice(1))||function(e){if("undefined"!=typeof Symbol&&null!=e[Symbol.iterator]||null!=e["@@iterator"])return Array.from(e)}(r)||G(r)||function(){throw TypeError("Invalid attempt to spread non-iterable instance.\nIn order to be iterable, non-array objects must have a [Symbol.iterator]() method.")}(),[0]),n)}),m=function(e){for(var t=e>1?1:e,n=t,r=0;r<8;++r){var o,i=f(n)-t,a=h(n);if(1e-4>Math.abs(i-t)||a<1e-4)break;n=(o=n-i/a)>1?1:o<0?0:o}return p(n)};return m.isStepper=!1,m},ee=function(){var e=arguments.length>0&&void 0!==arguments[0]?arguments[0]:{},t=e.stiff,n=void 0===t?100:t,r=e.damping,o=void 0===r?8:r,i=e.dt,a=void 0===i?17:i,l=function(e,t,r){var i=r+(-(e-t)*n-r*o)*a/1e3,l=r*a/1e3+e;return 1e-4>Math.abs(l-t)&&1e-4>Math.abs(i)?[t,0]:[l,i]};return l.isStepper=!0,l.dt=a,l},et=function(){for(var e=arguments.length,t=Array(e),n=0;ne.length)&&(t=e.length);for(var n=0,r=Array(t);ne.length)&&(t=e.length);for(var n=0,r=Array(t);n0?n[o-1]:r,f=s||Object.keys(c);if("function"==typeof l||"spring"===l)return[].concat(em(e),[t.runJSAnimation.bind(t,{from:d.style,to:c,duration:i,easing:l}),i]);var p=U(f,i,l),h=ey(ey(ey({},d.style),c),{},{transition:p});return[].concat(em(e),[h,i,u]).filter(H)},[a,Math.max(void 0===l?0:l,r)])),[e.onAnimationEnd]))}},{key:"runAnimation",value:function(e){if(!this.manager){var t,n,r;this.manager=(t=function(){return null},n=!1,r=function e(r){if(!n){if(Array.isArray(r)){if(!r.length)return;var o=function(e){if(Array.isArray(e))return e}(r)||function(e){if("undefined"!=typeof Symbol&&null!=e[Symbol.iterator]||null!=e["@@iterator"])return Array.from(e)}(r)||function(e,t){if(e){if("string"==typeof e)return A(e,void 0);var n=Object.prototype.toString.call(e).slice(8,-1);if("Object"===n&&e.constructor&&(n=e.constructor.name),"Map"===n||"Set"===n)return Array.from(e);if("Arguments"===n||/^(?:Ui|I)nt(?:8|16|32)(?:Clamped)?Array$/.test(n))return A(e,void 0)}}(r)||function(){throw TypeError("Invalid attempt to destructure non-iterable instance.\nIn order to be iterable, non-array objects must have a [Symbol.iterator]() method.")}(),i=o[0],a=o.slice(1);if("number"==typeof i){R(e.bind(null,a),i);return}e(i),R(e.bind(null,a));return}"object"===T(r)&&t(r),"function"==typeof r&&r()}},{stop:function(){n=!0},start:function(e){n=!1,r(e)},subscribe:function(e){return t=e,function(){t=function(){return null}}}})}var o=e.begin,i=e.duration,a=e.attributeName,l=e.to,c=e.easing,s=e.onAnimationStart,u=e.onAnimationEnd,d=e.steps,f=e.children,p=this.manager;if(this.unSubscribe=p.subscribe(this.handleStyleChange),"function"==typeof c||"function"==typeof f||"spring"===c){this.runJSAnimation(e);return}if(d.length>1){this.runStepAnimation(e);return}var h=a?eb({},a,l):l,m=U(Object.keys(h),i,c);p.start([s,o,ey(ey({},h),{},{transition:m}),i,u])}},{key:"render",value:function(){var e=this.props,t=e.children,n=(e.begin,e.duration),o=(e.attributeName,e.easing,e.isActive),i=(e.steps,e.from,e.to,e.canBegin,e.onAnimationEnd,e.shouldReAnimate,e.onAnimationReStart,function(e,t){if(null==e)return{};var n,r,o=function(e,t){if(null==e)return{};var n,r,o={},i=Object.keys(e);for(r=0;r=0||(o[n]=e[n]);return o}(e,t);if(Object.getOwnPropertySymbols){var i=Object.getOwnPropertySymbols(e);for(r=0;r=0)&&Object.prototype.propertyIsEnumerable.call(e,n)&&(o[n]=e[n])}return o}(e,eh)),a=r.Children.count(t),l=K(this.state.style);if("function"==typeof t)return t(l);if(!o||0===a||n<=0)return t;var c=function(e){var t=e.props,n=t.style,o=t.className;return(0,r.cloneElement)(e,ey(ey({},i),{},{style:ey(ey({},void 0===n?{}:n),l),className:o}))};return 1===a?c(r.Children.only(t)):r.createElement("div",null,r.Children.map(t,function(e){return c(e)}))}}],function(e,t){for(var n=0;n0&&void 0!==arguments[0]?arguments[0]:{},t=e.steps,n=e.duration;return t&&t.length?t.reduce(function(e,t){return e+(Number.isFinite(t.duration)&&t.duration>0?t.duration:0)},0):Number.isFinite(n)?n:0},eL=function(e){!function(e,t){if("function"!=typeof t&&null!==t)throw TypeError("Super expression must either be null or a function");e.prototype=Object.create(t&&t.prototype,{constructor:{value:e,writable:!0,configurable:!0}}),Object.defineProperty(e,"prototype",{writable:!1}),t&&eR(e,t)}(i,e);var t,n,o=(t=function(){if("undefined"==typeof Reflect||!Reflect.construct||Reflect.construct.sham)return!1;if("function"==typeof Proxy)return!0;try{return Boolean.prototype.valueOf.call(Reflect.construct(Boolean,[],function(){})),!0}catch(e){return!1}}(),function(){var e,n=eA(i);return e=t?Reflect.construct(n,arguments,eA(this).constructor):n.apply(this,arguments),function(e,t){if(t&&("object"===eP(t)||"function"==typeof t))return t;if(void 0!==t)throw TypeError("Derived constructors may only return object or undefined");return eT(e)}(this,e)});function i(){var e;return!function(e,t){if(!(e instanceof t))throw TypeError("Cannot call a class as a function")}(this,i),e_(eT(e=o.call(this)),"handleEnter",function(t,n){var r=e.props,o=r.appearOptions,i=r.enterOptions;e.handleStyleActive(n?o:i)}),e_(eT(e),"handleExit",function(){var t=e.props.leaveOptions;e.handleStyleActive(t)}),e.state={isActive:!1},e}return n=[{key:"handleStyleActive",value:function(e){if(e){var t=e.onAnimationEnd?function(){e.onAnimationEnd()}:null;this.setState(eI(eI({},e),{},{onAnimationEnd:t,isActive:!0}))}}},{key:"parseTimeout",value:function(){var e=this.props,t=e.appearOptions,n=e.enterOptions,r=e.leaveOptions;return eZ(t)+eZ(n)+eZ(r)}},{key:"render",value:function(){var e=this,t=this.props,n=t.children,o=(t.appearOptions,t.enterOptions,t.leaveOptions,function(e,t){if(null==e)return{};var n,r,o=function(e,t){if(null==e)return{};var n,r,o={},i=Object.keys(e);for(r=0;r=0||(o[n]=e[n]);return o}(e,t);if(Object.getOwnPropertySymbols){var i=Object.getOwnPropertySymbols(e);for(r=0;r=0)&&Object.prototype.propertyIsEnumerable.call(e,n)&&(o[n]=e[n])}return o}(t,ej));return r.createElement(eO.Transition,eM({},o,{onEnter:this.handleEnter,onExit:this.handleExit,timeout:this.parseTimeout()}),function(){return r.createElement(eC,e.state,r.Children.only(n))})}}],function(e,t){for(var n=0;n code':{position:"relative",zIndex:"1",borderLeft:"10px solid #358ccb",boxShadow:"-1px 0px 0px 0px #358ccb, 0px 0px 0px 1px #dfdfdf",backgroundColor:"#fdfdfd",backgroundImage:"linear-gradient(transparent 50%, rgba(69, 142, 209, 0.04) 50%)",backgroundSize:"3em 3em",backgroundOrigin:"content-box",backgroundAttachment:"local"},':not(pre) > code[class*="language-"]':{backgroundColor:"#fdfdfd",WebkitBoxSizing:"border-box",MozBoxSizing:"border-box",boxSizing:"border-box",marginBottom:"1em",position:"relative",padding:".2em",borderRadius:"0.3em",color:"#c92c2c",border:"1px solid rgba(0, 0, 0, 0.1)",display:"inline",whiteSpace:"normal"},'pre[class*="language-"]:before':{content:"''",display:"block",position:"absolute",bottom:"0.75em",left:"0.18em",width:"40%",height:"20%",maxHeight:"13em",boxShadow:"0px 13px 8px #979797",WebkitTransform:"rotate(-2deg)",MozTransform:"rotate(-2deg)",msTransform:"rotate(-2deg)",OTransform:"rotate(-2deg)",transform:"rotate(-2deg)"},'pre[class*="language-"]:after':{content:"''",display:"block",position:"absolute",bottom:"0.75em",left:"auto",width:"40%",height:"20%",maxHeight:"13em",boxShadow:"0px 13px 8px #979797",WebkitTransform:"rotate(2deg)",MozTransform:"rotate(2deg)",msTransform:"rotate(2deg)",OTransform:"rotate(2deg)",transform:"rotate(2deg)",right:"0.75em"},comment:{color:"#7D8B99"},"block-comment":{color:"#7D8B99"},prolog:{color:"#7D8B99"},doctype:{color:"#7D8B99"},cdata:{color:"#7D8B99"},punctuation:{color:"#5F6364"},property:{color:"#c92c2c"},tag:{color:"#c92c2c"},boolean:{color:"#c92c2c"},number:{color:"#c92c2c"},"function-name":{color:"#c92c2c"},constant:{color:"#c92c2c"},symbol:{color:"#c92c2c"},deleted:{color:"#c92c2c"},selector:{color:"#2f9c0a"},"attr-name":{color:"#2f9c0a"},string:{color:"#2f9c0a"},char:{color:"#2f9c0a"},function:{color:"#2f9c0a"},builtin:{color:"#2f9c0a"},inserted:{color:"#2f9c0a"},operator:{color:"#a67f59",background:"rgba(255, 255, 255, 0.5)"},entity:{color:"#a67f59",background:"rgba(255, 255, 255, 0.5)",cursor:"help"},url:{color:"#a67f59",background:"rgba(255, 255, 255, 0.5)"},variable:{color:"#a67f59",background:"rgba(255, 255, 255, 0.5)"},atrule:{color:"#1990b8"},"attr-value":{color:"#1990b8"},keyword:{color:"#1990b8"},"class-name":{color:"#1990b8"},regex:{color:"#e90"},important:{color:"#e90",fontWeight:"normal"},".language-css .token.string":{color:"#a67f59",background:"rgba(255, 255, 255, 0.5)"},".style .token.string":{color:"#a67f59",background:"rgba(255, 255, 255, 0.5)"},bold:{fontWeight:"bold"},italic:{fontStyle:"italic"},namespace:{Opacity:".7"},'pre[class*="language-"].line-numbers.line-numbers':{paddingLeft:"0"},'pre[class*="language-"].line-numbers.line-numbers code':{paddingLeft:"3.8em"},'pre[class*="language-"].line-numbers.line-numbers .line-numbers-rows':{left:"0"},'pre[class*="language-"][data-line]':{paddingTop:"0",paddingBottom:"0",paddingLeft:"0"},"pre[data-line] code":{position:"relative",paddingLeft:"4em"},"pre .line-highlight":{marginTop:"0"}}},33664:function(e,t,n){"use strict";t.__esModule=!0,t.default=void 0,function(e){if(!e||!e.__esModule){var t={};if(null!=e){for(var n in e)if(Object.prototype.hasOwnProperty.call(e,n)){var r=Object.defineProperty&&Object.getOwnPropertyDescriptor?Object.getOwnPropertyDescriptor(e,n):{};r.get||r.set?Object.defineProperty(t,n,r):t[n]=e[n]}}t.default=e}}(n(40718));var r=l(n(84617)),o=l(n(67973)),i=l(n(2265)),a=l(n(20536));function l(e){return e&&e.__esModule?e:{default:e}}function c(){return(c=Object.assign||function(e){for(var t=1;t=0||(o[n]=e[n]);return o}(e,["children","in"]),a=r.default.Children.toArray(t),l=a[0],c=a[1];return delete o.onEnter,delete o.onEntering,delete o.onEntered,delete o.onExit,delete o.onExiting,delete o.onExited,r.default.createElement(i.default,o,n?r.default.cloneElement(l,{key:"first",onEnter:this.handleEnter,onEntering:this.handleEntering,onEntered:this.handleEntered}):r.default.cloneElement(c,{key:"second",onEnter:this.handleExit,onEntering:this.handleExiting,onEntered:this.handleExited}))},t}(r.default.Component);l.propTypes={},t.default=l,e.exports=t.default},20536:function(e,t,n){"use strict";t.__esModule=!0,t.default=t.EXITING=t.ENTERED=t.ENTERING=t.EXITED=t.UNMOUNTED=void 0;var r=function(e){if(e&&e.__esModule)return e;var t={};if(null!=e){for(var n in e)if(Object.prototype.hasOwnProperty.call(e,n)){var r=Object.defineProperty&&Object.getOwnPropertyDescriptor?Object.getOwnPropertyDescriptor(e,n):{};r.get||r.set?Object.defineProperty(t,n,r):t[n]=e[n]}}return t.default=e,t}(n(40718)),o=l(n(2265)),i=l(n(54887)),a=n(52181);function l(e){return e&&e.__esModule?e:{default:e}}n(32601);var c="unmounted";t.UNMOUNTED=c;var s="exited";t.EXITED=s;var u="entering";t.ENTERING=u;var d="entered";t.ENTERED=d;var f="exiting";t.EXITING=f;var p=function(e){function t(t,n){r=e.call(this,t,n)||this;var r,o,i=n.transitionGroup,a=i&&!i.isMounting?t.enter:t.appear;return r.appearStatus=null,t.in?a?(o=s,r.appearStatus=u):o=d:o=t.unmountOnExit||t.mountOnEnter?c:s,r.state={status:o},r.nextCallback=null,r}t.prototype=Object.create(e.prototype),t.prototype.constructor=t,t.__proto__=e;var n=t.prototype;return n.getChildContext=function(){return{transitionGroup:null}},t.getDerivedStateFromProps=function(e,t){return e.in&&t.status===c?{status:s}:null},n.componentDidMount=function(){this.updateStatus(!0,this.appearStatus)},n.componentDidUpdate=function(e){var t=null;if(e!==this.props){var n=this.state.status;this.props.in?n!==u&&n!==d&&(t=u):(n===u||n===d)&&(t=f)}this.updateStatus(!1,t)},n.componentWillUnmount=function(){this.cancelNextCallback()},n.getTimeouts=function(){var e,t,n,r=this.props.timeout;return e=t=n=r,null!=r&&"number"!=typeof r&&(e=r.exit,t=r.enter,n=void 0!==r.appear?r.appear:t),{exit:e,enter:t,appear:n}},n.updateStatus=function(e,t){if(void 0===e&&(e=!1),null!==t){this.cancelNextCallback();var n=i.default.findDOMNode(this);t===u?this.performEnter(n,e):this.performExit(n)}else this.props.unmountOnExit&&this.state.status===s&&this.setState({status:c})},n.performEnter=function(e,t){var n=this,r=this.props.enter,o=this.context.transitionGroup?this.context.transitionGroup.isMounting:t,i=this.getTimeouts(),a=o?i.appear:i.enter;if(!t&&!r){this.safeSetState({status:d},function(){n.props.onEntered(e)});return}this.props.onEnter(e,o),this.safeSetState({status:u},function(){n.props.onEntering(e,o),n.onTransitionEnd(e,a,function(){n.safeSetState({status:d},function(){n.props.onEntered(e,o)})})})},n.performExit=function(e){var t=this,n=this.props.exit,r=this.getTimeouts();if(!n){this.safeSetState({status:s},function(){t.props.onExited(e)});return}this.props.onExit(e),this.safeSetState({status:f},function(){t.props.onExiting(e),t.onTransitionEnd(e,r.exit,function(){t.safeSetState({status:s},function(){t.props.onExited(e)})})})},n.cancelNextCallback=function(){null!==this.nextCallback&&(this.nextCallback.cancel(),this.nextCallback=null)},n.safeSetState=function(e,t){t=this.setNextCallback(t),this.setState(e,t)},n.setNextCallback=function(e){var t=this,n=!0;return this.nextCallback=function(r){n&&(n=!1,t.nextCallback=null,e(r))},this.nextCallback.cancel=function(){n=!1},this.nextCallback},n.onTransitionEnd=function(e,t,n){this.setNextCallback(n);var r=null==t&&!this.props.addEndListener;if(!e||r){setTimeout(this.nextCallback,0);return}this.props.addEndListener&&this.props.addEndListener(e,this.nextCallback),null!=t&&setTimeout(this.nextCallback,t)},n.render=function(){var e=this.state.status;if(e===c)return null;var t=this.props,n=t.children,r=function(e,t){if(null==e)return{};var n,r,o={},i=Object.keys(e);for(r=0;r=0||(o[n]=e[n]);return o}(t,["children"]);if(delete r.in,delete r.mountOnEnter,delete r.unmountOnExit,delete r.appear,delete r.enter,delete r.exit,delete r.timeout,delete r.addEndListener,delete r.onEnter,delete r.onEntering,delete r.onEntered,delete r.onExit,delete r.onExiting,delete r.onExited,"function"==typeof n)return n(e,r);var i=o.default.Children.only(n);return o.default.cloneElement(i,r)},t}(o.default.Component);function h(){}p.contextTypes={transitionGroup:r.object},p.childContextTypes={transitionGroup:function(){}},p.propTypes={},p.defaultProps={in:!1,mountOnEnter:!1,unmountOnExit:!1,appear:!1,enter:!0,exit:!0,onEnter:h,onEntering:h,onEntered:h,onExit:h,onExiting:h,onExited:h},p.UNMOUNTED=0,p.EXITED=1,p.ENTERING=2,p.ENTERED=3,p.EXITING=4;var m=(0,a.polyfill)(p);t.default=m},38244:function(e,t,n){"use strict";t.__esModule=!0,t.default=void 0;var r=l(n(40718)),o=l(n(2265)),i=n(52181),a=n(28710);function l(e){return e&&e.__esModule?e:{default:e}}function c(){return(c=Object.assign||function(e){for(var t=1;t=0||(o[n]=e[n]);return o}(e,["component","childFactory"]),i=u(this.state.children).map(n);return(delete r.appear,delete r.enter,delete r.exit,null===t)?i:o.default.createElement(t,r,i)},t}(o.default.Component);d.childContextTypes={transitionGroup:r.default.object.isRequired},d.propTypes={},d.defaultProps={component:"div",childFactory:function(e){return e}};var f=(0,i.polyfill)(d);t.default=f,e.exports=t.default},30719:function(e,t,n){"use strict";var r=l(n(33664)),o=l(n(31601)),i=l(n(38244)),a=l(n(20536));function l(e){return e&&e.__esModule?e:{default:e}}e.exports={Transition:a.default,TransitionGroup:i.default,ReplaceTransition:o.default,CSSTransition:r.default}},28710:function(e,t,n){"use strict";t.__esModule=!0,t.getChildMapping=o,t.mergeChildMappings=i,t.getInitialChildMapping=function(e,t){return o(e.children,function(n){return(0,r.cloneElement)(n,{onExited:t.bind(null,n),in:!0,appear:a(n,"appear",e),enter:a(n,"enter",e),exit:a(n,"exit",e)})})},t.getNextChildMapping=function(e,t,n){var l=o(e.children),c=i(t,l);return Object.keys(c).forEach(function(o){var i=c[o];if((0,r.isValidElement)(i)){var s=o in t,u=o in l,d=t[o],f=(0,r.isValidElement)(d)&&!d.props.in;u&&(!s||f)?c[o]=(0,r.cloneElement)(i,{onExited:n.bind(null,i),in:!0,exit:a(i,"exit",e),enter:a(i,"enter",e)}):u||!s||f?u&&s&&(0,r.isValidElement)(d)&&(c[o]=(0,r.cloneElement)(i,{onExited:n.bind(null,i),in:d.props.in,exit:a(i,"exit",e),enter:a(i,"enter",e)})):c[o]=(0,r.cloneElement)(i,{in:!1})}}),c};var r=n(2265);function o(e,t){var n=Object.create(null);return e&&r.Children.map(e,function(e){return e}).forEach(function(e){n[e.key]=t&&(0,r.isValidElement)(e)?t(e):e}),n}function i(e,t){function n(n){return n in t?t[n]:e[n]}e=e||{},t=t||{};var r,o=Object.create(null),i=[];for(var a in e)a in t?i.length&&(o[a]=i,i=[]):i.push(a);var l={};for(var c in t){if(o[c])for(r=0;r=0||(o[n]=e[n]);return o}(e,t);if(Object.getOwnPropertySymbols){var i=Object.getOwnPropertySymbols(e);for(r=0;r=0)&&Object.prototype.propertyIsEnumerable.call(e,n)&&(o[n]=e[n])}return o}(e,w),i=parseInt("".concat(n),10),a=parseInt("".concat(r),10),l=parseInt("".concat(t.height||o.height),10),c=parseInt("".concat(t.width||o.width),10);return E(E(E(E(E({},t),o),i?{x:i}:{}),a?{y:a}:{}),{},{height:l,width:c,name:t.name,radius:t.radius})}function O(e){return r.createElement(b.bn,S({shapeType:"rectangle",propTransformer:C,activeClassName:"recharts-active-bar"},e))}var j=["value","background"];function P(e){return(P="function"==typeof Symbol&&"symbol"==typeof Symbol.iterator?function(e){return typeof e}:function(e){return e&&"function"==typeof Symbol&&e.constructor===Symbol&&e!==Symbol.prototype?"symbol":typeof e})(e)}function M(){return(M=Object.assign?Object.assign.bind():function(e){for(var t=1;t=0||(o[n]=e[n]);return o}(e,t);if(Object.getOwnPropertySymbols){var i=Object.getOwnPropertySymbols(e);for(r=0;r=0)&&Object.prototype.propertyIsEnumerable.call(e,n)&&(o[n]=e[n])}return o}(t,j);if(!l)return null;var s=I(I(I(I(I({},c),{},{fill:"#eee"},l),a),(0,y.bw)(e.props,t,n)),{},{onAnimationStart:e.handleAnimationStart,onAnimationEnd:e.handleAnimationEnd,dataKey:o,index:n,key:"background-bar-".concat(n),className:"recharts-bar-background-rectangle"});return r.createElement(O,M({option:e.props.background,isActive:n===i},s))})}},{key:"renderErrorBar",value:function(e,t){if(this.props.isAnimationActive&&!this.state.isAnimationFinished)return null;var n=this.props,o=n.data,i=n.xAxis,a=n.yAxis,l=n.layout,c=n.children,s=(0,m.NN)(c,d.W);if(!s)return null;var f="vertical"===l?o[0].height/2:o[0].width/2,p=function(e,t){var n=Array.isArray(e.value)?e.value[1]:e.value;return{x:e.x,y:e.y,value:n,errorVal:(0,v.F$)(e,t)}};return r.createElement(u.m,{clipPath:e?"url(#clipPath-".concat(t,")"):null},s.map(function(e){return r.cloneElement(e,{key:"error-bar-".concat(t,"-").concat(e.props.dataKey),data:o,xAxis:i,yAxis:a,layout:l,offset:f,dataPointFormatter:p})}))}},{key:"render",value:function(){var e=this.props,t=e.hide,n=e.data,i=e.className,a=e.xAxis,l=e.yAxis,c=e.left,d=e.top,f=e.width,h=e.height,m=e.isAnimationActive,g=e.background,v=e.id;if(t||!n||!n.length)return null;var y=this.state.isAnimationFinished,b=(0,o.Z)("recharts-bar",i),x=a&&a.allowDataOverflow,w=l&&l.allowDataOverflow,S=x||w,k=s()(v)?this.id:v;return r.createElement(u.m,{className:b},x||w?r.createElement("defs",null,r.createElement("clipPath",{id:"clipPath-".concat(k)},r.createElement("rect",{x:x?c:c-f/2,y:w?d:d-h/2,width:x?f:2*f,height:w?h:2*h}))):null,r.createElement(u.m,{className:"recharts-bar-rectangles",clipPath:S?"url(#clipPath-".concat(k,")"):null},g?this.renderBackground():null,this.renderRectangles()),this.renderErrorBar(S,k),(!m||y)&&p.e.renderCallByParent(this.props,n))}}],a=[{key:"getDerivedStateFromProps",value:function(e,t){return e.animationId!==t.prevAnimationId?{prevAnimationId:e.animationId,curData:e.data,prevData:t.curData}:e.data!==t.curData?{curData:e.data}:null}}],n&&R(f.prototype,n),a&&R(f,a),Object.defineProperty(f,"prototype",{writable:!1}),f}(r.PureComponent);D(L,"displayName","Bar"),D(L,"defaultProps",{xAxisId:0,yAxisId:0,legendType:"rect",minPointSize:0,hide:!1,data:[],layout:"vertical",activeBar:!0,isAnimationActive:!g.x.isSsr,animationBegin:0,animationDuration:400,animationEasing:"ease"}),D(L,"getComposedData",function(e){var t=e.props,n=e.item,r=e.barPosition,o=e.bandSize,i=e.xAxis,a=e.yAxis,l=e.xAxisTicks,c=e.yAxisTicks,s=e.stackedData,u=e.dataStartIndex,d=e.displayedData,p=e.offset,g=(0,v.Bu)(r,n);if(!g)return null;var y=t.layout,b=n.props,x=b.dataKey,w=b.children,S=b.minPointSize,k="horizontal"===y?a:i,E=s?k.scale.domain():null,C=(0,v.Yj)({numericAxis:k}),O=(0,m.NN)(w,f.b),j=d.map(function(e,t){var r,d,f,p,m,b;if(s?r=(0,v.Vv)(s[u+t],E):Array.isArray(r=(0,v.F$)(e,x))||(r=[C,r]),"horizontal"===y){var w,k=[a.scale(r[0]),a.scale(r[1])],j=k[0],P=k[1];d=(0,v.Fy)({axis:i,ticks:l,bandSize:o,offset:g.offset,entry:e,index:t}),f=null!==(w=null!=P?P:j)&&void 0!==w?w:void 0,p=g.size;var M=j-P;if(m=Number.isNaN(M)?0:M,b={x:d,y:a.y,width:p,height:a.height},Math.abs(S)>0&&Math.abs(m)0&&Math.abs(p)=0||(o[n]=e[n]);return o}(e,t);if(Object.getOwnPropertySymbols){var i=Object.getOwnPropertySymbols(e);for(r=0;r=0)&&Object.prototype.propertyIsEnumerable.call(e,n)&&(o[n]=e[n])}return o}function C(e,t){for(var n=0;n0?this.props:h)),o<=0||a<=0||!m||!m.length)?null:r.createElement(u.m,{className:(0,c.Z)("recharts-cartesian-axis",s),ref:function(t){e.layerReference=t}},n&&this.renderAxisLine(),this.renderTicks(m,this.state.fontSize,this.state.letterSpacing),f._.renderCallByParent(this.props))}}],o=[{key:"renderTickItem",value:function(e,t,n){return r.isValidElement(e)?r.cloneElement(e,t):i()(e)?e(t):r.createElement(d.x,w({},t,{className:"recharts-cartesian-axis-tick-value"}),n)}}],n&&C(S.prototype,n),o&&C(S,o),Object.defineProperty(S,"prototype",{writable:!1}),S}(r.Component);P(N,"displayName","CartesianAxis"),P(N,"defaultProps",{x:0,y:0,width:0,height:0,viewBox:{x:0,y:0,width:0,height:0},orientation:"bottom",ticks:[],stroke:"#666",tickLine:!0,axisLine:!0,tick:!0,mirror:!1,minTickGap:5,tickSize:6,tickMargin:2,interval:"preserveEnd"})},56940:function(e,t,n){"use strict";n.d(t,{q:function(){return M}});var r=n(2265),o=n(86757),i=n.n(o),a=n(1175),l=n(16630),c=n(82944),s=n(85355),u=n(78242),d=n(80285),f=n(25739),p=["x1","y1","x2","y2","key"],h=["offset"];function m(e){return(m="function"==typeof Symbol&&"symbol"==typeof Symbol.iterator?function(e){return typeof e}:function(e){return e&&"function"==typeof Symbol&&e.constructor===Symbol&&e!==Symbol.prototype?"symbol":typeof e})(e)}function g(e,t){var n=Object.keys(e);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);t&&(r=r.filter(function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable})),n.push.apply(n,r)}return n}function v(e){for(var t=1;t=0||(o[n]=e[n]);return o}(e,t);if(Object.getOwnPropertySymbols){var i=Object.getOwnPropertySymbols(e);for(r=0;r=0)&&Object.prototype.propertyIsEnumerable.call(e,n)&&(o[n]=e[n])}return o}var x=function(e){var t=e.fill;if(!t||"none"===t)return null;var n=e.fillOpacity,o=e.x,i=e.y,a=e.width,l=e.height;return r.createElement("rect",{x:o,y:i,width:a,height:l,stroke:"none",fill:t,fillOpacity:n,className:"recharts-cartesian-grid-bg"})};function w(e,t){var n;if(r.isValidElement(e))n=r.cloneElement(e,t);else if(i()(e))n=e(t);else{var o=t.x1,a=t.y1,l=t.x2,s=t.y2,u=t.key,d=b(t,p),f=(0,c.L6)(d,!1),m=(f.offset,b(f,h));n=r.createElement("line",y({},m,{x1:o,y1:a,x2:l,y2:s,fill:"none",key:u}))}return n}function S(e){var t=e.x,n=e.width,o=e.horizontal,i=void 0===o||o,a=e.horizontalPoints;if(!i||!a||!a.length)return null;var l=a.map(function(r,o){return w(i,v(v({},e),{},{x1:t,y1:r,x2:t+n,y2:r,key:"line-".concat(o),index:o}))});return r.createElement("g",{className:"recharts-cartesian-grid-horizontal"},l)}function k(e){var t=e.y,n=e.height,o=e.vertical,i=void 0===o||o,a=e.verticalPoints;if(!i||!a||!a.length)return null;var l=a.map(function(r,o){return w(i,v(v({},e),{},{x1:r,y1:t,x2:r,y2:t+n,key:"line-".concat(o),index:o}))});return r.createElement("g",{className:"recharts-cartesian-grid-vertical"},l)}function E(e){var t=e.horizontalFill,n=e.fillOpacity,o=e.x,i=e.y,a=e.width,l=e.height,c=e.horizontalPoints,s=e.horizontal;if(!(void 0===s||s)||!t||!t.length)return null;var u=c.map(function(e){return Math.round(e+i-i)}).sort(function(e,t){return e-t});i!==u[0]&&u.unshift(0);var d=u.map(function(e,c){var s=u[c+1]?u[c+1]-e:i+l-e;if(s<=0)return null;var d=c%t.length;return r.createElement("rect",{key:"react-".concat(c),y:e,x:o,height:s,width:a,stroke:"none",fill:t[d],fillOpacity:n,className:"recharts-cartesian-grid-bg"})});return r.createElement("g",{className:"recharts-cartesian-gridstripes-horizontal"},d)}function C(e){var t=e.vertical,n=e.verticalFill,o=e.fillOpacity,i=e.x,a=e.y,l=e.width,c=e.height,s=e.verticalPoints;if(!(void 0===t||t)||!n||!n.length)return null;var u=s.map(function(e){return Math.round(e+i-i)}).sort(function(e,t){return e-t});i!==u[0]&&u.unshift(0);var d=u.map(function(e,t){var s=u[t+1]?u[t+1]-e:i+l-e;if(s<=0)return null;var d=t%n.length;return r.createElement("rect",{key:"react-".concat(t),x:e,y:a,width:s,height:c,stroke:"none",fill:n[d],fillOpacity:o,className:"recharts-cartesian-grid-bg"})});return r.createElement("g",{className:"recharts-cartesian-gridstripes-vertical"},d)}var O=function(e,t){var n=e.xAxis,r=e.width,o=e.height,i=e.offset;return(0,s.Rf)((0,u.f)(v(v(v({},d.O.defaultProps),n),{},{ticks:(0,s.uY)(n,!0),viewBox:{x:0,y:0,width:r,height:o}})),i.left,i.left+i.width,t)},j=function(e,t){var n=e.yAxis,r=e.width,o=e.height,i=e.offset;return(0,s.Rf)((0,u.f)(v(v(v({},d.O.defaultProps),n),{},{ticks:(0,s.uY)(n,!0),viewBox:{x:0,y:0,width:r,height:o}})),i.top,i.top+i.height,t)},P={horizontal:!0,vertical:!0,stroke:"#ccc",fill:"none",verticalFill:[],horizontalFill:[]};function M(e){var t,n,o,c,s,u,d=(0,f.zn)(),p=(0,f.Mw)(),h=(0,f.qD)(),g=v(v({},e),{},{stroke:null!==(t=e.stroke)&&void 0!==t?t:P.stroke,fill:null!==(n=e.fill)&&void 0!==n?n:P.fill,horizontal:null!==(o=e.horizontal)&&void 0!==o?o:P.horizontal,horizontalFill:null!==(c=e.horizontalFill)&&void 0!==c?c:P.horizontalFill,vertical:null!==(s=e.vertical)&&void 0!==s?s:P.vertical,verticalFill:null!==(u=e.verticalFill)&&void 0!==u?u:P.verticalFill}),b=g.x,w=g.y,M=g.width,N=g.height,I=g.xAxis,R=g.yAxis,T=g.syncWithTicks,A=g.horizontalValues,_=g.verticalValues;if(!(0,l.hj)(M)||M<=0||!(0,l.hj)(N)||N<=0||!(0,l.hj)(b)||b!==+b||!(0,l.hj)(w)||w!==+w)return null;var D=g.verticalCoordinatesGenerator||O,Z=g.horizontalCoordinatesGenerator||j,L=g.horizontalPoints,z=g.verticalPoints;if((!L||!L.length)&&i()(Z)){var B=A&&A.length,F=Z({yAxis:R?v(v({},R),{},{ticks:B?A:R.ticks}):void 0,width:d,height:p,offset:h},!!B||T);(0,a.Z)(Array.isArray(F),"horizontalCoordinatesGenerator should return Array but instead it returned [".concat(m(F),"]")),Array.isArray(F)&&(L=F)}if((!z||!z.length)&&i()(D)){var H=_&&_.length,q=D({xAxis:I?v(v({},I),{},{ticks:H?_:I.ticks}):void 0,width:d,height:p,offset:h},!!H||T);(0,a.Z)(Array.isArray(q),"verticalCoordinatesGenerator should return Array but instead it returned [".concat(m(q),"]")),Array.isArray(q)&&(z=q)}return r.createElement("g",{className:"recharts-cartesian-grid"},r.createElement(x,{fill:g.fill,fillOpacity:g.fillOpacity,x:g.x,y:g.y,width:g.width,height:g.height}),r.createElement(S,y({},g,{offset:h,horizontalPoints:L})),r.createElement(k,y({},g,{offset:h,verticalPoints:z})),r.createElement(E,y({},g,{horizontalPoints:L})),r.createElement(C,y({},g,{verticalPoints:z})))}M.displayName="CartesianGrid"},13137:function(e,t,n){"use strict";n.d(t,{W:function(){return u}});var r=n(2265),o=n(69398),i=n(9841),a=n(82944),l=["offset","layout","width","dataKey","data","dataPointFormatter","xAxis","yAxis"];function c(){return(c=Object.assign?Object.assign.bind():function(e){for(var t=1;te.length)&&(t=e.length);for(var n=0,r=Array(t);n=0||(o[n]=e[n]);return o}(e,t);if(Object.getOwnPropertySymbols){var i=Object.getOwnPropertySymbols(e);for(r=0;r=0)&&Object.prototype.propertyIsEnumerable.call(e,n)&&(o[n]=e[n])}return o}(e,l),v=(0,a.L6)(g,!1);"x"===e.direction&&"number"!==h.type&&(0,o.Z)(!1);var y=f.map(function(e){var o,a,l=p(e,d),f=l.x,g=l.y,y=l.value,b=l.errorVal;if(!b)return null;var x=[];if(Array.isArray(b)){var w=function(e){if(Array.isArray(e))return e}(b)||function(e,t){var n=null==e?null:"undefined"!=typeof Symbol&&e[Symbol.iterator]||e["@@iterator"];if(null!=n){var r,o,i,a,l=[],c=!0,s=!1;try{for(i=(n=n.call(e)).next;!(c=(r=i.call(n)).done)&&(l.push(r.value),2!==l.length);c=!0);}catch(e){s=!0,o=e}finally{try{if(!c&&null!=n.return&&(a=n.return(),Object(a)!==a))return}finally{if(s)throw o}}return l}}(b,2)||function(e,t){if(e){if("string"==typeof e)return s(e,2);var n=Object.prototype.toString.call(e).slice(8,-1);if("Object"===n&&e.constructor&&(n=e.constructor.name),"Map"===n||"Set"===n)return Array.from(e);if("Arguments"===n||/^(?:Ui|I)nt(?:8|16|32)(?:Clamped)?Array$/.test(n))return s(e,2)}}(b,2)||function(){throw TypeError("Invalid attempt to destructure non-iterable instance.\nIn order to be iterable, non-array objects must have a [Symbol.iterator]() method.")}();o=w[0],a=w[1]}else o=a=b;if("vertical"===n){var S=h.scale,k=g+t,E=k+u,C=k-u,O=S(y-o),j=S(y+a);x.push({x1:j,y1:E,x2:j,y2:C}),x.push({x1:O,y1:k,x2:j,y2:k}),x.push({x1:O,y1:E,x2:O,y2:C})}else if("horizontal"===n){var P=m.scale,M=f+t,N=M-u,I=M+u,R=P(y-o),T=P(y+a);x.push({x1:N,y1:T,x2:I,y2:T}),x.push({x1:M,y1:R,x2:M,y2:T}),x.push({x1:N,y1:R,x2:I,y2:R})}return r.createElement(i.m,c({className:"recharts-errorBar",key:"bar-".concat(x.map(function(e){return"".concat(e.x1,"-").concat(e.x2,"-").concat(e.y1,"-").concat(e.y2)}))},v),x.map(function(e){return r.createElement("line",c({},e,{key:"line-".concat(e.x1,"-").concat(e.x2,"-").concat(e.y1,"-").concat(e.y2)}))}))});return r.createElement(i.m,{className:"recharts-errorBars"},y)}u.defaultProps={stroke:"black",strokeWidth:1.5,width:5,offset:0,layout:"horizontal"},u.displayName="ErrorBar"},97059:function(e,t,n){"use strict";n.d(t,{K:function(){return s}});var r=n(2265),o=n(61994),i=n(25739),a=n(80285),l=n(85355);function c(){return(c=Object.assign?Object.assign.bind():function(e){for(var t=1;te*o)return!1;var i=n();return e*(t-e*i/2-r)>=0&&e*(t+e*i/2-o)<=0}function d(e){return(d="function"==typeof Symbol&&"symbol"==typeof Symbol.iterator?function(e){return typeof e}:function(e){return e&&"function"==typeof Symbol&&e.constructor===Symbol&&e!==Symbol.prototype?"symbol":typeof e})(e)}function f(e,t){var n=Object.keys(e);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);t&&(r=r.filter(function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable})),n.push.apply(n,r)}return n}function p(e){for(var t=1;t=2?(0,i.uY)(v[1].coordinate-v[0].coordinate):1,M=(r="width"===C,d=y.x,f=y.y,h=y.width,m=y.height,1===P?{start:r?d:f,end:r?d+h:f+m}:{start:r?d+h:f+m,end:r?d:f});return"equidistantPreserveStart"===w?function(e,t,n,r,o){for(var i,a=(r||[]).slice(),l=t.start,c=t.end,d=0,f=1,p=l;f<=a.length;)if(i=function(){var t,i=null==r?void 0:r[d];if(void 0===i)return{v:s(r,f)};var a=d,h=function(){return void 0===t&&(t=n(i,a)),t},m=i.coordinate,g=0===d||u(e,m,h,p,c);g||(d=0,p=l,f+=1),g&&(p=m+e*(h()/2+o),d+=f)}())return i.v;return[]}(P,M,j,v,b):("preserveStart"===w||"preserveStartEnd"===w?function(e,t,n,r,o,i){var a=(r||[]).slice(),l=a.length,c=t.start,s=t.end;if(i){var d=r[l-1],f=n(d,l-1),h=e*(d.coordinate+e*f/2-s);a[l-1]=d=p(p({},d),{},{tickCoord:h>0?d.coordinate-h*e:d.coordinate}),u(e,d.tickCoord,function(){return f},c,s)&&(s=d.tickCoord-e*(f/2+o),a[l-1]=p(p({},d),{},{isShow:!0}))}for(var m=i?l-1:l,g=function(t){var r,i=a[t],l=function(){return void 0===r&&(r=n(i,t)),r};if(0===t){var d=e*(i.coordinate-e*l()/2-c);a[t]=i=p(p({},i),{},{tickCoord:d<0?i.coordinate-d*e:i.coordinate})}else a[t]=i=p(p({},i),{},{tickCoord:i.coordinate});u(e,i.tickCoord,l,c,s)&&(c=i.tickCoord+e*(l()/2+o),a[t]=p(p({},i),{},{isShow:!0}))},v=0;v0?s.coordinate-f*e:s.coordinate})}else i[t]=s=p(p({},s),{},{tickCoord:s.coordinate});u(e,s.tickCoord,d,l,c)&&(c=s.tickCoord-e*(d()/2+o),i[t]=p(p({},s),{},{isShow:!0}))},d=a-1;d>=0;d--)s(d);return i}(P,M,j,v,b)).filter(function(e){return e.isShow})}},93765:function(e,t,n){"use strict";n.d(t,{z:function(){return tx}});var r=n(2265),o=n(77571),i=n.n(o),a=n(86757),l=n.n(a),c=n(99676),s=n.n(c),u=n(13735),d=n.n(u),f=n(34935),p=n.n(f),h=n(37065),m=n.n(h),g=n(84173),v=n.n(g),y=n(32242),b=n.n(y),x=n(61994),w=n(69398),S=n(48777),k=n(9841),E=n(8147),C=n(22190),O=n(81889),j=n(73649),P=n(82944),M=n(55284),N=n(58811),I=n(85355),R=n(16630);function T(e){return(T="function"==typeof Symbol&&"symbol"==typeof Symbol.iterator?function(e){return typeof e}:function(e){return e&&"function"==typeof Symbol&&e.constructor===Symbol&&e!==Symbol.prototype?"symbol":typeof e})(e)}function A(e,t){var n=Object.keys(e);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);t&&(r=r.filter(function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable})),n.push.apply(n,r)}return n}function _(e){for(var t=1;t0&&t.handleDrag(e.changedTouches[0])}),V(K(t),"handleDragEnd",function(){t.setState({isTravellerMoving:!1,isSlideMoving:!1},function(){var e=t.props,n=e.endIndex,r=e.onDragEnd,o=e.startIndex;null==r||r({endIndex:n,startIndex:o})}),t.detachDragEndListener()}),V(K(t),"handleLeaveWrapper",function(){(t.state.isTravellerMoving||t.state.isSlideMoving)&&(t.leaveTimer=window.setTimeout(t.handleDragEnd,t.props.leaveTimeOut))}),V(K(t),"handleEnterSlideOrTraveller",function(){t.setState({isTextActive:!0})}),V(K(t),"handleLeaveSlideOrTraveller",function(){t.setState({isTextActive:!1})}),V(K(t),"handleSlideDragStart",function(e){var n=$(e)?e.changedTouches[0]:e;t.setState({isTravellerMoving:!1,isSlideMoving:!0,slideMoveStartX:n.pageX}),t.attachDragEndListener()}),t.travellerDragStartHandlers={startX:t.handleTravellerDragStart.bind(K(t),"startX"),endX:t.handleTravellerDragStart.bind(K(t),"endX")},t.state={},t}return n=[{key:"componentWillUnmount",value:function(){this.leaveTimer&&(clearTimeout(this.leaveTimer),this.leaveTimer=null),this.detachDragEndListener()}},{key:"getIndex",value:function(e){var t=e.startX,n=e.endX,r=this.state.scaleValues,o=this.props,i=o.gap,l=o.data.length-1,c=a.getIndexInRange(r,Math.min(t,n)),s=a.getIndexInRange(r,Math.max(t,n));return{startIndex:c-c%i,endIndex:s===l?l:s-s%i}}},{key:"getTextOfTick",value:function(e){var t=this.props,n=t.data,r=t.tickFormatter,o=t.dataKey,i=(0,I.F$)(n[e],o,e);return l()(r)?r(i,e):i}},{key:"attachDragEndListener",value:function(){window.addEventListener("mouseup",this.handleDragEnd,!0),window.addEventListener("touchend",this.handleDragEnd,!0),window.addEventListener("mousemove",this.handleDrag,!0)}},{key:"detachDragEndListener",value:function(){window.removeEventListener("mouseup",this.handleDragEnd,!0),window.removeEventListener("touchend",this.handleDragEnd,!0),window.removeEventListener("mousemove",this.handleDrag,!0)}},{key:"handleSlideDrag",value:function(e){var t=this.state,n=t.slideMoveStartX,r=t.startX,o=t.endX,i=this.props,a=i.x,l=i.width,c=i.travellerWidth,s=i.startIndex,u=i.endIndex,d=i.onChange,f=e.pageX-n;f>0?f=Math.min(f,a+l-c-o,a+l-c-r):f<0&&(f=Math.max(f,a-r,a-o));var p=this.getIndex({startX:r+f,endX:o+f});(p.startIndex!==s||p.endIndex!==u)&&d&&d(p),this.setState({startX:r+f,endX:o+f,slideMoveStartX:e.pageX})}},{key:"handleTravellerDragStart",value:function(e,t){var n=$(t)?t.changedTouches[0]:t;this.setState({isSlideMoving:!1,isTravellerMoving:!0,movingTravellerId:e,brushMoveStartX:n.pageX}),this.attachDragEndListener()}},{key:"handleTravellerMove",value:function(e){var t,n=this.state,r=n.brushMoveStartX,o=n.movingTravellerId,i=n.endX,a=n.startX,l=this.state[o],c=this.props,s=c.x,u=c.width,d=c.travellerWidth,f=c.onChange,p=c.gap,h=c.data,m={startX:this.state.startX,endX:this.state.endX},g=e.pageX-r;g>0?g=Math.min(g,s+u-d-l):g<0&&(g=Math.max(g,s-l)),m[o]=l+g;var v=this.getIndex(m),y=v.startIndex,b=v.endIndex,x=function(){var e=h.length-1;return"startX"===o&&(i>a?y%p==0:b%p==0)||ia?b%p==0:y%p==0)||i>a&&b===e};this.setState((V(t={},o,l+g),V(t,"brushMoveStartX",e.pageX),t),function(){f&&x()&&f(v)})}},{key:"handleTravellerMoveKeyboard",value:function(e,t){var n=this,r=this.state,o=r.scaleValues,i=r.startX,a=r.endX,l=this.state[t],c=o.indexOf(l);if(-1!==c){var s=c+e;if(-1!==s&&!(s>=o.length)){var u=o[s];"startX"===t&&u>=a||"endX"===t&&u<=i||this.setState(V({},t,u),function(){n.props.onChange(n.getIndex({startX:n.state.startX,endX:n.state.endX}))})}}}},{key:"renderBackground",value:function(){var e=this.props,t=e.x,n=e.y,o=e.width,i=e.height,a=e.fill,l=e.stroke;return r.createElement("rect",{stroke:l,fill:a,x:t,y:n,width:o,height:i})}},{key:"renderPanorama",value:function(){var e=this.props,t=e.x,n=e.y,o=e.width,i=e.height,a=e.data,l=e.children,c=e.padding,s=r.Children.only(l);return s?r.cloneElement(s,{x:t,y:n,width:o,height:i,margin:c,compact:!0,data:a}):null}},{key:"renderTravellerLayer",value:function(e,t){var n=this,o=this.props,i=o.y,l=o.travellerWidth,c=o.height,s=o.traveller,u=o.ariaLabel,d=o.data,f=o.startIndex,p=o.endIndex,h=Math.max(e,this.props.x),m=H(H({},(0,P.L6)(this.props,!1)),{},{x:h,y:i,width:l,height:c}),g=u||"Min value: ".concat(d[f].name,", Max value: ").concat(d[p].name);return r.createElement(k.m,{tabIndex:0,role:"slider","aria-label":g,"aria-valuenow":e,className:"recharts-brush-traveller",onMouseEnter:this.handleEnterSlideOrTraveller,onMouseLeave:this.handleLeaveSlideOrTraveller,onMouseDown:this.travellerDragStartHandlers[t],onTouchStart:this.travellerDragStartHandlers[t],onKeyDown:function(e){["ArrowLeft","ArrowRight"].includes(e.key)&&(e.preventDefault(),e.stopPropagation(),n.handleTravellerMoveKeyboard("ArrowRight"===e.key?1:-1,t))},onFocus:function(){n.setState({isTravellerFocused:!0})},onBlur:function(){n.setState({isTravellerFocused:!1})},style:{cursor:"col-resize"}},a.renderTraveller(s,m))}},{key:"renderSlide",value:function(e,t){var n=this.props,o=n.y,i=n.height,a=n.stroke,l=n.travellerWidth;return r.createElement("rect",{className:"recharts-brush-slide",onMouseEnter:this.handleEnterSlideOrTraveller,onMouseLeave:this.handleLeaveSlideOrTraveller,onMouseDown:this.handleSlideDragStart,onTouchStart:this.handleSlideDragStart,style:{cursor:"move"},stroke:"none",fill:a,fillOpacity:.2,x:Math.min(e,t)+l,y:o,width:Math.max(Math.abs(t-e)-l,0),height:i})}},{key:"renderText",value:function(){var e=this.props,t=e.startIndex,n=e.endIndex,o=e.y,i=e.height,a=e.travellerWidth,l=e.stroke,c=this.state,s=c.startX,u=c.endX,d={pointerEvents:"none",fill:l};return r.createElement(k.m,{className:"recharts-brush-texts"},r.createElement(N.x,B({textAnchor:"end",verticalAnchor:"middle",x:Math.min(s,u)-5,y:o+i/2},d),this.getTextOfTick(t)),r.createElement(N.x,B({textAnchor:"start",verticalAnchor:"middle",x:Math.max(s,u)+a+5,y:o+i/2},d),this.getTextOfTick(n)))}},{key:"render",value:function(){var e=this.props,t=e.data,n=e.className,o=e.children,i=e.x,a=e.y,l=e.width,c=e.height,s=e.alwaysShowText,u=this.state,d=u.startX,f=u.endX,p=u.isTextActive,h=u.isSlideMoving,m=u.isTravellerMoving,g=u.isTravellerFocused;if(!t||!t.length||!(0,R.hj)(i)||!(0,R.hj)(a)||!(0,R.hj)(l)||!(0,R.hj)(c)||l<=0||c<=0)return null;var v=(0,x.Z)("recharts-brush",n),y=1===r.Children.count(o),b=L("userSelect","none");return r.createElement(k.m,{className:v,onMouseLeave:this.handleLeaveWrapper,onTouchMove:this.handleTouchMove,style:b},this.renderBackground(),y&&this.renderPanorama(),this.renderSlide(d,f),this.renderTravellerLayer(d,"startX"),this.renderTravellerLayer(f,"endX"),(p||h||m||g||s)&&this.renderText())}}],o=[{key:"renderDefaultTraveller",value:function(e){var t=e.x,n=e.y,o=e.width,i=e.height,a=e.stroke,l=Math.floor(n+i/2)-1;return r.createElement(r.Fragment,null,r.createElement("rect",{x:t,y:n,width:o,height:i,fill:a,stroke:"none"}),r.createElement("line",{x1:t+1,y1:l,x2:t+o-1,y2:l,fill:"none",stroke:"#fff"}),r.createElement("line",{x1:t+1,y1:l+2,x2:t+o-1,y2:l+2,fill:"none",stroke:"#fff"}))}},{key:"renderTraveller",value:function(e,t){return r.isValidElement(e)?r.cloneElement(e,t):l()(e)?e(t):a.renderDefaultTraveller(t)}},{key:"getDerivedStateFromProps",value:function(e,t){var n=e.data,r=e.width,o=e.x,i=e.travellerWidth,a=e.updateId,l=e.startIndex,c=e.endIndex;if(n!==t.prevData||a!==t.prevUpdateId)return H({prevData:n,prevTravellerWidth:i,prevUpdateId:a,prevX:o,prevWidth:r},n&&n.length?X({data:n,width:r,x:o,travellerWidth:i,startIndex:l,endIndex:c}):{scale:null,scaleValues:null});if(t.scale&&(r!==t.prevWidth||o!==t.prevX||i!==t.prevTravellerWidth)){t.scale.range([o,o+r-i]);var s=t.scale.domain().map(function(e){return t.scale(e)});return{prevData:n,prevTravellerWidth:i,prevUpdateId:a,prevX:o,prevWidth:r,startX:t.scale(e.startIndex),endX:t.scale(e.endIndex),scaleValues:s}}return null}},{key:"getIndexInRange",value:function(e,t){for(var n=e.length,r=0,o=n-1;o-r>1;){var i=Math.floor((r+o)/2);e[i]>t?o=i:r=i}return t>=e[o]?o:r}}],n&&q(a.prototype,n),o&&q(a,o),Object.defineProperty(a,"prototype",{writable:!1}),a}(r.PureComponent);V(Y,"displayName","Brush"),V(Y,"defaultProps",{height:40,travellerWidth:5,gap:1,fill:"#fff",stroke:"#666",padding:{top:1,right:1,bottom:1,left:1},leaveTimeOut:1e3,alwaysShowText:!1});var Q=n(4094),J=n(38569),ee=n(26680),et=function(e,t){var n=e.alwaysShow,r=e.ifOverflow;return n&&(r="extendDomain"),r===t},en=n(25311),er=n(1175);function eo(e){return(eo="function"==typeof Symbol&&"symbol"==typeof Symbol.iterator?function(e){return typeof e}:function(e){return e&&"function"==typeof Symbol&&e.constructor===Symbol&&e!==Symbol.prototype?"symbol":typeof e})(e)}function ei(){return(ei=Object.assign?Object.assign.bind():function(e){for(var t=1;te.length)&&(t=e.length);for(var n=0,r=Array(t);ne.length)&&(t=e.length);for(var n=0,r=Array(t);n=0||(o[n]=e[n]);return o}(e,t);if(Object.getOwnPropertySymbols){var i=Object.getOwnPropertySymbols(e);for(r=0;r=0)&&Object.prototype.propertyIsEnumerable.call(e,n)&&(o[n]=e[n])}return o}(e,eH));return(0,R.hj)(n)&&(0,R.hj)(i)&&(0,R.hj)(d)&&(0,R.hj)(p)&&(0,R.hj)(l)&&(0,R.hj)(s)?r.createElement("path",eq({},(0,P.L6)(m,!0),{className:(0,x.Z)("recharts-cross",h),d:"M".concat(n,",").concat(l,"v").concat(p,"M").concat(s,",").concat(i,"h").concat(d)})):null};function eU(e){var t=e.cx,n=e.cy,r=e.radius,o=e.startAngle,i=e.endAngle;return{points:[(0,eM.op)(t,n,r,o),(0,eM.op)(t,n,r,i)],cx:t,cy:n,radius:r,startAngle:o,endAngle:i}}var eV=n(60474);function eG(e){return(eG="function"==typeof Symbol&&"symbol"==typeof Symbol.iterator?function(e){return typeof e}:function(e){return e&&"function"==typeof Symbol&&e.constructor===Symbol&&e!==Symbol.prototype?"symbol":typeof e})(e)}function eX(e,t){var n=Object.keys(e);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);t&&(r=r.filter(function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable})),n.push.apply(n,r)}return n}function e$(e){for(var t=1;t=0||(o[n]=e[n]);return o}(e,t);if(Object.getOwnPropertySymbols){var i=Object.getOwnPropertySymbols(e);for(r=0;r=0)&&Object.prototype.propertyIsEnumerable.call(e,n)&&(o[n]=e[n])}return o}function e3(e,t){return(e3=Object.setPrototypeOf?Object.setPrototypeOf.bind():function(e,t){return e.__proto__=t,e})(e,t)}function e4(e){if(void 0===e)throw ReferenceError("this hasn't been initialised - super() hasn't been called");return e}function e5(e){return(e5=Object.setPrototypeOf?Object.getPrototypeOf.bind():function(e){return e.__proto__||Object.getPrototypeOf(e)})(e)}function e8(e){return function(e){if(Array.isArray(e))return e9(e)}(e)||function(e){if("undefined"!=typeof Symbol&&null!=e[Symbol.iterator]||null!=e["@@iterator"])return Array.from(e)}(e)||e7(e)||function(){throw TypeError("Invalid attempt to spread non-iterable instance.\nIn order to be iterable, non-array objects must have a [Symbol.iterator]() method.")}()}function e7(e,t){if(e){if("string"==typeof e)return e9(e,t);var n=Object.prototype.toString.call(e).slice(8,-1);if("Object"===n&&e.constructor&&(n=e.constructor.name),"Map"===n||"Set"===n)return Array.from(e);if("Arguments"===n||/^(?:Ui|I)nt(?:8|16|32)(?:Clamped)?Array$/.test(n))return e9(e,t)}}function e9(e,t){(null==t||t>e.length)&&(t=e.length);for(var n=0,r=Array(t);n0?i:e&&e.length&&(0,R.hj)(r)&&(0,R.hj)(o)?e.slice(r,o+1):[]};function tu(e){return"number"===e?[0,"auto"]:void 0}var td=function(e,t,n,r){var o=e.graphicalItems,i=e.tooltipAxis,a=ts(t,e);return n<0||!o||!o.length||n>=a.length?null:o.reduce(function(o,l){var c,s,u=null!==(c=l.props.data)&&void 0!==c?c:t;if(u&&e.dataStartIndex+e.dataEndIndex!==0&&(u=u.slice(e.dataStartIndex,e.dataEndIndex+1)),i.dataKey&&!i.allowDuplicatedCategory){var d=void 0===u?a:u;s=(0,R.Ap)(d,i.dataKey,r)}else s=u&&u[n]||a[n];return s?[].concat(e8(o),[(0,I.Qo)(l,s)]):o},[])},tf=function(e,t,n,r){var o=r||{x:e.chartX,y:e.chartY},i="horizontal"===n?o.x:"vertical"===n?o.y:"centric"===n?o.angle:o.radius,a=e.orderedTooltipTicks,l=e.tooltipAxis,c=e.tooltipTicks,s=(0,I.VO)(i,a,c,l);if(s>=0&&c){var u=c[s]&&c[s].value,d=td(e,t,s,u),f=tc(n,a,s,o);return{activeTooltipIndex:s,activeLabel:u,activePayload:d,activeCoordinate:f}}return null},tp=function(e,t){var n=t.axes,r=t.graphicalItems,o=t.axisType,a=t.axisIdKey,l=t.stackGroups,c=t.dataStartIndex,u=t.dataEndIndex,d=e.layout,f=e.children,p=e.stackOffset,h=(0,I.NA)(d,o);return n.reduce(function(t,n){var m=n.props,g=m.type,v=m.dataKey,y=m.allowDataOverflow,b=m.allowDuplicatedCategory,x=m.scale,w=m.ticks,S=m.includeHidden,k=n.props[a];if(t[k])return t;var E=ts(e.data,{graphicalItems:r.filter(function(e){return e.props[a]===k}),dataStartIndex:c,dataEndIndex:u}),C=E.length;(function(e,t,n){if("number"===n&&!0===t&&Array.isArray(e)){var r=null==e?void 0:e[0],o=null==e?void 0:e[1];if(r&&o&&(0,R.hj)(r)&&(0,R.hj)(o))return!0}return!1})(n.props.domain,y,g)&&(P=(0,I.LG)(n.props.domain,null,y),h&&("number"===g||"auto"!==x)&&(N=(0,I.gF)(E,v,"category")));var O=tu(g);if(!P||0===P.length){var j,P,M,N,T,A=null!==(T=n.props.domain)&&void 0!==T?T:O;if(v){if(P=(0,I.gF)(E,v,g),"category"===g&&h){var _=(0,R.bv)(P);b&&_?(M=P,P=s()(0,C)):b||(P=(0,I.ko)(A,P,n).reduce(function(e,t){return e.indexOf(t)>=0?e:[].concat(e8(e),[t])},[]))}else if("category"===g)P=b?P.filter(function(e){return""!==e&&!i()(e)}):(0,I.ko)(A,P,n).reduce(function(e,t){return e.indexOf(t)>=0||""===t||i()(t)?e:[].concat(e8(e),[t])},[]);else if("number"===g){var D=(0,I.ZI)(E,r.filter(function(e){return e.props[a]===k&&(S||!e.props.hide)}),v,o,d);D&&(P=D)}h&&("number"===g||"auto"!==x)&&(N=(0,I.gF)(E,v,"category"))}else P=h?s()(0,C):l&&l[k]&&l[k].hasStack&&"number"===g?"expand"===p?[0,1]:(0,I.EB)(l[k].stackGroups,c,u):(0,I.s6)(E,r.filter(function(e){return e.props[a]===k&&(S||!e.props.hide)}),g,d,!0);"number"===g?(P=eP(f,P,k,o,w),A&&(P=(0,I.LG)(A,P,y))):"category"===g&&A&&P.every(function(e){return A.indexOf(e)>=0})&&(P=A)}return tt(tt({},t),{},tn({},k,tt(tt({},n.props),{},{axisType:o,domain:P,categoricalDomain:N,duplicateDomain:M,originalDomain:null!==(j=n.props.domain)&&void 0!==j?j:O,isCategorical:h,layout:d})))},{})},th=function(e,t){var n=t.graphicalItems,r=t.Axis,o=t.axisType,i=t.axisIdKey,a=t.stackGroups,l=t.dataStartIndex,c=t.dataEndIndex,u=e.layout,f=e.children,p=ts(e.data,{graphicalItems:n,dataStartIndex:l,dataEndIndex:c}),h=p.length,m=(0,I.NA)(u,o),g=-1;return n.reduce(function(e,t){var v,y=t.props[i],b=tu("number");return e[y]?e:(g++,v=m?s()(0,h):a&&a[y]&&a[y].hasStack?eP(f,v=(0,I.EB)(a[y].stackGroups,l,c),y,o):eP(f,v=(0,I.LG)(b,(0,I.s6)(p,n.filter(function(e){return e.props[i]===y&&!e.props.hide}),"number",u),r.defaultProps.allowDataOverflow),y,o),tt(tt({},e),{},tn({},y,tt(tt({axisType:o},r.defaultProps),{},{hide:!0,orientation:d()(to,"".concat(o,".").concat(g%2),null),domain:v,originalDomain:b,isCategorical:m,layout:u}))))},{})},tm=function(e,t){var n=t.axisType,r=void 0===n?"xAxis":n,o=t.AxisComp,i=t.graphicalItems,a=t.stackGroups,l=t.dataStartIndex,c=t.dataEndIndex,s=e.children,u="".concat(r,"Id"),d=(0,P.NN)(s,o),f={};return d&&d.length?f=tp(e,{axes:d,graphicalItems:i,axisType:r,axisIdKey:u,stackGroups:a,dataStartIndex:l,dataEndIndex:c}):i&&i.length&&(f=th(e,{Axis:o,graphicalItems:i,axisType:r,axisIdKey:u,stackGroups:a,dataStartIndex:l,dataEndIndex:c})),f},tg=function(e){var t=(0,R.Kt)(e),n=(0,I.uY)(t,!1,!0);return{tooltipTicks:n,orderedTooltipTicks:p()(n,function(e){return e.coordinate}),tooltipAxis:t,tooltipAxisBandSize:(0,I.zT)(t,n)}},tv=function(e){var t=e.children,n=e.defaultShowTooltip,r=(0,P.sP)(t,Y),o=0,i=0;return e.data&&0!==e.data.length&&(i=e.data.length-1),r&&r.props&&(r.props.startIndex>=0&&(o=r.props.startIndex),r.props.endIndex>=0&&(i=r.props.endIndex)),{chartX:0,chartY:0,dataStartIndex:o,dataEndIndex:i,activeTooltipIndex:-1,isTooltipActive:!!n}},ty=function(e){return"horizontal"===e?{numericAxisName:"yAxis",cateAxisName:"xAxis"}:"vertical"===e?{numericAxisName:"xAxis",cateAxisName:"yAxis"}:"centric"===e?{numericAxisName:"radiusAxis",cateAxisName:"angleAxis"}:{numericAxisName:"angleAxis",cateAxisName:"radiusAxis"}},tb=function(e,t){var n=e.props,r=e.graphicalItems,o=e.xAxisMap,i=void 0===o?{}:o,a=e.yAxisMap,l=void 0===a?{}:a,c=n.width,s=n.height,u=n.children,f=n.margin||{},p=(0,P.sP)(u,Y),h=(0,P.sP)(u,C.D),m=Object.keys(l).reduce(function(e,t){var n=l[t],r=n.orientation;return n.mirror||n.hide?e:tt(tt({},e),{},tn({},r,e[r]+n.width))},{left:f.left||0,right:f.right||0}),g=Object.keys(i).reduce(function(e,t){var n=i[t],r=n.orientation;return n.mirror||n.hide?e:tt(tt({},e),{},tn({},r,d()(e,"".concat(r))+n.height))},{top:f.top||0,bottom:f.bottom||0}),v=tt(tt({},g),m),y=v.bottom;p&&(v.bottom+=p.props.height||Y.defaultProps.height),h&&t&&(v=(0,I.By)(v,r,n,t));var b=c-v.left-v.right,x=s-v.top-v.bottom;return tt(tt({brushBottom:y},v),{},{width:Math.max(b,0),height:Math.max(x,0)})},tx=function(e){var t,n=e.chartName,o=e.GraphicalChild,a=e.defaultTooltipEventType,c=void 0===a?"axis":a,s=e.validateTooltipEventTypes,u=void 0===s?["axis"]:s,f=e.axisComponents,p=e.legendContent,h=e.formatAxisMap,g=e.defaultProps,y=function(e,t){var n=t.graphicalItems,r=t.stackGroups,o=t.offset,a=t.updateId,l=t.dataStartIndex,c=t.dataEndIndex,s=e.barSize,u=e.layout,d=e.barGap,p=e.barCategoryGap,h=e.maxBarSize,m=ty(u),g=m.numericAxisName,v=m.cateAxisName,y=!!n&&!!n.length&&n.some(function(e){var t=(0,P.Gf)(e&&e.type);return t&&t.indexOf("Bar")>=0})&&(0,I.pt)({barSize:s,stackGroups:r}),b=[];return n.forEach(function(n,s){var m,x=ts(e.data,{graphicalItems:[n],dataStartIndex:l,dataEndIndex:c}),S=n.props,k=S.dataKey,E=S.maxBarSize,C=n.props["".concat(g,"Id")],O=n.props["".concat(v,"Id")],j=f.reduce(function(e,r){var o,i=t["".concat(r.axisType,"Map")],a=n.props["".concat(r.axisType,"Id")];i&&i[a]||"zAxis"===r.axisType||(0,w.Z)(!1);var l=i[a];return tt(tt({},e),{},(tn(o={},r.axisType,l),tn(o,"".concat(r.axisType,"Ticks"),(0,I.uY)(l)),o))},{}),M=j[v],N=j["".concat(v,"Ticks")],R=r&&r[C]&&r[C].hasStack&&(0,I.O3)(n,r[C].stackGroups),T=(0,P.Gf)(n.type).indexOf("Bar")>=0,A=(0,I.zT)(M,N),_=[];if(T){var D,Z,L=i()(E)?h:E,z=null!==(D=null!==(Z=(0,I.zT)(M,N,!0))&&void 0!==Z?Z:L)&&void 0!==D?D:0;_=(0,I.qz)({barGap:d,barCategoryGap:p,bandSize:z!==A?z:A,sizeList:y[O],maxBarSize:L}),z!==A&&(_=_.map(function(e){return tt(tt({},e),{},{position:tt(tt({},e.position),{},{offset:e.position.offset-z/2})})}))}var B=n&&n.type&&n.type.getComposedData;B&&b.push({props:tt(tt({},B(tt(tt({},j),{},{displayedData:x,props:e,dataKey:k,item:n,bandSize:A,barPosition:_,offset:o,stackedData:R,layout:u,dataStartIndex:l,dataEndIndex:c}))),{},(tn(m={key:n.key||"item-".concat(s)},g,j[g]),tn(m,v,j[v]),tn(m,"animationId",a),m)),childIndex:(0,P.$R)(n,e.children),item:n})}),b},C=function(e,t){var r=e.props,i=e.dataStartIndex,a=e.dataEndIndex,l=e.updateId;if(!(0,P.TT)({props:r}))return null;var c=r.children,s=r.layout,u=r.stackOffset,d=r.data,p=r.reverseStackOrder,m=ty(s),g=m.numericAxisName,v=m.cateAxisName,b=(0,P.NN)(c,o),x=(0,I.wh)(d,b,"".concat(g,"Id"),"".concat(v,"Id"),u,p),w=f.reduce(function(e,t){var n="".concat(t.axisType,"Map");return tt(tt({},e),{},tn({},n,tm(r,tt(tt({},t),{},{graphicalItems:b,stackGroups:t.axisType===g&&x,dataStartIndex:i,dataEndIndex:a}))))},{}),S=tb(tt(tt({},w),{},{props:r,graphicalItems:b}),null==t?void 0:t.legendBBox);Object.keys(w).forEach(function(e){w[e]=h(r,w[e],S,e.replace("Map",""),n)});var k=tg(w["".concat(v,"Map")]),E=y(r,tt(tt({},w),{},{dataStartIndex:i,dataEndIndex:a,updateId:l,graphicalItems:b,stackGroups:x,offset:S}));return tt(tt({formattedGraphicalItems:E,graphicalItems:b,offset:S,stackGroups:x},k),w)};return t=function(e){(function(e,t){if("function"!=typeof t&&null!==t)throw TypeError("Super expression must either be null or a function");e.prototype=Object.create(t&&t.prototype,{constructor:{value:e,writable:!0,configurable:!0}}),Object.defineProperty(e,"prototype",{writable:!1}),t&&e3(e,t)})(s,e);var t,o,a=(t=function(){if("undefined"==typeof Reflect||!Reflect.construct||Reflect.construct.sham)return!1;if("function"==typeof Proxy)return!0;try{return Boolean.prototype.valueOf.call(Reflect.construct(Boolean,[],function(){})),!0}catch(e){return!1}}(),function(){var e,n=e5(s);return e=t?Reflect.construct(n,arguments,e5(this).constructor):n.apply(this,arguments),function(e,t){if(t&&("object"===e0(t)||"function"==typeof t))return t;if(void 0!==t)throw TypeError("Derived constructors may only return object or undefined");return e4(e)}(this,e)});function s(e){var t,o,c;return function(e,t){if(!(e instanceof t))throw TypeError("Cannot call a class as a function")}(this,s),tn(e4(c=a.call(this,e)),"eventEmitterSymbol",Symbol("rechartsEventEmitter")),tn(e4(c),"accessibilityManager",new eL),tn(e4(c),"handleLegendBBoxUpdate",function(e){if(e){var t=c.state,n=t.dataStartIndex,r=t.dataEndIndex,o=t.updateId;c.setState(tt({legendBBox:e},C({props:c.props,dataStartIndex:n,dataEndIndex:r,updateId:o},tt(tt({},c.state),{},{legendBBox:e}))))}}),tn(e4(c),"handleReceiveSyncEvent",function(e,t,n){c.props.syncId===e&&(n!==c.eventEmitterSymbol||"function"==typeof c.props.syncMethod)&&c.applySyncEvent(t)}),tn(e4(c),"handleBrushChange",function(e){var t=e.startIndex,n=e.endIndex;if(t!==c.state.dataStartIndex||n!==c.state.dataEndIndex){var r=c.state.updateId;c.setState(function(){return tt({dataStartIndex:t,dataEndIndex:n},C({props:c.props,dataStartIndex:t,dataEndIndex:n,updateId:r},c.state))}),c.triggerSyncEvent({dataStartIndex:t,dataEndIndex:n})}}),tn(e4(c),"handleMouseEnter",function(e){var t=c.getMouseInfo(e);if(t){var n=tt(tt({},t),{},{isTooltipActive:!0});c.setState(n),c.triggerSyncEvent(n);var r=c.props.onMouseEnter;l()(r)&&r(n,e)}}),tn(e4(c),"triggeredAfterMouseMove",function(e){var t=c.getMouseInfo(e),n=t?tt(tt({},t),{},{isTooltipActive:!0}):{isTooltipActive:!1};c.setState(n),c.triggerSyncEvent(n);var r=c.props.onMouseMove;l()(r)&&r(n,e)}),tn(e4(c),"handleItemMouseEnter",function(e){c.setState(function(){return{isTooltipActive:!0,activeItem:e,activePayload:e.tooltipPayload,activeCoordinate:e.tooltipPosition||{x:e.cx,y:e.cy}}})}),tn(e4(c),"handleItemMouseLeave",function(){c.setState(function(){return{isTooltipActive:!1}})}),tn(e4(c),"handleMouseMove",function(e){e.persist(),c.throttleTriggeredAfterMouseMove(e)}),tn(e4(c),"handleMouseLeave",function(e){var t={isTooltipActive:!1};c.setState(t),c.triggerSyncEvent(t);var n=c.props.onMouseLeave;l()(n)&&n(t,e)}),tn(e4(c),"handleOuterEvent",function(e){var t,n=(0,P.Bh)(e),r=d()(c.props,"".concat(n));n&&l()(r)&&r(null!==(t=/.*touch.*/i.test(n)?c.getMouseInfo(e.changedTouches[0]):c.getMouseInfo(e))&&void 0!==t?t:{},e)}),tn(e4(c),"handleClick",function(e){var t=c.getMouseInfo(e);if(t){var n=tt(tt({},t),{},{isTooltipActive:!0});c.setState(n),c.triggerSyncEvent(n);var r=c.props.onClick;l()(r)&&r(n,e)}}),tn(e4(c),"handleMouseDown",function(e){var t=c.props.onMouseDown;l()(t)&&t(c.getMouseInfo(e),e)}),tn(e4(c),"handleMouseUp",function(e){var t=c.props.onMouseUp;l()(t)&&t(c.getMouseInfo(e),e)}),tn(e4(c),"handleTouchMove",function(e){null!=e.changedTouches&&e.changedTouches.length>0&&c.throttleTriggeredAfterMouseMove(e.changedTouches[0])}),tn(e4(c),"handleTouchStart",function(e){null!=e.changedTouches&&e.changedTouches.length>0&&c.handleMouseDown(e.changedTouches[0])}),tn(e4(c),"handleTouchEnd",function(e){null!=e.changedTouches&&e.changedTouches.length>0&&c.handleMouseUp(e.changedTouches[0])}),tn(e4(c),"triggerSyncEvent",function(e){void 0!==c.props.syncId&&eR.emit(eT,c.props.syncId,e,c.eventEmitterSymbol)}),tn(e4(c),"applySyncEvent",function(e){var t=c.props,n=t.layout,r=t.syncMethod,o=c.state.updateId,i=e.dataStartIndex,a=e.dataEndIndex;if(void 0!==e.dataStartIndex||void 0!==e.dataEndIndex)c.setState(tt({dataStartIndex:i,dataEndIndex:a},C({props:c.props,dataStartIndex:i,dataEndIndex:a,updateId:o},c.state)));else if(void 0!==e.activeTooltipIndex){var l=e.chartX,s=e.chartY,u=e.activeTooltipIndex,d=c.state,f=d.offset,p=d.tooltipTicks;if(!f)return;if("function"==typeof r)u=r(p,e);else if("value"===r){u=-1;for(var h=0;h=0){if(u.dataKey&&!u.allowDuplicatedCategory){var j="function"==typeof u.dataKey?function(e){return"function"==typeof u.dataKey?u.dataKey(e.payload):null}:"payload.".concat(u.dataKey.toString());N=(0,R.Ap)(g,j,f),T=v&&y&&(0,R.Ap)(y,j,f)}else N=null==g?void 0:g[d],T=v&&y&&y[d];if(k||S){var M=void 0!==e.props.activeIndex?e.props.activeIndex:d;return[(0,r.cloneElement)(e,tt(tt(tt({},o.props),C),{},{activeIndex:M})),null,null]}if(!i()(N))return[O].concat(e8(c.renderActivePoints({item:o,activePoint:N,basePoint:T,childIndex:d,isRange:v})))}else{var N,T,A,_=(null!==(A=c.getItemByXY(c.state.activeCoordinate))&&void 0!==A?A:{graphicalItem:O}).graphicalItem,D=_.item,Z=void 0===D?e:D,L=_.childIndex,z=tt(tt(tt({},o.props),C),{},{activeIndex:L});return[(0,r.cloneElement)(Z,z),null,null]}}return v?[O,null,null]:[O,null]}),tn(e4(c),"renderCustomized",function(e,t,n){return(0,r.cloneElement)(e,tt(tt({key:"recharts-customized-".concat(n)},c.props),c.state))}),tn(e4(c),"renderMap",{CartesianGrid:{handler:c.renderGrid,once:!0},ReferenceArea:{handler:c.renderReferenceElement},ReferenceLine:{handler:tl},ReferenceDot:{handler:c.renderReferenceElement},XAxis:{handler:tl},YAxis:{handler:tl},Brush:{handler:c.renderBrush,once:!0},Bar:{handler:c.renderGraphicChild},Line:{handler:c.renderGraphicChild},Area:{handler:c.renderGraphicChild},Radar:{handler:c.renderGraphicChild},RadialBar:{handler:c.renderGraphicChild},Scatter:{handler:c.renderGraphicChild},Pie:{handler:c.renderGraphicChild},Funnel:{handler:c.renderGraphicChild},Tooltip:{handler:c.renderCursor,once:!0},PolarGrid:{handler:c.renderPolarGrid,once:!0},PolarAngleAxis:{handler:c.renderPolarAxis},PolarRadiusAxis:{handler:c.renderPolarAxis},Customized:{handler:c.renderCustomized}}),c.clipPathId="".concat(null!==(t=e.id)&&void 0!==t?t:(0,R.EL)("recharts"),"-clip"),c.throttleTriggeredAfterMouseMove=m()(c.triggeredAfterMouseMove,null!==(o=e.throttleDelay)&&void 0!==o?o:1e3/60),c.state={},c}return o=[{key:"componentDidMount",value:function(){var e,t;this.addListener(),this.accessibilityManager.setDetails({container:this.container,offset:{left:null!==(e=this.props.margin.left)&&void 0!==e?e:0,top:null!==(t=this.props.margin.top)&&void 0!==t?t:0},coordinateList:this.state.tooltipTicks,mouseHandlerCallback:this.triggeredAfterMouseMove,layout:this.props.layout}),this.displayDefaultTooltip()}},{key:"displayDefaultTooltip",value:function(){var e=this.props,t=e.children,n=e.data,r=e.height,o=e.layout,i=(0,P.sP)(t,E.u);if(i){var a=i.props.defaultIndex;if("number"==typeof a&&!(a<0)&&!(a>this.state.tooltipTicks.length)){var l=this.state.tooltipTicks[a]&&this.state.tooltipTicks[a].value,c=td(this.state,n,a,l),s=this.state.tooltipTicks[a].coordinate,u=(this.state.offset.top+r)/2,d="horizontal"===o?{x:s,y:u}:{y:s,x:u},f=this.state.formattedGraphicalItems.find(function(e){return"Scatter"===e.item.type.name});f&&(d=tt(tt({},d),f.props.points[a].tooltipPosition),c=f.props.points[a].tooltipPayload);var p={activeTooltipIndex:a,isTooltipActive:!0,activeLabel:l,activePayload:c,activeCoordinate:d};this.setState(p),this.renderCursor(i),this.accessibilityManager.setIndex(a)}}}},{key:"getSnapshotBeforeUpdate",value:function(e,t){if(!this.props.accessibilityLayer)return null;if(this.state.tooltipTicks!==t.tooltipTicks&&this.accessibilityManager.setDetails({coordinateList:this.state.tooltipTicks}),this.props.layout!==e.layout&&this.accessibilityManager.setDetails({layout:this.props.layout}),this.props.margin!==e.margin){var n,r;this.accessibilityManager.setDetails({offset:{left:null!==(n=this.props.margin.left)&&void 0!==n?n:0,top:null!==(r=this.props.margin.top)&&void 0!==r?r:0}})}return null}},{key:"componentDidUpdate",value:function(e){(0,P.rL)([(0,P.sP)(e.children,E.u)],[(0,P.sP)(this.props.children,E.u)])||this.displayDefaultTooltip()}},{key:"componentWillUnmount",value:function(){this.removeListener(),this.throttleTriggeredAfterMouseMove.cancel()}},{key:"getTooltipEventType",value:function(){var e=(0,P.sP)(this.props.children,E.u);if(e&&"boolean"==typeof e.props.shared){var t=e.props.shared?"axis":"item";return u.indexOf(t)>=0?t:c}return c}},{key:"getMouseInfo",value:function(e){if(!this.container)return null;var t=this.container,n=t.getBoundingClientRect(),r=(0,Q.os)(n),o={chartX:Math.round(e.pageX-r.left),chartY:Math.round(e.pageY-r.top)},i=n.width/t.offsetWidth||1,a=this.inRange(o.chartX,o.chartY,i);if(!a)return null;var l=this.state,c=l.xAxisMap,s=l.yAxisMap;if("axis"!==this.getTooltipEventType()&&c&&s){var u=(0,R.Kt)(c).scale,d=(0,R.Kt)(s).scale,f=u&&u.invert?u.invert(o.chartX):null,p=d&&d.invert?d.invert(o.chartY):null;return tt(tt({},o),{},{xValue:f,yValue:p})}var h=tf(this.state,this.props.data,this.props.layout,a);return h?tt(tt({},o),h):null}},{key:"inRange",value:function(e,t){var n=arguments.length>2&&void 0!==arguments[2]?arguments[2]:1,r=this.props.layout,o=e/n,i=t/n;if("horizontal"===r||"vertical"===r){var a=this.state.offset;return o>=a.left&&o<=a.left+a.width&&i>=a.top&&i<=a.top+a.height?{x:o,y:i}:null}var l=this.state,c=l.angleAxisMap,s=l.radiusAxisMap;if(c&&s){var u=(0,R.Kt)(c);return(0,eM.z3)({x:o,y:i},u)}return null}},{key:"parseEventsOfWrapper",value:function(){var e=this.props.children,t=this.getTooltipEventType(),n=(0,P.sP)(e,E.u),r={};return n&&"axis"===t&&(r="click"===n.props.trigger?{onClick:this.handleClick}:{onMouseEnter:this.handleMouseEnter,onMouseMove:this.handleMouseMove,onMouseLeave:this.handleMouseLeave,onTouchMove:this.handleTouchMove,onTouchStart:this.handleTouchStart,onTouchEnd:this.handleTouchEnd}),tt(tt({},(0,eA.Ym)(this.props,this.handleOuterEvent)),r)}},{key:"addListener",value:function(){eR.on(eT,this.handleReceiveSyncEvent)}},{key:"removeListener",value:function(){eR.removeListener(eT,this.handleReceiveSyncEvent)}},{key:"filterFormatItem",value:function(e,t,n){for(var r=this.state.formattedGraphicalItems,o=0,i=r.length;oe.length)&&(t=e.length);for(var n=0,r=Array(t);n=0?1:-1;"insideStart"===l?(o=y+E*s,a=w):"insideEnd"===l?(o=b-E*s,a=!w):"end"===l&&(o=b+E*s,a=w),a=k<=0?a:!a;var C=(0,h.op)(f,m,S,o),O=(0,h.op)(f,m,S,o+(a?1:-1)*359),j="M".concat(C.x,",").concat(C.y,"\n A").concat(S,",").concat(S,",0,1,").concat(a?0:1,",\n ").concat(O.x,",").concat(O.y),P=i()(e.id)?(0,p.EL)("recharts-radial-line-"):e.id;return r.createElement("text",x({},n,{dominantBaseline:"central",className:(0,u.Z)("recharts-radial-bar-label",d)}),r.createElement("defs",null,r.createElement("path",{id:P,d:j})),r.createElement("textPath",{xlinkHref:"#".concat(P)},t))},k=function(e){var t=e.viewBox,n=e.offset,r=e.position,o=t.cx,i=t.cy,a=t.innerRadius,l=t.outerRadius,c=(t.startAngle+t.endAngle)/2;if("outside"===r){var s=(0,h.op)(o,i,l+n,c),u=s.x;return{x:u,y:s.y,textAnchor:u>=o?"start":"end",verticalAnchor:"middle"}}if("center"===r)return{x:o,y:i,textAnchor:"middle",verticalAnchor:"middle"};if("centerTop"===r)return{x:o,y:i,textAnchor:"middle",verticalAnchor:"start"};if("centerBottom"===r)return{x:o,y:i,textAnchor:"middle",verticalAnchor:"end"};var d=(0,h.op)(o,i,(a+l)/2,c);return{x:d.x,y:d.y,textAnchor:"middle",verticalAnchor:"middle"}},E=function(e){var t=e.viewBox,n=e.parentViewBox,r=e.offset,o=e.position,i=t.x,a=t.y,l=t.width,c=t.height,u=c>=0?1:-1,d=u*r,f=u>0?"end":"start",h=u>0?"start":"end",m=l>=0?1:-1,g=m*r,v=m>0?"end":"start",y=m>0?"start":"end";if("top"===o)return b(b({},{x:i+l/2,y:a-u*r,textAnchor:"middle",verticalAnchor:f}),n?{height:Math.max(a-n.y,0),width:l}:{});if("bottom"===o)return b(b({},{x:i+l/2,y:a+c+d,textAnchor:"middle",verticalAnchor:h}),n?{height:Math.max(n.y+n.height-(a+c),0),width:l}:{});if("left"===o){var x={x:i-g,y:a+c/2,textAnchor:v,verticalAnchor:"middle"};return b(b({},x),n?{width:Math.max(x.x-n.x,0),height:c}:{})}if("right"===o){var w={x:i+l+g,y:a+c/2,textAnchor:y,verticalAnchor:"middle"};return b(b({},w),n?{width:Math.max(n.x+n.width-w.x,0),height:c}:{})}var S=n?{width:l,height:c}:{};return"insideLeft"===o?b({x:i+g,y:a+c/2,textAnchor:y,verticalAnchor:"middle"},S):"insideRight"===o?b({x:i+l-g,y:a+c/2,textAnchor:v,verticalAnchor:"middle"},S):"insideTop"===o?b({x:i+l/2,y:a+d,textAnchor:"middle",verticalAnchor:h},S):"insideBottom"===o?b({x:i+l/2,y:a+c-d,textAnchor:"middle",verticalAnchor:f},S):"insideTopLeft"===o?b({x:i+g,y:a+d,textAnchor:y,verticalAnchor:h},S):"insideTopRight"===o?b({x:i+l-g,y:a+d,textAnchor:v,verticalAnchor:h},S):"insideBottomLeft"===o?b({x:i+g,y:a+c-d,textAnchor:y,verticalAnchor:f},S):"insideBottomRight"===o?b({x:i+l-g,y:a+c-d,textAnchor:v,verticalAnchor:f},S):s()(o)&&((0,p.hj)(o.x)||(0,p.hU)(o.x))&&((0,p.hj)(o.y)||(0,p.hU)(o.y))?b({x:i+(0,p.h1)(o.x,l),y:a+(0,p.h1)(o.y,c),textAnchor:"end",verticalAnchor:"end"},S):b({x:i+l/2,y:a+c/2,textAnchor:"middle",verticalAnchor:"middle"},S)};function C(e){var t,n=e.offset,o=b({offset:void 0===n?5:n},function(e,t){if(null==e)return{};var n,r,o=function(e,t){if(null==e)return{};var n,r,o={},i=Object.keys(e);for(r=0;r=0||(o[n]=e[n]);return o}(e,t);if(Object.getOwnPropertySymbols){var i=Object.getOwnPropertySymbols(e);for(r=0;r=0)&&Object.prototype.propertyIsEnumerable.call(e,n)&&(o[n]=e[n])}return o}(e,g)),a=o.viewBox,c=o.position,s=o.value,h=o.children,m=o.content,v=o.className,y=o.textBreakAll;if(!a||i()(s)&&i()(h)&&!(0,r.isValidElement)(m)&&!l()(m))return null;if((0,r.isValidElement)(m))return(0,r.cloneElement)(m,o);if(l()(m)){if(t=(0,r.createElement)(m,o),(0,r.isValidElement)(t))return t}else t=w(o);var C="cx"in a&&(0,p.hj)(a.cx),O=(0,f.L6)(o,!0);if(C&&("insideStart"===c||"insideEnd"===c||"end"===c))return S(o,t,O);var j=C?k(o):E(o);return r.createElement(d.x,x({className:(0,u.Z)("recharts-label",void 0===v?"":v)},O,j,{breakAll:y}),t)}C.displayName="Label";var O=function(e){var t=e.cx,n=e.cy,r=e.angle,o=e.startAngle,i=e.endAngle,a=e.r,l=e.radius,c=e.innerRadius,s=e.outerRadius,u=e.x,d=e.y,f=e.top,h=e.left,m=e.width,g=e.height,v=e.clockWise,y=e.labelViewBox;if(y)return y;if((0,p.hj)(m)&&(0,p.hj)(g)){if((0,p.hj)(u)&&(0,p.hj)(d))return{x:u,y:d,width:m,height:g};if((0,p.hj)(f)&&(0,p.hj)(h))return{x:f,y:h,width:m,height:g}}return(0,p.hj)(u)&&(0,p.hj)(d)?{x:u,y:d,width:0,height:0}:(0,p.hj)(t)&&(0,p.hj)(n)?{cx:t,cy:n,startAngle:o||r||0,endAngle:i||r||0,innerRadius:c||0,outerRadius:s||l||a||0,clockWise:v}:e.viewBox?e.viewBox:{}};C.parseViewBox=O,C.renderCallByParent=function(e,t){var n,o,i=!(arguments.length>2)||void 0===arguments[2]||arguments[2];if(!e||!e.children&&i&&!e.label)return null;var a=e.children,c=O(e),u=(0,f.NN)(a,C).map(function(e,n){return(0,r.cloneElement)(e,{viewBox:t||c,key:"label-".concat(n)})});return i?[(n=e.label,o=t||c,n?!0===n?r.createElement(C,{key:"label-implicit",viewBox:o}):(0,p.P2)(n)?r.createElement(C,{key:"label-implicit",viewBox:o,value:n}):(0,r.isValidElement)(n)?n.type===C?(0,r.cloneElement)(n,{key:"label-implicit",viewBox:o}):r.createElement(C,{key:"label-implicit",content:n,viewBox:o}):l()(n)?r.createElement(C,{key:"label-implicit",content:n,viewBox:o}):s()(n)?r.createElement(C,x({viewBox:o},n,{key:"label-implicit"})):null:null)].concat(function(e){if(Array.isArray(e))return v(e)}(u)||function(e){if("undefined"!=typeof Symbol&&null!=e[Symbol.iterator]||null!=e["@@iterator"])return Array.from(e)}(u)||function(e,t){if(e){if("string"==typeof e)return v(e,void 0);var n=Object.prototype.toString.call(e).slice(8,-1);if("Object"===n&&e.constructor&&(n=e.constructor.name),"Map"===n||"Set"===n)return Array.from(e);if("Arguments"===n||/^(?:Ui|I)nt(?:8|16|32)(?:Clamped)?Array$/.test(n))return v(e,void 0)}}(u)||function(){throw TypeError("Invalid attempt to spread non-iterable instance.\nIn order to be iterable, non-array objects must have a [Symbol.iterator]() method.")}()):u}},58772:function(e,t,n){"use strict";n.d(t,{e:function(){return C}});var r=n(2265),o=n(77571),i=n.n(o),a=n(28302),l=n.n(a),c=n(86757),s=n.n(c),u=n(86185),d=n.n(u),f=n(26680),p=n(9841),h=n(82944),m=n(85355);function g(e){return(g="function"==typeof Symbol&&"symbol"==typeof Symbol.iterator?function(e){return typeof e}:function(e){return e&&"function"==typeof Symbol&&e.constructor===Symbol&&e!==Symbol.prototype?"symbol":typeof e})(e)}var v=["valueAccessor"],y=["data","dataKey","clockWise","id","textBreakAll"];function b(e,t){(null==t||t>e.length)&&(t=e.length);for(var n=0,r=Array(t);n=0||(o[n]=e[n]);return o}(e,t);if(Object.getOwnPropertySymbols){var i=Object.getOwnPropertySymbols(e);for(r=0;r=0)&&Object.prototype.propertyIsEnumerable.call(e,n)&&(o[n]=e[n])}return o}var E=function(e){return Array.isArray(e.value)?d()(e.value):e.value};function C(e){var t=e.valueAccessor,n=void 0===t?E:t,o=k(e,v),a=o.data,l=o.dataKey,c=o.clockWise,s=o.id,u=o.textBreakAll,d=k(o,y);return a&&a.length?r.createElement(p.m,{className:"recharts-label-list"},a.map(function(e,t){var o=i()(l)?n(e,t):(0,m.F$)(e&&e.payload,l),a=i()(s)?{}:{id:"".concat(s,"-").concat(t)};return r.createElement(f._,x({},(0,h.L6)(e,!0),d,a,{parentViewBox:e.parentViewBox,value:o,textBreakAll:u,viewBox:f._.parseViewBox(i()(c)?e:S(S({},e),{},{clockWise:c})),key:"label-".concat(t),index:t}))})):null}C.displayName="LabelList",C.renderCallByParent=function(e,t){var n,o=!(arguments.length>2)||void 0===arguments[2]||arguments[2];if(!e||!e.children&&o&&!e.label)return null;var i=e.children,a=(0,h.NN)(i,C).map(function(e,n){return(0,r.cloneElement)(e,{data:t,key:"labelList-".concat(n)})});return o?[(n=e.label)?!0===n?r.createElement(C,{key:"labelList-implicit",data:t}):r.isValidElement(n)||s()(n)?r.createElement(C,{key:"labelList-implicit",data:t,content:n}):l()(n)?r.createElement(C,x({data:t},n,{key:"labelList-implicit"})):null:null].concat(function(e){if(Array.isArray(e))return b(e)}(a)||function(e){if("undefined"!=typeof Symbol&&null!=e[Symbol.iterator]||null!=e["@@iterator"])return Array.from(e)}(a)||function(e,t){if(e){if("string"==typeof e)return b(e,void 0);var n=Object.prototype.toString.call(e).slice(8,-1);if("Object"===n&&e.constructor&&(n=e.constructor.name),"Map"===n||"Set"===n)return Array.from(e);if("Arguments"===n||/^(?:Ui|I)nt(?:8|16|32)(?:Clamped)?Array$/.test(n))return b(e,void 0)}}(a)||function(){throw TypeError("Invalid attempt to spread non-iterable instance.\nIn order to be iterable, non-array objects must have a [Symbol.iterator]() method.")}()):a}},22190:function(e,t,n){"use strict";n.d(t,{D:function(){return R}});var r=n(2265),o=n(86757),i=n.n(o),a=n(61994),l=n(1175),c=n(48777),s=n(14870),u=n(41637);function d(e){return(d="function"==typeof Symbol&&"symbol"==typeof Symbol.iterator?function(e){return typeof e}:function(e){return e&&"function"==typeof Symbol&&e.constructor===Symbol&&e!==Symbol.prototype?"symbol":typeof e})(e)}function f(){return(f=Object.assign?Object.assign.bind():function(e){for(var t=1;t');var w=t.inactive?p:t.color;return r.createElement("li",f({className:b,style:m,key:"legend-item-".concat(n)},(0,u.bw)(e.props,t,n)),r.createElement(c.T,{width:o,height:o,viewBox:h,style:v},e.renderIcon(t)),r.createElement("span",{className:"recharts-legend-item-text",style:{color:w}},y?y(x,t,n):x))})}},{key:"render",value:function(){var e=this.props,t=e.payload,n=e.layout,o=e.align;return t&&t.length?r.createElement("ul",{className:"recharts-default-legend",style:{padding:0,margin:0,textAlign:"horizontal"===n?o:"left"}},this.renderItems()):null}}],function(e,t){for(var n=0;n1||Math.abs(t.height-this.lastBoundingBox.height)>1)&&(this.lastBoundingBox.width=t.width,this.lastBoundingBox.height=t.height,e&&e(t))}else(-1!==this.lastBoundingBox.width||-1!==this.lastBoundingBox.height)&&(this.lastBoundingBox.width=-1,this.lastBoundingBox.height=-1,e&&e(null))}},{key:"getBBoxSnapshot",value:function(){return this.lastBoundingBox.width>=0&&this.lastBoundingBox.height>=0?E({},this.lastBoundingBox):{width:0,height:0}}},{key:"getDefaultPosition",value:function(e){var t,n,r=this.props,o=r.layout,i=r.align,a=r.verticalAlign,l=r.margin,c=r.chartWidth,s=r.chartHeight;return e&&(void 0!==e.left&&null!==e.left||void 0!==e.right&&null!==e.right)||(t="center"===i&&"vertical"===o?{left:((c||0)-this.getBBoxSnapshot().width)/2}:"right"===i?{right:l&&l.right||0}:{left:l&&l.left||0}),e&&(void 0!==e.top&&null!==e.top||void 0!==e.bottom&&null!==e.bottom)||(n="middle"===a?{top:((s||0)-this.getBBoxSnapshot().height)/2}:"bottom"===a?{bottom:l&&l.bottom||0}:{top:l&&l.top||0}),E(E({},t),n)}},{key:"render",value:function(){var e=this,t=this.props,n=t.content,o=t.width,i=t.height,a=t.wrapperStyle,l=t.payloadUniqBy,c=t.payload,s=E(E({position:"absolute",width:o||"auto",height:i||"auto"},this.getDefaultPosition(a)),a);return r.createElement("div",{className:"recharts-legend-wrapper",style:s,ref:function(t){e.wrapperNode=t}},function(e,t){if(r.isValidElement(e))return r.cloneElement(e,t);if("function"==typeof e)return r.createElement(e,t);t.ref;var n=function(e,t){if(null==e)return{};var n,r,o=function(e,t){if(null==e)return{};var n,r,o={},i=Object.keys(e);for(r=0;r=0||(o[n]=e[n]);return o}(e,t);if(Object.getOwnPropertySymbols){var i=Object.getOwnPropertySymbols(e);for(r=0;r=0)&&Object.prototype.propertyIsEnumerable.call(e,n)&&(o[n]=e[n])}return o}(t,S);return r.createElement(y,n)}(n,E(E({},this.props),{},{payload:(0,x.z)(c,l,I)})))}}],o=[{key:"getWithHeight",value:function(e,t){var n=e.props.layout;return"vertical"===n&&(0,b.hj)(e.props.height)?{height:e.props.height}:"horizontal"===n?{width:e.props.width||t}:null}}],n&&C(a.prototype,n),o&&C(a,o),Object.defineProperty(a,"prototype",{writable:!1}),a}(r.PureComponent);M(R,"displayName","Legend"),M(R,"defaultProps",{iconSize:14,layout:"horizontal",align:"center",verticalAlign:"bottom"})},47625:function(e,t,n){"use strict";n.d(t,{h:function(){return m}});var r=n(61994),o=n(2265),i=n(37065),a=n.n(i),l=n(82558),c=n(16630),s=n(1175),u=n(82944);function d(e){return(d="function"==typeof Symbol&&"symbol"==typeof Symbol.iterator?function(e){return typeof e}:function(e){return e&&"function"==typeof Symbol&&e.constructor===Symbol&&e!==Symbol.prototype?"symbol":typeof e})(e)}function f(e,t){var n=Object.keys(e);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);t&&(r=r.filter(function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable})),n.push.apply(n,r)}return n}function p(e){for(var t=1;te.length)&&(t=e.length);for(var n=0,r=Array(t);n0&&(e=a()(e,C,{trailing:!0,leading:!1}));var t=new ResizeObserver(e),n=N.current.getBoundingClientRect();return _(n.width,n.height),t.observe(N.current),function(){t.disconnect()}},[_,C]);var D=(0,o.useMemo)(function(){var e=T.containerWidth,t=T.containerHeight;if(e<0||t<0)return null;(0,s.Z)((0,c.hU)(g)||(0,c.hU)(y),"The width(%s) and height(%s) are both fixed numbers,\n maybe you don't need to use a ResponsiveContainer.",g,y),(0,s.Z)(!i||i>0,"The aspect(%s) must be greater than zero.",i);var n=(0,c.hU)(g)?e:g,r=(0,c.hU)(y)?t:y;i&&i>0&&(n?r=n/i:r&&(n=r*i),S&&r>S&&(r=S)),(0,s.Z)(n>0||r>0,"The width(%s) and height(%s) of chart should be greater than 0,\n please check the style of container, or the props width(%s) and height(%s),\n or add a minWidth(%s) or minHeight(%s) or use aspect(%s) to control the\n height and width.",n,r,g,y,x,w,i);var a=!Array.isArray(k)&&(0,l.isElement)(k)&&(0,u.Gf)(k.type).endsWith("Chart");return o.Children.map(k,function(e){return(0,l.isElement)(e)?(0,o.cloneElement)(e,p({width:n,height:r},a?{style:p({height:"100%",width:"100%",maxHeight:r,maxWidth:n},e.props.style)}:{})):e})},[i,k,y,S,w,x,T,g]);return o.createElement("div",{id:O?"".concat(O):void 0,className:(0,r.Z)("recharts-responsive-container",j),style:p(p({},void 0===M?{}:M),{},{width:g,height:y,minWidth:x,minHeight:w,maxHeight:S}),ref:N},D)})},58811:function(e,t,n){"use strict";n.d(t,{x:function(){return Z}});var r=n(2265),o=n(77571),i=n.n(o),a=n(61994),l=n(16630),c=n(34067),s=n(82944),u=n(4094);function d(e){return(d="function"==typeof Symbol&&"symbol"==typeof Symbol.iterator?function(e){return typeof e}:function(e){return e&&"function"==typeof Symbol&&e.constructor===Symbol&&e!==Symbol.prototype?"symbol":typeof e})(e)}function f(e,t){return function(e){if(Array.isArray(e))return e}(e)||function(e,t){var n=null==e?null:"undefined"!=typeof Symbol&&e[Symbol.iterator]||e["@@iterator"];if(null!=n){var r,o,i,a,l=[],c=!0,s=!1;try{if(i=(n=n.call(e)).next,0===t){if(Object(n)!==n)return;c=!1}else for(;!(c=(r=i.call(n)).done)&&(l.push(r.value),l.length!==t);c=!0);}catch(e){s=!0,o=e}finally{try{if(!c&&null!=n.return&&(a=n.return(),Object(a)!==a))return}finally{if(s)throw o}}return l}}(e,t)||function(e,t){if(e){if("string"==typeof e)return p(e,t);var n=Object.prototype.toString.call(e).slice(8,-1);if("Object"===n&&e.constructor&&(n=e.constructor.name),"Map"===n||"Set"===n)return Array.from(e);if("Arguments"===n||/^(?:Ui|I)nt(?:8|16|32)(?:Clamped)?Array$/.test(n))return p(e,t)}}(e,t)||function(){throw TypeError("Invalid attempt to destructure non-iterable instance.\nIn order to be iterable, non-array objects must have a [Symbol.iterator]() method.")}()}function p(e,t){(null==t||t>e.length)&&(t=e.length);for(var n=0,r=Array(t);n=0||(o[n]=e[n]);return o}(e,t);if(Object.getOwnPropertySymbols){var i=Object.getOwnPropertySymbols(e);for(r=0;r=0)&&Object.prototype.propertyIsEnumerable.call(e,n)&&(o[n]=e[n])}return o}function M(e,t){return function(e){if(Array.isArray(e))return e}(e)||function(e,t){var n=null==e?null:"undefined"!=typeof Symbol&&e[Symbol.iterator]||e["@@iterator"];if(null!=n){var r,o,i,a,l=[],c=!0,s=!1;try{if(i=(n=n.call(e)).next,0===t){if(Object(n)!==n)return;c=!1}else for(;!(c=(r=i.call(n)).done)&&(l.push(r.value),l.length!==t);c=!0);}catch(e){s=!0,o=e}finally{try{if(!c&&null!=n.return&&(a=n.return(),Object(a)!==a))return}finally{if(s)throw o}}return l}}(e,t)||function(e,t){if(e){if("string"==typeof e)return N(e,t);var n=Object.prototype.toString.call(e).slice(8,-1);if("Object"===n&&e.constructor&&(n=e.constructor.name),"Map"===n||"Set"===n)return Array.from(e);if("Arguments"===n||/^(?:Ui|I)nt(?:8|16|32)(?:Clamped)?Array$/.test(n))return N(e,t)}}(e,t)||function(){throw TypeError("Invalid attempt to destructure non-iterable instance.\nIn order to be iterable, non-array objects must have a [Symbol.iterator]() method.")}()}function N(e,t){(null==t||t>e.length)&&(t=e.length);for(var n=0,r=Array(t);n0&&void 0!==arguments[0]?arguments[0]:[];return e.reduce(function(e,t){var i=t.word,a=t.width,l=e[e.length-1];return l&&(null==r||o||l.width+a+na||t.reduce(function(e,t){return e.width>t.width?e:t}).width>Number(r),t]},m=0,g=c.length-1,v=0;m<=g&&v<=c.length-1;){var y=Math.floor((m+g)/2),b=M(h(y-1),2),x=b[0],w=b[1],S=M(h(y),1)[0];if(x||S||(m=y+1),x&&S&&(g=y-1),!x&&S){i=w;break}v++}return i||p},A=function(e){return[{words:i()(e)?[]:e.toString().split(I)}]},_=function(e){var t=e.width,n=e.scaleToFit,r=e.children,o=e.style,i=e.breakAll,a=e.maxLines;if((t||n)&&!c.x.isSsr){var l=R({breakAll:i,children:r,style:o});return l?T({breakAll:i,children:r,maxLines:a,style:o},l.wordsWithComputedWidth,l.spaceWidth,t,n):A(r)}return A(r)},D="#808080",Z=function(e){var t,n=e.x,o=void 0===n?0:n,i=e.y,c=void 0===i?0:i,u=e.lineHeight,d=void 0===u?"1em":u,f=e.capHeight,p=void 0===f?"0.71em":f,h=e.scaleToFit,m=void 0!==h&&h,g=e.textAnchor,v=e.verticalAnchor,y=e.fill,b=void 0===y?D:y,x=P(e,C),w=(0,r.useMemo)(function(){return _({breakAll:x.breakAll,children:x.children,maxLines:x.maxLines,scaleToFit:m,style:x.style,width:x.width})},[x.breakAll,x.children,x.maxLines,m,x.style,x.width]),S=x.dx,k=x.dy,M=x.angle,N=x.className,I=x.breakAll,R=P(x,O);if(!(0,l.P2)(o)||!(0,l.P2)(c))return null;var T=o+((0,l.hj)(S)?S:0),A=c+((0,l.hj)(k)?k:0);switch(void 0===v?"end":v){case"start":t=E("calc(".concat(p,")"));break;case"middle":t=E("calc(".concat((w.length-1)/2," * -").concat(d," + (").concat(p," / 2))"));break;default:t=E("calc(".concat(w.length-1," * -").concat(d,")"))}var Z=[];if(m){var L=w[0].width,z=x.width;Z.push("scale(".concat(((0,l.hj)(z)?z/L:1)/L,")"))}return M&&Z.push("rotate(".concat(M,", ").concat(T,", ").concat(A,")")),Z.length&&(R.transform=Z.join(" ")),r.createElement("text",j({},(0,s.L6)(R,!0),{x:T,y:A,className:(0,a.Z)("recharts-text",N),textAnchor:void 0===g?"start":g,fill:b.includes("url")?D:b}),w.map(function(e,n){var o=e.words.join(I?"":" ");return r.createElement("tspan",{x:T,dy:0===n?t:d,key:o},o)}))}},8147:function(e,t,n){"use strict";n.d(t,{u:function(){return F}});var r=n(2265),o=n(34935),i=n.n(o),a=n(77571),l=n.n(a),c=n(61994),s=n(16630);function u(e){return(u="function"==typeof Symbol&&"symbol"==typeof Symbol.iterator?function(e){return typeof e}:function(e){return e&&"function"==typeof Symbol&&e.constructor===Symbol&&e!==Symbol.prototype?"symbol":typeof e})(e)}function d(e,t){(null==t||t>e.length)&&(t=e.length);for(var n=0,r=Array(t);nc[r]+u?Math.max(d,c[r]):Math.max(f,c[r])}function S(e){return(S="function"==typeof Symbol&&"symbol"==typeof Symbol.iterator?function(e){return typeof e}:function(e){return e&&"function"==typeof Symbol&&e.constructor===Symbol&&e!==Symbol.prototype?"symbol":typeof e})(e)}function k(e,t){var n=Object.keys(e);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);t&&(r=r.filter(function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable})),n.push.apply(n,r)}return n}function E(e){for(var t=1;t1||Math.abs(e.height-this.lastBoundingBox.height)>1)&&(this.lastBoundingBox.width=e.width,this.lastBoundingBox.height=e.height)}else(-1!==this.lastBoundingBox.width||-1!==this.lastBoundingBox.height)&&(this.lastBoundingBox.width=-1,this.lastBoundingBox.height=-1)}},{key:"componentDidMount",value:function(){document.addEventListener("keydown",this.handleKeyDown),this.updateBBox()}},{key:"componentWillUnmount",value:function(){document.removeEventListener("keydown",this.handleKeyDown)}},{key:"componentDidUpdate",value:function(){var e,t;this.props.active&&this.updateBBox(),this.state.dismissed&&((null===(e=this.props.coordinate)||void 0===e?void 0:e.x)!==this.state.dismissedAtCoordinate.x||(null===(t=this.props.coordinate)||void 0===t?void 0:t.y)!==this.state.dismissedAtCoordinate.y)&&(this.state.dismissed=!1)}},{key:"render",value:function(){var e,t,n,o,i,a,l,u,d,f,p,h,m,v,S,k,C,O,j,P,M,N=this,I=this.props,R=I.active,T=I.allowEscapeViewBox,A=I.animationDuration,_=I.animationEasing,D=I.children,Z=I.coordinate,L=I.hasPayload,z=I.isAnimationActive,B=I.offset,F=I.position,H=I.reverseDirection,q=I.useTranslate3d,W=I.viewBox,K=I.wrapperStyle,U=(v=(e={allowEscapeViewBox:T,coordinate:Z,offsetTopLeft:B,position:F,reverseDirection:H,tooltipBox:{height:this.lastBoundingBox.height,width:this.lastBoundingBox.width},useTranslate3d:q,viewBox:W}).allowEscapeViewBox,S=e.coordinate,k=e.offsetTopLeft,C=e.position,O=e.reverseDirection,j=e.tooltipBox,P=e.useTranslate3d,M=e.viewBox,j.height>0&&j.width>0&&S?(n=(t={translateX:h=w({allowEscapeViewBox:v,coordinate:S,key:"x",offsetTopLeft:k,position:C,reverseDirection:O,tooltipDimension:j.width,viewBox:M,viewBoxDimension:M.width}),translateY:m=w({allowEscapeViewBox:v,coordinate:S,key:"y",offsetTopLeft:k,position:C,reverseDirection:O,tooltipDimension:j.height,viewBox:M,viewBoxDimension:M.height}),useTranslate3d:P}).translateX,o=t.translateY,i=t.useTranslate3d,p=(0,g.bO)({transform:i?"translate3d(".concat(n,"px, ").concat(o,"px, 0)"):"translate(".concat(n,"px, ").concat(o,"px)")})):p=x,{cssProperties:p,cssClasses:(u=(a={translateX:h,translateY:m,coordinate:S}).coordinate,d=a.translateX,f=a.translateY,(0,c.Z)(b,(y(l={},"".concat(b,"-right"),(0,s.hj)(d)&&u&&(0,s.hj)(u.x)&&d>=u.x),y(l,"".concat(b,"-left"),(0,s.hj)(d)&&u&&(0,s.hj)(u.x)&&d=u.y),y(l,"".concat(b,"-top"),(0,s.hj)(f)&&u&&(0,s.hj)(u.y)&&f0;return r.createElement(N,{allowEscapeViewBox:i,animationDuration:a,animationEasing:l,isAnimationActive:d,active:o,coordinate:s,hasPayload:S,offset:f,position:g,reverseDirection:v,useTranslate3d:y,viewBox:b,wrapperStyle:x},(e=_(_({},this.props),{},{payload:w}),r.isValidElement(c)?r.cloneElement(c,e):"function"==typeof c?r.createElement(c,e):r.createElement(m,e)))}}],function(e,t){for(var n=0;n=0||(o[n]=e[n]);return o}(e,t);if(Object.getOwnPropertySymbols){var i=Object.getOwnPropertySymbols(e);for(r=0;r=0)&&Object.prototype.propertyIsEnumerable.call(e,n)&&(o[n]=e[n])}return o}(e,a),u=(0,o.Z)("recharts-layer",c);return r.createElement("g",l({className:u},(0,i.L6)(s,!0),{ref:t}),n)})},48777:function(e,t,n){"use strict";n.d(t,{T:function(){return c}});var r=n(2265),o=n(61994),i=n(82944),a=["children","width","height","viewBox","className","style","title","desc"];function l(){return(l=Object.assign?Object.assign.bind():function(e){for(var t=1;t=0||(o[n]=e[n]);return o}(e,t);if(Object.getOwnPropertySymbols){var i=Object.getOwnPropertySymbols(e);for(r=0;r=0)&&Object.prototype.propertyIsEnumerable.call(e,n)&&(o[n]=e[n])}return o}(e,a),m=s||{width:n,height:c,x:0,y:0},g=(0,o.Z)("recharts-surface",u);return r.createElement("svg",l({},(0,i.L6)(h,!0,"svg"),{className:g,width:n,height:c,style:d,viewBox:"".concat(m.x," ").concat(m.y," ").concat(m.width," ").concat(m.height)}),r.createElement("title",null,f),r.createElement("desc",null,p),t)}},25739:function(e,t,n){"use strict";n.d(t,{br:function(){return h},Mw:function(){return w},zn:function(){return x},sp:function(){return m},qD:function(){return b},d2:function(){return y},bH:function(){return g},Ud:function(){return v}});var r=n(2265),o=n(69398),i=n(50967),a=n.n(i)()(function(e){return{x:e.left,y:e.top,width:e.width,height:e.height}},function(e){return["l",e.left,"t",e.top,"w",e.width,"h",e.height].join("")}),l=(0,r.createContext)(void 0),c=(0,r.createContext)(void 0),s=(0,r.createContext)(void 0),u=(0,r.createContext)({}),d=(0,r.createContext)(void 0),f=(0,r.createContext)(0),p=(0,r.createContext)(0),h=function(e){var t=e.state,n=t.xAxisMap,o=t.yAxisMap,i=t.offset,h=e.clipPathId,m=e.children,g=e.width,v=e.height,y=a(i);return r.createElement(l.Provider,{value:n},r.createElement(c.Provider,{value:o},r.createElement(u.Provider,{value:i},r.createElement(s.Provider,{value:y},r.createElement(d.Provider,{value:h},r.createElement(f.Provider,{value:v},r.createElement(p.Provider,{value:g},m)))))))},m=function(){return(0,r.useContext)(d)},g=function(e){var t=(0,r.useContext)(l);null!=t||(0,o.Z)(!1);var n=t[e];return null!=n||(0,o.Z)(!1),n},v=function(e){var t=(0,r.useContext)(c);null!=t||(0,o.Z)(!1);var n=t[e];return null!=n||(0,o.Z)(!1),n},y=function(){return(0,r.useContext)(s)},b=function(){return(0,r.useContext)(u)},x=function(){return(0,r.useContext)(p)},w=function(){return(0,r.useContext)(f)}},57165:function(e,t,n){"use strict";n.d(t,{H:function(){return V}});var r=n(2265);function o(){}function i(e,t,n){e._context.bezierCurveTo((2*e._x0+e._x1)/3,(2*e._y0+e._y1)/3,(e._x0+2*e._x1)/3,(e._y0+2*e._y1)/3,(e._x0+4*e._x1+t)/6,(e._y0+4*e._y1+n)/6)}function a(e){this._context=e}function l(e){this._context=e}function c(e){this._context=e}a.prototype={areaStart:function(){this._line=0},areaEnd:function(){this._line=NaN},lineStart:function(){this._x0=this._x1=this._y0=this._y1=NaN,this._point=0},lineEnd:function(){switch(this._point){case 3:i(this,this._x1,this._y1);case 2:this._context.lineTo(this._x1,this._y1)}(this._line||0!==this._line&&1===this._point)&&this._context.closePath(),this._line=1-this._line},point:function(e,t){switch(e=+e,t=+t,this._point){case 0:this._point=1,this._line?this._context.lineTo(e,t):this._context.moveTo(e,t);break;case 1:this._point=2;break;case 2:this._point=3,this._context.lineTo((5*this._x0+this._x1)/6,(5*this._y0+this._y1)/6);default:i(this,e,t)}this._x0=this._x1,this._x1=e,this._y0=this._y1,this._y1=t}},l.prototype={areaStart:o,areaEnd:o,lineStart:function(){this._x0=this._x1=this._x2=this._x3=this._x4=this._y0=this._y1=this._y2=this._y3=this._y4=NaN,this._point=0},lineEnd:function(){switch(this._point){case 1:this._context.moveTo(this._x2,this._y2),this._context.closePath();break;case 2:this._context.moveTo((this._x2+2*this._x3)/3,(this._y2+2*this._y3)/3),this._context.lineTo((this._x3+2*this._x2)/3,(this._y3+2*this._y2)/3),this._context.closePath();break;case 3:this.point(this._x2,this._y2),this.point(this._x3,this._y3),this.point(this._x4,this._y4)}},point:function(e,t){switch(e=+e,t=+t,this._point){case 0:this._point=1,this._x2=e,this._y2=t;break;case 1:this._point=2,this._x3=e,this._y3=t;break;case 2:this._point=3,this._x4=e,this._y4=t,this._context.moveTo((this._x0+4*this._x1+e)/6,(this._y0+4*this._y1+t)/6);break;default:i(this,e,t)}this._x0=this._x1,this._x1=e,this._y0=this._y1,this._y1=t}},c.prototype={areaStart:function(){this._line=0},areaEnd:function(){this._line=NaN},lineStart:function(){this._x0=this._x1=this._y0=this._y1=NaN,this._point=0},lineEnd:function(){(this._line||0!==this._line&&3===this._point)&&this._context.closePath(),this._line=1-this._line},point:function(e,t){switch(e=+e,t=+t,this._point){case 0:this._point=1;break;case 1:this._point=2;break;case 2:this._point=3;var n=(this._x0+4*this._x1+e)/6,r=(this._y0+4*this._y1+t)/6;this._line?this._context.lineTo(n,r):this._context.moveTo(n,r);break;case 3:this._point=4;default:i(this,e,t)}this._x0=this._x1,this._x1=e,this._y0=this._y1,this._y1=t}};class s{constructor(e,t){this._context=e,this._x=t}areaStart(){this._line=0}areaEnd(){this._line=NaN}lineStart(){this._point=0}lineEnd(){(this._line||0!==this._line&&1===this._point)&&this._context.closePath(),this._line=1-this._line}point(e,t){switch(e=+e,t=+t,this._point){case 0:this._point=1,this._line?this._context.lineTo(e,t):this._context.moveTo(e,t);break;case 1:this._point=2;default:this._x?this._context.bezierCurveTo(this._x0=(this._x0+e)/2,this._y0,this._x0,t,e,t):this._context.bezierCurveTo(this._x0,this._y0=(this._y0+t)/2,e,this._y0,e,t)}this._x0=e,this._y0=t}}function u(e){this._context=e}function d(e){this._context=e}function f(e){return new d(e)}function p(e,t,n){var r=e._x1-e._x0,o=t-e._x1,i=(e._y1-e._y0)/(r||o<0&&-0),a=(n-e._y1)/(o||r<0&&-0);return((i<0?-1:1)+(a<0?-1:1))*Math.min(Math.abs(i),Math.abs(a),.5*Math.abs((i*o+a*r)/(r+o)))||0}function h(e,t){var n=e._x1-e._x0;return n?(3*(e._y1-e._y0)/n-t)/2:t}function m(e,t,n){var r=e._x0,o=e._y0,i=e._x1,a=e._y1,l=(i-r)/3;e._context.bezierCurveTo(r+l,o+l*t,i-l,a-l*n,i,a)}function g(e){this._context=e}function v(e){this._context=new y(e)}function y(e){this._context=e}function b(e){this._context=e}function x(e){var t,n,r=e.length-1,o=Array(r),i=Array(r),a=Array(r);for(o[0]=0,i[0]=2,a[0]=e[0]+2*e[1],t=1;t=0;--t)o[t]=(a[t]-o[t+1])/i[t];for(t=0,i[r-1]=(e[r]+o[r-1])/2;t=0&&(this._t=1-this._t,this._line=1-this._line)},point:function(e,t){switch(e=+e,t=+t,this._point){case 0:this._point=1,this._line?this._context.lineTo(e,t):this._context.moveTo(e,t);break;case 1:this._point=2;default:if(this._t<=0)this._context.lineTo(this._x,t),this._context.lineTo(e,t);else{var n=this._x*(1-this._t)+e*this._t;this._context.lineTo(n,this._y),this._context.lineTo(n,t)}}this._x=e,this._y=t}};var S=n(22516),k=n(76115),E=n(67790);function C(e){return e[0]}function O(e){return e[1]}function j(e,t){var n=(0,k.Z)(!0),r=null,o=f,i=null,a=(0,E.d)(l);function l(l){var c,s,u,d=(l=(0,S.Z)(l)).length,f=!1;for(null==r&&(i=o(u=a())),c=0;c<=d;++c)!(c=d;--f)l.point(v[f],y[f]);l.lineEnd(),l.areaEnd()}}g&&(v[u]=+e(p,u,s),y[u]=+t(p,u,s),l.point(r?+r(p,u,s):v[u],n?+n(p,u,s):y[u]))}if(h)return l=null,h+""||null}function u(){return j().defined(o).curve(a).context(i)}return e="function"==typeof e?e:void 0===e?C:(0,k.Z)(+e),t="function"==typeof t?t:void 0===t?(0,k.Z)(0):(0,k.Z)(+t),n="function"==typeof n?n:void 0===n?O:(0,k.Z)(+n),s.x=function(t){return arguments.length?(e="function"==typeof t?t:(0,k.Z)(+t),r=null,s):e},s.x0=function(t){return arguments.length?(e="function"==typeof t?t:(0,k.Z)(+t),s):e},s.x1=function(e){return arguments.length?(r=null==e?null:"function"==typeof e?e:(0,k.Z)(+e),s):r},s.y=function(e){return arguments.length?(t="function"==typeof e?e:(0,k.Z)(+e),n=null,s):t},s.y0=function(e){return arguments.length?(t="function"==typeof e?e:(0,k.Z)(+e),s):t},s.y1=function(e){return arguments.length?(n=null==e?null:"function"==typeof e?e:(0,k.Z)(+e),s):n},s.lineX0=s.lineY0=function(){return u().x(e).y(t)},s.lineY1=function(){return u().x(e).y(n)},s.lineX1=function(){return u().x(r).y(t)},s.defined=function(e){return arguments.length?(o="function"==typeof e?e:(0,k.Z)(!!e),s):o},s.curve=function(e){return arguments.length?(a=e,null!=i&&(l=a(i)),s):a},s.context=function(e){return arguments.length?(null==e?i=l=null:l=a(i=e),s):i},s}var M=n(75551),N=n.n(M),I=n(86757),R=n.n(I),T=n(61994),A=n(41637),_=n(82944),D=n(16630);function Z(e){return(Z="function"==typeof Symbol&&"symbol"==typeof Symbol.iterator?function(e){return typeof e}:function(e){return e&&"function"==typeof Symbol&&e.constructor===Symbol&&e!==Symbol.prototype?"symbol":typeof e})(e)}function L(){return(L=Object.assign?Object.assign.bind():function(e){for(var t=1;te.length)&&(t=e.length);for(var n=0,r=Array(t);n=0?1:-1,c=n>=0?1:-1,s=r>=0&&n>=0||r<0&&n<0?1:0;if(a>0&&o instanceof Array){for(var u=[0,0,0,0],d=0;d<4;d++)u[d]=o[d]>a?a:o[d];i="M".concat(e,",").concat(t+l*u[0]),u[0]>0&&(i+="A ".concat(u[0],",").concat(u[0],",0,0,").concat(s,",").concat(e+c*u[0],",").concat(t)),i+="L ".concat(e+n-c*u[1],",").concat(t),u[1]>0&&(i+="A ".concat(u[1],",").concat(u[1],",0,0,").concat(s,",\n ").concat(e+n,",").concat(t+l*u[1])),i+="L ".concat(e+n,",").concat(t+r-l*u[2]),u[2]>0&&(i+="A ".concat(u[2],",").concat(u[2],",0,0,").concat(s,",\n ").concat(e+n-c*u[2],",").concat(t+r)),i+="L ".concat(e+c*u[3],",").concat(t+r),u[3]>0&&(i+="A ".concat(u[3],",").concat(u[3],",0,0,").concat(s,",\n ").concat(e,",").concat(t+r-l*u[3])),i+="Z"}else if(a>0&&o===+o&&o>0){var f=Math.min(a,o);i="M ".concat(e,",").concat(t+l*f,"\n A ").concat(f,",").concat(f,",0,0,").concat(s,",").concat(e+c*f,",").concat(t,"\n L ").concat(e+n-c*f,",").concat(t,"\n A ").concat(f,",").concat(f,",0,0,").concat(s,",").concat(e+n,",").concat(t+l*f,"\n L ").concat(e+n,",").concat(t+r-l*f,"\n A ").concat(f,",").concat(f,",0,0,").concat(s,",").concat(e+n-c*f,",").concat(t+r,"\n L ").concat(e+c*f,",").concat(t+r,"\n A ").concat(f,",").concat(f,",0,0,").concat(s,",").concat(e,",").concat(t+r-l*f," Z")}else i="M ".concat(e,",").concat(t," h ").concat(n," v ").concat(r," h ").concat(-n," Z");return i},p=function(e,t){if(!e||!t)return!1;var n=e.x,r=e.y,o=t.x,i=t.y,a=t.width,l=t.height;return!!(Math.abs(a)>0&&Math.abs(l)>0)&&n>=Math.min(o,o+a)&&n<=Math.max(o,o+a)&&r>=Math.min(i,i+l)&&r<=Math.max(i,i+l)},h={x:0,y:0,width:0,height:0,radius:0,isAnimationActive:!1,isUpdateAnimationActive:!1,animationBegin:0,animationDuration:1500,animationEasing:"ease"},m=function(e){var t,n=d(d({},h),e),l=(0,r.useRef)(),u=function(e){if(Array.isArray(e))return e}(t=(0,r.useState)(-1))||function(e,t){var n=null==e?null:"undefined"!=typeof Symbol&&e[Symbol.iterator]||e["@@iterator"];if(null!=n){var r,o,i,a,l=[],c=!0,s=!1;try{for(i=(n=n.call(e)).next;!(c=(r=i.call(n)).done)&&(l.push(r.value),2!==l.length);c=!0);}catch(e){s=!0,o=e}finally{try{if(!c&&null!=n.return&&(a=n.return(),Object(a)!==a))return}finally{if(s)throw o}}return l}}(t,2)||function(e,t){if(e){if("string"==typeof e)return s(e,2);var n=Object.prototype.toString.call(e).slice(8,-1);if("Object"===n&&e.constructor&&(n=e.constructor.name),"Map"===n||"Set"===n)return Array.from(e);if("Arguments"===n||/^(?:Ui|I)nt(?:8|16|32)(?:Clamped)?Array$/.test(n))return s(e,2)}}(t,2)||function(){throw TypeError("Invalid attempt to destructure non-iterable instance.\nIn order to be iterable, non-array objects must have a [Symbol.iterator]() method.")}(),p=u[0],m=u[1];(0,r.useEffect)(function(){if(l.current&&l.current.getTotalLength)try{var e=l.current.getTotalLength();e&&m(e)}catch(e){}},[]);var g=n.x,v=n.y,y=n.width,b=n.height,x=n.radius,w=n.className,S=n.animationEasing,k=n.animationDuration,E=n.animationBegin,C=n.isAnimationActive,O=n.isUpdateAnimationActive;if(g!==+g||v!==+v||y!==+y||b!==+b||0===y||0===b)return null;var j=(0,o.Z)("recharts-rectangle",w);return O?r.createElement(i.ZP,{canBegin:p>0,from:{width:y,height:b,x:g,y:v},to:{width:y,height:b,x:g,y:v},duration:k,animationEasing:S,isActive:O},function(e){var t=e.width,o=e.height,s=e.x,u=e.y;return r.createElement(i.ZP,{canBegin:p>0,from:"0px ".concat(-1===p?1:p,"px"),to:"".concat(p,"px 0px"),attributeName:"strokeDasharray",begin:E,duration:k,isActive:C,easing:S},r.createElement("path",c({},(0,a.L6)(n,!0),{className:j,d:f(s,u,t,o,x),ref:l})))}):r.createElement("path",c({},(0,a.L6)(n,!0),{className:j,d:f(g,v,y,b,x)}))}},60474:function(e,t,n){"use strict";n.d(t,{L:function(){return g}});var r=n(2265),o=n(61994),i=n(82944),a=n(39206),l=n(16630);function c(e){return(c="function"==typeof Symbol&&"symbol"==typeof Symbol.iterator?function(e){return typeof e}:function(e){return e&&"function"==typeof Symbol&&e.constructor===Symbol&&e!==Symbol.prototype?"symbol":typeof e})(e)}function s(){return(s=Object.assign?Object.assign.bind():function(e){for(var t=1;t180),",").concat(+(c>u),",\n ").concat(f.x,",").concat(f.y,"\n ");if(o>0){var h=(0,a.op)(n,r,o,c),m=(0,a.op)(n,r,o,u);p+="L ".concat(m.x,",").concat(m.y,"\n A ").concat(o,",").concat(o,",0,\n ").concat(+(Math.abs(s)>180),",").concat(+(c<=u),",\n ").concat(h.x,",").concat(h.y," Z")}else p+="L ".concat(n,",").concat(r," Z");return p},h=function(e){var t=e.cx,n=e.cy,r=e.innerRadius,o=e.outerRadius,i=e.cornerRadius,a=e.forceCornerRadius,c=e.cornerIsExternal,s=e.startAngle,u=e.endAngle,d=(0,l.uY)(u-s),h=f({cx:t,cy:n,radius:o,angle:s,sign:d,cornerRadius:i,cornerIsExternal:c}),m=h.circleTangency,g=h.lineTangency,v=h.theta,y=f({cx:t,cy:n,radius:o,angle:u,sign:-d,cornerRadius:i,cornerIsExternal:c}),b=y.circleTangency,x=y.lineTangency,w=y.theta,S=c?Math.abs(s-u):Math.abs(s-u)-v-w;if(S<0)return a?"M ".concat(g.x,",").concat(g.y,"\n a").concat(i,",").concat(i,",0,0,1,").concat(2*i,",0\n a").concat(i,",").concat(i,",0,0,1,").concat(-(2*i),",0\n "):p({cx:t,cy:n,innerRadius:r,outerRadius:o,startAngle:s,endAngle:u});var k="M ".concat(g.x,",").concat(g.y,"\n A").concat(i,",").concat(i,",0,0,").concat(+(d<0),",").concat(m.x,",").concat(m.y,"\n A").concat(o,",").concat(o,",0,").concat(+(S>180),",").concat(+(d<0),",").concat(b.x,",").concat(b.y,"\n A").concat(i,",").concat(i,",0,0,").concat(+(d<0),",").concat(x.x,",").concat(x.y,"\n ");if(r>0){var E=f({cx:t,cy:n,radius:r,angle:s,sign:d,isExternal:!0,cornerRadius:i,cornerIsExternal:c}),C=E.circleTangency,O=E.lineTangency,j=E.theta,P=f({cx:t,cy:n,radius:r,angle:u,sign:-d,isExternal:!0,cornerRadius:i,cornerIsExternal:c}),M=P.circleTangency,N=P.lineTangency,I=P.theta,R=c?Math.abs(s-u):Math.abs(s-u)-j-I;if(R<0&&0===i)return"".concat(k,"L").concat(t,",").concat(n,"Z");k+="L".concat(N.x,",").concat(N.y,"\n A").concat(i,",").concat(i,",0,0,").concat(+(d<0),",").concat(M.x,",").concat(M.y,"\n A").concat(r,",").concat(r,",0,").concat(+(R>180),",").concat(+(d>0),",").concat(C.x,",").concat(C.y,"\n A").concat(i,",").concat(i,",0,0,").concat(+(d<0),",").concat(O.x,",").concat(O.y,"Z")}else k+="L".concat(t,",").concat(n,"Z");return k},m={cx:0,cy:0,innerRadius:0,outerRadius:0,startAngle:0,endAngle:0,cornerRadius:0,forceCornerRadius:!1,cornerIsExternal:!1},g=function(e){var t,n=d(d({},m),e),a=n.cx,c=n.cy,u=n.innerRadius,f=n.outerRadius,g=n.cornerRadius,v=n.forceCornerRadius,y=n.cornerIsExternal,b=n.startAngle,x=n.endAngle,w=n.className;if(f
16)throw Error(u+w(e));if(!e.s)return new p(i);for(null==t?(l=!1,c=h):c=t,a=new p(.03125);e.abs().gte(.1);)e=e.times(a),d+=5;for(c+=Math.log(f(2,d))/Math.LN10*2+5|0,n=r=o=new p(i),p.precision=c;;){if(r=O(r.times(e),c),n=n.times(++s),y((a=o.plus(b(r,n,c))).d).slice(0,c)===y(o.d).slice(0,c)){for(;d--;)o=O(o.times(o),c);return p.precision=h,null==t?(l=!0,O(o,h)):o}o=a}}function w(e){for(var t=7*e.e,n=e.d[0];n>=10;n/=10)t++;return t}function S(e,t,n){if(t>e.LN10.sd())throw l=!0,n&&(e.precision=n),Error(c+"LN10 precision limit exceeded");return O(new e(e.LN10),t)}function k(e){for(var t="";e--;)t+="0";return t}function E(e,t){var n,r,o,a,s,u,d,f,p,h=1,m=e,g=m.d,v=m.constructor,x=v.precision;if(m.s<1)throw Error(c+(m.s?"NaN":"-Infinity"));if(m.eq(i))return new v(0);if(null==t?(l=!1,f=x):f=t,m.eq(10))return null==t&&(l=!0),S(v,f);if(f+=10,v.precision=f,r=(n=y(g)).charAt(0),!(15e14>Math.abs(a=w(m))))return d=S(v,f+2,x).times(a+""),m=E(new v(r+"."+n.slice(1)),f-10).plus(d),v.precision=x,null==t?(l=!0,O(m,x)):m;for(;r<7&&1!=r||1==r&&n.charAt(1)>3;)r=(n=y((m=m.times(e)).d)).charAt(0),h++;for(a=w(m),r>1?(m=new v("0."+n),a++):m=new v(r+"."+n.slice(1)),u=s=m=b(m.minus(i),m.plus(i),f),p=O(m.times(m),f),o=3;;){if(s=O(s.times(p),f),y((d=u.plus(b(s,new v(o),f))).d).slice(0,f)===y(u.d).slice(0,f))return u=u.times(2),0!==a&&(u=u.plus(S(v,f+2,x).times(a+""))),u=b(u,new v(h),f),v.precision=x,null==t?(l=!0,O(u,x)):u;u=d,o+=2}}function C(e,t){var n,r,o;for((n=t.indexOf("."))>-1&&(t=t.replace(".","")),(r=t.search(/e/i))>0?(n<0&&(n=r),n+=+t.slice(r+1),t=t.substring(0,r)):n<0&&(n=t.length),r=0;48===t.charCodeAt(r);)++r;for(o=t.length;48===t.charCodeAt(o-1);)--o;if(t=t.slice(r,o)){if(o-=r,n=n-r-1,e.e=d(n/7),e.d=[],r=(n+1)%7,n<0&&(r+=7),rh||e.e<-h))throw Error(u+n)}else e.s=0,e.e=0,e.d=[0];return e}function O(e,t,n){var r,o,i,a,c,s,p,m,g=e.d;for(a=1,i=g[0];i>=10;i/=10)a++;if((r=t-a)<0)r+=7,o=t,p=g[m=0];else{if((m=Math.ceil((r+1)/7))>=(i=g.length))return e;for(a=1,p=i=g[m];i>=10;i/=10)a++;r%=7,o=r-7+a}if(void 0!==n&&(c=p/(i=f(10,a-o-1))%10|0,s=t<0||void 0!==g[m+1]||p%i,s=n<4?(c||s)&&(0==n||n==(e.s<0?3:2)):c>5||5==c&&(4==n||s||6==n&&(r>0?o>0?p/f(10,a-o):0:g[m-1])%10&1||n==(e.s<0?8:7))),t<1||!g[0])return s?(i=w(e),g.length=1,t=t-i-1,g[0]=f(10,(7-t%7)%7),e.e=d(-t/7)||0):(g.length=1,g[0]=e.e=e.s=0),e;if(0==r?(g.length=m,i=1,m--):(g.length=m+1,i=f(10,7-r),g[m]=o>0?(p/f(10,a-o)%f(10,o)|0)*i:0),s)for(;;){if(0==m){1e7==(g[0]+=i)&&(g[0]=1,++e.e);break}if(g[m]+=i,1e7!=g[m])break;g[m--]=0,i=1}for(r=g.length;0===g[--r];)g.pop();if(l&&(e.e>h||e.e<-h))throw Error(u+w(e));return e}function j(e,t){var n,r,o,i,a,c,s,u,d,f,p=e.constructor,h=p.precision;if(!e.s||!t.s)return t.s?t.s=-t.s:t=new p(e),l?O(t,h):t;if(s=e.d,f=t.d,r=t.e,u=e.e,s=s.slice(),a=u-r){for((d=a<0)?(n=s,a=-a,c=f.length):(n=f,r=u,c=s.length),a>(o=Math.max(Math.ceil(h/7),c)+2)&&(a=o,n.length=1),n.reverse(),o=a;o--;)n.push(0);n.reverse()}else{for((d=(o=s.length)<(c=f.length))&&(c=o),o=0;o0;--o)s[c++]=0;for(o=f.length;o>a;){if(s[--o]0?i=i.charAt(0)+"."+i.slice(1)+k(r):a>1&&(i=i.charAt(0)+"."+i.slice(1)),i=i+(o<0?"e":"e+")+o):o<0?(i="0."+k(-o-1)+i,n&&(r=n-a)>0&&(i+=k(r))):o>=a?(i+=k(o+1-a),n&&(r=n-o-1)>0&&(i=i+"."+k(r))):((r=o+1)0&&(o+1===a&&(i+="."),i+=k(r))),e.s<0?"-"+i:i}function M(e,t){if(e.length>t)return e.length=t,!0}function N(e){if(!e||"object"!=typeof e)throw Error(c+"Object expected");var t,n,r,o=["precision",1,1e9,"rounding",0,8,"toExpNeg",-1/0,0,"toExpPos",0,1/0];for(t=0;t=o[t+1]&&r<=o[t+2])this[n]=r;else throw Error(s+n+": "+r)}if(void 0!==(r=e[n="LN10"])){if(r==Math.LN10)this[n]=new this(r);else throw Error(s+n+": "+r)}return this}(a=function e(t){var n,r,o;function i(e){if(!(this instanceof i))return new i(e);if(this.constructor=i,e instanceof i){this.s=e.s,this.e=e.e,this.d=(e=e.d)?e.slice():e;return}if("number"==typeof e){if(0*e!=0)throw Error(s+e);if(e>0)this.s=1;else if(e<0)e=-e,this.s=-1;else{this.s=0,this.e=0,this.d=[0];return}if(e===~~e&&e<1e7){this.e=0,this.d=[e];return}return C(this,e.toString())}if("string"!=typeof e)throw Error(s+e);if(45===e.charCodeAt(0)?(e=e.slice(1),this.s=-1):this.s=1,p.test(e))C(this,e);else throw Error(s+e)}if(i.prototype=m,i.ROUND_UP=0,i.ROUND_DOWN=1,i.ROUND_CEIL=2,i.ROUND_FLOOR=3,i.ROUND_HALF_UP=4,i.ROUND_HALF_DOWN=5,i.ROUND_HALF_EVEN=6,i.ROUND_HALF_CEIL=7,i.ROUND_HALF_FLOOR=8,i.clone=e,i.config=i.set=N,void 0===t&&(t={}),t)for(n=0,o=["precision","rounding","toExpNeg","toExpPos","LN10"];n-1}},56883:function(e){e.exports=function(e,t,n){for(var r=-1,o=null==e?0:e.length;++r0&&i(u)?n>1?e(u,n-1,i,a,l):r(l,u):a||(l[l.length]=u)}return l}},63321:function(e,t,n){var r=n(33023)();e.exports=r},98060:function(e,t,n){var r=n(63321),o=n(43228);e.exports=function(e,t){return e&&r(e,t,o)}},92167:function(e,t,n){var r=n(67906),o=n(70235);e.exports=function(e,t){t=r(t,e);for(var n=0,i=t.length;null!=e&&nt}},93012:function(e){e.exports=function(e,t){return null!=e&&t in Object(e)}},47909:function(e,t,n){var r=n(8235),o=n(31953),i=n(35281);e.exports=function(e,t,n){return t==t?i(e,t,n):r(e,o,n)}},90370:function(e,t,n){var r=n(54506),o=n(10303);e.exports=function(e){return o(e)&&"[object Arguments]"==r(e)}},56318:function(e,t,n){var r=n(6791),o=n(10303);e.exports=function e(t,n,i,a,l){return t===n||(null!=t&&null!=n&&(o(t)||o(n))?r(t,n,i,a,e,l):t!=t&&n!=n)}},6791:function(e,t,n){var r=n(85885),o=n(97638),i=n(88030),a=n(64974),l=n(81690),c=n(25614),s=n(98051),u=n(9792),d="[object Arguments]",f="[object Array]",p="[object Object]",h=Object.prototype.hasOwnProperty;e.exports=function(e,t,n,m,g,v){var y=c(e),b=c(t),x=y?f:l(e),w=b?f:l(t);x=x==d?p:x,w=w==d?p:w;var S=x==p,k=w==p,E=x==w;if(E&&s(e)){if(!s(t))return!1;y=!0,S=!1}if(E&&!S)return v||(v=new r),y||u(e)?o(e,t,n,m,g,v):i(e,t,x,n,m,g,v);if(!(1&n)){var C=S&&h.call(e,"__wrapped__"),O=k&&h.call(t,"__wrapped__");if(C||O){var j=C?e.value():e,P=O?t.value():t;return v||(v=new r),g(j,P,n,m,v)}}return!!E&&(v||(v=new r),a(e,t,n,m,g,v))}},62538:function(e,t,n){var r=n(85885),o=n(56318);e.exports=function(e,t,n,i){var a=n.length,l=a,c=!i;if(null==e)return!l;for(e=Object(e);a--;){var s=n[a];if(c&&s[2]?s[1]!==e[s[0]]:!(s[0]in e))return!1}for(;++ao?0:o+t),(n=n>o?o:n)<0&&(n+=o),o=t>n?0:n-t>>>0,t>>>=0;for(var i=Array(o);++r=200){var m=t?null:l(e);if(m)return c(m);f=!1,u=a,h=new r}else h=t?[]:p;e:for(;++s=o?e:r(e,t,n)}},1536:function(e,t,n){var r=n(78371);e.exports=function(e,t){if(e!==t){var n=void 0!==e,o=null===e,i=e==e,a=r(e),l=void 0!==t,c=null===t,s=t==t,u=r(t);if(!c&&!u&&!a&&e>t||a&&l&&s&&!c&&!u||o&&l&&s||!n&&s||!i)return 1;if(!o&&!a&&!u&&e=c)return s;return s*("desc"==n[o]?-1:1)}}return e.index-t.index}},92077:function(e,t,n){var r=n(74288)["__core-js_shared__"];e.exports=r},97930:function(e,t,n){var r=n(5629);e.exports=function(e,t){return function(n,o){if(null==n)return n;if(!r(n))return e(n,o);for(var i=n.length,a=t?i:-1,l=Object(n);(t?a--:++a-1?l[c?t[s]:s]:void 0}}},35464:function(e,t,n){var r=n(19608),o=n(49639),i=n(175);e.exports=function(e){return function(t,n,a){return a&&"number"!=typeof a&&o(t,n,a)&&(n=a=void 0),t=i(t),void 0===n?(n=t,t=0):n=i(n),a=void 0===a?tu))return!1;var f=c.get(e),p=c.get(t);if(f&&p)return f==t&&p==e;var h=-1,m=!0,g=2&n?new r:void 0;for(c.set(e,t),c.set(t,e);++h-1&&e%1==0&&e-1}},13368:function(e,t,n){var r=n(24457);e.exports=function(e,t){var n=this.__data__,o=r(n,e);return o<0?(++this.size,n.push([e,t])):n[o][1]=t,this}},38764:function(e,t,n){var r=n(9855),o=n(99078),i=n(88675);e.exports=function(){this.size=0,this.__data__={hash:new r,map:new(i||o),string:new r}}},78615:function(e,t,n){var r=n(1507);e.exports=function(e){var t=r(this,e).delete(e);return this.size-=t?1:0,t}},83391:function(e,t,n){var r=n(1507);e.exports=function(e){return r(this,e).get(e)}},53483:function(e,t,n){var r=n(1507);e.exports=function(e){return r(this,e).has(e)}},74724:function(e,t,n){var r=n(1507);e.exports=function(e,t){var n=r(this,e),o=n.size;return n.set(e,t),this.size+=n.size==o?0:1,this}},22523:function(e){e.exports=function(e){var t=-1,n=Array(e.size);return e.forEach(function(e,r){n[++t]=[r,e]}),n}},47073:function(e){e.exports=function(e,t){return function(n){return null!=n&&n[e]===t&&(void 0!==t||e in Object(n))}}},23787:function(e,t,n){var r=n(50967);e.exports=function(e){var t=r(e,function(e){return 500===n.size&&n.clear(),e}),n=t.cache;return t}},20453:function(e,t,n){var r=n(39866)(Object,"create");e.exports=r},77184:function(e,t,n){var r=n(45070)(Object.keys,Object);e.exports=r},39931:function(e,t,n){e=n.nmd(e);var r=n(17071),o=t&&!t.nodeType&&t,i=o&&e&&!e.nodeType&&e,a=i&&i.exports===o&&r.process,l=function(){try{var e=i&&i.require&&i.require("util").types;if(e)return e;return a&&a.binding&&a.binding("util")}catch(e){}}();e.exports=l},80910:function(e){var t=Object.prototype.toString;e.exports=function(e){return t.call(e)}},45070:function(e){e.exports=function(e,t){return function(n){return e(t(n))}}},49478:function(e,t,n){var r=n(60493),o=Math.max;e.exports=function(e,t,n){return t=o(void 0===t?e.length-1:t,0),function(){for(var i=arguments,a=-1,l=o(i.length-t,0),c=Array(l);++a0){if(++n>=800)return arguments[0]}else n=0;return e.apply(void 0,arguments)}}},84092:function(e,t,n){var r=n(99078);e.exports=function(){this.__data__=new r,this.size=0}},31663:function(e){e.exports=function(e){var t=this.__data__,n=t.delete(e);return this.size=t.size,n}},69135:function(e){e.exports=function(e){return this.__data__.get(e)}},39552:function(e){e.exports=function(e){return this.__data__.has(e)}},8381:function(e,t,n){var r=n(99078),o=n(88675),i=n(76219);e.exports=function(e,t){var n=this.__data__;if(n instanceof r){var a=n.__data__;if(!o||a.length<199)return a.push([e,t]),this.size=++n.size,this;n=this.__data__=new i(a)}return n.set(e,t),this.size=n.size,this}},35281:function(e){e.exports=function(e,t,n){for(var r=n-1,o=e.length;++r=t||n<0||g&&r>=u}function x(){var e,n,r,i=o();if(b(i))return w(i);f=setTimeout(x,(e=i-p,n=i-h,r=t-e,g?l(r,u-n):r))}function w(e){return(f=void 0,v&&c)?y(e):(c=s=void 0,d)}function S(){var e,n=o(),r=b(n);if(c=arguments,s=this,p=n,r){if(void 0===f)return h=e=p,f=setTimeout(x,t),m?y(e):d;if(g)return clearTimeout(f),f=setTimeout(x,t),y(p)}return void 0===f&&(f=setTimeout(x,t)),d}return t=i(t)||0,r(n)&&(m=!!n.leading,u=(g="maxWait"in n)?a(i(n.maxWait)||0,t):u,v="trailing"in n?!!n.trailing:v),S.cancel=function(){void 0!==f&&clearTimeout(f),h=0,c=p=s=f=void 0},S.flush=function(){return void 0===f?d:w(o())},S}},37560:function(e){e.exports=function(e,t){return e===t||e!=e&&t!=t}},32242:function(e,t,n){var r=n(78897),o=n(28935),i=n(88157),a=n(25614),l=n(49639);e.exports=function(e,t,n){var c=a(e)?r:o;return n&&l(e,t,n)&&(t=void 0),c(e,i(t,3))}},84173:function(e,t,n){var r=n(82602)(n(12152));e.exports=r},12152:function(e,t,n){var r=n(8235),o=n(88157),i=n(85759),a=Math.max;e.exports=function(e,t,n){var l=null==e?0:e.length;if(!l)return -1;var c=null==n?0:i(n);return c<0&&(c=a(l+c,0)),r(e,o(t,3),c)}},11314:function(e,t,n){var r=n(72569),o=n(89238);e.exports=function(e,t){return r(o(e,t),1)}},13735:function(e,t,n){var r=n(92167);e.exports=function(e,t,n){var o=null==e?void 0:r(e,t);return void 0===o?n:o}},17764:function(e,t,n){var r=n(93012),o=n(59592);e.exports=function(e,t){return null!=e&&o(e,t,r)}},79586:function(e){e.exports=function(e){return e}},56569:function(e,t,n){var r=n(90370),o=n(10303),i=Object.prototype,a=i.hasOwnProperty,l=i.propertyIsEnumerable,c=r(function(){return arguments}())?r:function(e){return o(e)&&a.call(e,"callee")&&!l.call(e,"callee")};e.exports=c},25614:function(e){var t=Array.isArray;e.exports=t},5629:function(e,t,n){var r=n(86757),o=n(13973);e.exports=function(e){return null!=e&&o(e.length)&&!r(e)}},24342:function(e,t,n){var r=n(54506),o=n(10303);e.exports=function(e){return!0===e||!1===e||o(e)&&"[object Boolean]"==r(e)}},98051:function(e,t,n){e=n.nmd(e);var r=n(74288),o=n(7406),i=t&&!t.nodeType&&t,a=i&&e&&!e.nodeType&&e,l=a&&a.exports===i?r.Buffer:void 0,c=l?l.isBuffer:void 0;e.exports=c||o},21652:function(e,t,n){var r=n(56318);e.exports=function(e,t){return r(e,t)}},86757:function(e,t,n){var r=n(54506),o=n(28302);e.exports=function(e){if(!o(e))return!1;var t=r(e);return"[object Function]"==t||"[object GeneratorFunction]"==t||"[object AsyncFunction]"==t||"[object Proxy]"==t}},13973:function(e){e.exports=function(e){return"number"==typeof e&&e>-1&&e%1==0&&e<=9007199254740991}},82559:function(e,t,n){var r=n(22345);e.exports=function(e){return r(e)&&e!=+e}},77571:function(e){e.exports=function(e){return null==e}},22345:function(e,t,n){var r=n(54506),o=n(10303);e.exports=function(e){return"number"==typeof e||o(e)&&"[object Number]"==r(e)}},28302:function(e){e.exports=function(e){var t=typeof e;return null!=e&&("object"==t||"function"==t)}},10303:function(e){e.exports=function(e){return null!=e&&"object"==typeof e}},90231:function(e,t,n){var r=n(54506),o=n(62602),i=n(10303),a=Object.prototype,l=Function.prototype.toString,c=a.hasOwnProperty,s=l.call(Object);e.exports=function(e){if(!i(e)||"[object Object]"!=r(e))return!1;var t=o(e);if(null===t)return!0;var n=c.call(t,"constructor")&&t.constructor;return"function"==typeof n&&n instanceof n&&l.call(n)==s}},42715:function(e,t,n){var r=n(54506),o=n(25614),i=n(10303);e.exports=function(e){return"string"==typeof e||!o(e)&&i(e)&&"[object String]"==r(e)}},78371:function(e,t,n){var r=n(54506),o=n(10303);e.exports=function(e){return"symbol"==typeof e||o(e)&&"[object Symbol]"==r(e)}},9792:function(e,t,n){var r=n(59332),o=n(23305),i=n(39931),a=i&&i.isTypedArray,l=a?o(a):r;e.exports=l},43228:function(e,t,n){var r=n(28579),o=n(4578),i=n(5629);e.exports=function(e){return i(e)?r(e):o(e)}},86185:function(e){e.exports=function(e){var t=null==e?0:e.length;return t?e[t-1]:void 0}},89238:function(e,t,n){var r=n(73819),o=n(88157),i=n(24240),a=n(25614);e.exports=function(e,t){return(a(e)?r:i)(e,o(t,3))}},41443:function(e,t,n){var r=n(83023),o=n(98060),i=n(88157);e.exports=function(e,t){var n={};return t=i(t,3),o(e,function(e,o,i){r(n,o,t(e,o,i))}),n}},95645:function(e,t,n){var r=n(67646),o=n(58905),i=n(79586);e.exports=function(e){return e&&e.length?r(e,i,o):void 0}},35802:function(e,t,n){var r=n(67646),o=n(58905),i=n(88157);e.exports=function(e,t){return e&&e.length?r(e,i(t,2),o):void 0}},50967:function(e,t,n){var r=n(76219);function o(e,t){if("function"!=typeof e||null!=t&&"function"!=typeof t)throw TypeError("Expected a function");var n=function(){var r=arguments,o=t?t.apply(this,r):r[0],i=n.cache;if(i.has(o))return i.get(o);var a=e.apply(this,r);return n.cache=i.set(o,a)||i,a};return n.cache=new(o.Cache||r),n}o.Cache=r,e.exports=o},99008:function(e,t,n){var r=n(67646),o=n(20121),i=n(79586);e.exports=function(e){return e&&e.length?r(e,i,o):void 0}},37891:function(e,t,n){var r=n(67646),o=n(88157),i=n(20121);e.exports=function(e,t){return e&&e.length?r(e,o(t,2),i):void 0}},93810:function(e){e.exports=function(){}},11121:function(e,t,n){var r=n(74288);e.exports=function(){return r.Date.now()}},22350:function(e,t,n){var r=n(18155),o=n(73584),i=n(67352),a=n(70235);e.exports=function(e){return i(e)?r(a(e)):o(e)}},99676:function(e,t,n){var r=n(35464)();e.exports=r},33645:function(e,t,n){var r=n(25253),o=n(88157),i=n(12327),a=n(25614),l=n(49639);e.exports=function(e,t,n){var c=a(e)?r:i;return n&&l(e,t,n)&&(t=void 0),c(e,o(t,3))}},34935:function(e,t,n){var r=n(72569),o=n(84046),i=n(44843),a=n(49639),l=i(function(e,t){if(null==e)return[];var n=t.length;return n>1&&a(e,t[0],t[1])?t=[]:n>2&&a(t[0],t[1],t[2])&&(t=[t[0]]),o(e,r(t,1),[])});e.exports=l},55716:function(e){e.exports=function(){return[]}},7406:function(e){e.exports=function(){return!1}},37065:function(e,t,n){var r=n(7310),o=n(28302);e.exports=function(e,t,n){var i=!0,a=!0;if("function"!=typeof e)throw TypeError("Expected a function");return o(n)&&(i="leading"in n?!!n.leading:i,a="trailing"in n?!!n.trailing:a),r(e,t,{leading:i,maxWait:t,trailing:a})}},175:function(e,t,n){var r=n(6660),o=1/0;e.exports=function(e){return e?(e=r(e))===o||e===-o?(e<0?-1:1)*17976931348623157e292:e==e?e:0:0===e?e:0}},85759:function(e,t,n){var r=n(175);e.exports=function(e){var t=r(e),n=t%1;return t==t?n?t-n:t:0}},6660:function(e,t,n){var r=n(41087),o=n(28302),i=n(78371),a=0/0,l=/^[-+]0x[0-9a-f]+$/i,c=/^0b[01]+$/i,s=/^0o[0-7]+$/i,u=parseInt;e.exports=function(e){if("number"==typeof e)return e;if(i(e))return a;if(o(e)){var t="function"==typeof e.valueOf?e.valueOf():e;e=o(t)?t+"":t}if("string"!=typeof e)return 0===e?e:+e;e=r(e);var n=c.test(e);return n||s.test(e)?u(e.slice(2),n?2:8):l.test(e)?a:+e}},3641:function(e,t,n){var r=n(65020);e.exports=function(e){return null==e?"":r(e)}},47230:function(e,t,n){var r=n(88157),o=n(13826);e.exports=function(e,t){return e&&e.length?o(e,r(t,2)):[]}},75551:function(e,t,n){var r=n(80675)("toUpperCase");e.exports=r},27648:function(e,t,n){"use strict";n.d(t,{default:function(){return o.a}});var r=n(72972),o=n.n(r)},55449:function(e,t,n){"use strict";Object.defineProperty(t,"__esModule",{value:!0}),Object.defineProperty(t,"addLocale",{enumerable:!0,get:function(){return r}}),n(33068);let r=function(e){for(var t=arguments.length,n=Array(t>1?t-1:0),r=1;ri?e.prefetch(t,o):e.prefetch(t,n,r))().catch(e=>{})}}function b(e){return"string"==typeof e?e:(0,c.formatUrl)(e)}let x=i.default.forwardRef(function(e,t){let n,r;let{href:c,as:v,children:x,prefetch:w=null,passHref:S,replace:k,shallow:E,scroll:C,locale:O,onClick:j,onMouseEnter:P,onTouchStart:M,legacyBehavior:N=!1,...I}=e;n=x,N&&("string"==typeof n||"number"==typeof n)&&(n=(0,o.jsx)("a",{children:n}));let R=i.default.useContext(d.RouterContext),T=i.default.useContext(f.AppRouterContext),A=null!=R?R:T,_=!R,D=!1!==w,Z=null===w?g.PrefetchKind.AUTO:g.PrefetchKind.FULL,{href:L,as:z}=i.default.useMemo(()=>{if(!R){let e=b(c);return{href:e,as:v?b(v):e}}let[e,t]=(0,a.resolveHref)(R,c,!0);return{href:e,as:v?(0,a.resolveHref)(R,v):t||e}},[R,c,v]),B=i.default.useRef(L),F=i.default.useRef(z);N&&(r=i.default.Children.only(n));let H=N?r&&"object"==typeof r&&r.ref:t,[q,W,K]=(0,p.useIntersection)({rootMargin:"200px"}),U=i.default.useCallback(e=>{(F.current!==z||B.current!==L)&&(K(),F.current=z,B.current=L),q(e),H&&("function"==typeof H?H(e):"object"==typeof H&&(H.current=e))},[z,H,L,K,q]);i.default.useEffect(()=>{A&&W&&D&&y(A,L,z,{locale:O},{kind:Z},_)},[z,L,W,O,D,null==R?void 0:R.locale,A,_,Z]);let V={ref:U,onClick(e){N||"function"!=typeof j||j(e),N&&r.props&&"function"==typeof r.props.onClick&&r.props.onClick(e),A&&!e.defaultPrevented&&function(e,t,n,r,o,a,c,s,u){let{nodeName:d}=e.currentTarget;if("A"===d.toUpperCase()&&(function(e){let t=e.currentTarget.getAttribute("target");return t&&"_self"!==t||e.metaKey||e.ctrlKey||e.shiftKey||e.altKey||e.nativeEvent&&2===e.nativeEvent.which}(e)||!u&&!(0,l.isLocalURL)(n)))return;e.preventDefault();let f=()=>{let e=null==c||c;"beforePopState"in t?t[o?"replace":"push"](n,r,{shallow:a,locale:s,scroll:e}):t[o?"replace":"push"](r||n,{scroll:e})};u?i.default.startTransition(f):f()}(e,A,L,z,k,E,C,O,_)},onMouseEnter(e){N||"function"!=typeof P||P(e),N&&r.props&&"function"==typeof r.props.onMouseEnter&&r.props.onMouseEnter(e),A&&(D||!_)&&y(A,L,z,{locale:O,priority:!0,bypassPrefetchedCheck:!0},{kind:Z},_)},onTouchStart:function(e){N||"function"!=typeof M||M(e),N&&r.props&&"function"==typeof r.props.onTouchStart&&r.props.onTouchStart(e),A&&(D||!_)&&y(A,L,z,{locale:O,priority:!0,bypassPrefetchedCheck:!0},{kind:Z},_)}};if((0,s.isAbsoluteUrl)(z))V.href=z;else if(!N||S||"a"===r.type&&!("href"in r.props)){let e=void 0!==O?O:null==R?void 0:R.locale,t=(null==R?void 0:R.isLocaleDomain)&&(0,h.getDomainLocale)(z,e,null==R?void 0:R.locales,null==R?void 0:R.domainLocales);V.href=t||(0,m.addBasePath)((0,u.addLocale)(z,e,null==R?void 0:R.defaultLocale))}return N?i.default.cloneElement(r,V):(0,o.jsx)("a",{...I,...V,children:n})});("function"==typeof t.default||"object"==typeof t.default&&null!==t.default)&&void 0===t.default.__esModule&&(Object.defineProperty(t.default,"__esModule",{value:!0}),Object.assign(t.default,t),e.exports=t.default)},63515:function(e,t){"use strict";Object.defineProperty(t,"__esModule",{value:!0}),function(e,t){for(var n in t)Object.defineProperty(e,n,{enumerable:!0,get:t[n]})}(t,{cancelIdleCallback:function(){return r},requestIdleCallback:function(){return n}});let n="undefined"!=typeof self&&self.requestIdleCallback&&self.requestIdleCallback.bind(window)||function(e){let t=Date.now();return self.setTimeout(function(){e({didTimeout:!1,timeRemaining:function(){return Math.max(0,50-(Date.now()-t))}})},1)},r="undefined"!=typeof self&&self.cancelIdleCallback&&self.cancelIdleCallback.bind(window)||function(e){return clearTimeout(e)};("function"==typeof t.default||"object"==typeof t.default&&null!==t.default)&&void 0===t.default.__esModule&&(Object.defineProperty(t.default,"__esModule",{value:!0}),Object.assign(t.default,t),e.exports=t.default)},25246:function(e,t,n){"use strict";Object.defineProperty(t,"__esModule",{value:!0}),Object.defineProperty(t,"resolveHref",{enumerable:!0,get:function(){return d}});let r=n(48637),o=n(57497),i=n(17053),a=n(3987),l=n(33068),c=n(53552),s=n(86279),u=n(37205);function d(e,t,n){let d;let f="string"==typeof t?t:(0,o.formatWithValidation)(t),p=f.match(/^[a-zA-Z]{1,}:\/\//),h=p?f.slice(p[0].length):f;if((h.split("?",1)[0]||"").match(/(\/\/|\\)/)){console.error("Invalid href '"+f+"' passed to next/router in page: '"+e.pathname+"'. Repeated forward-slashes (//) or backslashes \\ are not valid in the href.");let t=(0,a.normalizeRepeatedSlashes)(h);f=(p?p[0]:"")+t}if(!(0,c.isLocalURL)(f))return n?[f]:f;try{d=new URL(f.startsWith("#")?e.asPath:e.pathname,"http://n")}catch(e){d=new URL("/","http://n")}try{let e=new URL(f,d);e.pathname=(0,l.normalizePathTrailingSlash)(e.pathname);let t="";if((0,s.isDynamicRoute)(e.pathname)&&e.searchParams&&n){let n=(0,r.searchParamsToUrlQuery)(e.searchParams),{result:a,params:l}=(0,u.interpolateAs)(e.pathname,e.pathname,n);a&&(t=(0,o.formatWithValidation)({pathname:a,hash:e.hash,query:(0,i.omit)(n,l)}))}let a=e.origin===d.origin?e.href.slice(e.origin.length):e.href;return n?[a,t||a]:a}catch(e){return n?[f]:f}}("function"==typeof t.default||"object"==typeof t.default&&null!==t.default)&&void 0===t.default.__esModule&&(Object.defineProperty(t.default,"__esModule",{value:!0}),Object.assign(t.default,t),e.exports=t.default)},16081:function(e,t,n){"use strict";Object.defineProperty(t,"__esModule",{value:!0}),Object.defineProperty(t,"useIntersection",{enumerable:!0,get:function(){return c}});let r=n(2265),o=n(63515),i="function"==typeof IntersectionObserver,a=new Map,l=[];function c(e){let{rootRef:t,rootMargin:n,disabled:c}=e,s=c||!i,[u,d]=(0,r.useState)(!1),f=(0,r.useRef)(null),p=(0,r.useCallback)(e=>{f.current=e},[]);return(0,r.useEffect)(()=>{if(i){if(s||u)return;let e=f.current;if(e&&e.tagName)return function(e,t,n){let{id:r,observer:o,elements:i}=function(e){let t;let n={root:e.root||null,margin:e.rootMargin||""},r=l.find(e=>e.root===n.root&&e.margin===n.margin);if(r&&(t=a.get(r)))return t;let o=new Map;return t={id:n,observer:new IntersectionObserver(e=>{e.forEach(e=>{let t=o.get(e.target),n=e.isIntersecting||e.intersectionRatio>0;t&&n&&t(n)})},e),elements:o},l.push(n),a.set(n,t),t}(n);return i.set(e,t),o.observe(e),function(){if(i.delete(e),o.unobserve(e),0===i.size){o.disconnect(),a.delete(r);let e=l.findIndex(e=>e.root===r.root&&e.margin===r.margin);e>-1&&l.splice(e,1)}}}(e,e=>e&&d(e),{root:null==t?void 0:t.current,rootMargin:n})}else if(!u){let e=(0,o.requestIdleCallback)(()=>d(!0));return()=>(0,o.cancelIdleCallback)(e)}},[s,n,t,u,f.current]),[p,u,(0,r.useCallback)(()=>{d(!1)},[])]}("function"==typeof t.default||"object"==typeof t.default&&null!==t.default)&&void 0===t.default.__esModule&&(Object.defineProperty(t.default,"__esModule",{value:!0}),Object.assign(t.default,t),e.exports=t.default)},90042:function(e,t){"use strict";Object.defineProperty(t,"__esModule",{value:!0}),Object.defineProperty(t,"escapeStringRegexp",{enumerable:!0,get:function(){return o}});let n=/[|\\{}()[\]^$+*?.-]/,r=/[|\\{}()[\]^$+*?.-]/g;function o(e){return n.test(e)?e.replace(r,"\\$&"):e}},25523:function(e,t,n){"use strict";Object.defineProperty(t,"__esModule",{value:!0}),Object.defineProperty(t,"RouterContext",{enumerable:!0,get:function(){return r}});let r=n(47043)._(n(2265)).default.createContext(null)},57497:function(e,t,n){"use strict";Object.defineProperty(t,"__esModule",{value:!0}),function(e,t){for(var n in t)Object.defineProperty(e,n,{enumerable:!0,get:t[n]})}(t,{formatUrl:function(){return i},formatWithValidation:function(){return l},urlObjectKeys:function(){return a}});let r=n(53099)._(n(48637)),o=/https?|ftp|gopher|file/;function i(e){let{auth:t,hostname:n}=e,i=e.protocol||"",a=e.pathname||"",l=e.hash||"",c=e.query||"",s=!1;t=t?encodeURIComponent(t).replace(/%3A/i,":")+"@":"",e.host?s=t+e.host:n&&(s=t+(~n.indexOf(":")?"["+n+"]":n),e.port&&(s+=":"+e.port)),c&&"object"==typeof c&&(c=String(r.urlQueryToSearchParams(c)));let u=e.search||c&&"?"+c||"";return i&&!i.endsWith(":")&&(i+=":"),e.slashes||(!i||o.test(i))&&!1!==s?(s="//"+(s||""),a&&"/"!==a[0]&&(a="/"+a)):s||(s=""),l&&"#"!==l[0]&&(l="#"+l),u&&"?"!==u[0]&&(u="?"+u),""+i+s+(a=a.replace(/[?#]/g,encodeURIComponent))+(u=u.replace("#","%23"))+l}let a=["auth","hash","host","hostname","href","path","pathname","port","protocol","query","search","slashes"];function l(e){return i(e)}},86279:function(e,t,n){"use strict";Object.defineProperty(t,"__esModule",{value:!0}),function(e,t){for(var n in t)Object.defineProperty(e,n,{enumerable:!0,get:t[n]})}(t,{getSortedRoutes:function(){return r.getSortedRoutes},isDynamicRoute:function(){return o.isDynamicRoute}});let r=n(14777),o=n(38104)},37205:function(e,t,n){"use strict";Object.defineProperty(t,"__esModule",{value:!0}),Object.defineProperty(t,"interpolateAs",{enumerable:!0,get:function(){return i}});let r=n(4199),o=n(9964);function i(e,t,n){let i="",a=(0,o.getRouteRegex)(e),l=a.groups,c=(t!==e?(0,r.getRouteMatcher)(a)(t):"")||n;i=e;let s=Object.keys(l);return s.every(e=>{let t=c[e]||"",{repeat:n,optional:r}=l[e],o="["+(n?"...":"")+e+"]";return r&&(o=(t?"":"/")+"["+o+"]"),n&&!Array.isArray(t)&&(t=[t]),(r||e in c)&&(i=i.replace(o,n?t.map(e=>encodeURIComponent(e)).join("/"):encodeURIComponent(t))||"/")})||(i=""),{params:s,result:i}}},38104:function(e,t,n){"use strict";Object.defineProperty(t,"__esModule",{value:!0}),Object.defineProperty(t,"isDynamicRoute",{enumerable:!0,get:function(){return i}});let r=n(91182),o=/\/\[[^/]+?\](?=\/|$)/;function i(e){return(0,r.isInterceptionRouteAppPath)(e)&&(e=(0,r.extractInterceptionRouteInformation)(e).interceptedRoute),o.test(e)}},53552:function(e,t,n){"use strict";Object.defineProperty(t,"__esModule",{value:!0}),Object.defineProperty(t,"isLocalURL",{enumerable:!0,get:function(){return i}});let r=n(3987),o=n(11283);function i(e){if(!(0,r.isAbsoluteUrl)(e))return!0;try{let t=(0,r.getLocationOrigin)(),n=new URL(e,t);return n.origin===t&&(0,o.hasBasePath)(n.pathname)}catch(e){return!1}}},17053:function(e,t){"use strict";function n(e,t){let n={};return Object.keys(e).forEach(r=>{t.includes(r)||(n[r]=e[r])}),n}Object.defineProperty(t,"__esModule",{value:!0}),Object.defineProperty(t,"omit",{enumerable:!0,get:function(){return n}})},48637:function(e,t){"use strict";function n(e){let t={};return e.forEach((e,n)=>{void 0===t[n]?t[n]=e:Array.isArray(t[n])?t[n].push(e):t[n]=[t[n],e]}),t}function r(e){return"string"!=typeof e&&("number"!=typeof e||isNaN(e))&&"boolean"!=typeof e?"":String(e)}function o(e){let t=new URLSearchParams;return Object.entries(e).forEach(e=>{let[n,o]=e;Array.isArray(o)?o.forEach(e=>t.append(n,r(e))):t.set(n,r(o))}),t}function i(e){for(var t=arguments.length,n=Array(t>1?t-1:0),r=1;r{Array.from(t.keys()).forEach(t=>e.delete(t)),t.forEach((t,n)=>e.append(n,t))}),e}Object.defineProperty(t,"__esModule",{value:!0}),function(e,t){for(var n in t)Object.defineProperty(e,n,{enumerable:!0,get:t[n]})}(t,{assign:function(){return i},searchParamsToUrlQuery:function(){return n},urlQueryToSearchParams:function(){return o}})},4199:function(e,t,n){"use strict";Object.defineProperty(t,"__esModule",{value:!0}),Object.defineProperty(t,"getRouteMatcher",{enumerable:!0,get:function(){return o}});let r=n(3987);function o(e){let{re:t,groups:n}=e;return e=>{let o=t.exec(e);if(!o)return!1;let i=e=>{try{return decodeURIComponent(e)}catch(e){throw new r.DecodeError("failed to decode param")}},a={};return Object.keys(n).forEach(e=>{let t=n[e],r=o[t.pos];void 0!==r&&(a[e]=~r.indexOf("/")?r.split("/").map(e=>i(e)):t.repeat?[i(r)]:i(r))}),a}}},9964:function(e,t,n){"use strict";Object.defineProperty(t,"__esModule",{value:!0}),function(e,t){for(var n in t)Object.defineProperty(e,n,{enumerable:!0,get:t[n]})}(t,{getNamedMiddlewareRegex:function(){return f},getNamedRouteRegex:function(){return d},getRouteRegex:function(){return c},parseParameter:function(){return a}});let r=n(91182),o=n(90042),i=n(26674);function a(e){let t=e.startsWith("[")&&e.endsWith("]");t&&(e=e.slice(1,-1));let n=e.startsWith("...");return n&&(e=e.slice(3)),{key:e,repeat:n,optional:t}}function l(e){let t=(0,i.removeTrailingSlash)(e).slice(1).split("/"),n={},l=1;return{parameterizedRoute:t.map(e=>{let t=r.INTERCEPTION_ROUTE_MARKERS.find(t=>e.startsWith(t)),i=e.match(/\[((?:\[.*\])|.+)\]/);if(t&&i){let{key:e,optional:r,repeat:c}=a(i[1]);return n[e]={pos:l++,repeat:c,optional:r},"/"+(0,o.escapeStringRegexp)(t)+"([^/]+?)"}if(!i)return"/"+(0,o.escapeStringRegexp)(e);{let{key:e,repeat:t,optional:r}=a(i[1]);return n[e]={pos:l++,repeat:t,optional:r},t?r?"(?:/(.+?))?":"/(.+?)":"/([^/]+?)"}}).join(""),groups:n}}function c(e){let{parameterizedRoute:t,groups:n}=l(e);return{re:RegExp("^"+t+"(?:/)?$"),groups:n}}function s(e){let{interceptionMarker:t,getSafeRouteKey:n,segment:r,routeKeys:i,keyPrefix:l}=e,{key:c,optional:s,repeat:u}=a(r),d=c.replace(/\W/g,"");l&&(d=""+l+d);let f=!1;(0===d.length||d.length>30)&&(f=!0),isNaN(parseInt(d.slice(0,1)))||(f=!0),f&&(d=n()),l?i[d]=""+l+c:i[d]=c;let p=t?(0,o.escapeStringRegexp)(t):"";return u?s?"(?:/"+p+"(?<"+d+">.+?))?":"/"+p+"(?<"+d+">.+?)":"/"+p+"(?<"+d+">[^/]+?)"}function u(e,t){let n;let a=(0,i.removeTrailingSlash)(e).slice(1).split("/"),l=(n=0,()=>{let e="",t=++n;for(;t>0;)e+=String.fromCharCode(97+(t-1)%26),t=Math.floor((t-1)/26);return e}),c={};return{namedParameterizedRoute:a.map(e=>{let n=r.INTERCEPTION_ROUTE_MARKERS.some(t=>e.startsWith(t)),i=e.match(/\[((?:\[.*\])|.+)\]/);if(n&&i){let[n]=e.split(i[0]);return s({getSafeRouteKey:l,interceptionMarker:n,segment:i[1],routeKeys:c,keyPrefix:t?"nxtI":void 0})}return i?s({getSafeRouteKey:l,segment:i[1],routeKeys:c,keyPrefix:t?"nxtP":void 0}):"/"+(0,o.escapeStringRegexp)(e)}).join(""),routeKeys:c}}function d(e,t){let n=u(e,t);return{...c(e),namedRegex:"^"+n.namedParameterizedRoute+"(?:/)?$",routeKeys:n.routeKeys}}function f(e,t){let{parameterizedRoute:n}=l(e),{catchAll:r=!0}=t;if("/"===n)return{namedRegex:"^/"+(r?".*":"")+"$"};let{namedParameterizedRoute:o}=u(e,!1);return{namedRegex:"^"+o+(r?"(?:(/.*)?)":"")+"$"}}},14777:function(e,t){"use strict";Object.defineProperty(t,"__esModule",{value:!0}),Object.defineProperty(t,"getSortedRoutes",{enumerable:!0,get:function(){return r}});class n{insert(e){this._insert(e.split("/").filter(Boolean),[],!1)}smoosh(){return this._smoosh()}_smoosh(e){void 0===e&&(e="/");let t=[...this.children.keys()].sort();null!==this.slugName&&t.splice(t.indexOf("[]"),1),null!==this.restSlugName&&t.splice(t.indexOf("[...]"),1),null!==this.optionalRestSlugName&&t.splice(t.indexOf("[[...]]"),1);let n=t.map(t=>this.children.get(t)._smoosh(""+e+t+"/")).reduce((e,t)=>[...e,...t],[]);if(null!==this.slugName&&n.push(...this.children.get("[]")._smoosh(e+"["+this.slugName+"]/")),!this.placeholder){let t="/"===e?"/":e.slice(0,-1);if(null!=this.optionalRestSlugName)throw Error('You cannot define a route with the same specificity as a optional catch-all route ("'+t+'" and "'+t+"[[..."+this.optionalRestSlugName+']]").');n.unshift(t)}return null!==this.restSlugName&&n.push(...this.children.get("[...]")._smoosh(e+"[..."+this.restSlugName+"]/")),null!==this.optionalRestSlugName&&n.push(...this.children.get("[[...]]")._smoosh(e+"[[..."+this.optionalRestSlugName+"]]/")),n}_insert(e,t,r){if(0===e.length){this.placeholder=!1;return}if(r)throw Error("Catch-all must be the last part of the URL.");let o=e[0];if(o.startsWith("[")&&o.endsWith("]")){let n=o.slice(1,-1),a=!1;if(n.startsWith("[")&&n.endsWith("]")&&(n=n.slice(1,-1),a=!0),n.startsWith("...")&&(n=n.substring(3),r=!0),n.startsWith("[")||n.endsWith("]"))throw Error("Segment names may not start or end with extra brackets ('"+n+"').");if(n.startsWith("."))throw Error("Segment names may not start with erroneous periods ('"+n+"').");function i(e,n){if(null!==e&&e!==n)throw Error("You cannot use different slug names for the same dynamic path ('"+e+"' !== '"+n+"').");t.forEach(e=>{if(e===n)throw Error('You cannot have the same slug name "'+n+'" repeat within a single dynamic path');if(e.replace(/\W/g,"")===o.replace(/\W/g,""))throw Error('You cannot have the slug names "'+e+'" and "'+n+'" differ only by non-word symbols within a single dynamic path')}),t.push(n)}if(r){if(a){if(null!=this.restSlugName)throw Error('You cannot use both an required and optional catch-all route at the same level ("[...'+this.restSlugName+']" and "'+e[0]+'" ).');i(this.optionalRestSlugName,n),this.optionalRestSlugName=n,o="[[...]]"}else{if(null!=this.optionalRestSlugName)throw Error('You cannot use both an optional and required catch-all route at the same level ("[[...'+this.optionalRestSlugName+']]" and "'+e[0]+'").');i(this.restSlugName,n),this.restSlugName=n,o="[...]"}}else{if(a)throw Error('Optional route parameters are not yet supported ("'+e[0]+'").');i(this.slugName,n),this.slugName=n,o="[]"}}this.children.has(o)||this.children.set(o,new n),this.children.get(o)._insert(e.slice(1),t,r)}constructor(){this.placeholder=!0,this.children=new Map,this.slugName=null,this.restSlugName=null,this.optionalRestSlugName=null}}function r(e){let t=new n;return e.forEach(e=>t.insert(e)),t.smoosh()}},3987:function(e,t){"use strict";Object.defineProperty(t,"__esModule",{value:!0}),function(e,t){for(var n in t)Object.defineProperty(e,n,{enumerable:!0,get:t[n]})}(t,{DecodeError:function(){return h},MiddlewareNotFoundError:function(){return y},MissingStaticPage:function(){return v},NormalizeError:function(){return m},PageNotFoundError:function(){return g},SP:function(){return f},ST:function(){return p},WEB_VITALS:function(){return n},execOnce:function(){return r},getDisplayName:function(){return c},getLocationOrigin:function(){return a},getURL:function(){return l},isAbsoluteUrl:function(){return i},isResSent:function(){return s},loadGetInitialProps:function(){return d},normalizeRepeatedSlashes:function(){return u},stringifyError:function(){return b}});let n=["CLS","FCP","FID","INP","LCP","TTFB"];function r(e){let t,n=!1;return function(){for(var r=arguments.length,o=Array(r),i=0;io.test(e);function a(){let{protocol:e,hostname:t,port:n}=window.location;return e+"//"+t+(n?":"+n:"")}function l(){let{href:e}=window.location,t=a();return e.substring(t.length)}function c(e){return"string"==typeof e?e:e.displayName||e.name||"Unknown"}function s(e){return e.finished||e.headersSent}function u(e){let t=e.split("?");return t[0].replace(/\\/g,"/").replace(/\/\/+/g,"/")+(t[1]?"?"+t.slice(1).join("?"):"")}async function d(e,t){let n=t.res||t.ctx&&t.ctx.res;if(!e.getInitialProps)return t.ctx&&t.Component?{pageProps:await d(t.Component,t.ctx)}:{};let r=await e.getInitialProps(t);if(n&&s(n))return r;if(!r)throw Error('"'+c(e)+'.getInitialProps()" should resolve to an object. But found "'+r+'" instead.');return r}let f="undefined"!=typeof performance,p=f&&["mark","measure","getEntriesByName"].every(e=>"function"==typeof performance[e]);class h extends Error{}class m extends Error{}class g extends Error{constructor(e){super(),this.code="ENOENT",this.name="PageNotFoundError",this.message="Cannot find module for page: "+e}}class v extends Error{constructor(e,t){super(),this.message="Failed to load static file for page: "+e+" "+t}}class y extends Error{constructor(){super(),this.code="ENOENT",this.message="Cannot find the middleware module"}}function b(e){return JSON.stringify({message:e.message,stack:e.stack})}},15452:function(e,t){var n,r,o;r=[],void 0!==(o="function"==typeof(n=function e(){var t,n="undefined"!=typeof self?self:"undefined"!=typeof window?window:void 0!==n?n:{},r=!n.document&&!!n.postMessage,o=n.IS_PAPA_WORKER||!1,i={},a=0,l={};function c(e){this._handle=null,this._finished=!1,this._completed=!1,this._halted=!1,this._input=null,this._baseIndex=0,this._partialLine="",this._rowCount=0,this._start=0,this._nextChunk=null,this.isFirstChunk=!0,this._completeResults={data:[],errors:[],meta:{}},(function(e){var t=b(e);t.chunkSize=parseInt(t.chunkSize),e.step||e.chunk||(t.chunkSize=null),this._handle=new p(t),(this._handle.streamer=this)._config=t}).call(this,e),this.parseChunk=function(e,t){var r=parseInt(this._config.skipFirstNLines)||0;if(this.isFirstChunk&&0=this._config.preview,o)n.postMessage({results:i,workerId:l.WORKER_ID,finished:r});else if(w(this._config.chunk)&&!t){if(this._config.chunk(i,this._handle),this._handle.paused()||this._handle.aborted())return void(this._halted=!0);this._completeResults=i=void 0}return this._config.step||this._config.chunk||(this._completeResults.data=this._completeResults.data.concat(i.data),this._completeResults.errors=this._completeResults.errors.concat(i.errors),this._completeResults.meta=i.meta),this._completed||!r||!w(this._config.complete)||i&&i.meta.aborted||(this._config.complete(this._completeResults,this._input),this._completed=!0),r||i&&i.meta.paused||this._nextChunk(),i}this._halted=!0},this._sendError=function(e){w(this._config.error)?this._config.error(e):o&&this._config.error&&n.postMessage({workerId:l.WORKER_ID,error:e,finished:!1})}}function s(e){var t;(e=e||{}).chunkSize||(e.chunkSize=l.RemoteChunkSize),c.call(this,e),this._nextChunk=r?function(){this._readChunk(),this._chunkLoaded()}:function(){this._readChunk()},this.stream=function(e){this._input=e,this._nextChunk()},this._readChunk=function(){if(this._finished)this._chunkLoaded();else{if(t=new XMLHttpRequest,this._config.withCredentials&&(t.withCredentials=this._config.withCredentials),r||(t.onload=x(this._chunkLoaded,this),t.onerror=x(this._chunkError,this)),t.open(this._config.downloadRequestBody?"POST":"GET",this._input,!r),this._config.downloadRequestHeaders){var e,n,o=this._config.downloadRequestHeaders;for(n in o)t.setRequestHeader(n,o[n])}this._config.chunkSize&&(e=this._start+this._config.chunkSize-1,t.setRequestHeader("Range","bytes="+this._start+"-"+e));try{t.send(this._config.downloadRequestBody)}catch(e){this._chunkError(e.message)}r&&0===t.status&&this._chunkError()}},this._chunkLoaded=function(){let e;4===t.readyState&&(t.status<200||400<=t.status?this._chunkError():(this._start+=this._config.chunkSize||t.responseText.length,this._finished=!this._config.chunkSize||this._start>=(null!==(e=(e=t).getResponseHeader("Content-Range"))?parseInt(e.substring(e.lastIndexOf("/")+1)):-1),this.parseChunk(t.responseText)))},this._chunkError=function(e){e=t.statusText||e,this._sendError(Error(e))}}function u(e){(e=e||{}).chunkSize||(e.chunkSize=l.LocalChunkSize),c.call(this,e);var t,n,r="undefined"!=typeof FileReader;this.stream=function(e){this._input=e,n=e.slice||e.webkitSlice||e.mozSlice,r?((t=new FileReader).onload=x(this._chunkLoaded,this),t.onerror=x(this._chunkError,this)):t=new FileReaderSync,this._nextChunk()},this._nextChunk=function(){this._finished||this._config.preview&&!(this._rowCount=this._input.size,this.parseChunk(e.target.result)},this._chunkError=function(){this._sendError(t.error)}}function d(e){var t;c.call(this,e=e||{}),this.stream=function(e){return t=e,this._nextChunk()},this._nextChunk=function(){var e,n;if(!this._finished)return t=(e=this._config.chunkSize)?(n=t.substring(0,e),t.substring(e)):(n=t,""),this._finished=!t,this.parseChunk(n)}}function f(e){c.call(this,e=e||{});var t=[],n=!0,r=!1;this.pause=function(){c.prototype.pause.apply(this,arguments),this._input.pause()},this.resume=function(){c.prototype.resume.apply(this,arguments),this._input.resume()},this.stream=function(e){this._input=e,this._input.on("data",this._streamData),this._input.on("end",this._streamEnd),this._input.on("error",this._streamError)},this._checkIsFinished=function(){r&&1===t.length&&(this._finished=!0)},this._nextChunk=function(){this._checkIsFinished(),t.length?this.parseChunk(t.shift()):n=!0},this._streamData=x(function(e){try{t.push("string"==typeof e?e:e.toString(this._config.encoding)),n&&(n=!1,this._checkIsFinished(),this.parseChunk(t.shift()))}catch(e){this._streamError(e)}},this),this._streamError=x(function(e){this._streamCleanUp(),this._sendError(e)},this),this._streamEnd=x(function(){this._streamCleanUp(),r=!0,this._streamData("")},this),this._streamCleanUp=x(function(){this._input.removeListener("data",this._streamData),this._input.removeListener("end",this._streamEnd),this._input.removeListener("error",this._streamError)},this)}function p(e){var t,n,r,o,i=/^\s*-?(\d+\.?|\.\d+|\d+\.\d+)([eE][-+]?\d+)?\s*$/,a=/^((\d{4}-[01]\d-[0-3]\dT[0-2]\d:[0-5]\d:[0-5]\d\.\d+([+-][0-2]\d:[0-5]\d|Z))|(\d{4}-[01]\d-[0-3]\dT[0-2]\d:[0-5]\d:[0-5]\d([+-][0-2]\d:[0-5]\d|Z))|(\d{4}-[01]\d-[0-3]\dT[0-2]\d:[0-5]\d([+-][0-2]\d:[0-5]\d|Z)))$/,c=this,s=0,u=0,d=!1,f=!1,p=[],g={data:[],errors:[],meta:{}};function v(t){return"greedy"===e.skipEmptyLines?""===t.join("").trim():1===t.length&&0===t[0].length}function y(){if(g&&r&&(S("Delimiter","UndetectableDelimiter","Unable to auto-detect delimiting character; defaulted to '"+l.DefaultDelimiter+"'"),r=!1),e.skipEmptyLines&&(g.data=g.data.filter(function(e){return!v(e)})),x()){if(g){if(Array.isArray(g.data[0])){for(var t,n=0;x()&&n=p.length?"__parsed_extra":p[o]:l,s=c=e.transform?e.transform(c,l):c,(e.dynamicTypingFunction&&void 0===e.dynamicTyping[n]&&(e.dynamicTyping[n]=e.dynamicTypingFunction(n)),!0===(e.dynamicTyping[n]||e.dynamicTyping))?"true"===s||"TRUE"===s||"false"!==s&&"FALSE"!==s&&((e=>{if(i.test(e)&&-9007199254740992<(e=parseFloat(e))&&e<9007199254740992)return 1})(s)?parseFloat(s):a.test(s)?new Date(s):""===s?null:s):s);"__parsed_extra"===l?(r[l]=r[l]||[],r[l].push(c)):r[l]=c}return e.header&&(o>p.length?S("FieldMismatch","TooManyFields","Too many fields: expected "+p.length+" fields but parsed "+o,u+n):oe.preview?n.abort():(g.data=g.data[0],o(g,c))))}),this.parse=function(o,i,a){var c=e.quoteChar||'"',c=(e.newline||(e.newline=this.guessLineEndings(o,c)),r=!1,e.delimiter?w(e.delimiter)&&(e.delimiter=e.delimiter(o),g.meta.delimiter=e.delimiter):((c=((t,n,r,o,i)=>{var a,c,s,u;i=i||[","," ","|",";",l.RECORD_SEP,l.UNIT_SEP];for(var d=0;d=n.length/2?"\r\n":"\r"}}function h(e){return e.replace(/[.*+?^${}()|[\]\\]/g,"\\$&")}function m(e){var t=(e=e||{}).delimiter,n=e.newline,r=e.comments,o=e.step,i=e.preview,a=e.fastMode,c=null,s=!1,u=null==e.quoteChar?'"':e.quoteChar,d=u;if(void 0!==e.escapeChar&&(d=e.escapeChar),("string"!=typeof t||-1=i)return L(!0);break}E.push({type:"Quotes",code:"InvalidQuotes",message:"Trailing quote on quoted field is malformed",row:k.length,index:f}),R++}}else if(r&&0===C.length&&l.substring(f,f+x)===r){if(-1===N)return L();f=N+b,N=l.indexOf(n,f),M=l.indexOf(t,f)}else if(-1!==M&&(M=i)return L(!0)}return D();function A(e){k.push(e),O=f}function _(e){return -1!==e&&(e=l.substring(R+1,e))&&""===e.trim()?e.length:0}function D(e){return g||(void 0===e&&(e=l.substring(f)),C.push(e),f=v,A(C),S&&z()),L()}function Z(e){f=e,A(C),C=[],N=l.indexOf(n,f)}function L(r){if(e.header&&!m&&k.length&&!s){var o=k[0],i={},a=new Set(o);let t=!1;for(let n=0;n{if("object"==typeof t){if("string"!=typeof t.delimiter||l.BAD_DELIMITERS.filter(function(e){return -1!==t.delimiter.indexOf(e)}).length||(o=t.delimiter),("boolean"==typeof t.quotes||"function"==typeof t.quotes||Array.isArray(t.quotes))&&(n=t.quotes),"boolean"!=typeof t.skipEmptyLines&&"string"!=typeof t.skipEmptyLines||(s=t.skipEmptyLines),"string"==typeof t.newline&&(i=t.newline),"string"==typeof t.quoteChar&&(a=t.quoteChar),"boolean"==typeof t.header&&(r=t.header),Array.isArray(t.columns)){if(0===t.columns.length)throw Error("Option columns is empty");u=t.columns}void 0!==t.escapeChar&&(c=t.escapeChar+a),t.escapeFormulae instanceof RegExp?d=t.escapeFormulae:"boolean"==typeof t.escapeFormulae&&t.escapeFormulae&&(d=/^[=+\-@\t\r].*$/)}})(),RegExp(h(a),"g"));if("string"==typeof e&&(e=JSON.parse(e)),Array.isArray(e)){if(!e.length||Array.isArray(e[0]))return p(null,e,s);if("object"==typeof e[0])return p(u||Object.keys(e[0]),e,s)}else if("object"==typeof e)return"string"==typeof e.data&&(e.data=JSON.parse(e.data)),Array.isArray(e.data)&&(e.fields||(e.fields=e.meta&&e.meta.fields||u),e.fields||(e.fields=Array.isArray(e.data[0])?e.fields:"object"==typeof e.data[0]?Object.keys(e.data[0]):[]),Array.isArray(e.data[0])||"object"==typeof e.data[0]||(e.data=[e.data])),p(e.fields||[],e.data||[],s);throw Error("Unable to serialize unrecognized input");function p(e,t,n){var a="",l=("string"==typeof e&&(e=JSON.parse(e)),"string"==typeof t&&(t=JSON.parse(t)),Array.isArray(e)&&0{for(var n=0;nen;(0,s.useImperativeHandle)(t,function(){return{focus:K,blur:function(){var e;null===(e=W.current)||void 0===e||e.blur()},setSelectionRange:function(e,t,n){var r;null===(r=W.current)||void 0===r||r.setSelectionRange(e,t,n)},select:function(){var e;null===(e=W.current)||void 0===e||e.select()},input:W.current}}),(0,s.useEffect)(function(){H(function(e){return(!e||!C)&&e})},[C]);var ei=function(e,t,n){var r,o,i=t;if(!q.current&&et.exceedFormatter&&et.max&&et.strategy(t)>et.max)i=et.exceedFormatter(t,{max:et.max}),t!==i&&ee([(null===(r=W.current)||void 0===r?void 0:r.selectionStart)||0,(null===(o=W.current)||void 0===o?void 0:o.selectionEnd)||0]);else if("compositionEnd"===n.source)return;X(i),W.current&&(0,u.rJ)(W.current,e,l,i)};(0,s.useEffect)(function(){if(J){var e;null===(e=W.current)||void 0===e||e.setSelectionRange.apply(e,(0,f.Z)(J))}},[J]);var ea=eo&&"".concat(E,"-out-of-range");return s.createElement(d,(0,o.Z)({},L,{prefixCls:E,className:c()(j,ea),handleReset:function(e){X(""),K(),W.current&&(0,u.rJ)(W.current,e,l)},value:$,focused:F,triggerFocus:K,suffix:function(){var e=Number(en)>0;if(M||et.show){var t=et.showFormatter?et.showFormatter({value:$,count:er,maxLength:en}):"".concat(er).concat(e?" / ".concat(en):"");return s.createElement(s.Fragment,null,et.show&&s.createElement("span",{className:c()("".concat(E,"-show-count-suffix"),(0,i.Z)({},"".concat(E,"-show-count-has-suffix"),!!M),null==A?void 0:A.count),style:(0,r.Z)({},null==_?void 0:_.count)},t),M)}return null}(),disabled:C,classes:T,classNames:A,styles:_}),(n=(0,g.Z)(e,["prefixCls","onPressEnter","addonBefore","addonAfter","prefix","suffix","allowClear","defaultValue","showCount","count","classes","htmlSize","styles","classNames"]),s.createElement("input",(0,o.Z)({autoComplete:a},n,{onChange:function(e){ei(e,e.target.value,{source:"change"})},onFocus:function(e){H(!0),null==b||b(e)},onBlur:function(e){H(!1),null==x||x(e)},onKeyDown:function(e){w&&"Enter"===e.key&&w(e),null==S||S(e)},className:c()(E,(0,i.Z)({},"".concat(E,"-disabled"),C),null==A?void 0:A.input),style:null==_?void 0:_.input,ref:W,size:O,type:void 0===R?"text":R,onCompositionStart:function(e){q.current=!0,null==D||D(e)},onCompositionEnd:function(e){q.current=!1,ei(e,e.currentTarget.value,{source:"compositionEnd"}),null==Z||Z(e)}}))))})},55041:function(e,t,n){"use strict";function r(e){return!!(e.addonBefore||e.addonAfter)}function o(e){return!!(e.prefix||e.suffix||e.allowClear)}function i(e,t,n,r){if(n){var o=t;if("click"===t.type){var i=e.cloneNode(!0);o=Object.create(t,{target:{value:i},currentTarget:{value:i}}),i.value="",n(o);return}if("file"!==e.type&&void 0!==r){var a=e.cloneNode(!0);o=Object.create(t,{target:{value:a},currentTarget:{value:a}}),a.value=r,n(o);return}n(o)}}function a(e,t){if(e){e.focus(t);var n=(t||{}).cursor;if(n){var r=e.value.length;switch(n){case"start":e.setSelectionRange(0,0);break;case"end":e.setSelectionRange(r,r);break;default:e.setSelectionRange(0,r)}}}}n.d(t,{He:function(){return r},X3:function(){return o},nH:function(){return a},rJ:function(){return i}})},33082:function(e,t,n){"use strict";n.d(t,{iz:function(){return eZ},ck:function(){return eh},BW:function(){return eD},sN:function(){return eh},Wd:function(){return eI},ZP:function(){return eH},Xl:function(){return j}});var r=n(1119),o=n(11993),i=n(31686),a=n(83145),l=n(26365),c=n(6989),s=n(36760),u=n.n(s),d=n(1699),f=n(50506),p=n(16671),h=n(32559),m=n(2265),g=n(54887),v=m.createContext(null);function y(e,t){return void 0===e?null:"".concat(e,"-").concat(t)}function b(e){return y(m.useContext(v),e)}var x=n(6397),w=["children","locked"],S=m.createContext(null);function k(e){var t=e.children,n=e.locked,r=(0,c.Z)(e,w),o=m.useContext(S),a=(0,x.Z)(function(){var e;return e=(0,i.Z)({},o),Object.keys(r).forEach(function(t){var n=r[t];void 0!==n&&(e[t]=n)}),e},[o,r],function(e,t){return!n&&(e[0]!==t[0]||!(0,p.Z)(e[1],t[1],!0))});return m.createElement(S.Provider,{value:a},t)}var E=m.createContext(null);function C(){return m.useContext(E)}var O=m.createContext([]);function j(e){var t=m.useContext(O);return m.useMemo(function(){return void 0!==e?[].concat((0,a.Z)(t),[e]):t},[t,e])}var P=m.createContext(null),M=m.createContext({}),N=n(2857);function I(e){var t=arguments.length>1&&void 0!==arguments[1]&&arguments[1];if((0,N.Z)(e)){var n=e.nodeName.toLowerCase(),r=["input","select","textarea","button"].includes(n)||e.isContentEditable||"a"===n&&!!e.getAttribute("href"),o=e.getAttribute("tabindex"),i=Number(o),a=null;return o&&!Number.isNaN(i)?a=i:r&&null===a&&(a=0),r&&e.disabled&&(a=null),null!==a&&(a>=0||t&&a<0)}return!1}var R=n(95814),T=n(53346),A=R.Z.LEFT,_=R.Z.RIGHT,D=R.Z.UP,Z=R.Z.DOWN,L=R.Z.ENTER,z=R.Z.ESC,B=R.Z.HOME,F=R.Z.END,H=[D,Z,A,_];function q(e,t){return(function(e){var t=arguments.length>1&&void 0!==arguments[1]&&arguments[1],n=(0,a.Z)(e.querySelectorAll("*")).filter(function(e){return I(e,t)});return I(e,t)&&n.unshift(e),n})(e,!0).filter(function(e){return t.has(e)})}function W(e,t,n){var r=arguments.length>3&&void 0!==arguments[3]?arguments[3]:1;if(!e)return null;var o=q(e,t),i=o.length,a=o.findIndex(function(e){return n===e});return r<0?-1===a?a=i-1:a-=1:r>0&&(a+=1),o[a=(a+i)%i]}var K=function(e,t){var n=new Set,r=new Map,o=new Map;return e.forEach(function(e){var i=document.querySelector("[data-menu-id='".concat(y(t,e),"']"));i&&(n.add(i),o.set(i,e),r.set(e,i))}),{elements:n,key2element:r,element2key:o}},U="__RC_UTIL_PATH_SPLIT__",V=function(e){return e.join(U)},G="rc-menu-more";function X(e){var t=m.useRef(e);t.current=e;var n=m.useCallback(function(){for(var e,n=arguments.length,r=Array(n),o=0;o1&&(E.motionAppear=!1);var C=E.onVisibleChanged;return(E.onVisibleChanged=function(e){return g.current||e||x(!0),null==C?void 0:C(e)},b)?null:m.createElement(k,{mode:c,locked:!g.current},m.createElement(eO.ZP,(0,r.Z)({visible:w},E,{forceRender:d,removeOnLeave:!1,leavedClassName:"".concat(u,"-hidden")}),function(e){var n=e.className,r=e.style;return m.createElement(eg,{id:t,className:n,style:r},a)}))}var eP=["style","className","title","eventKey","warnKey","disabled","internalPopupClose","children","itemIcon","expandIcon","popupClassName","popupOffset","popupStyle","onClick","onMouseEnter","onMouseLeave","onTitleClick","onTitleMouseEnter","onTitleMouseLeave"],eM=["active"],eN=function(e){var t,n=e.style,a=e.className,s=e.title,f=e.eventKey,p=(e.warnKey,e.disabled),h=e.internalPopupClose,g=e.children,v=e.itemIcon,y=e.expandIcon,x=e.popupClassName,w=e.popupOffset,E=e.popupStyle,C=e.onClick,O=e.onMouseEnter,N=e.onMouseLeave,I=e.onTitleClick,R=e.onTitleMouseEnter,T=e.onTitleMouseLeave,A=(0,c.Z)(e,eP),_=b(f),D=m.useContext(S),Z=D.prefixCls,L=D.mode,z=D.openKeys,B=D.disabled,F=D.overflowDisabled,H=D.activeKey,q=D.selectedKeys,W=D.itemIcon,K=D.expandIcon,U=D.onItemClick,V=D.onOpenChange,G=D.onActive,$=m.useContext(M)._internalRenderSubMenuItem,Y=m.useContext(P).isSubPathKey,Q=j(),J="".concat(Z,"-submenu"),ee=B||p,et=m.useRef(),en=m.useRef(),er=null!=y?y:K,el=z.includes(f),es=!F&&el,eu=Y(q,f),ed=eo(f,ee,R,T),ef=ed.active,ep=(0,c.Z)(ed,eM),eh=m.useState(!1),em=(0,l.Z)(eh,2),ev=em[0],ey=em[1],eb=function(e){ee||ey(e)},ex=m.useMemo(function(){return ef||"inline"!==L&&(ev||Y([H],f))},[L,ef,H,ev,f,Y]),ew=ei(Q.length),eS=X(function(e){null==C||C(ec(e)),U(e)}),ek=_&&"".concat(_,"-popup"),eE=m.createElement("div",(0,r.Z)({role:"menuitem",style:ew,className:"".concat(J,"-title"),tabIndex:ee?null:-1,ref:et,title:"string"==typeof s?s:null,"data-menu-id":F&&_?null:_,"aria-expanded":es,"aria-haspopup":!0,"aria-controls":ek,"aria-disabled":ee,onClick:function(e){ee||(null==I||I({key:f,domEvent:e}),"inline"===L&&V(f,!el))},onFocus:function(){G(f)}},ep),s,m.createElement(ea,{icon:"horizontal"!==L?er:void 0,props:(0,i.Z)((0,i.Z)({},e),{},{isOpen:es,isSubMenu:!0})},m.createElement("i",{className:"".concat(J,"-arrow")}))),eO=m.useRef(L);if("inline"!==L&&Q.length>1?eO.current="vertical":eO.current=L,!F){var eN=eO.current;eE=m.createElement(eC,{mode:eN,prefixCls:J,visible:!h&&es&&"inline"!==L,popupClassName:x,popupOffset:w,popupStyle:E,popup:m.createElement(k,{mode:"horizontal"===eN?"vertical":eN},m.createElement(eg,{id:ek,ref:en},g)),disabled:ee,onVisibleChange:function(e){"inline"!==L&&V(f,e)}},eE)}var eI=m.createElement(d.Z.Item,(0,r.Z)({role:"none"},A,{component:"li",style:n,className:u()(J,"".concat(J,"-").concat(L),a,(t={},(0,o.Z)(t,"".concat(J,"-open"),es),(0,o.Z)(t,"".concat(J,"-active"),ex),(0,o.Z)(t,"".concat(J,"-selected"),eu),(0,o.Z)(t,"".concat(J,"-disabled"),ee),t)),onMouseEnter:function(e){eb(!0),null==O||O({key:f,domEvent:e})},onMouseLeave:function(e){eb(!1),null==N||N({key:f,domEvent:e})}}),eE,!F&&m.createElement(ej,{id:ek,open:es,keyPath:Q},g));return $&&(eI=$(eI,e,{selected:eu,active:ex,open:es,disabled:ee})),m.createElement(k,{onItemClick:eS,mode:"horizontal"===L?"vertical":L,itemIcon:null!=v?v:W,expandIcon:er},eI)};function eI(e){var t,n=e.eventKey,r=e.children,o=j(n),i=ey(r,o),a=C();return m.useEffect(function(){if(a)return a.registerPath(n,o),function(){a.unregisterPath(n,o)}},[o]),t=a?i:m.createElement(eN,e,i),m.createElement(O.Provider,{value:o},t)}var eR=n(41154),eT=["className","title","eventKey","children"],eA=["children"],e_=function(e){var t=e.className,n=e.title,o=(e.eventKey,e.children),i=(0,c.Z)(e,eT),a=m.useContext(S).prefixCls,l="".concat(a,"-item-group");return m.createElement("li",(0,r.Z)({role:"presentation"},i,{onClick:function(e){return e.stopPropagation()},className:u()(l,t)}),m.createElement("div",{role:"presentation",className:"".concat(l,"-title"),title:"string"==typeof n?n:void 0},n),m.createElement("ul",{role:"group",className:"".concat(l,"-list")},o))};function eD(e){var t=e.children,n=(0,c.Z)(e,eA),r=ey(t,j(n.eventKey));return C()?r:m.createElement(e_,(0,en.Z)(n,["warnKey"]),r)}function eZ(e){var t=e.className,n=e.style,r=m.useContext(S).prefixCls;return C()?null:m.createElement("li",{role:"separator",className:u()("".concat(r,"-item-divider"),t),style:n})}var eL=["label","children","key","type"],ez=["prefixCls","rootClassName","style","className","tabIndex","items","children","direction","id","mode","inlineCollapsed","disabled","disabledOverflow","subMenuOpenDelay","subMenuCloseDelay","forceSubMenuRender","defaultOpenKeys","openKeys","activeKey","defaultActiveFirst","selectable","multiple","defaultSelectedKeys","selectedKeys","onSelect","onDeselect","inlineIndent","motion","defaultMotions","triggerSubMenuAction","builtinPlacements","itemIcon","expandIcon","overflowedIndicator","overflowedIndicatorPopupClassName","getPopupContainer","onClick","onOpenChange","onKeyDown","openAnimation","openTransitionName","_internalRenderMenuItem","_internalRenderSubMenuItem"],eB=[],eF=m.forwardRef(function(e,t){var n,s,h,y,b,x,w,S,C,O,j,N,I,R,Q,J,ee,et,en,er,eo,ei,ea,el,es,eu,ed,ef=e.prefixCls,ep=void 0===ef?"rc-menu":ef,em=e.rootClassName,eg=e.style,ev=e.className,eb=e.tabIndex,ex=e.items,ew=e.children,eS=e.direction,ek=e.id,eE=e.mode,eC=void 0===eE?"vertical":eE,eO=e.inlineCollapsed,ej=e.disabled,eP=e.disabledOverflow,eM=e.subMenuOpenDelay,eN=e.subMenuCloseDelay,eT=e.forceSubMenuRender,eA=e.defaultOpenKeys,e_=e.openKeys,eF=e.activeKey,eH=e.defaultActiveFirst,eq=e.selectable,eW=void 0===eq||eq,eK=e.multiple,eU=void 0!==eK&&eK,eV=e.defaultSelectedKeys,eG=e.selectedKeys,eX=e.onSelect,e$=e.onDeselect,eY=e.inlineIndent,eQ=e.motion,eJ=e.defaultMotions,e0=e.triggerSubMenuAction,e1=e.builtinPlacements,e2=e.itemIcon,e6=e.expandIcon,e3=e.overflowedIndicator,e4=void 0===e3?"...":e3,e5=e.overflowedIndicatorPopupClassName,e8=e.getPopupContainer,e7=e.onClick,e9=e.onOpenChange,te=e.onKeyDown,tt=(e.openAnimation,e.openTransitionName,e._internalRenderMenuItem),tn=e._internalRenderSubMenuItem,tr=(0,c.Z)(e,ez),to=m.useMemo(function(){var e;return e=ew,ex&&(e=function e(t){return(t||[]).map(function(t,n){if(t&&"object"===(0,eR.Z)(t)){var o=t.label,i=t.children,a=t.key,l=t.type,s=(0,c.Z)(t,eL),u=null!=a?a:"tmp-".concat(n);return i||"group"===l?"group"===l?m.createElement(eD,(0,r.Z)({key:u},s,{title:o}),e(i)):m.createElement(eI,(0,r.Z)({key:u},s,{title:o}),e(i)):"divider"===l?m.createElement(eZ,(0,r.Z)({key:u},s)):m.createElement(eh,(0,r.Z)({key:u},s),o)}return null}).filter(function(e){return e})}(ex)),ey(e,eB)},[ew,ex]),ti=m.useState(!1),ta=(0,l.Z)(ti,2),tl=ta[0],tc=ta[1],ts=m.useRef(),tu=(n=(0,f.Z)(ek,{value:ek}),h=(s=(0,l.Z)(n,2))[0],y=s[1],m.useEffect(function(){Y+=1;var e="".concat($,"-").concat(Y);y("rc-menu-uuid-".concat(e))},[]),h),td="rtl"===eS,tf=(0,f.Z)(eA,{value:e_,postState:function(e){return e||eB}}),tp=(0,l.Z)(tf,2),th=tp[0],tm=tp[1],tg=function(e){var t=arguments.length>1&&void 0!==arguments[1]&&arguments[1];function n(){tm(e),null==e9||e9(e)}t?(0,g.flushSync)(n):n()},tv=m.useState(th),ty=(0,l.Z)(tv,2),tb=ty[0],tx=ty[1],tw=m.useRef(!1),tS=m.useMemo(function(){return("inline"===eC||"vertical"===eC)&&eO?["vertical",eO]:[eC,!1]},[eC,eO]),tk=(0,l.Z)(tS,2),tE=tk[0],tC=tk[1],tO="inline"===tE,tj=m.useState(tE),tP=(0,l.Z)(tj,2),tM=tP[0],tN=tP[1],tI=m.useState(tC),tR=(0,l.Z)(tI,2),tT=tR[0],tA=tR[1];m.useEffect(function(){tN(tE),tA(tC),tw.current&&(tO?tm(tb):tg(eB))},[tE,tC]);var t_=m.useState(0),tD=(0,l.Z)(t_,2),tZ=tD[0],tL=tD[1],tz=tZ>=to.length-1||"horizontal"!==tM||eP;m.useEffect(function(){tO&&tx(th)},[th]),m.useEffect(function(){return tw.current=!0,function(){tw.current=!1}},[]);var tB=(b=m.useState({}),x=(0,l.Z)(b,2)[1],w=(0,m.useRef)(new Map),S=(0,m.useRef)(new Map),C=m.useState([]),j=(O=(0,l.Z)(C,2))[0],N=O[1],I=(0,m.useRef)(0),R=(0,m.useRef)(!1),Q=function(){R.current||x({})},J=(0,m.useCallback)(function(e,t){var n,r=V(t);S.current.set(r,e),w.current.set(e,r),I.current+=1;var o=I.current;n=function(){o===I.current&&Q()},Promise.resolve().then(n)},[]),ee=(0,m.useCallback)(function(e,t){var n=V(t);S.current.delete(n),w.current.delete(e)},[]),et=(0,m.useCallback)(function(e){N(e)},[]),en=(0,m.useCallback)(function(e,t){var n=(w.current.get(e)||"").split(U);return t&&j.includes(n[0])&&n.unshift(G),n},[j]),er=(0,m.useCallback)(function(e,t){return e.some(function(e){return en(e,!0).includes(t)})},[en]),eo=(0,m.useCallback)(function(e){var t="".concat(w.current.get(e)).concat(U),n=new Set;return(0,a.Z)(S.current.keys()).forEach(function(e){e.startsWith(t)&&n.add(S.current.get(e))}),n},[]),m.useEffect(function(){return function(){R.current=!0}},[]),{registerPath:J,unregisterPath:ee,refreshOverflowKeys:et,isSubPathKey:er,getKeyPath:en,getKeys:function(){var e=(0,a.Z)(w.current.keys());return j.length&&e.push(G),e},getSubPathKeys:eo}),tF=tB.registerPath,tH=tB.unregisterPath,tq=tB.refreshOverflowKeys,tW=tB.isSubPathKey,tK=tB.getKeyPath,tU=tB.getKeys,tV=tB.getSubPathKeys,tG=m.useMemo(function(){return{registerPath:tF,unregisterPath:tH}},[tF,tH]),tX=m.useMemo(function(){return{isSubPathKey:tW}},[tW]);m.useEffect(function(){tq(tz?eB:to.slice(tZ+1).map(function(e){return e.key}))},[tZ,tz]);var t$=(0,f.Z)(eF||eH&&(null===(eu=to[0])||void 0===eu?void 0:eu.key),{value:eF}),tY=(0,l.Z)(t$,2),tQ=tY[0],tJ=tY[1],t0=X(function(e){tJ(e)}),t1=X(function(){tJ(void 0)});(0,m.useImperativeHandle)(t,function(){return{list:ts.current,focus:function(e){var t,n,r=K(tU(),tu),o=r.elements,i=r.key2element,a=r.element2key,l=q(ts.current,o),c=null!=tQ?tQ:l[0]?a.get(l[0]):null===(t=to.find(function(e){return!e.props.disabled}))||void 0===t?void 0:t.key,s=i.get(c);c&&s&&(null==s||null===(n=s.focus)||void 0===n||n.call(s,e))}}});var t2=(0,f.Z)(eV||[],{value:eG,postState:function(e){return Array.isArray(e)?e:null==e?eB:[e]}}),t6=(0,l.Z)(t2,2),t3=t6[0],t4=t6[1],t5=function(e){if(eW){var t,n=e.key,r=t3.includes(n);t4(t=eU?r?t3.filter(function(e){return e!==n}):[].concat((0,a.Z)(t3),[n]):[n]);var o=(0,i.Z)((0,i.Z)({},e),{},{selectedKeys:t});r?null==e$||e$(o):null==eX||eX(o)}!eU&&th.length&&"inline"!==tM&&tg(eB)},t8=X(function(e){null==e7||e7(ec(e)),t5(e)}),t7=X(function(e,t){var n=th.filter(function(t){return t!==e});if(t)n.push(e);else if("inline"!==tM){var r=tV(e);n=n.filter(function(e){return!r.has(e)})}(0,p.Z)(th,n,!0)||tg(n,!0)}),t9=(ei=function(e,t){var n=null!=t?t:!th.includes(e);t7(e,n)},ea=m.useRef(),(el=m.useRef()).current=tQ,es=function(){T.Z.cancel(ea.current)},m.useEffect(function(){return function(){es()}},[]),function(e){var t=e.which;if([].concat(H,[L,z,B,F]).includes(t)){var n=tU(),r=K(n,tu),i=r,a=i.elements,l=i.key2element,c=i.element2key,s=function(e,t){for(var n=e||document.activeElement;n;){if(t.has(n))return n;n=n.parentElement}return null}(l.get(tQ),a),u=c.get(s),d=function(e,t,n,r){var i,a,l,c,s="prev",u="next",d="children",f="parent";if("inline"===e&&r===L)return{inlineTrigger:!0};var p=(i={},(0,o.Z)(i,D,s),(0,o.Z)(i,Z,u),i),h=(a={},(0,o.Z)(a,A,n?u:s),(0,o.Z)(a,_,n?s:u),(0,o.Z)(a,Z,d),(0,o.Z)(a,L,d),a),m=(l={},(0,o.Z)(l,D,s),(0,o.Z)(l,Z,u),(0,o.Z)(l,L,d),(0,o.Z)(l,z,f),(0,o.Z)(l,A,n?d:f),(0,o.Z)(l,_,n?f:d),l);switch(null===(c=({inline:p,horizontal:h,vertical:m,inlineSub:p,horizontalSub:m,verticalSub:m})["".concat(e).concat(t?"":"Sub")])||void 0===c?void 0:c[r]){case s:return{offset:-1,sibling:!0};case u:return{offset:1,sibling:!0};case f:return{offset:-1,sibling:!1};case d:return{offset:1,sibling:!1};default:return null}}(tM,1===tK(u,!0).length,td,t);if(!d&&t!==B&&t!==F)return;(H.includes(t)||[B,F].includes(t))&&e.preventDefault();var f=function(e){if(e){var t=e,n=e.querySelector("a");null!=n&&n.getAttribute("href")&&(t=n);var r=c.get(e);tJ(r),es(),ea.current=(0,T.Z)(function(){el.current===r&&t.focus()})}};if([B,F].includes(t)||d.sibling||!s){var p,h=q(p=s&&"inline"!==tM?function(e){for(var t=e;t;){if(t.getAttribute("data-menu-list"))return t;t=t.parentElement}return null}(s):ts.current,a);f(t===B?h[0]:t===F?h[h.length-1]:W(p,a,s,d.offset))}else if(d.inlineTrigger)ei(u);else if(d.offset>0)ei(u,!0),es(),ea.current=(0,T.Z)(function(){r=K(n,tu);var e=s.getAttribute("aria-controls");f(W(document.getElementById(e),r.elements))},5);else if(d.offset<0){var m=tK(u,!0),g=m[m.length-2],v=l.get(g);ei(g,!1),f(v)}}null==te||te(e)});m.useEffect(function(){tc(!0)},[]);var ne=m.useMemo(function(){return{_internalRenderMenuItem:tt,_internalRenderSubMenuItem:tn}},[tt,tn]),nt="horizontal"!==tM||eP?to:to.map(function(e,t){return m.createElement(k,{key:e.key,overflowDisabled:t>tZ},e)}),nn=m.createElement(d.Z,(0,r.Z)({id:ek,ref:ts,prefixCls:"".concat(ep,"-overflow"),component:"ul",itemComponent:eh,className:u()(ep,"".concat(ep,"-root"),"".concat(ep,"-").concat(tM),ev,(ed={},(0,o.Z)(ed,"".concat(ep,"-inline-collapsed"),tT),(0,o.Z)(ed,"".concat(ep,"-rtl"),td),ed),em),dir:eS,style:eg,role:"menu",tabIndex:void 0===eb?0:eb,data:nt,renderRawItem:function(e){return e},renderRawRest:function(e){var t=e.length,n=t?to.slice(-t):null;return m.createElement(eI,{eventKey:G,title:e4,disabled:tz,internalPopupClose:0===t,popupClassName:e5},n)},maxCount:"horizontal"!==tM||eP?d.Z.INVALIDATE:d.Z.RESPONSIVE,ssr:"full","data-menu-list":!0,onVisibleChange:function(e){tL(e)},onKeyDown:t9},tr));return m.createElement(M.Provider,{value:ne},m.createElement(v.Provider,{value:tu},m.createElement(k,{prefixCls:ep,rootClassName:em,mode:tM,openKeys:th,rtl:td,disabled:ej,motion:tl?eQ:null,defaultMotions:tl?eJ:null,activeKey:tQ,onActive:t0,onInactive:t1,selectedKeys:t3,inlineIndent:void 0===eY?24:eY,subMenuOpenDelay:void 0===eM?.1:eM,subMenuCloseDelay:void 0===eN?.1:eN,forceSubMenuRender:eT,builtinPlacements:e1,triggerSubMenuAction:void 0===e0?"hover":e0,getPopupContainer:e8,itemIcon:e2,expandIcon:e6,onItemClick:t8,onOpenChange:t7},m.createElement(P.Provider,{value:tX},nn),m.createElement("div",{style:{display:"none"},"aria-hidden":!0},m.createElement(E.Provider,{value:tG},to)))))});eF.Item=eh,eF.SubMenu=eI,eF.ItemGroup=eD,eF.Divider=eZ;var eH=eF},1699:function(e,t,n){"use strict";n.d(t,{Z:function(){return M}});var r=n(1119),o=n(31686),i=n(26365),a=n(6989),l=n(2265),c=n(36760),s=n.n(c),u=n(31474),d=n(27380),f=["prefixCls","invalidate","item","renderItem","responsive","responsiveDisabled","registerSize","itemKey","className","style","children","display","order","component"],p=void 0,h=l.forwardRef(function(e,t){var n,i=e.prefixCls,c=e.invalidate,d=e.item,h=e.renderItem,m=e.responsive,g=e.responsiveDisabled,v=e.registerSize,y=e.itemKey,b=e.className,x=e.style,w=e.children,S=e.display,k=e.order,E=e.component,C=(0,a.Z)(e,f),O=m&&!S;l.useEffect(function(){return function(){v(y,null)}},[]);var j=h&&d!==p?h(d):w;c||(n={opacity:O?0:1,height:O?0:p,overflowY:O?"hidden":p,order:m?k:p,pointerEvents:O?"none":p,position:O?"absolute":p});var P={};O&&(P["aria-hidden"]=!0);var M=l.createElement(void 0===E?"div":E,(0,r.Z)({className:s()(!c&&i,b),style:(0,o.Z)((0,o.Z)({},n),x)},P,C,{ref:t}),j);return m&&(M=l.createElement(u.Z,{onResize:function(e){v(y,e.offsetWidth)},disabled:g},M)),M});h.displayName="Item";var m=n(58525),g=n(54887),v=n(53346);function y(e,t){var n=l.useState(t),r=(0,i.Z)(n,2),o=r[0],a=r[1];return[o,(0,m.Z)(function(t){e(function(){a(t)})})]}var b=l.createContext(null),x=["component"],w=["className"],S=["className"],k=l.forwardRef(function(e,t){var n=l.useContext(b);if(!n){var o=e.component,i=(0,a.Z)(e,x);return l.createElement(void 0===o?"div":o,(0,r.Z)({},i,{ref:t}))}var c=n.className,u=(0,a.Z)(n,w),d=e.className,f=(0,a.Z)(e,S);return l.createElement(b.Provider,{value:null},l.createElement(h,(0,r.Z)({ref:t,className:s()(c,d)},u,f)))});k.displayName="RawItem";var E=["prefixCls","data","renderItem","renderRawItem","itemKey","itemWidth","ssr","style","className","maxCount","renderRest","renderRawRest","suffix","component","itemComponent","onVisibleChange"],C="responsive",O="invalidate";function j(e){return"+ ".concat(e.length," ...")}var P=l.forwardRef(function(e,t){var n,c,f=e.prefixCls,p=void 0===f?"rc-overflow":f,m=e.data,x=void 0===m?[]:m,w=e.renderItem,S=e.renderRawItem,k=e.itemKey,P=e.itemWidth,M=void 0===P?10:P,N=e.ssr,I=e.style,R=e.className,T=e.maxCount,A=e.renderRest,_=e.renderRawRest,D=e.suffix,Z=e.component,L=e.itemComponent,z=e.onVisibleChange,B=(0,a.Z)(e,E),F="full"===N,H=(n=l.useRef(null),function(e){n.current||(n.current=[],function(e){if("undefined"==typeof MessageChannel)(0,v.Z)(e);else{var t=new MessageChannel;t.port1.onmessage=function(){return e()},t.port2.postMessage(void 0)}}(function(){(0,g.unstable_batchedUpdates)(function(){n.current.forEach(function(e){e()}),n.current=null})})),n.current.push(e)}),q=y(H,null),W=(0,i.Z)(q,2),K=W[0],U=W[1],V=K||0,G=y(H,new Map),X=(0,i.Z)(G,2),$=X[0],Y=X[1],Q=y(H,0),J=(0,i.Z)(Q,2),ee=J[0],et=J[1],en=y(H,0),er=(0,i.Z)(en,2),eo=er[0],ei=er[1],ea=y(H,0),el=(0,i.Z)(ea,2),ec=el[0],es=el[1],eu=(0,l.useState)(null),ed=(0,i.Z)(eu,2),ef=ed[0],ep=ed[1],eh=(0,l.useState)(null),em=(0,i.Z)(eh,2),eg=em[0],ev=em[1],ey=l.useMemo(function(){return null===eg&&F?Number.MAX_SAFE_INTEGER:eg||0},[eg,K]),eb=(0,l.useState)(!1),ex=(0,i.Z)(eb,2),ew=ex[0],eS=ex[1],ek="".concat(p,"-item"),eE=Math.max(ee,eo),eC=T===C,eO=x.length&&eC,ej=T===O,eP=eO||"number"==typeof T&&x.length>T,eM=(0,l.useMemo)(function(){var e=x;return eO?e=null===K&&F?x:x.slice(0,Math.min(x.length,V/M)):"number"==typeof T&&(e=x.slice(0,T)),e},[x,M,K,T,eO]),eN=(0,l.useMemo)(function(){return eO?x.slice(ey+1):x.slice(eM.length)},[x,eM,eO,ey]),eI=(0,l.useCallback)(function(e,t){var n;return"function"==typeof k?k(e):null!==(n=k&&(null==e?void 0:e[k]))&&void 0!==n?n:t},[k]),eR=(0,l.useCallback)(w||function(e){return e},[w]);function eT(e,t,n){(eg!==e||void 0!==t&&t!==ef)&&(ev(e),n||(eS(eV){eT(r-1,e-o-ec+eo);break}}D&&e_(0)+ec>V&&ep(null)}},[V,$,eo,ec,eI,eM]);var eD=ew&&!!eN.length,eZ={};null!==ef&&eO&&(eZ={position:"absolute",left:ef,top:0});var eL={prefixCls:ek,responsive:eO,component:L,invalidate:ej},ez=S?function(e,t){var n=eI(e,t);return l.createElement(b.Provider,{key:n,value:(0,o.Z)((0,o.Z)({},eL),{},{order:t,item:e,itemKey:n,registerSize:eA,display:t<=ey})},S(e,t))}:function(e,t){var n=eI(e,t);return l.createElement(h,(0,r.Z)({},eL,{order:t,key:n,item:e,renderItem:eR,itemKey:n,registerSize:eA,display:t<=ey}))},eB={order:eD?ey:Number.MAX_SAFE_INTEGER,className:"".concat(ek,"-rest"),registerSize:function(e,t){ei(t),et(eo)},display:eD};if(_)_&&(c=l.createElement(b.Provider,{value:(0,o.Z)((0,o.Z)({},eL),eB)},_(eN)));else{var eF=A||j;c=l.createElement(h,(0,r.Z)({},eL,eB),"function"==typeof eF?eF(eN):eF)}var eH=l.createElement(void 0===Z?"div":Z,(0,r.Z)({className:s()(!ej&&p,R),style:I,ref:t},B),eM.map(ez),eP?c:null,D&&l.createElement(h,(0,r.Z)({},eL,{responsive:eC,responsiveDisabled:!eO,order:ey,className:"".concat(ek,"-suffix"),registerSize:function(e,t){es(t)},display:!0,style:eZ}),D));return eC&&(eH=l.createElement(u.Z,{onResize:function(e,t){U(t.clientWidth)},disabled:!eO},eH)),eH});P.displayName="Overflow",P.Item=k,P.RESPONSIVE=C,P.INVALIDATE=O;var M=P},10281:function(e,t,n){"use strict";n.d(t,{G:function(){return a}});var r=n(94981),o=function(e){if((0,r.Z)()&&window.document.documentElement){var t=Array.isArray(e)?e:[e],n=window.document.documentElement;return t.some(function(e){return e in n.style})}return!1},i=function(e,t){if(!o(e))return!1;var n=document.createElement("div"),r=n.style[e];return n.style[e]=t,n.style[e]!==r};function a(e,t){return Array.isArray(e)||void 0===t?o(e):i(e,t)}},48625:function(e,t,n){"use strict";n.d(t,{Z:function(){return T}});var r=n(1119),o=n(31686),i=n(41154),a=n(26365),l=n(11993),c=n(6989),s=n(2265),u=n(54887),d=n(36760),f=n.n(d),p=n(31474),h=s.forwardRef(function(e,t){var n,i=e.height,a=e.offsetY,c=e.offsetX,u=e.children,d=e.prefixCls,h=e.onInnerResize,m=e.innerProps,g=e.rtl,v=e.extra,y={},b={display:"flex",flexDirection:"column"};return void 0!==a&&(y={height:i,position:"relative",overflow:"hidden"},b=(0,o.Z)((0,o.Z)({},b),{},(n={transform:"translateY(".concat(a,"px)")},(0,l.Z)(n,g?"marginRight":"marginLeft",-c),(0,l.Z)(n,"position","absolute"),(0,l.Z)(n,"left",0),(0,l.Z)(n,"right",0),(0,l.Z)(n,"top",0),n))),s.createElement("div",{style:y},s.createElement(p.Z,{onResize:function(e){e.offsetHeight&&h&&h()}},s.createElement("div",(0,r.Z)({style:b,className:f()((0,l.Z)({},"".concat(d,"-holder-inner"),d)),ref:t},m),u,v)))});h.displayName="Filler";var m=n(53346);function g(e,t){return("touches"in e?e.touches[0]:e)[t?"pageX":"pageY"]}var v=s.forwardRef(function(e,t){var n,r=e.prefixCls,i=e.rtl,c=e.scrollOffset,u=e.scrollRange,d=e.onStartMove,p=e.onStopMove,h=e.onScroll,v=e.horizontal,y=e.spinSize,b=e.containerSize,x=e.style,w=e.thumbStyle,S=s.useState(!1),k=(0,a.Z)(S,2),E=k[0],C=k[1],O=s.useState(null),j=(0,a.Z)(O,2),P=j[0],M=j[1],N=s.useState(null),I=(0,a.Z)(N,2),R=I[0],T=I[1],A=!i,_=s.useRef(),D=s.useRef(),Z=s.useState(!1),L=(0,a.Z)(Z,2),z=L[0],B=L[1],F=s.useRef(),H=function(){clearTimeout(F.current),B(!0),F.current=setTimeout(function(){B(!1)},3e3)},q=u-b||0,W=b-y||0,K=s.useMemo(function(){return 0===c||0===q?0:c/q*W},[c,q,W]),U=s.useRef({top:K,dragging:E,pageY:P,startTop:R});U.current={top:K,dragging:E,pageY:P,startTop:R};var V=function(e){C(!0),M(g(e,v)),T(U.current.top),d(),e.stopPropagation(),e.preventDefault()};s.useEffect(function(){var e=function(e){e.preventDefault()},t=_.current,n=D.current;return t.addEventListener("touchstart",e),n.addEventListener("touchstart",V),function(){t.removeEventListener("touchstart",e),n.removeEventListener("touchstart",V)}},[]);var G=s.useRef();G.current=q;var X=s.useRef();X.current=W,s.useEffect(function(){if(E){var e,t=function(t){var n=U.current,r=n.dragging,o=n.pageY,i=n.startTop;if(m.Z.cancel(e),r){var a=g(t,v)-o,l=i;!A&&v?l-=a:l+=a;var c=G.current,s=X.current,u=Math.ceil((s?l/s:0)*c);u=Math.min(u=Math.max(u,0),c),e=(0,m.Z)(function(){h(u,v)})}},n=function(){C(!1),p()};return window.addEventListener("mousemove",t),window.addEventListener("touchmove",t),window.addEventListener("mouseup",n),window.addEventListener("touchend",n),function(){window.removeEventListener("mousemove",t),window.removeEventListener("touchmove",t),window.removeEventListener("mouseup",n),window.removeEventListener("touchend",n),m.Z.cancel(e)}}},[E]),s.useEffect(function(){H()},[c]),s.useImperativeHandle(t,function(){return{delayHidden:H}});var $="".concat(r,"-scrollbar"),Y={position:"absolute",visibility:z&&q>0?null:"hidden"},Q={position:"absolute",background:"rgba(0, 0, 0, 0.5)",borderRadius:99,cursor:"pointer",userSelect:"none"};return v?(Y.height=8,Y.left=0,Y.right=0,Y.bottom=0,Q.height="100%",Q.width=y,A?Q.left=K:Q.right=K):(Y.width=8,Y.top=0,Y.bottom=0,A?Y.right=0:Y.left=0,Q.width="100%",Q.height=y,Q.top=K),s.createElement("div",{ref:_,className:f()($,(n={},(0,l.Z)(n,"".concat($,"-horizontal"),v),(0,l.Z)(n,"".concat($,"-vertical"),!v),(0,l.Z)(n,"".concat($,"-visible"),z),n)),style:(0,o.Z)((0,o.Z)({},Y),x),onMouseDown:function(e){e.stopPropagation(),e.preventDefault()},onMouseMove:H},s.createElement("div",{ref:D,className:f()("".concat($,"-thumb"),(0,l.Z)({},"".concat($,"-thumb-moving"),E)),style:(0,o.Z)((0,o.Z)({},Q),w),onMouseDown:V}))});function y(e){var t=e.children,n=e.setRef,r=s.useCallback(function(e){n(e)},[]);return s.cloneElement(t,{ref:r})}var b=n(2868),x=n(76405),w=n(25049),S=function(){function e(){(0,x.Z)(this,e),this.maps=void 0,this.id=0,this.maps=Object.create(null)}return(0,w.Z)(e,[{key:"set",value:function(e,t){this.maps[e]=t,this.id+=1}},{key:"get",value:function(e){return this.maps[e]}}]),e}(),k=n(27380),E=n(74126),C=("undefined"==typeof navigator?"undefined":(0,i.Z)(navigator))==="object"&&/Firefox/i.test(navigator.userAgent),O=function(e,t){var n=(0,s.useRef)(!1),r=(0,s.useRef)(null),o=(0,s.useRef)({top:e,bottom:t});return o.current.top=e,o.current.bottom=t,function(e){var t=arguments.length>1&&void 0!==arguments[1]&&arguments[1],i=e<0&&o.current.top||e>0&&o.current.bottom;return t&&i?(clearTimeout(r.current),n.current=!1):(!i||n.current)&&(clearTimeout(r.current),n.current=!0,r.current=setTimeout(function(){n.current=!1},50)),!n.current&&i}},j=14/15;function P(){var e=arguments.length>0&&void 0!==arguments[0]?arguments[0]:0,t=arguments.length>1&&void 0!==arguments[1]?arguments[1]:0,n=e/t*100;return isNaN(n)&&(n=0),Math.floor(n=Math.min(n=Math.max(n,20),e/2))}var M=["prefixCls","className","height","itemHeight","fullHeight","style","data","children","itemKey","virtual","direction","scrollWidth","component","onScroll","onVirtualScroll","onVisibleChange","innerProps","extraRender","styles"],N=[],I={overflowY:"auto",overflowAnchor:"none"},R=s.forwardRef(function(e,t){var n,d,g,x,w,R,T,A,_,D,Z,L,z,B,F,H,q,W,K,U,V,G,X,$,Y,Q,J,ee,et,en,er,eo,ei,ea,el,ec=e.prefixCls,es=void 0===ec?"rc-virtual-list":ec,eu=e.className,ed=e.height,ef=e.itemHeight,ep=e.fullHeight,eh=e.style,em=e.data,eg=e.children,ev=e.itemKey,ey=e.virtual,eb=e.direction,ex=e.scrollWidth,ew=e.component,eS=e.onScroll,ek=e.onVirtualScroll,eE=e.onVisibleChange,eC=e.innerProps,eO=e.extraRender,ej=e.styles,eP=(0,c.Z)(e,M),eM=!!(!1!==ey&&ed&&ef),eN=eM&&em&&(ef*em.length>ed||!!ex),eI="rtl"===eb,eR=f()(es,(0,l.Z)({},"".concat(es,"-rtl"),eI),eu),eT=em||N,eA=(0,s.useRef)(),e_=(0,s.useRef)(),eD=(0,s.useState)(0),eZ=(0,a.Z)(eD,2),eL=eZ[0],ez=eZ[1],eB=(0,s.useState)(0),eF=(0,a.Z)(eB,2),eH=eF[0],eq=eF[1],eW=(0,s.useState)(!1),eK=(0,a.Z)(eW,2),eU=eK[0],eV=eK[1],eG=function(){eV(!0)},eX=function(){eV(!1)},e$=s.useCallback(function(e){return"function"==typeof ev?ev(e):null==e?void 0:e[ev]},[ev]);function eY(e){ez(function(t){var n,r=(n="function"==typeof e?e(t):e,Number.isNaN(tf.current)||(n=Math.min(n,tf.current)),n=Math.max(n,0));return eA.current.scrollTop=r,r})}var eQ=(0,s.useRef)({start:0,end:eT.length}),eJ=(0,s.useRef)(),e0=(n=s.useState(eT),g=(d=(0,a.Z)(n,2))[0],x=d[1],w=s.useState(null),T=(R=(0,a.Z)(w,2))[0],A=R[1],s.useEffect(function(){var e=function(e,t,n){var r,o,i=e.length,a=t.length;if(0===i&&0===a)return null;i0&&void 0!==arguments[0]&&arguments[0];f();var t=function(){c.current.forEach(function(e,t){if(e&&e.offsetParent){var n=(0,b.Z)(e),r=n.offsetHeight;u.current.get(t)!==r&&u.current.set(t,n.offsetHeight)}}),l(function(e){return e+1})};e?t():d.current=(0,m.Z)(t)}return(0,s.useEffect)(function(){return f},[]),[function(r,o){var i=e(r),a=c.current.get(i);o?(c.current.set(i,o),p()):c.current.delete(i),!a!=!o&&(o?null==t||t(r):null==n||n(r))},p,u.current,i]}(e$,null,null),e6=(0,a.Z)(e2,4),e3=e6[0],e4=e6[1],e5=e6[2],e8=e6[3],e7=s.useMemo(function(){if(!eM)return{scrollHeight:void 0,start:0,end:eT.length-1,offset:void 0};if(!eN)return{scrollHeight:(null===(e=e_.current)||void 0===e?void 0:e.offsetHeight)||0,start:0,end:eT.length-1,offset:void 0};for(var e,t,n,r,o=0,i=eT.length,a=0;a=eL&&void 0===t&&(t=a,n=o),s>eL+ed&&void 0===r&&(r=a),o=s}return void 0===t&&(t=0,n=0,r=Math.ceil(ed/ef)),void 0===r&&(r=eT.length-1),{scrollHeight:o,start:t,end:r=Math.min(r+1,eT.length-1),offset:n}},[eN,eM,eL,eT,e8,ed]),e9=e7.scrollHeight,te=e7.start,tt=e7.end,tn=e7.offset;eQ.current.start=te,eQ.current.end=tt;var tr=s.useState({width:0,height:ed}),to=(0,a.Z)(tr,2),ti=to[0],ta=to[1],tl=(0,s.useRef)(),tc=(0,s.useRef)(),ts=s.useMemo(function(){return P(ti.width,ex)},[ti.width,ex]),tu=s.useMemo(function(){return P(ti.height,e9)},[ti.height,e9]),td=e9-ed,tf=(0,s.useRef)(td);tf.current=td;var tp=eL<=0,th=eL>=td,tm=O(tp,th),tg=function(){return{x:eI?-eH:eH,y:eL}},tv=(0,s.useRef)(tg()),ty=(0,E.zX)(function(){if(ek){var e=tg();(tv.current.x!==e.x||tv.current.y!==e.y)&&(ek(e),tv.current=e)}});function tb(e,t){t?((0,u.flushSync)(function(){eq(e)}),ty()):eY(e)}var tx=function(e){var t=e,n=ex-ti.width;return Math.min(t=Math.max(t,0),n)},tw=(0,E.zX)(function(e,t){t?((0,u.flushSync)(function(){eq(function(t){return tx(t+(eI?-e:e))})}),ty()):eY(function(t){return t+e})}),tS=(_=!!ex,D=(0,s.useRef)(0),Z=(0,s.useRef)(null),L=(0,s.useRef)(null),z=(0,s.useRef)(!1),B=O(tp,th),F=(0,s.useRef)(null),H=(0,s.useRef)(null),[function(e){if(eM){m.Z.cancel(H.current),H.current=(0,m.Z)(function(){F.current=null},2);var t,n=e.deltaX,r=e.deltaY,o=e.shiftKey,i=n,a=r;("sx"===F.current||!F.current&&o&&r&&!n)&&(i=r,a=0,F.current="sx");var l=Math.abs(i),c=Math.abs(a);(null===F.current&&(F.current=_&&l>c?"x":"y"),"y"===F.current)?(t=a,m.Z.cancel(Z.current),D.current+=t,L.current=t,B(t)||(C||e.preventDefault(),Z.current=(0,m.Z)(function(){var e=z.current?10:1;tw(D.current*e),D.current=0}))):(tw(i,!0),C||e.preventDefault())}},function(e){eM&&(z.current=e.detail===L.current)}]),tk=(0,a.Z)(tS,2),tE=tk[0],tC=tk[1];q=function(e,t){return!tm(e,t)&&(tE({preventDefault:function(){},deltaY:e}),!0)},K=(0,s.useRef)(!1),U=(0,s.useRef)(0),V=(0,s.useRef)(null),G=(0,s.useRef)(null),X=function(e){if(K.current){var t=Math.ceil(e.touches[0].pageY),n=U.current-t;U.current=t,q(n)&&e.preventDefault(),clearInterval(G.current),G.current=setInterval(function(){(!q(n*=j,!0)||.1>=Math.abs(n))&&clearInterval(G.current)},16)}},$=function(){K.current=!1,W()},Y=function(e){W(),1!==e.touches.length||K.current||(K.current=!0,U.current=Math.ceil(e.touches[0].pageY),V.current=e.target,V.current.addEventListener("touchmove",X),V.current.addEventListener("touchend",$))},W=function(){V.current&&(V.current.removeEventListener("touchmove",X),V.current.removeEventListener("touchend",$))},(0,k.Z)(function(){return eM&&eA.current.addEventListener("touchstart",Y),function(){var e;null===(e=eA.current)||void 0===e||e.removeEventListener("touchstart",Y),W(),clearInterval(G.current)}},[eM]),(0,k.Z)(function(){function e(e){eM&&e.preventDefault()}var t=eA.current;return t.addEventListener("wheel",tE),t.addEventListener("DOMMouseScroll",tC),t.addEventListener("MozMousePixelScroll",e),function(){t.removeEventListener("wheel",tE),t.removeEventListener("DOMMouseScroll",tC),t.removeEventListener("MozMousePixelScroll",e)}},[eM]),(0,k.Z)(function(){ex&&eq(function(e){return tx(e)})},[ti.width,ex]);var tO=function(){var e,t;null===(e=tl.current)||void 0===e||e.delayHidden(),null===(t=tc.current)||void 0===t||t.delayHidden()},tj=(Q=function(){return e4(!0)},J=s.useRef(),ee=s.useState(null),en=(et=(0,a.Z)(ee,2))[0],er=et[1],(0,k.Z)(function(){if(en&&en.times<10){if(!eA.current){er(function(e){return(0,o.Z)({},e)});return}Q();var e=en.targetAlign,t=en.originAlign,n=en.index,r=en.offset,i=eA.current.clientHeight,a=!1,l=e,c=null;if(i){for(var s=e||t,u=0,d=0,f=0,p=Math.min(eT.length-1,n),h=0;h<=p;h+=1){var m=e$(eT[h]);d=u;var g=e5.get(m);u=f=d+(void 0===g?ef:g)}for(var v="top"===s?r:i-r,y=p;y>=0;y-=1){var b=e$(eT[y]),x=e5.get(b);if(void 0===x){a=!0;break}if((v-=x)<=0)break}switch(s){case"top":c=d-r;break;case"bottom":c=f-i+r;break;default:var w=eA.current.scrollTop;dw+i&&(l="bottom")}null!==c&&eY(c),c!==en.lastTop&&(a=!0)}a&&er((0,o.Z)((0,o.Z)({},en),{},{times:en.times+1,targetAlign:l,lastTop:c}))}},[en,eA.current]),function(e){if(null==e){tO();return}if(m.Z.cancel(J.current),"number"==typeof e)eY(e);else if(e&&"object"===(0,i.Z)(e)){var t,n=e.align;t="index"in e?e.index:eT.findIndex(function(t){return e$(t)===e.key});var r=e.offset;er({times:0,index:t,offset:void 0===r?0:r,originAlign:n})}});s.useImperativeHandle(t,function(){return{getScrollInfo:tg,scrollTo:function(e){e&&"object"===(0,i.Z)(e)&&("left"in e||"top"in e)?(void 0!==e.left&&eq(tx(e.left)),tj(e.top)):tj(e)}}}),(0,k.Z)(function(){eE&&eE(eT.slice(te,tt+1),eT)},[te,tt,eT]);var tP=(eo=s.useMemo(function(){return[new Map,[]]},[eT,e5.id,ef]),ea=(ei=(0,a.Z)(eo,2))[0],el=ei[1],function(e){var t=arguments.length>1&&void 0!==arguments[1]?arguments[1]:e,n=ea.get(e),r=ea.get(t);if(void 0===n||void 0===r)for(var o=eT.length,i=el.length;ied&&s.createElement(v,{ref:tl,prefixCls:es,scrollOffset:eL,scrollRange:e9,rtl:eI,onScroll:tb,onStartMove:eG,onStopMove:eX,spinSize:tu,containerSize:ti.height,style:null==ej?void 0:ej.verticalScrollBar,thumbStyle:null==ej?void 0:ej.verticalScrollBarThumb}),eN&&ex&&s.createElement(v,{ref:tc,prefixCls:es,scrollOffset:eH,scrollRange:ex,rtl:eI,onScroll:tb,onStartMove:eG,onStopMove:eX,spinSize:ts,containerSize:ti.width,horizontal:!0,style:null==ej?void 0:ej.horizontalScrollBar,thumbStyle:null==ej?void 0:ej.horizontalScrollBarThumb}))});R.displayName="List";var T=R},6337:function(e,t,n){"use strict";function r(e){return(r="function"==typeof Symbol&&"symbol"==typeof Symbol.iterator?function(e){return typeof e}:function(e){return e&&"function"==typeof Symbol&&e.constructor===Symbol&&e!==Symbol.prototype?"symbol":typeof e})(e)}Object.defineProperty(t,"__esModule",{value:!0}),t.CopyToClipboard=void 0;var o=l(n(2265)),i=l(n(49211)),a=["text","onCopy","options","children"];function l(e){return e&&e.__esModule?e:{default:e}}function c(e,t){var n=Object.keys(e);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);t&&(r=r.filter(function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable})),n.push.apply(n,r)}return n}function s(e){for(var t=1;t=0||(o[n]=e[n]);return o}(e,t);if(Object.getOwnPropertySymbols){var i=Object.getOwnPropertySymbols(e);for(r=0;r=0)&&Object.prototype.propertyIsEnumerable.call(e,n)&&(o[n]=e[n])}return o}(e,a),r=o.default.Children.only(t);return o.default.cloneElement(r,s(s({},n),{},{onClick:this.onClick}))}}],function(e,t){for(var n=0;n0;)if(!n.equals(e[r],t[r],r,r,e,t,n))return!1;return!0}function y(e,t){return p(e.getTime(),t.getTime())}function b(e,t,n){if(e.size!==t.size)return!1;for(var r,o,i={},a=e.entries(),l=0;(r=a.next())&&!r.done;){for(var c=t.entries(),s=!1,u=0;(o=c.next())&&!o.done;){var d=r.value,f=d[0],p=d[1],h=o.value,m=h[0],g=h[1];!s&&!i[u]&&(s=n.equals(f,m,l,u,e,t,n)&&n.equals(p,g,f,m,e,t,n))&&(i[u]=!0),u++}if(!s)return!1;l++}return!0}function x(e,t,n){var r,o=g(e),i=o.length;if(g(t).length!==i)return!1;for(;i-- >0;)if((r=o[i])===h&&(e.$$typeof||t.$$typeof)&&e.$$typeof!==t.$$typeof||!f(t,r)||!n.equals(e[r],t[r],r,r,e,t,n))return!1;return!0}function w(e,t,n){var r,o,i,a=d(e),l=a.length;if(d(t).length!==l)return!1;for(;l-- >0;)if((r=a[l])===h&&(e.$$typeof||t.$$typeof)&&e.$$typeof!==t.$$typeof||!f(t,r)||!n.equals(e[r],t[r],r,r,e,t,n)||(o=m(e,r),i=m(t,r),(o||i)&&(!o||!i||o.configurable!==i.configurable||o.enumerable!==i.enumerable||o.writable!==i.writable)))return!1;return!0}function S(e,t){return p(e.valueOf(),t.valueOf())}function k(e,t){return e.source===t.source&&e.flags===t.flags}function E(e,t,n){if(e.size!==t.size)return!1;for(var r,o,i={},a=e.values();(r=a.next())&&!r.done;){for(var l=t.values(),c=!1,s=0;(o=l.next())&&!o.done;)!c&&!i[s]&&(c=n.equals(r.value,o.value,r.value,o.value,e,t,n))&&(i[s]=!0),s++;if(!c)return!1}return!0}function C(e,t){var n=e.length;if(t.length!==n)return!1;for(;n-- >0;)if(e[n]!==t[n])return!1;return!0}var O=Array.isArray,j="function"==typeof ArrayBuffer&&ArrayBuffer.isView?ArrayBuffer.isView:null,P=Object.assign,M=Object.prototype.toString.call.bind(Object.prototype.toString),N=I();function I(e){void 0===e&&(e={});var t,n,r,o,i,a,l,c,d,f=e.circular,p=e.createInternalComparator,h=e.createState,m=e.strict,g=(n=(t=function(e){var t=e.circular,n=e.createCustomConfig,r=e.strict,o={areArraysEqual:r?w:v,areDatesEqual:y,areMapsEqual:r?s(b,w):b,areObjectsEqual:r?w:x,arePrimitiveWrappersEqual:S,areRegExpsEqual:k,areSetsEqual:r?s(E,w):E,areTypedArraysEqual:r?w:C};if(n&&(o=P({},o,n(o))),t){var i=u(o.areArraysEqual),a=u(o.areMapsEqual),l=u(o.areObjectsEqual),c=u(o.areSetsEqual);o=P({},o,{areArraysEqual:i,areMapsEqual:a,areObjectsEqual:l,areSetsEqual:c})}return o}(e)).areArraysEqual,r=t.areDatesEqual,o=t.areMapsEqual,i=t.areObjectsEqual,a=t.arePrimitiveWrappersEqual,l=t.areRegExpsEqual,c=t.areSetsEqual,d=t.areTypedArraysEqual,function(e,t,s){if(e===t)return!0;if(null==e||null==t||"object"!=typeof e||"object"!=typeof t)return e!=e&&t!=t;var u=e.constructor;if(u!==t.constructor)return!1;if(u===Object)return i(e,t,s);if(O(e))return n(e,t,s);if(null!=j&&j(e))return d(e,t,s);if(u===Date)return r(e,t,s);if(u===RegExp)return l(e,t,s);if(u===Map)return o(e,t,s);if(u===Set)return c(e,t,s);var f=M(e);return"[object Date]"===f?r(e,t,s):"[object RegExp]"===f?l(e,t,s):"[object Map]"===f?o(e,t,s):"[object Set]"===f?c(e,t,s):"[object Object]"===f?"function"!=typeof e.then&&"function"!=typeof t.then&&i(e,t,s):"[object Arguments]"===f?i(e,t,s):("[object Boolean]"===f||"[object Number]"===f||"[object String]"===f)&&a(e,t,s)}),N=p?p(g):function(e,t,n,r,o,i,a){return g(e,t,a)};return function(e){var t=e.circular,n=e.comparator,r=e.createState,o=e.equals,i=e.strict;if(r)return function(e,a){var l=r(),c=l.cache;return n(e,a,{cache:void 0===c?t?new WeakMap:void 0:c,equals:o,meta:l.meta,strict:i})};if(t)return function(e,t){return n(e,t,{cache:new WeakMap,equals:o,meta:void 0,strict:i})};var a={cache:void 0,equals:o,meta:void 0,strict:i};return function(e,t){return n(e,t,a)}}({circular:void 0!==f&&f,comparator:g,createState:h,equals:N,strict:void 0!==m&&m})}function R(e){var t=arguments.length>1&&void 0!==arguments[1]?arguments[1]:0,n=-1;requestAnimationFrame(function r(o){if(n<0&&(n=o),o-n>t)e(o),n=-1;else{var i;i=r,"undefined"!=typeof requestAnimationFrame&&requestAnimationFrame(i)}})}function T(e){return(T="function"==typeof Symbol&&"symbol"==typeof Symbol.iterator?function(e){return typeof e}:function(e){return e&&"function"==typeof Symbol&&e.constructor===Symbol&&e!==Symbol.prototype?"symbol":typeof e})(e)}function A(e,t){(null==t||t>e.length)&&(t=e.length);for(var n=0,r=Array(t);ne.length)&&(t=e.length);for(var n=0,r=Array(t);n=0&&e<=1}),"[configBezier]: arguments should be x1, y1, x2, y2 of [0, 1] instead received %s",r);var f=Q(i,l),p=Q(a,c),h=(e=i,t=l,function(n){var r;return Y([].concat(function(e){if(Array.isArray(e))return X(e)}(r=$(e,t).map(function(e,t){return e*t}).slice(1))||function(e){if("undefined"!=typeof Symbol&&null!=e[Symbol.iterator]||null!=e["@@iterator"])return Array.from(e)}(r)||G(r)||function(){throw TypeError("Invalid attempt to spread non-iterable instance.\nIn order to be iterable, non-array objects must have a [Symbol.iterator]() method.")}(),[0]),n)}),m=function(e){for(var t=e>1?1:e,n=t,r=0;r<8;++r){var o,i=f(n)-t,a=h(n);if(1e-4>Math.abs(i-t)||a<1e-4)break;n=(o=n-i/a)>1?1:o<0?0:o}return p(n)};return m.isStepper=!1,m},ee=function(){var e=arguments.length>0&&void 0!==arguments[0]?arguments[0]:{},t=e.stiff,n=void 0===t?100:t,r=e.damping,o=void 0===r?8:r,i=e.dt,a=void 0===i?17:i,l=function(e,t,r){var i=r+(-(e-t)*n-r*o)*a/1e3,l=r*a/1e3+e;return 1e-4>Math.abs(l-t)&&1e-4>Math.abs(i)?[t,0]:[l,i]};return l.isStepper=!0,l.dt=a,l},et=function(){for(var e=arguments.length,t=Array(e),n=0;ne.length)&&(t=e.length);for(var n=0,r=Array(t);ne.length)&&(t=e.length);for(var n=0,r=Array(t);n0?n[o-1]:r,f=s||Object.keys(c);if("function"==typeof l||"spring"===l)return[].concat(em(e),[t.runJSAnimation.bind(t,{from:d.style,to:c,duration:i,easing:l}),i]);var p=U(f,i,l),h=ey(ey(ey({},d.style),c),{},{transition:p});return[].concat(em(e),[h,i,u]).filter(H)},[a,Math.max(void 0===l?0:l,r)])),[e.onAnimationEnd]))}},{key:"runAnimation",value:function(e){if(!this.manager){var t,n,r;this.manager=(t=function(){return null},n=!1,r=function e(r){if(!n){if(Array.isArray(r)){if(!r.length)return;var o=function(e){if(Array.isArray(e))return e}(r)||function(e){if("undefined"!=typeof Symbol&&null!=e[Symbol.iterator]||null!=e["@@iterator"])return Array.from(e)}(r)||function(e,t){if(e){if("string"==typeof e)return A(e,void 0);var n=Object.prototype.toString.call(e).slice(8,-1);if("Object"===n&&e.constructor&&(n=e.constructor.name),"Map"===n||"Set"===n)return Array.from(e);if("Arguments"===n||/^(?:Ui|I)nt(?:8|16|32)(?:Clamped)?Array$/.test(n))return A(e,void 0)}}(r)||function(){throw TypeError("Invalid attempt to destructure non-iterable instance.\nIn order to be iterable, non-array objects must have a [Symbol.iterator]() method.")}(),i=o[0],a=o.slice(1);if("number"==typeof i){R(e.bind(null,a),i);return}e(i),R(e.bind(null,a));return}"object"===T(r)&&t(r),"function"==typeof r&&r()}},{stop:function(){n=!0},start:function(e){n=!1,r(e)},subscribe:function(e){return t=e,function(){t=function(){return null}}}})}var o=e.begin,i=e.duration,a=e.attributeName,l=e.to,c=e.easing,s=e.onAnimationStart,u=e.onAnimationEnd,d=e.steps,f=e.children,p=this.manager;if(this.unSubscribe=p.subscribe(this.handleStyleChange),"function"==typeof c||"function"==typeof f||"spring"===c){this.runJSAnimation(e);return}if(d.length>1){this.runStepAnimation(e);return}var h=a?eb({},a,l):l,m=U(Object.keys(h),i,c);p.start([s,o,ey(ey({},h),{},{transition:m}),i,u])}},{key:"render",value:function(){var e=this.props,t=e.children,n=(e.begin,e.duration),o=(e.attributeName,e.easing,e.isActive),i=(e.steps,e.from,e.to,e.canBegin,e.onAnimationEnd,e.shouldReAnimate,e.onAnimationReStart,function(e,t){if(null==e)return{};var n,r,o=function(e,t){if(null==e)return{};var n,r,o={},i=Object.keys(e);for(r=0;r=0||(o[n]=e[n]);return o}(e,t);if(Object.getOwnPropertySymbols){var i=Object.getOwnPropertySymbols(e);for(r=0;r=0)&&Object.prototype.propertyIsEnumerable.call(e,n)&&(o[n]=e[n])}return o}(e,eh)),a=r.Children.count(t),l=K(this.state.style);if("function"==typeof t)return t(l);if(!o||0===a||n<=0)return t;var c=function(e){var t=e.props,n=t.style,o=t.className;return(0,r.cloneElement)(e,ey(ey({},i),{},{style:ey(ey({},void 0===n?{}:n),l),className:o}))};return 1===a?c(r.Children.only(t)):r.createElement("div",null,r.Children.map(t,function(e){return c(e)}))}}],function(e,t){for(var n=0;n0&&void 0!==arguments[0]?arguments[0]:{},t=e.steps,n=e.duration;return t&&t.length?t.reduce(function(e,t){return e+(Number.isFinite(t.duration)&&t.duration>0?t.duration:0)},0):Number.isFinite(n)?n:0},eL=function(e){!function(e,t){if("function"!=typeof t&&null!==t)throw TypeError("Super expression must either be null or a function");e.prototype=Object.create(t&&t.prototype,{constructor:{value:e,writable:!0,configurable:!0}}),Object.defineProperty(e,"prototype",{writable:!1}),t&&eR(e,t)}(i,e);var t,n,o=(t=function(){if("undefined"==typeof Reflect||!Reflect.construct||Reflect.construct.sham)return!1;if("function"==typeof Proxy)return!0;try{return Boolean.prototype.valueOf.call(Reflect.construct(Boolean,[],function(){})),!0}catch(e){return!1}}(),function(){var e,n=eA(i);return e=t?Reflect.construct(n,arguments,eA(this).constructor):n.apply(this,arguments),function(e,t){if(t&&("object"===eP(t)||"function"==typeof t))return t;if(void 0!==t)throw TypeError("Derived constructors may only return object or undefined");return eT(e)}(this,e)});function i(){var e;return!function(e,t){if(!(e instanceof t))throw TypeError("Cannot call a class as a function")}(this,i),e_(eT(e=o.call(this)),"handleEnter",function(t,n){var r=e.props,o=r.appearOptions,i=r.enterOptions;e.handleStyleActive(n?o:i)}),e_(eT(e),"handleExit",function(){var t=e.props.leaveOptions;e.handleStyleActive(t)}),e.state={isActive:!1},e}return n=[{key:"handleStyleActive",value:function(e){if(e){var t=e.onAnimationEnd?function(){e.onAnimationEnd()}:null;this.setState(eI(eI({},e),{},{onAnimationEnd:t,isActive:!0}))}}},{key:"parseTimeout",value:function(){var e=this.props,t=e.appearOptions,n=e.enterOptions,r=e.leaveOptions;return eZ(t)+eZ(n)+eZ(r)}},{key:"render",value:function(){var e=this,t=this.props,n=t.children,o=(t.appearOptions,t.enterOptions,t.leaveOptions,function(e,t){if(null==e)return{};var n,r,o=function(e,t){if(null==e)return{};var n,r,o={},i=Object.keys(e);for(r=0;r=0||(o[n]=e[n]);return o}(e,t);if(Object.getOwnPropertySymbols){var i=Object.getOwnPropertySymbols(e);for(r=0;r=0)&&Object.prototype.propertyIsEnumerable.call(e,n)&&(o[n]=e[n])}return o}(t,ej));return r.createElement(eO.Transition,eM({},o,{onEnter:this.handleEnter,onExit:this.handleExit,timeout:this.parseTimeout()}),function(){return r.createElement(eC,e.state,r.Children.only(n))})}}],function(e,t){for(var n=0;n code':{position:"relative",zIndex:"1",borderLeft:"10px solid #358ccb",boxShadow:"-1px 0px 0px 0px #358ccb, 0px 0px 0px 1px #dfdfdf",backgroundColor:"#fdfdfd",backgroundImage:"linear-gradient(transparent 50%, rgba(69, 142, 209, 0.04) 50%)",backgroundSize:"3em 3em",backgroundOrigin:"content-box",backgroundAttachment:"local"},':not(pre) > code[class*="language-"]':{backgroundColor:"#fdfdfd",WebkitBoxSizing:"border-box",MozBoxSizing:"border-box",boxSizing:"border-box",marginBottom:"1em",position:"relative",padding:".2em",borderRadius:"0.3em",color:"#c92c2c",border:"1px solid rgba(0, 0, 0, 0.1)",display:"inline",whiteSpace:"normal"},'pre[class*="language-"]:before':{content:"''",display:"block",position:"absolute",bottom:"0.75em",left:"0.18em",width:"40%",height:"20%",maxHeight:"13em",boxShadow:"0px 13px 8px #979797",WebkitTransform:"rotate(-2deg)",MozTransform:"rotate(-2deg)",msTransform:"rotate(-2deg)",OTransform:"rotate(-2deg)",transform:"rotate(-2deg)"},'pre[class*="language-"]:after':{content:"''",display:"block",position:"absolute",bottom:"0.75em",left:"auto",width:"40%",height:"20%",maxHeight:"13em",boxShadow:"0px 13px 8px #979797",WebkitTransform:"rotate(2deg)",MozTransform:"rotate(2deg)",msTransform:"rotate(2deg)",OTransform:"rotate(2deg)",transform:"rotate(2deg)",right:"0.75em"},comment:{color:"#7D8B99"},"block-comment":{color:"#7D8B99"},prolog:{color:"#7D8B99"},doctype:{color:"#7D8B99"},cdata:{color:"#7D8B99"},punctuation:{color:"#5F6364"},property:{color:"#c92c2c"},tag:{color:"#c92c2c"},boolean:{color:"#c92c2c"},number:{color:"#c92c2c"},"function-name":{color:"#c92c2c"},constant:{color:"#c92c2c"},symbol:{color:"#c92c2c"},deleted:{color:"#c92c2c"},selector:{color:"#2f9c0a"},"attr-name":{color:"#2f9c0a"},string:{color:"#2f9c0a"},char:{color:"#2f9c0a"},function:{color:"#2f9c0a"},builtin:{color:"#2f9c0a"},inserted:{color:"#2f9c0a"},operator:{color:"#a67f59",background:"rgba(255, 255, 255, 0.5)"},entity:{color:"#a67f59",background:"rgba(255, 255, 255, 0.5)",cursor:"help"},url:{color:"#a67f59",background:"rgba(255, 255, 255, 0.5)"},variable:{color:"#a67f59",background:"rgba(255, 255, 255, 0.5)"},atrule:{color:"#1990b8"},"attr-value":{color:"#1990b8"},keyword:{color:"#1990b8"},"class-name":{color:"#1990b8"},regex:{color:"#e90"},important:{color:"#e90",fontWeight:"normal"},".language-css .token.string":{color:"#a67f59",background:"rgba(255, 255, 255, 0.5)"},".style .token.string":{color:"#a67f59",background:"rgba(255, 255, 255, 0.5)"},bold:{fontWeight:"bold"},italic:{fontStyle:"italic"},namespace:{Opacity:".7"},'pre[class*="language-"].line-numbers.line-numbers':{paddingLeft:"0"},'pre[class*="language-"].line-numbers.line-numbers code':{paddingLeft:"3.8em"},'pre[class*="language-"].line-numbers.line-numbers .line-numbers-rows':{left:"0"},'pre[class*="language-"][data-line]':{paddingTop:"0",paddingBottom:"0",paddingLeft:"0"},"pre[data-line] code":{position:"relative",paddingLeft:"4em"},"pre .line-highlight":{marginTop:"0"}}},33664:function(e,t,n){"use strict";t.__esModule=!0,t.default=void 0,function(e){if(!e||!e.__esModule){var t={};if(null!=e){for(var n in e)if(Object.prototype.hasOwnProperty.call(e,n)){var r=Object.defineProperty&&Object.getOwnPropertyDescriptor?Object.getOwnPropertyDescriptor(e,n):{};r.get||r.set?Object.defineProperty(t,n,r):t[n]=e[n]}}t.default=e}}(n(40718));var r=l(n(84617)),o=l(n(67973)),i=l(n(2265)),a=l(n(20536));function l(e){return e&&e.__esModule?e:{default:e}}function c(){return(c=Object.assign||function(e){for(var t=1;t=0||(o[n]=e[n]);return o}(e,["children","in"]),a=r.default.Children.toArray(t),l=a[0],c=a[1];return delete o.onEnter,delete o.onEntering,delete o.onEntered,delete o.onExit,delete o.onExiting,delete o.onExited,r.default.createElement(i.default,o,n?r.default.cloneElement(l,{key:"first",onEnter:this.handleEnter,onEntering:this.handleEntering,onEntered:this.handleEntered}):r.default.cloneElement(c,{key:"second",onEnter:this.handleExit,onEntering:this.handleExiting,onEntered:this.handleExited}))},t}(r.default.Component);l.propTypes={},t.default=l,e.exports=t.default},20536:function(e,t,n){"use strict";t.__esModule=!0,t.default=t.EXITING=t.ENTERED=t.ENTERING=t.EXITED=t.UNMOUNTED=void 0;var r=function(e){if(e&&e.__esModule)return e;var t={};if(null!=e){for(var n in e)if(Object.prototype.hasOwnProperty.call(e,n)){var r=Object.defineProperty&&Object.getOwnPropertyDescriptor?Object.getOwnPropertyDescriptor(e,n):{};r.get||r.set?Object.defineProperty(t,n,r):t[n]=e[n]}}return t.default=e,t}(n(40718)),o=l(n(2265)),i=l(n(54887)),a=n(52181);function l(e){return e&&e.__esModule?e:{default:e}}n(32601);var c="unmounted";t.UNMOUNTED=c;var s="exited";t.EXITED=s;var u="entering";t.ENTERING=u;var d="entered";t.ENTERED=d;var f="exiting";t.EXITING=f;var p=function(e){function t(t,n){r=e.call(this,t,n)||this;var r,o,i=n.transitionGroup,a=i&&!i.isMounting?t.enter:t.appear;return r.appearStatus=null,t.in?a?(o=s,r.appearStatus=u):o=d:o=t.unmountOnExit||t.mountOnEnter?c:s,r.state={status:o},r.nextCallback=null,r}t.prototype=Object.create(e.prototype),t.prototype.constructor=t,t.__proto__=e;var n=t.prototype;return n.getChildContext=function(){return{transitionGroup:null}},t.getDerivedStateFromProps=function(e,t){return e.in&&t.status===c?{status:s}:null},n.componentDidMount=function(){this.updateStatus(!0,this.appearStatus)},n.componentDidUpdate=function(e){var t=null;if(e!==this.props){var n=this.state.status;this.props.in?n!==u&&n!==d&&(t=u):(n===u||n===d)&&(t=f)}this.updateStatus(!1,t)},n.componentWillUnmount=function(){this.cancelNextCallback()},n.getTimeouts=function(){var e,t,n,r=this.props.timeout;return e=t=n=r,null!=r&&"number"!=typeof r&&(e=r.exit,t=r.enter,n=void 0!==r.appear?r.appear:t),{exit:e,enter:t,appear:n}},n.updateStatus=function(e,t){if(void 0===e&&(e=!1),null!==t){this.cancelNextCallback();var n=i.default.findDOMNode(this);t===u?this.performEnter(n,e):this.performExit(n)}else this.props.unmountOnExit&&this.state.status===s&&this.setState({status:c})},n.performEnter=function(e,t){var n=this,r=this.props.enter,o=this.context.transitionGroup?this.context.transitionGroup.isMounting:t,i=this.getTimeouts(),a=o?i.appear:i.enter;if(!t&&!r){this.safeSetState({status:d},function(){n.props.onEntered(e)});return}this.props.onEnter(e,o),this.safeSetState({status:u},function(){n.props.onEntering(e,o),n.onTransitionEnd(e,a,function(){n.safeSetState({status:d},function(){n.props.onEntered(e,o)})})})},n.performExit=function(e){var t=this,n=this.props.exit,r=this.getTimeouts();if(!n){this.safeSetState({status:s},function(){t.props.onExited(e)});return}this.props.onExit(e),this.safeSetState({status:f},function(){t.props.onExiting(e),t.onTransitionEnd(e,r.exit,function(){t.safeSetState({status:s},function(){t.props.onExited(e)})})})},n.cancelNextCallback=function(){null!==this.nextCallback&&(this.nextCallback.cancel(),this.nextCallback=null)},n.safeSetState=function(e,t){t=this.setNextCallback(t),this.setState(e,t)},n.setNextCallback=function(e){var t=this,n=!0;return this.nextCallback=function(r){n&&(n=!1,t.nextCallback=null,e(r))},this.nextCallback.cancel=function(){n=!1},this.nextCallback},n.onTransitionEnd=function(e,t,n){this.setNextCallback(n);var r=null==t&&!this.props.addEndListener;if(!e||r){setTimeout(this.nextCallback,0);return}this.props.addEndListener&&this.props.addEndListener(e,this.nextCallback),null!=t&&setTimeout(this.nextCallback,t)},n.render=function(){var e=this.state.status;if(e===c)return null;var t=this.props,n=t.children,r=function(e,t){if(null==e)return{};var n,r,o={},i=Object.keys(e);for(r=0;r=0||(o[n]=e[n]);return o}(t,["children"]);if(delete r.in,delete r.mountOnEnter,delete r.unmountOnExit,delete r.appear,delete r.enter,delete r.exit,delete r.timeout,delete r.addEndListener,delete r.onEnter,delete r.onEntering,delete r.onEntered,delete r.onExit,delete r.onExiting,delete r.onExited,"function"==typeof n)return n(e,r);var i=o.default.Children.only(n);return o.default.cloneElement(i,r)},t}(o.default.Component);function h(){}p.contextTypes={transitionGroup:r.object},p.childContextTypes={transitionGroup:function(){}},p.propTypes={},p.defaultProps={in:!1,mountOnEnter:!1,unmountOnExit:!1,appear:!1,enter:!0,exit:!0,onEnter:h,onEntering:h,onEntered:h,onExit:h,onExiting:h,onExited:h},p.UNMOUNTED=0,p.EXITED=1,p.ENTERING=2,p.ENTERED=3,p.EXITING=4;var m=(0,a.polyfill)(p);t.default=m},38244:function(e,t,n){"use strict";t.__esModule=!0,t.default=void 0;var r=l(n(40718)),o=l(n(2265)),i=n(52181),a=n(28710);function l(e){return e&&e.__esModule?e:{default:e}}function c(){return(c=Object.assign||function(e){for(var t=1;t=0||(o[n]=e[n]);return o}(e,["component","childFactory"]),i=u(this.state.children).map(n);return(delete r.appear,delete r.enter,delete r.exit,null===t)?i:o.default.createElement(t,r,i)},t}(o.default.Component);d.childContextTypes={transitionGroup:r.default.object.isRequired},d.propTypes={},d.defaultProps={component:"div",childFactory:function(e){return e}};var f=(0,i.polyfill)(d);t.default=f,e.exports=t.default},30719:function(e,t,n){"use strict";var r=l(n(33664)),o=l(n(31601)),i=l(n(38244)),a=l(n(20536));function l(e){return e&&e.__esModule?e:{default:e}}e.exports={Transition:a.default,TransitionGroup:i.default,ReplaceTransition:o.default,CSSTransition:r.default}},28710:function(e,t,n){"use strict";t.__esModule=!0,t.getChildMapping=o,t.mergeChildMappings=i,t.getInitialChildMapping=function(e,t){return o(e.children,function(n){return(0,r.cloneElement)(n,{onExited:t.bind(null,n),in:!0,appear:a(n,"appear",e),enter:a(n,"enter",e),exit:a(n,"exit",e)})})},t.getNextChildMapping=function(e,t,n){var l=o(e.children),c=i(t,l);return Object.keys(c).forEach(function(o){var i=c[o];if((0,r.isValidElement)(i)){var s=o in t,u=o in l,d=t[o],f=(0,r.isValidElement)(d)&&!d.props.in;u&&(!s||f)?c[o]=(0,r.cloneElement)(i,{onExited:n.bind(null,i),in:!0,exit:a(i,"exit",e),enter:a(i,"enter",e)}):u||!s||f?u&&s&&(0,r.isValidElement)(d)&&(c[o]=(0,r.cloneElement)(i,{onExited:n.bind(null,i),in:d.props.in,exit:a(i,"exit",e),enter:a(i,"enter",e)})):c[o]=(0,r.cloneElement)(i,{in:!1})}}),c};var r=n(2265);function o(e,t){var n=Object.create(null);return e&&r.Children.map(e,function(e){return e}).forEach(function(e){n[e.key]=t&&(0,r.isValidElement)(e)?t(e):e}),n}function i(e,t){function n(n){return n in t?t[n]:e[n]}e=e||{},t=t||{};var r,o=Object.create(null),i=[];for(var a in e)a in t?i.length&&(o[a]=i,i=[]):i.push(a);var l={};for(var c in t){if(o[c])for(r=0;r=0||(o[n]=e[n]);return o}(e,t);if(Object.getOwnPropertySymbols){var i=Object.getOwnPropertySymbols(e);for(r=0;r=0)&&Object.prototype.propertyIsEnumerable.call(e,n)&&(o[n]=e[n])}return o}(e,w),i=parseInt("".concat(n),10),a=parseInt("".concat(r),10),l=parseInt("".concat(t.height||o.height),10),c=parseInt("".concat(t.width||o.width),10);return E(E(E(E(E({},t),o),i?{x:i}:{}),a?{y:a}:{}),{},{height:l,width:c,name:t.name,radius:t.radius})}function O(e){return r.createElement(b.bn,S({shapeType:"rectangle",propTransformer:C,activeClassName:"recharts-active-bar"},e))}var j=["value","background"];function P(e){return(P="function"==typeof Symbol&&"symbol"==typeof Symbol.iterator?function(e){return typeof e}:function(e){return e&&"function"==typeof Symbol&&e.constructor===Symbol&&e!==Symbol.prototype?"symbol":typeof e})(e)}function M(){return(M=Object.assign?Object.assign.bind():function(e){for(var t=1;t=0||(o[n]=e[n]);return o}(e,t);if(Object.getOwnPropertySymbols){var i=Object.getOwnPropertySymbols(e);for(r=0;r=0)&&Object.prototype.propertyIsEnumerable.call(e,n)&&(o[n]=e[n])}return o}(t,j);if(!l)return null;var s=I(I(I(I(I({},c),{},{fill:"#eee"},l),a),(0,y.bw)(e.props,t,n)),{},{onAnimationStart:e.handleAnimationStart,onAnimationEnd:e.handleAnimationEnd,dataKey:o,index:n,key:"background-bar-".concat(n),className:"recharts-bar-background-rectangle"});return r.createElement(O,M({option:e.props.background,isActive:n===i},s))})}},{key:"renderErrorBar",value:function(e,t){if(this.props.isAnimationActive&&!this.state.isAnimationFinished)return null;var n=this.props,o=n.data,i=n.xAxis,a=n.yAxis,l=n.layout,c=n.children,s=(0,m.NN)(c,d.W);if(!s)return null;var f="vertical"===l?o[0].height/2:o[0].width/2,p=function(e,t){var n=Array.isArray(e.value)?e.value[1]:e.value;return{x:e.x,y:e.y,value:n,errorVal:(0,v.F$)(e,t)}};return r.createElement(u.m,{clipPath:e?"url(#clipPath-".concat(t,")"):null},s.map(function(e){return r.cloneElement(e,{key:"error-bar-".concat(t,"-").concat(e.props.dataKey),data:o,xAxis:i,yAxis:a,layout:l,offset:f,dataPointFormatter:p})}))}},{key:"render",value:function(){var e=this.props,t=e.hide,n=e.data,i=e.className,a=e.xAxis,l=e.yAxis,c=e.left,d=e.top,f=e.width,h=e.height,m=e.isAnimationActive,g=e.background,v=e.id;if(t||!n||!n.length)return null;var y=this.state.isAnimationFinished,b=(0,o.Z)("recharts-bar",i),x=a&&a.allowDataOverflow,w=l&&l.allowDataOverflow,S=x||w,k=s()(v)?this.id:v;return r.createElement(u.m,{className:b},x||w?r.createElement("defs",null,r.createElement("clipPath",{id:"clipPath-".concat(k)},r.createElement("rect",{x:x?c:c-f/2,y:w?d:d-h/2,width:x?f:2*f,height:w?h:2*h}))):null,r.createElement(u.m,{className:"recharts-bar-rectangles",clipPath:S?"url(#clipPath-".concat(k,")"):null},g?this.renderBackground():null,this.renderRectangles()),this.renderErrorBar(S,k),(!m||y)&&p.e.renderCallByParent(this.props,n))}}],a=[{key:"getDerivedStateFromProps",value:function(e,t){return e.animationId!==t.prevAnimationId?{prevAnimationId:e.animationId,curData:e.data,prevData:t.curData}:e.data!==t.curData?{curData:e.data}:null}}],n&&R(f.prototype,n),a&&R(f,a),Object.defineProperty(f,"prototype",{writable:!1}),f}(r.PureComponent);D(L,"displayName","Bar"),D(L,"defaultProps",{xAxisId:0,yAxisId:0,legendType:"rect",minPointSize:0,hide:!1,data:[],layout:"vertical",activeBar:!0,isAnimationActive:!g.x.isSsr,animationBegin:0,animationDuration:400,animationEasing:"ease"}),D(L,"getComposedData",function(e){var t=e.props,n=e.item,r=e.barPosition,o=e.bandSize,i=e.xAxis,a=e.yAxis,l=e.xAxisTicks,c=e.yAxisTicks,s=e.stackedData,u=e.dataStartIndex,d=e.displayedData,p=e.offset,g=(0,v.Bu)(r,n);if(!g)return null;var y=t.layout,b=n.props,x=b.dataKey,w=b.children,S=b.minPointSize,k="horizontal"===y?a:i,E=s?k.scale.domain():null,C=(0,v.Yj)({numericAxis:k}),O=(0,m.NN)(w,f.b),j=d.map(function(e,t){var r,d,f,p,m,b;if(s?r=(0,v.Vv)(s[u+t],E):Array.isArray(r=(0,v.F$)(e,x))||(r=[C,r]),"horizontal"===y){var w,k=[a.scale(r[0]),a.scale(r[1])],j=k[0],P=k[1];d=(0,v.Fy)({axis:i,ticks:l,bandSize:o,offset:g.offset,entry:e,index:t}),f=null!==(w=null!=P?P:j)&&void 0!==w?w:void 0,p=g.size;var M=j-P;if(m=Number.isNaN(M)?0:M,b={x:d,y:a.y,width:p,height:a.height},Math.abs(S)>0&&Math.abs(m)0&&Math.abs(p)=0||(o[n]=e[n]);return o}(e,t);if(Object.getOwnPropertySymbols){var i=Object.getOwnPropertySymbols(e);for(r=0;r=0)&&Object.prototype.propertyIsEnumerable.call(e,n)&&(o[n]=e[n])}return o}function C(e,t){for(var n=0;n0?this.props:h)),o<=0||a<=0||!m||!m.length)?null:r.createElement(u.m,{className:(0,c.Z)("recharts-cartesian-axis",s),ref:function(t){e.layerReference=t}},n&&this.renderAxisLine(),this.renderTicks(m,this.state.fontSize,this.state.letterSpacing),f._.renderCallByParent(this.props))}}],o=[{key:"renderTickItem",value:function(e,t,n){return r.isValidElement(e)?r.cloneElement(e,t):i()(e)?e(t):r.createElement(d.x,w({},t,{className:"recharts-cartesian-axis-tick-value"}),n)}}],n&&C(S.prototype,n),o&&C(S,o),Object.defineProperty(S,"prototype",{writable:!1}),S}(r.Component);P(N,"displayName","CartesianAxis"),P(N,"defaultProps",{x:0,y:0,width:0,height:0,viewBox:{x:0,y:0,width:0,height:0},orientation:"bottom",ticks:[],stroke:"#666",tickLine:!0,axisLine:!0,tick:!0,mirror:!1,minTickGap:5,tickSize:6,tickMargin:2,interval:"preserveEnd"})},56940:function(e,t,n){"use strict";n.d(t,{q:function(){return M}});var r=n(2265),o=n(86757),i=n.n(o),a=n(1175),l=n(16630),c=n(82944),s=n(85355),u=n(78242),d=n(80285),f=n(25739),p=["x1","y1","x2","y2","key"],h=["offset"];function m(e){return(m="function"==typeof Symbol&&"symbol"==typeof Symbol.iterator?function(e){return typeof e}:function(e){return e&&"function"==typeof Symbol&&e.constructor===Symbol&&e!==Symbol.prototype?"symbol":typeof e})(e)}function g(e,t){var n=Object.keys(e);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);t&&(r=r.filter(function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable})),n.push.apply(n,r)}return n}function v(e){for(var t=1;t=0||(o[n]=e[n]);return o}(e,t);if(Object.getOwnPropertySymbols){var i=Object.getOwnPropertySymbols(e);for(r=0;r=0)&&Object.prototype.propertyIsEnumerable.call(e,n)&&(o[n]=e[n])}return o}var x=function(e){var t=e.fill;if(!t||"none"===t)return null;var n=e.fillOpacity,o=e.x,i=e.y,a=e.width,l=e.height;return r.createElement("rect",{x:o,y:i,width:a,height:l,stroke:"none",fill:t,fillOpacity:n,className:"recharts-cartesian-grid-bg"})};function w(e,t){var n;if(r.isValidElement(e))n=r.cloneElement(e,t);else if(i()(e))n=e(t);else{var o=t.x1,a=t.y1,l=t.x2,s=t.y2,u=t.key,d=b(t,p),f=(0,c.L6)(d,!1),m=(f.offset,b(f,h));n=r.createElement("line",y({},m,{x1:o,y1:a,x2:l,y2:s,fill:"none",key:u}))}return n}function S(e){var t=e.x,n=e.width,o=e.horizontal,i=void 0===o||o,a=e.horizontalPoints;if(!i||!a||!a.length)return null;var l=a.map(function(r,o){return w(i,v(v({},e),{},{x1:t,y1:r,x2:t+n,y2:r,key:"line-".concat(o),index:o}))});return r.createElement("g",{className:"recharts-cartesian-grid-horizontal"},l)}function k(e){var t=e.y,n=e.height,o=e.vertical,i=void 0===o||o,a=e.verticalPoints;if(!i||!a||!a.length)return null;var l=a.map(function(r,o){return w(i,v(v({},e),{},{x1:r,y1:t,x2:r,y2:t+n,key:"line-".concat(o),index:o}))});return r.createElement("g",{className:"recharts-cartesian-grid-vertical"},l)}function E(e){var t=e.horizontalFill,n=e.fillOpacity,o=e.x,i=e.y,a=e.width,l=e.height,c=e.horizontalPoints,s=e.horizontal;if(!(void 0===s||s)||!t||!t.length)return null;var u=c.map(function(e){return Math.round(e+i-i)}).sort(function(e,t){return e-t});i!==u[0]&&u.unshift(0);var d=u.map(function(e,c){var s=u[c+1]?u[c+1]-e:i+l-e;if(s<=0)return null;var d=c%t.length;return r.createElement("rect",{key:"react-".concat(c),y:e,x:o,height:s,width:a,stroke:"none",fill:t[d],fillOpacity:n,className:"recharts-cartesian-grid-bg"})});return r.createElement("g",{className:"recharts-cartesian-gridstripes-horizontal"},d)}function C(e){var t=e.vertical,n=e.verticalFill,o=e.fillOpacity,i=e.x,a=e.y,l=e.width,c=e.height,s=e.verticalPoints;if(!(void 0===t||t)||!n||!n.length)return null;var u=s.map(function(e){return Math.round(e+i-i)}).sort(function(e,t){return e-t});i!==u[0]&&u.unshift(0);var d=u.map(function(e,t){var s=u[t+1]?u[t+1]-e:i+l-e;if(s<=0)return null;var d=t%n.length;return r.createElement("rect",{key:"react-".concat(t),x:e,y:a,width:s,height:c,stroke:"none",fill:n[d],fillOpacity:o,className:"recharts-cartesian-grid-bg"})});return r.createElement("g",{className:"recharts-cartesian-gridstripes-vertical"},d)}var O=function(e,t){var n=e.xAxis,r=e.width,o=e.height,i=e.offset;return(0,s.Rf)((0,u.f)(v(v(v({},d.O.defaultProps),n),{},{ticks:(0,s.uY)(n,!0),viewBox:{x:0,y:0,width:r,height:o}})),i.left,i.left+i.width,t)},j=function(e,t){var n=e.yAxis,r=e.width,o=e.height,i=e.offset;return(0,s.Rf)((0,u.f)(v(v(v({},d.O.defaultProps),n),{},{ticks:(0,s.uY)(n,!0),viewBox:{x:0,y:0,width:r,height:o}})),i.top,i.top+i.height,t)},P={horizontal:!0,vertical:!0,stroke:"#ccc",fill:"none",verticalFill:[],horizontalFill:[]};function M(e){var t,n,o,c,s,u,d=(0,f.zn)(),p=(0,f.Mw)(),h=(0,f.qD)(),g=v(v({},e),{},{stroke:null!==(t=e.stroke)&&void 0!==t?t:P.stroke,fill:null!==(n=e.fill)&&void 0!==n?n:P.fill,horizontal:null!==(o=e.horizontal)&&void 0!==o?o:P.horizontal,horizontalFill:null!==(c=e.horizontalFill)&&void 0!==c?c:P.horizontalFill,vertical:null!==(s=e.vertical)&&void 0!==s?s:P.vertical,verticalFill:null!==(u=e.verticalFill)&&void 0!==u?u:P.verticalFill}),b=g.x,w=g.y,M=g.width,N=g.height,I=g.xAxis,R=g.yAxis,T=g.syncWithTicks,A=g.horizontalValues,_=g.verticalValues;if(!(0,l.hj)(M)||M<=0||!(0,l.hj)(N)||N<=0||!(0,l.hj)(b)||b!==+b||!(0,l.hj)(w)||w!==+w)return null;var D=g.verticalCoordinatesGenerator||O,Z=g.horizontalCoordinatesGenerator||j,L=g.horizontalPoints,z=g.verticalPoints;if((!L||!L.length)&&i()(Z)){var B=A&&A.length,F=Z({yAxis:R?v(v({},R),{},{ticks:B?A:R.ticks}):void 0,width:d,height:p,offset:h},!!B||T);(0,a.Z)(Array.isArray(F),"horizontalCoordinatesGenerator should return Array but instead it returned [".concat(m(F),"]")),Array.isArray(F)&&(L=F)}if((!z||!z.length)&&i()(D)){var H=_&&_.length,q=D({xAxis:I?v(v({},I),{},{ticks:H?_:I.ticks}):void 0,width:d,height:p,offset:h},!!H||T);(0,a.Z)(Array.isArray(q),"verticalCoordinatesGenerator should return Array but instead it returned [".concat(m(q),"]")),Array.isArray(q)&&(z=q)}return r.createElement("g",{className:"recharts-cartesian-grid"},r.createElement(x,{fill:g.fill,fillOpacity:g.fillOpacity,x:g.x,y:g.y,width:g.width,height:g.height}),r.createElement(S,y({},g,{offset:h,horizontalPoints:L})),r.createElement(k,y({},g,{offset:h,verticalPoints:z})),r.createElement(E,y({},g,{horizontalPoints:L})),r.createElement(C,y({},g,{verticalPoints:z})))}M.displayName="CartesianGrid"},13137:function(e,t,n){"use strict";n.d(t,{W:function(){return u}});var r=n(2265),o=n(69398),i=n(9841),a=n(82944),l=["offset","layout","width","dataKey","data","dataPointFormatter","xAxis","yAxis"];function c(){return(c=Object.assign?Object.assign.bind():function(e){for(var t=1;te.length)&&(t=e.length);for(var n=0,r=Array(t);n=0||(o[n]=e[n]);return o}(e,t);if(Object.getOwnPropertySymbols){var i=Object.getOwnPropertySymbols(e);for(r=0;r=0)&&Object.prototype.propertyIsEnumerable.call(e,n)&&(o[n]=e[n])}return o}(e,l),v=(0,a.L6)(g,!1);"x"===e.direction&&"number"!==h.type&&(0,o.Z)(!1);var y=f.map(function(e){var o,a,l=p(e,d),f=l.x,g=l.y,y=l.value,b=l.errorVal;if(!b)return null;var x=[];if(Array.isArray(b)){var w=function(e){if(Array.isArray(e))return e}(b)||function(e,t){var n=null==e?null:"undefined"!=typeof Symbol&&e[Symbol.iterator]||e["@@iterator"];if(null!=n){var r,o,i,a,l=[],c=!0,s=!1;try{for(i=(n=n.call(e)).next;!(c=(r=i.call(n)).done)&&(l.push(r.value),2!==l.length);c=!0);}catch(e){s=!0,o=e}finally{try{if(!c&&null!=n.return&&(a=n.return(),Object(a)!==a))return}finally{if(s)throw o}}return l}}(b,2)||function(e,t){if(e){if("string"==typeof e)return s(e,2);var n=Object.prototype.toString.call(e).slice(8,-1);if("Object"===n&&e.constructor&&(n=e.constructor.name),"Map"===n||"Set"===n)return Array.from(e);if("Arguments"===n||/^(?:Ui|I)nt(?:8|16|32)(?:Clamped)?Array$/.test(n))return s(e,2)}}(b,2)||function(){throw TypeError("Invalid attempt to destructure non-iterable instance.\nIn order to be iterable, non-array objects must have a [Symbol.iterator]() method.")}();o=w[0],a=w[1]}else o=a=b;if("vertical"===n){var S=h.scale,k=g+t,E=k+u,C=k-u,O=S(y-o),j=S(y+a);x.push({x1:j,y1:E,x2:j,y2:C}),x.push({x1:O,y1:k,x2:j,y2:k}),x.push({x1:O,y1:E,x2:O,y2:C})}else if("horizontal"===n){var P=m.scale,M=f+t,N=M-u,I=M+u,R=P(y-o),T=P(y+a);x.push({x1:N,y1:T,x2:I,y2:T}),x.push({x1:M,y1:R,x2:M,y2:T}),x.push({x1:N,y1:R,x2:I,y2:R})}return r.createElement(i.m,c({className:"recharts-errorBar",key:"bar-".concat(x.map(function(e){return"".concat(e.x1,"-").concat(e.x2,"-").concat(e.y1,"-").concat(e.y2)}))},v),x.map(function(e){return r.createElement("line",c({},e,{key:"line-".concat(e.x1,"-").concat(e.x2,"-").concat(e.y1,"-").concat(e.y2)}))}))});return r.createElement(i.m,{className:"recharts-errorBars"},y)}u.defaultProps={stroke:"black",strokeWidth:1.5,width:5,offset:0,layout:"horizontal"},u.displayName="ErrorBar"},97059:function(e,t,n){"use strict";n.d(t,{K:function(){return s}});var r=n(2265),o=n(61994),i=n(25739),a=n(80285),l=n(85355);function c(){return(c=Object.assign?Object.assign.bind():function(e){for(var t=1;te*o)return!1;var i=n();return e*(t-e*i/2-r)>=0&&e*(t+e*i/2-o)<=0}function d(e){return(d="function"==typeof Symbol&&"symbol"==typeof Symbol.iterator?function(e){return typeof e}:function(e){return e&&"function"==typeof Symbol&&e.constructor===Symbol&&e!==Symbol.prototype?"symbol":typeof e})(e)}function f(e,t){var n=Object.keys(e);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);t&&(r=r.filter(function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable})),n.push.apply(n,r)}return n}function p(e){for(var t=1;t=2?(0,i.uY)(v[1].coordinate-v[0].coordinate):1,M=(r="width"===C,d=y.x,f=y.y,h=y.width,m=y.height,1===P?{start:r?d:f,end:r?d+h:f+m}:{start:r?d+h:f+m,end:r?d:f});return"equidistantPreserveStart"===w?function(e,t,n,r,o){for(var i,a=(r||[]).slice(),l=t.start,c=t.end,d=0,f=1,p=l;f<=a.length;)if(i=function(){var t,i=null==r?void 0:r[d];if(void 0===i)return{v:s(r,f)};var a=d,h=function(){return void 0===t&&(t=n(i,a)),t},m=i.coordinate,g=0===d||u(e,m,h,p,c);g||(d=0,p=l,f+=1),g&&(p=m+e*(h()/2+o),d+=f)}())return i.v;return[]}(P,M,j,v,b):("preserveStart"===w||"preserveStartEnd"===w?function(e,t,n,r,o,i){var a=(r||[]).slice(),l=a.length,c=t.start,s=t.end;if(i){var d=r[l-1],f=n(d,l-1),h=e*(d.coordinate+e*f/2-s);a[l-1]=d=p(p({},d),{},{tickCoord:h>0?d.coordinate-h*e:d.coordinate}),u(e,d.tickCoord,function(){return f},c,s)&&(s=d.tickCoord-e*(f/2+o),a[l-1]=p(p({},d),{},{isShow:!0}))}for(var m=i?l-1:l,g=function(t){var r,i=a[t],l=function(){return void 0===r&&(r=n(i,t)),r};if(0===t){var d=e*(i.coordinate-e*l()/2-c);a[t]=i=p(p({},i),{},{tickCoord:d<0?i.coordinate-d*e:i.coordinate})}else a[t]=i=p(p({},i),{},{tickCoord:i.coordinate});u(e,i.tickCoord,l,c,s)&&(c=i.tickCoord+e*(l()/2+o),a[t]=p(p({},i),{},{isShow:!0}))},v=0;v0?s.coordinate-f*e:s.coordinate})}else i[t]=s=p(p({},s),{},{tickCoord:s.coordinate});u(e,s.tickCoord,d,l,c)&&(c=s.tickCoord-e*(d()/2+o),i[t]=p(p({},s),{},{isShow:!0}))},d=a-1;d>=0;d--)s(d);return i}(P,M,j,v,b)).filter(function(e){return e.isShow})}},93765:function(e,t,n){"use strict";n.d(t,{z:function(){return tx}});var r=n(2265),o=n(77571),i=n.n(o),a=n(86757),l=n.n(a),c=n(99676),s=n.n(c),u=n(13735),d=n.n(u),f=n(34935),p=n.n(f),h=n(37065),m=n.n(h),g=n(84173),v=n.n(g),y=n(32242),b=n.n(y),x=n(61994),w=n(69398),S=n(48777),k=n(9841),E=n(8147),C=n(22190),O=n(81889),j=n(73649),P=n(82944),M=n(55284),N=n(58811),I=n(85355),R=n(16630);function T(e){return(T="function"==typeof Symbol&&"symbol"==typeof Symbol.iterator?function(e){return typeof e}:function(e){return e&&"function"==typeof Symbol&&e.constructor===Symbol&&e!==Symbol.prototype?"symbol":typeof e})(e)}function A(e,t){var n=Object.keys(e);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);t&&(r=r.filter(function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable})),n.push.apply(n,r)}return n}function _(e){for(var t=1;t0&&t.handleDrag(e.changedTouches[0])}),V(K(t),"handleDragEnd",function(){t.setState({isTravellerMoving:!1,isSlideMoving:!1},function(){var e=t.props,n=e.endIndex,r=e.onDragEnd,o=e.startIndex;null==r||r({endIndex:n,startIndex:o})}),t.detachDragEndListener()}),V(K(t),"handleLeaveWrapper",function(){(t.state.isTravellerMoving||t.state.isSlideMoving)&&(t.leaveTimer=window.setTimeout(t.handleDragEnd,t.props.leaveTimeOut))}),V(K(t),"handleEnterSlideOrTraveller",function(){t.setState({isTextActive:!0})}),V(K(t),"handleLeaveSlideOrTraveller",function(){t.setState({isTextActive:!1})}),V(K(t),"handleSlideDragStart",function(e){var n=$(e)?e.changedTouches[0]:e;t.setState({isTravellerMoving:!1,isSlideMoving:!0,slideMoveStartX:n.pageX}),t.attachDragEndListener()}),t.travellerDragStartHandlers={startX:t.handleTravellerDragStart.bind(K(t),"startX"),endX:t.handleTravellerDragStart.bind(K(t),"endX")},t.state={},t}return n=[{key:"componentWillUnmount",value:function(){this.leaveTimer&&(clearTimeout(this.leaveTimer),this.leaveTimer=null),this.detachDragEndListener()}},{key:"getIndex",value:function(e){var t=e.startX,n=e.endX,r=this.state.scaleValues,o=this.props,i=o.gap,l=o.data.length-1,c=a.getIndexInRange(r,Math.min(t,n)),s=a.getIndexInRange(r,Math.max(t,n));return{startIndex:c-c%i,endIndex:s===l?l:s-s%i}}},{key:"getTextOfTick",value:function(e){var t=this.props,n=t.data,r=t.tickFormatter,o=t.dataKey,i=(0,I.F$)(n[e],o,e);return l()(r)?r(i,e):i}},{key:"attachDragEndListener",value:function(){window.addEventListener("mouseup",this.handleDragEnd,!0),window.addEventListener("touchend",this.handleDragEnd,!0),window.addEventListener("mousemove",this.handleDrag,!0)}},{key:"detachDragEndListener",value:function(){window.removeEventListener("mouseup",this.handleDragEnd,!0),window.removeEventListener("touchend",this.handleDragEnd,!0),window.removeEventListener("mousemove",this.handleDrag,!0)}},{key:"handleSlideDrag",value:function(e){var t=this.state,n=t.slideMoveStartX,r=t.startX,o=t.endX,i=this.props,a=i.x,l=i.width,c=i.travellerWidth,s=i.startIndex,u=i.endIndex,d=i.onChange,f=e.pageX-n;f>0?f=Math.min(f,a+l-c-o,a+l-c-r):f<0&&(f=Math.max(f,a-r,a-o));var p=this.getIndex({startX:r+f,endX:o+f});(p.startIndex!==s||p.endIndex!==u)&&d&&d(p),this.setState({startX:r+f,endX:o+f,slideMoveStartX:e.pageX})}},{key:"handleTravellerDragStart",value:function(e,t){var n=$(t)?t.changedTouches[0]:t;this.setState({isSlideMoving:!1,isTravellerMoving:!0,movingTravellerId:e,brushMoveStartX:n.pageX}),this.attachDragEndListener()}},{key:"handleTravellerMove",value:function(e){var t,n=this.state,r=n.brushMoveStartX,o=n.movingTravellerId,i=n.endX,a=n.startX,l=this.state[o],c=this.props,s=c.x,u=c.width,d=c.travellerWidth,f=c.onChange,p=c.gap,h=c.data,m={startX:this.state.startX,endX:this.state.endX},g=e.pageX-r;g>0?g=Math.min(g,s+u-d-l):g<0&&(g=Math.max(g,s-l)),m[o]=l+g;var v=this.getIndex(m),y=v.startIndex,b=v.endIndex,x=function(){var e=h.length-1;return"startX"===o&&(i>a?y%p==0:b%p==0)||ia?b%p==0:y%p==0)||i>a&&b===e};this.setState((V(t={},o,l+g),V(t,"brushMoveStartX",e.pageX),t),function(){f&&x()&&f(v)})}},{key:"handleTravellerMoveKeyboard",value:function(e,t){var n=this,r=this.state,o=r.scaleValues,i=r.startX,a=r.endX,l=this.state[t],c=o.indexOf(l);if(-1!==c){var s=c+e;if(-1!==s&&!(s>=o.length)){var u=o[s];"startX"===t&&u>=a||"endX"===t&&u<=i||this.setState(V({},t,u),function(){n.props.onChange(n.getIndex({startX:n.state.startX,endX:n.state.endX}))})}}}},{key:"renderBackground",value:function(){var e=this.props,t=e.x,n=e.y,o=e.width,i=e.height,a=e.fill,l=e.stroke;return r.createElement("rect",{stroke:l,fill:a,x:t,y:n,width:o,height:i})}},{key:"renderPanorama",value:function(){var e=this.props,t=e.x,n=e.y,o=e.width,i=e.height,a=e.data,l=e.children,c=e.padding,s=r.Children.only(l);return s?r.cloneElement(s,{x:t,y:n,width:o,height:i,margin:c,compact:!0,data:a}):null}},{key:"renderTravellerLayer",value:function(e,t){var n=this,o=this.props,i=o.y,l=o.travellerWidth,c=o.height,s=o.traveller,u=o.ariaLabel,d=o.data,f=o.startIndex,p=o.endIndex,h=Math.max(e,this.props.x),m=H(H({},(0,P.L6)(this.props,!1)),{},{x:h,y:i,width:l,height:c}),g=u||"Min value: ".concat(d[f].name,", Max value: ").concat(d[p].name);return r.createElement(k.m,{tabIndex:0,role:"slider","aria-label":g,"aria-valuenow":e,className:"recharts-brush-traveller",onMouseEnter:this.handleEnterSlideOrTraveller,onMouseLeave:this.handleLeaveSlideOrTraveller,onMouseDown:this.travellerDragStartHandlers[t],onTouchStart:this.travellerDragStartHandlers[t],onKeyDown:function(e){["ArrowLeft","ArrowRight"].includes(e.key)&&(e.preventDefault(),e.stopPropagation(),n.handleTravellerMoveKeyboard("ArrowRight"===e.key?1:-1,t))},onFocus:function(){n.setState({isTravellerFocused:!0})},onBlur:function(){n.setState({isTravellerFocused:!1})},style:{cursor:"col-resize"}},a.renderTraveller(s,m))}},{key:"renderSlide",value:function(e,t){var n=this.props,o=n.y,i=n.height,a=n.stroke,l=n.travellerWidth;return r.createElement("rect",{className:"recharts-brush-slide",onMouseEnter:this.handleEnterSlideOrTraveller,onMouseLeave:this.handleLeaveSlideOrTraveller,onMouseDown:this.handleSlideDragStart,onTouchStart:this.handleSlideDragStart,style:{cursor:"move"},stroke:"none",fill:a,fillOpacity:.2,x:Math.min(e,t)+l,y:o,width:Math.max(Math.abs(t-e)-l,0),height:i})}},{key:"renderText",value:function(){var e=this.props,t=e.startIndex,n=e.endIndex,o=e.y,i=e.height,a=e.travellerWidth,l=e.stroke,c=this.state,s=c.startX,u=c.endX,d={pointerEvents:"none",fill:l};return r.createElement(k.m,{className:"recharts-brush-texts"},r.createElement(N.x,B({textAnchor:"end",verticalAnchor:"middle",x:Math.min(s,u)-5,y:o+i/2},d),this.getTextOfTick(t)),r.createElement(N.x,B({textAnchor:"start",verticalAnchor:"middle",x:Math.max(s,u)+a+5,y:o+i/2},d),this.getTextOfTick(n)))}},{key:"render",value:function(){var e=this.props,t=e.data,n=e.className,o=e.children,i=e.x,a=e.y,l=e.width,c=e.height,s=e.alwaysShowText,u=this.state,d=u.startX,f=u.endX,p=u.isTextActive,h=u.isSlideMoving,m=u.isTravellerMoving,g=u.isTravellerFocused;if(!t||!t.length||!(0,R.hj)(i)||!(0,R.hj)(a)||!(0,R.hj)(l)||!(0,R.hj)(c)||l<=0||c<=0)return null;var v=(0,x.Z)("recharts-brush",n),y=1===r.Children.count(o),b=L("userSelect","none");return r.createElement(k.m,{className:v,onMouseLeave:this.handleLeaveWrapper,onTouchMove:this.handleTouchMove,style:b},this.renderBackground(),y&&this.renderPanorama(),this.renderSlide(d,f),this.renderTravellerLayer(d,"startX"),this.renderTravellerLayer(f,"endX"),(p||h||m||g||s)&&this.renderText())}}],o=[{key:"renderDefaultTraveller",value:function(e){var t=e.x,n=e.y,o=e.width,i=e.height,a=e.stroke,l=Math.floor(n+i/2)-1;return r.createElement(r.Fragment,null,r.createElement("rect",{x:t,y:n,width:o,height:i,fill:a,stroke:"none"}),r.createElement("line",{x1:t+1,y1:l,x2:t+o-1,y2:l,fill:"none",stroke:"#fff"}),r.createElement("line",{x1:t+1,y1:l+2,x2:t+o-1,y2:l+2,fill:"none",stroke:"#fff"}))}},{key:"renderTraveller",value:function(e,t){return r.isValidElement(e)?r.cloneElement(e,t):l()(e)?e(t):a.renderDefaultTraveller(t)}},{key:"getDerivedStateFromProps",value:function(e,t){var n=e.data,r=e.width,o=e.x,i=e.travellerWidth,a=e.updateId,l=e.startIndex,c=e.endIndex;if(n!==t.prevData||a!==t.prevUpdateId)return H({prevData:n,prevTravellerWidth:i,prevUpdateId:a,prevX:o,prevWidth:r},n&&n.length?X({data:n,width:r,x:o,travellerWidth:i,startIndex:l,endIndex:c}):{scale:null,scaleValues:null});if(t.scale&&(r!==t.prevWidth||o!==t.prevX||i!==t.prevTravellerWidth)){t.scale.range([o,o+r-i]);var s=t.scale.domain().map(function(e){return t.scale(e)});return{prevData:n,prevTravellerWidth:i,prevUpdateId:a,prevX:o,prevWidth:r,startX:t.scale(e.startIndex),endX:t.scale(e.endIndex),scaleValues:s}}return null}},{key:"getIndexInRange",value:function(e,t){for(var n=e.length,r=0,o=n-1;o-r>1;){var i=Math.floor((r+o)/2);e[i]>t?o=i:r=i}return t>=e[o]?o:r}}],n&&q(a.prototype,n),o&&q(a,o),Object.defineProperty(a,"prototype",{writable:!1}),a}(r.PureComponent);V(Y,"displayName","Brush"),V(Y,"defaultProps",{height:40,travellerWidth:5,gap:1,fill:"#fff",stroke:"#666",padding:{top:1,right:1,bottom:1,left:1},leaveTimeOut:1e3,alwaysShowText:!1});var Q=n(4094),J=n(38569),ee=n(26680),et=function(e,t){var n=e.alwaysShow,r=e.ifOverflow;return n&&(r="extendDomain"),r===t},en=n(25311),er=n(1175);function eo(e){return(eo="function"==typeof Symbol&&"symbol"==typeof Symbol.iterator?function(e){return typeof e}:function(e){return e&&"function"==typeof Symbol&&e.constructor===Symbol&&e!==Symbol.prototype?"symbol":typeof e})(e)}function ei(){return(ei=Object.assign?Object.assign.bind():function(e){for(var t=1;te.length)&&(t=e.length);for(var n=0,r=Array(t);ne.length)&&(t=e.length);for(var n=0,r=Array(t);n=0||(o[n]=e[n]);return o}(e,t);if(Object.getOwnPropertySymbols){var i=Object.getOwnPropertySymbols(e);for(r=0;r=0)&&Object.prototype.propertyIsEnumerable.call(e,n)&&(o[n]=e[n])}return o}(e,eH));return(0,R.hj)(n)&&(0,R.hj)(i)&&(0,R.hj)(d)&&(0,R.hj)(p)&&(0,R.hj)(l)&&(0,R.hj)(s)?r.createElement("path",eq({},(0,P.L6)(m,!0),{className:(0,x.Z)("recharts-cross",h),d:"M".concat(n,",").concat(l,"v").concat(p,"M").concat(s,",").concat(i,"h").concat(d)})):null};function eU(e){var t=e.cx,n=e.cy,r=e.radius,o=e.startAngle,i=e.endAngle;return{points:[(0,eM.op)(t,n,r,o),(0,eM.op)(t,n,r,i)],cx:t,cy:n,radius:r,startAngle:o,endAngle:i}}var eV=n(60474);function eG(e){return(eG="function"==typeof Symbol&&"symbol"==typeof Symbol.iterator?function(e){return typeof e}:function(e){return e&&"function"==typeof Symbol&&e.constructor===Symbol&&e!==Symbol.prototype?"symbol":typeof e})(e)}function eX(e,t){var n=Object.keys(e);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);t&&(r=r.filter(function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable})),n.push.apply(n,r)}return n}function e$(e){for(var t=1;t=0||(o[n]=e[n]);return o}(e,t);if(Object.getOwnPropertySymbols){var i=Object.getOwnPropertySymbols(e);for(r=0;r=0)&&Object.prototype.propertyIsEnumerable.call(e,n)&&(o[n]=e[n])}return o}function e3(e,t){return(e3=Object.setPrototypeOf?Object.setPrototypeOf.bind():function(e,t){return e.__proto__=t,e})(e,t)}function e4(e){if(void 0===e)throw ReferenceError("this hasn't been initialised - super() hasn't been called");return e}function e5(e){return(e5=Object.setPrototypeOf?Object.getPrototypeOf.bind():function(e){return e.__proto__||Object.getPrototypeOf(e)})(e)}function e8(e){return function(e){if(Array.isArray(e))return e9(e)}(e)||function(e){if("undefined"!=typeof Symbol&&null!=e[Symbol.iterator]||null!=e["@@iterator"])return Array.from(e)}(e)||e7(e)||function(){throw TypeError("Invalid attempt to spread non-iterable instance.\nIn order to be iterable, non-array objects must have a [Symbol.iterator]() method.")}()}function e7(e,t){if(e){if("string"==typeof e)return e9(e,t);var n=Object.prototype.toString.call(e).slice(8,-1);if("Object"===n&&e.constructor&&(n=e.constructor.name),"Map"===n||"Set"===n)return Array.from(e);if("Arguments"===n||/^(?:Ui|I)nt(?:8|16|32)(?:Clamped)?Array$/.test(n))return e9(e,t)}}function e9(e,t){(null==t||t>e.length)&&(t=e.length);for(var n=0,r=Array(t);n0?i:e&&e.length&&(0,R.hj)(r)&&(0,R.hj)(o)?e.slice(r,o+1):[]};function tu(e){return"number"===e?[0,"auto"]:void 0}var td=function(e,t,n,r){var o=e.graphicalItems,i=e.tooltipAxis,a=ts(t,e);return n<0||!o||!o.length||n>=a.length?null:o.reduce(function(o,l){var c,s,u=null!==(c=l.props.data)&&void 0!==c?c:t;if(u&&e.dataStartIndex+e.dataEndIndex!==0&&(u=u.slice(e.dataStartIndex,e.dataEndIndex+1)),i.dataKey&&!i.allowDuplicatedCategory){var d=void 0===u?a:u;s=(0,R.Ap)(d,i.dataKey,r)}else s=u&&u[n]||a[n];return s?[].concat(e8(o),[(0,I.Qo)(l,s)]):o},[])},tf=function(e,t,n,r){var o=r||{x:e.chartX,y:e.chartY},i="horizontal"===n?o.x:"vertical"===n?o.y:"centric"===n?o.angle:o.radius,a=e.orderedTooltipTicks,l=e.tooltipAxis,c=e.tooltipTicks,s=(0,I.VO)(i,a,c,l);if(s>=0&&c){var u=c[s]&&c[s].value,d=td(e,t,s,u),f=tc(n,a,s,o);return{activeTooltipIndex:s,activeLabel:u,activePayload:d,activeCoordinate:f}}return null},tp=function(e,t){var n=t.axes,r=t.graphicalItems,o=t.axisType,a=t.axisIdKey,l=t.stackGroups,c=t.dataStartIndex,u=t.dataEndIndex,d=e.layout,f=e.children,p=e.stackOffset,h=(0,I.NA)(d,o);return n.reduce(function(t,n){var m=n.props,g=m.type,v=m.dataKey,y=m.allowDataOverflow,b=m.allowDuplicatedCategory,x=m.scale,w=m.ticks,S=m.includeHidden,k=n.props[a];if(t[k])return t;var E=ts(e.data,{graphicalItems:r.filter(function(e){return e.props[a]===k}),dataStartIndex:c,dataEndIndex:u}),C=E.length;(function(e,t,n){if("number"===n&&!0===t&&Array.isArray(e)){var r=null==e?void 0:e[0],o=null==e?void 0:e[1];if(r&&o&&(0,R.hj)(r)&&(0,R.hj)(o))return!0}return!1})(n.props.domain,y,g)&&(P=(0,I.LG)(n.props.domain,null,y),h&&("number"===g||"auto"!==x)&&(N=(0,I.gF)(E,v,"category")));var O=tu(g);if(!P||0===P.length){var j,P,M,N,T,A=null!==(T=n.props.domain)&&void 0!==T?T:O;if(v){if(P=(0,I.gF)(E,v,g),"category"===g&&h){var _=(0,R.bv)(P);b&&_?(M=P,P=s()(0,C)):b||(P=(0,I.ko)(A,P,n).reduce(function(e,t){return e.indexOf(t)>=0?e:[].concat(e8(e),[t])},[]))}else if("category"===g)P=b?P.filter(function(e){return""!==e&&!i()(e)}):(0,I.ko)(A,P,n).reduce(function(e,t){return e.indexOf(t)>=0||""===t||i()(t)?e:[].concat(e8(e),[t])},[]);else if("number"===g){var D=(0,I.ZI)(E,r.filter(function(e){return e.props[a]===k&&(S||!e.props.hide)}),v,o,d);D&&(P=D)}h&&("number"===g||"auto"!==x)&&(N=(0,I.gF)(E,v,"category"))}else P=h?s()(0,C):l&&l[k]&&l[k].hasStack&&"number"===g?"expand"===p?[0,1]:(0,I.EB)(l[k].stackGroups,c,u):(0,I.s6)(E,r.filter(function(e){return e.props[a]===k&&(S||!e.props.hide)}),g,d,!0);"number"===g?(P=eP(f,P,k,o,w),A&&(P=(0,I.LG)(A,P,y))):"category"===g&&A&&P.every(function(e){return A.indexOf(e)>=0})&&(P=A)}return tt(tt({},t),{},tn({},k,tt(tt({},n.props),{},{axisType:o,domain:P,categoricalDomain:N,duplicateDomain:M,originalDomain:null!==(j=n.props.domain)&&void 0!==j?j:O,isCategorical:h,layout:d})))},{})},th=function(e,t){var n=t.graphicalItems,r=t.Axis,o=t.axisType,i=t.axisIdKey,a=t.stackGroups,l=t.dataStartIndex,c=t.dataEndIndex,u=e.layout,f=e.children,p=ts(e.data,{graphicalItems:n,dataStartIndex:l,dataEndIndex:c}),h=p.length,m=(0,I.NA)(u,o),g=-1;return n.reduce(function(e,t){var v,y=t.props[i],b=tu("number");return e[y]?e:(g++,v=m?s()(0,h):a&&a[y]&&a[y].hasStack?eP(f,v=(0,I.EB)(a[y].stackGroups,l,c),y,o):eP(f,v=(0,I.LG)(b,(0,I.s6)(p,n.filter(function(e){return e.props[i]===y&&!e.props.hide}),"number",u),r.defaultProps.allowDataOverflow),y,o),tt(tt({},e),{},tn({},y,tt(tt({axisType:o},r.defaultProps),{},{hide:!0,orientation:d()(to,"".concat(o,".").concat(g%2),null),domain:v,originalDomain:b,isCategorical:m,layout:u}))))},{})},tm=function(e,t){var n=t.axisType,r=void 0===n?"xAxis":n,o=t.AxisComp,i=t.graphicalItems,a=t.stackGroups,l=t.dataStartIndex,c=t.dataEndIndex,s=e.children,u="".concat(r,"Id"),d=(0,P.NN)(s,o),f={};return d&&d.length?f=tp(e,{axes:d,graphicalItems:i,axisType:r,axisIdKey:u,stackGroups:a,dataStartIndex:l,dataEndIndex:c}):i&&i.length&&(f=th(e,{Axis:o,graphicalItems:i,axisType:r,axisIdKey:u,stackGroups:a,dataStartIndex:l,dataEndIndex:c})),f},tg=function(e){var t=(0,R.Kt)(e),n=(0,I.uY)(t,!1,!0);return{tooltipTicks:n,orderedTooltipTicks:p()(n,function(e){return e.coordinate}),tooltipAxis:t,tooltipAxisBandSize:(0,I.zT)(t,n)}},tv=function(e){var t=e.children,n=e.defaultShowTooltip,r=(0,P.sP)(t,Y),o=0,i=0;return e.data&&0!==e.data.length&&(i=e.data.length-1),r&&r.props&&(r.props.startIndex>=0&&(o=r.props.startIndex),r.props.endIndex>=0&&(i=r.props.endIndex)),{chartX:0,chartY:0,dataStartIndex:o,dataEndIndex:i,activeTooltipIndex:-1,isTooltipActive:!!n}},ty=function(e){return"horizontal"===e?{numericAxisName:"yAxis",cateAxisName:"xAxis"}:"vertical"===e?{numericAxisName:"xAxis",cateAxisName:"yAxis"}:"centric"===e?{numericAxisName:"radiusAxis",cateAxisName:"angleAxis"}:{numericAxisName:"angleAxis",cateAxisName:"radiusAxis"}},tb=function(e,t){var n=e.props,r=e.graphicalItems,o=e.xAxisMap,i=void 0===o?{}:o,a=e.yAxisMap,l=void 0===a?{}:a,c=n.width,s=n.height,u=n.children,f=n.margin||{},p=(0,P.sP)(u,Y),h=(0,P.sP)(u,C.D),m=Object.keys(l).reduce(function(e,t){var n=l[t],r=n.orientation;return n.mirror||n.hide?e:tt(tt({},e),{},tn({},r,e[r]+n.width))},{left:f.left||0,right:f.right||0}),g=Object.keys(i).reduce(function(e,t){var n=i[t],r=n.orientation;return n.mirror||n.hide?e:tt(tt({},e),{},tn({},r,d()(e,"".concat(r))+n.height))},{top:f.top||0,bottom:f.bottom||0}),v=tt(tt({},g),m),y=v.bottom;p&&(v.bottom+=p.props.height||Y.defaultProps.height),h&&t&&(v=(0,I.By)(v,r,n,t));var b=c-v.left-v.right,x=s-v.top-v.bottom;return tt(tt({brushBottom:y},v),{},{width:Math.max(b,0),height:Math.max(x,0)})},tx=function(e){var t,n=e.chartName,o=e.GraphicalChild,a=e.defaultTooltipEventType,c=void 0===a?"axis":a,s=e.validateTooltipEventTypes,u=void 0===s?["axis"]:s,f=e.axisComponents,p=e.legendContent,h=e.formatAxisMap,g=e.defaultProps,y=function(e,t){var n=t.graphicalItems,r=t.stackGroups,o=t.offset,a=t.updateId,l=t.dataStartIndex,c=t.dataEndIndex,s=e.barSize,u=e.layout,d=e.barGap,p=e.barCategoryGap,h=e.maxBarSize,m=ty(u),g=m.numericAxisName,v=m.cateAxisName,y=!!n&&!!n.length&&n.some(function(e){var t=(0,P.Gf)(e&&e.type);return t&&t.indexOf("Bar")>=0})&&(0,I.pt)({barSize:s,stackGroups:r}),b=[];return n.forEach(function(n,s){var m,x=ts(e.data,{graphicalItems:[n],dataStartIndex:l,dataEndIndex:c}),S=n.props,k=S.dataKey,E=S.maxBarSize,C=n.props["".concat(g,"Id")],O=n.props["".concat(v,"Id")],j=f.reduce(function(e,r){var o,i=t["".concat(r.axisType,"Map")],a=n.props["".concat(r.axisType,"Id")];i&&i[a]||"zAxis"===r.axisType||(0,w.Z)(!1);var l=i[a];return tt(tt({},e),{},(tn(o={},r.axisType,l),tn(o,"".concat(r.axisType,"Ticks"),(0,I.uY)(l)),o))},{}),M=j[v],N=j["".concat(v,"Ticks")],R=r&&r[C]&&r[C].hasStack&&(0,I.O3)(n,r[C].stackGroups),T=(0,P.Gf)(n.type).indexOf("Bar")>=0,A=(0,I.zT)(M,N),_=[];if(T){var D,Z,L=i()(E)?h:E,z=null!==(D=null!==(Z=(0,I.zT)(M,N,!0))&&void 0!==Z?Z:L)&&void 0!==D?D:0;_=(0,I.qz)({barGap:d,barCategoryGap:p,bandSize:z!==A?z:A,sizeList:y[O],maxBarSize:L}),z!==A&&(_=_.map(function(e){return tt(tt({},e),{},{position:tt(tt({},e.position),{},{offset:e.position.offset-z/2})})}))}var B=n&&n.type&&n.type.getComposedData;B&&b.push({props:tt(tt({},B(tt(tt({},j),{},{displayedData:x,props:e,dataKey:k,item:n,bandSize:A,barPosition:_,offset:o,stackedData:R,layout:u,dataStartIndex:l,dataEndIndex:c}))),{},(tn(m={key:n.key||"item-".concat(s)},g,j[g]),tn(m,v,j[v]),tn(m,"animationId",a),m)),childIndex:(0,P.$R)(n,e.children),item:n})}),b},C=function(e,t){var r=e.props,i=e.dataStartIndex,a=e.dataEndIndex,l=e.updateId;if(!(0,P.TT)({props:r}))return null;var c=r.children,s=r.layout,u=r.stackOffset,d=r.data,p=r.reverseStackOrder,m=ty(s),g=m.numericAxisName,v=m.cateAxisName,b=(0,P.NN)(c,o),x=(0,I.wh)(d,b,"".concat(g,"Id"),"".concat(v,"Id"),u,p),w=f.reduce(function(e,t){var n="".concat(t.axisType,"Map");return tt(tt({},e),{},tn({},n,tm(r,tt(tt({},t),{},{graphicalItems:b,stackGroups:t.axisType===g&&x,dataStartIndex:i,dataEndIndex:a}))))},{}),S=tb(tt(tt({},w),{},{props:r,graphicalItems:b}),null==t?void 0:t.legendBBox);Object.keys(w).forEach(function(e){w[e]=h(r,w[e],S,e.replace("Map",""),n)});var k=tg(w["".concat(v,"Map")]),E=y(r,tt(tt({},w),{},{dataStartIndex:i,dataEndIndex:a,updateId:l,graphicalItems:b,stackGroups:x,offset:S}));return tt(tt({formattedGraphicalItems:E,graphicalItems:b,offset:S,stackGroups:x},k),w)};return t=function(e){(function(e,t){if("function"!=typeof t&&null!==t)throw TypeError("Super expression must either be null or a function");e.prototype=Object.create(t&&t.prototype,{constructor:{value:e,writable:!0,configurable:!0}}),Object.defineProperty(e,"prototype",{writable:!1}),t&&e3(e,t)})(s,e);var t,o,a=(t=function(){if("undefined"==typeof Reflect||!Reflect.construct||Reflect.construct.sham)return!1;if("function"==typeof Proxy)return!0;try{return Boolean.prototype.valueOf.call(Reflect.construct(Boolean,[],function(){})),!0}catch(e){return!1}}(),function(){var e,n=e5(s);return e=t?Reflect.construct(n,arguments,e5(this).constructor):n.apply(this,arguments),function(e,t){if(t&&("object"===e0(t)||"function"==typeof t))return t;if(void 0!==t)throw TypeError("Derived constructors may only return object or undefined");return e4(e)}(this,e)});function s(e){var t,o,c;return function(e,t){if(!(e instanceof t))throw TypeError("Cannot call a class as a function")}(this,s),tn(e4(c=a.call(this,e)),"eventEmitterSymbol",Symbol("rechartsEventEmitter")),tn(e4(c),"accessibilityManager",new eL),tn(e4(c),"handleLegendBBoxUpdate",function(e){if(e){var t=c.state,n=t.dataStartIndex,r=t.dataEndIndex,o=t.updateId;c.setState(tt({legendBBox:e},C({props:c.props,dataStartIndex:n,dataEndIndex:r,updateId:o},tt(tt({},c.state),{},{legendBBox:e}))))}}),tn(e4(c),"handleReceiveSyncEvent",function(e,t,n){c.props.syncId===e&&(n!==c.eventEmitterSymbol||"function"==typeof c.props.syncMethod)&&c.applySyncEvent(t)}),tn(e4(c),"handleBrushChange",function(e){var t=e.startIndex,n=e.endIndex;if(t!==c.state.dataStartIndex||n!==c.state.dataEndIndex){var r=c.state.updateId;c.setState(function(){return tt({dataStartIndex:t,dataEndIndex:n},C({props:c.props,dataStartIndex:t,dataEndIndex:n,updateId:r},c.state))}),c.triggerSyncEvent({dataStartIndex:t,dataEndIndex:n})}}),tn(e4(c),"handleMouseEnter",function(e){var t=c.getMouseInfo(e);if(t){var n=tt(tt({},t),{},{isTooltipActive:!0});c.setState(n),c.triggerSyncEvent(n);var r=c.props.onMouseEnter;l()(r)&&r(n,e)}}),tn(e4(c),"triggeredAfterMouseMove",function(e){var t=c.getMouseInfo(e),n=t?tt(tt({},t),{},{isTooltipActive:!0}):{isTooltipActive:!1};c.setState(n),c.triggerSyncEvent(n);var r=c.props.onMouseMove;l()(r)&&r(n,e)}),tn(e4(c),"handleItemMouseEnter",function(e){c.setState(function(){return{isTooltipActive:!0,activeItem:e,activePayload:e.tooltipPayload,activeCoordinate:e.tooltipPosition||{x:e.cx,y:e.cy}}})}),tn(e4(c),"handleItemMouseLeave",function(){c.setState(function(){return{isTooltipActive:!1}})}),tn(e4(c),"handleMouseMove",function(e){e.persist(),c.throttleTriggeredAfterMouseMove(e)}),tn(e4(c),"handleMouseLeave",function(e){var t={isTooltipActive:!1};c.setState(t),c.triggerSyncEvent(t);var n=c.props.onMouseLeave;l()(n)&&n(t,e)}),tn(e4(c),"handleOuterEvent",function(e){var t,n=(0,P.Bh)(e),r=d()(c.props,"".concat(n));n&&l()(r)&&r(null!==(t=/.*touch.*/i.test(n)?c.getMouseInfo(e.changedTouches[0]):c.getMouseInfo(e))&&void 0!==t?t:{},e)}),tn(e4(c),"handleClick",function(e){var t=c.getMouseInfo(e);if(t){var n=tt(tt({},t),{},{isTooltipActive:!0});c.setState(n),c.triggerSyncEvent(n);var r=c.props.onClick;l()(r)&&r(n,e)}}),tn(e4(c),"handleMouseDown",function(e){var t=c.props.onMouseDown;l()(t)&&t(c.getMouseInfo(e),e)}),tn(e4(c),"handleMouseUp",function(e){var t=c.props.onMouseUp;l()(t)&&t(c.getMouseInfo(e),e)}),tn(e4(c),"handleTouchMove",function(e){null!=e.changedTouches&&e.changedTouches.length>0&&c.throttleTriggeredAfterMouseMove(e.changedTouches[0])}),tn(e4(c),"handleTouchStart",function(e){null!=e.changedTouches&&e.changedTouches.length>0&&c.handleMouseDown(e.changedTouches[0])}),tn(e4(c),"handleTouchEnd",function(e){null!=e.changedTouches&&e.changedTouches.length>0&&c.handleMouseUp(e.changedTouches[0])}),tn(e4(c),"triggerSyncEvent",function(e){void 0!==c.props.syncId&&eR.emit(eT,c.props.syncId,e,c.eventEmitterSymbol)}),tn(e4(c),"applySyncEvent",function(e){var t=c.props,n=t.layout,r=t.syncMethod,o=c.state.updateId,i=e.dataStartIndex,a=e.dataEndIndex;if(void 0!==e.dataStartIndex||void 0!==e.dataEndIndex)c.setState(tt({dataStartIndex:i,dataEndIndex:a},C({props:c.props,dataStartIndex:i,dataEndIndex:a,updateId:o},c.state)));else if(void 0!==e.activeTooltipIndex){var l=e.chartX,s=e.chartY,u=e.activeTooltipIndex,d=c.state,f=d.offset,p=d.tooltipTicks;if(!f)return;if("function"==typeof r)u=r(p,e);else if("value"===r){u=-1;for(var h=0;h=0){if(u.dataKey&&!u.allowDuplicatedCategory){var j="function"==typeof u.dataKey?function(e){return"function"==typeof u.dataKey?u.dataKey(e.payload):null}:"payload.".concat(u.dataKey.toString());N=(0,R.Ap)(g,j,f),T=v&&y&&(0,R.Ap)(y,j,f)}else N=null==g?void 0:g[d],T=v&&y&&y[d];if(k||S){var M=void 0!==e.props.activeIndex?e.props.activeIndex:d;return[(0,r.cloneElement)(e,tt(tt(tt({},o.props),C),{},{activeIndex:M})),null,null]}if(!i()(N))return[O].concat(e8(c.renderActivePoints({item:o,activePoint:N,basePoint:T,childIndex:d,isRange:v})))}else{var N,T,A,_=(null!==(A=c.getItemByXY(c.state.activeCoordinate))&&void 0!==A?A:{graphicalItem:O}).graphicalItem,D=_.item,Z=void 0===D?e:D,L=_.childIndex,z=tt(tt(tt({},o.props),C),{},{activeIndex:L});return[(0,r.cloneElement)(Z,z),null,null]}}return v?[O,null,null]:[O,null]}),tn(e4(c),"renderCustomized",function(e,t,n){return(0,r.cloneElement)(e,tt(tt({key:"recharts-customized-".concat(n)},c.props),c.state))}),tn(e4(c),"renderMap",{CartesianGrid:{handler:c.renderGrid,once:!0},ReferenceArea:{handler:c.renderReferenceElement},ReferenceLine:{handler:tl},ReferenceDot:{handler:c.renderReferenceElement},XAxis:{handler:tl},YAxis:{handler:tl},Brush:{handler:c.renderBrush,once:!0},Bar:{handler:c.renderGraphicChild},Line:{handler:c.renderGraphicChild},Area:{handler:c.renderGraphicChild},Radar:{handler:c.renderGraphicChild},RadialBar:{handler:c.renderGraphicChild},Scatter:{handler:c.renderGraphicChild},Pie:{handler:c.renderGraphicChild},Funnel:{handler:c.renderGraphicChild},Tooltip:{handler:c.renderCursor,once:!0},PolarGrid:{handler:c.renderPolarGrid,once:!0},PolarAngleAxis:{handler:c.renderPolarAxis},PolarRadiusAxis:{handler:c.renderPolarAxis},Customized:{handler:c.renderCustomized}}),c.clipPathId="".concat(null!==(t=e.id)&&void 0!==t?t:(0,R.EL)("recharts"),"-clip"),c.throttleTriggeredAfterMouseMove=m()(c.triggeredAfterMouseMove,null!==(o=e.throttleDelay)&&void 0!==o?o:1e3/60),c.state={},c}return o=[{key:"componentDidMount",value:function(){var e,t;this.addListener(),this.accessibilityManager.setDetails({container:this.container,offset:{left:null!==(e=this.props.margin.left)&&void 0!==e?e:0,top:null!==(t=this.props.margin.top)&&void 0!==t?t:0},coordinateList:this.state.tooltipTicks,mouseHandlerCallback:this.triggeredAfterMouseMove,layout:this.props.layout}),this.displayDefaultTooltip()}},{key:"displayDefaultTooltip",value:function(){var e=this.props,t=e.children,n=e.data,r=e.height,o=e.layout,i=(0,P.sP)(t,E.u);if(i){var a=i.props.defaultIndex;if("number"==typeof a&&!(a<0)&&!(a>this.state.tooltipTicks.length)){var l=this.state.tooltipTicks[a]&&this.state.tooltipTicks[a].value,c=td(this.state,n,a,l),s=this.state.tooltipTicks[a].coordinate,u=(this.state.offset.top+r)/2,d="horizontal"===o?{x:s,y:u}:{y:s,x:u},f=this.state.formattedGraphicalItems.find(function(e){return"Scatter"===e.item.type.name});f&&(d=tt(tt({},d),f.props.points[a].tooltipPosition),c=f.props.points[a].tooltipPayload);var p={activeTooltipIndex:a,isTooltipActive:!0,activeLabel:l,activePayload:c,activeCoordinate:d};this.setState(p),this.renderCursor(i),this.accessibilityManager.setIndex(a)}}}},{key:"getSnapshotBeforeUpdate",value:function(e,t){if(!this.props.accessibilityLayer)return null;if(this.state.tooltipTicks!==t.tooltipTicks&&this.accessibilityManager.setDetails({coordinateList:this.state.tooltipTicks}),this.props.layout!==e.layout&&this.accessibilityManager.setDetails({layout:this.props.layout}),this.props.margin!==e.margin){var n,r;this.accessibilityManager.setDetails({offset:{left:null!==(n=this.props.margin.left)&&void 0!==n?n:0,top:null!==(r=this.props.margin.top)&&void 0!==r?r:0}})}return null}},{key:"componentDidUpdate",value:function(e){(0,P.rL)([(0,P.sP)(e.children,E.u)],[(0,P.sP)(this.props.children,E.u)])||this.displayDefaultTooltip()}},{key:"componentWillUnmount",value:function(){this.removeListener(),this.throttleTriggeredAfterMouseMove.cancel()}},{key:"getTooltipEventType",value:function(){var e=(0,P.sP)(this.props.children,E.u);if(e&&"boolean"==typeof e.props.shared){var t=e.props.shared?"axis":"item";return u.indexOf(t)>=0?t:c}return c}},{key:"getMouseInfo",value:function(e){if(!this.container)return null;var t=this.container,n=t.getBoundingClientRect(),r=(0,Q.os)(n),o={chartX:Math.round(e.pageX-r.left),chartY:Math.round(e.pageY-r.top)},i=n.width/t.offsetWidth||1,a=this.inRange(o.chartX,o.chartY,i);if(!a)return null;var l=this.state,c=l.xAxisMap,s=l.yAxisMap;if("axis"!==this.getTooltipEventType()&&c&&s){var u=(0,R.Kt)(c).scale,d=(0,R.Kt)(s).scale,f=u&&u.invert?u.invert(o.chartX):null,p=d&&d.invert?d.invert(o.chartY):null;return tt(tt({},o),{},{xValue:f,yValue:p})}var h=tf(this.state,this.props.data,this.props.layout,a);return h?tt(tt({},o),h):null}},{key:"inRange",value:function(e,t){var n=arguments.length>2&&void 0!==arguments[2]?arguments[2]:1,r=this.props.layout,o=e/n,i=t/n;if("horizontal"===r||"vertical"===r){var a=this.state.offset;return o>=a.left&&o<=a.left+a.width&&i>=a.top&&i<=a.top+a.height?{x:o,y:i}:null}var l=this.state,c=l.angleAxisMap,s=l.radiusAxisMap;if(c&&s){var u=(0,R.Kt)(c);return(0,eM.z3)({x:o,y:i},u)}return null}},{key:"parseEventsOfWrapper",value:function(){var e=this.props.children,t=this.getTooltipEventType(),n=(0,P.sP)(e,E.u),r={};return n&&"axis"===t&&(r="click"===n.props.trigger?{onClick:this.handleClick}:{onMouseEnter:this.handleMouseEnter,onMouseMove:this.handleMouseMove,onMouseLeave:this.handleMouseLeave,onTouchMove:this.handleTouchMove,onTouchStart:this.handleTouchStart,onTouchEnd:this.handleTouchEnd}),tt(tt({},(0,eA.Ym)(this.props,this.handleOuterEvent)),r)}},{key:"addListener",value:function(){eR.on(eT,this.handleReceiveSyncEvent)}},{key:"removeListener",value:function(){eR.removeListener(eT,this.handleReceiveSyncEvent)}},{key:"filterFormatItem",value:function(e,t,n){for(var r=this.state.formattedGraphicalItems,o=0,i=r.length;oe.length)&&(t=e.length);for(var n=0,r=Array(t);n=0?1:-1;"insideStart"===l?(o=y+E*s,a=w):"insideEnd"===l?(o=b-E*s,a=!w):"end"===l&&(o=b+E*s,a=w),a=k<=0?a:!a;var C=(0,h.op)(f,m,S,o),O=(0,h.op)(f,m,S,o+(a?1:-1)*359),j="M".concat(C.x,",").concat(C.y,"\n A").concat(S,",").concat(S,",0,1,").concat(a?0:1,",\n ").concat(O.x,",").concat(O.y),P=i()(e.id)?(0,p.EL)("recharts-radial-line-"):e.id;return r.createElement("text",x({},n,{dominantBaseline:"central",className:(0,u.Z)("recharts-radial-bar-label",d)}),r.createElement("defs",null,r.createElement("path",{id:P,d:j})),r.createElement("textPath",{xlinkHref:"#".concat(P)},t))},k=function(e){var t=e.viewBox,n=e.offset,r=e.position,o=t.cx,i=t.cy,a=t.innerRadius,l=t.outerRadius,c=(t.startAngle+t.endAngle)/2;if("outside"===r){var s=(0,h.op)(o,i,l+n,c),u=s.x;return{x:u,y:s.y,textAnchor:u>=o?"start":"end",verticalAnchor:"middle"}}if("center"===r)return{x:o,y:i,textAnchor:"middle",verticalAnchor:"middle"};if("centerTop"===r)return{x:o,y:i,textAnchor:"middle",verticalAnchor:"start"};if("centerBottom"===r)return{x:o,y:i,textAnchor:"middle",verticalAnchor:"end"};var d=(0,h.op)(o,i,(a+l)/2,c);return{x:d.x,y:d.y,textAnchor:"middle",verticalAnchor:"middle"}},E=function(e){var t=e.viewBox,n=e.parentViewBox,r=e.offset,o=e.position,i=t.x,a=t.y,l=t.width,c=t.height,u=c>=0?1:-1,d=u*r,f=u>0?"end":"start",h=u>0?"start":"end",m=l>=0?1:-1,g=m*r,v=m>0?"end":"start",y=m>0?"start":"end";if("top"===o)return b(b({},{x:i+l/2,y:a-u*r,textAnchor:"middle",verticalAnchor:f}),n?{height:Math.max(a-n.y,0),width:l}:{});if("bottom"===o)return b(b({},{x:i+l/2,y:a+c+d,textAnchor:"middle",verticalAnchor:h}),n?{height:Math.max(n.y+n.height-(a+c),0),width:l}:{});if("left"===o){var x={x:i-g,y:a+c/2,textAnchor:v,verticalAnchor:"middle"};return b(b({},x),n?{width:Math.max(x.x-n.x,0),height:c}:{})}if("right"===o){var w={x:i+l+g,y:a+c/2,textAnchor:y,verticalAnchor:"middle"};return b(b({},w),n?{width:Math.max(n.x+n.width-w.x,0),height:c}:{})}var S=n?{width:l,height:c}:{};return"insideLeft"===o?b({x:i+g,y:a+c/2,textAnchor:y,verticalAnchor:"middle"},S):"insideRight"===o?b({x:i+l-g,y:a+c/2,textAnchor:v,verticalAnchor:"middle"},S):"insideTop"===o?b({x:i+l/2,y:a+d,textAnchor:"middle",verticalAnchor:h},S):"insideBottom"===o?b({x:i+l/2,y:a+c-d,textAnchor:"middle",verticalAnchor:f},S):"insideTopLeft"===o?b({x:i+g,y:a+d,textAnchor:y,verticalAnchor:h},S):"insideTopRight"===o?b({x:i+l-g,y:a+d,textAnchor:v,verticalAnchor:h},S):"insideBottomLeft"===o?b({x:i+g,y:a+c-d,textAnchor:y,verticalAnchor:f},S):"insideBottomRight"===o?b({x:i+l-g,y:a+c-d,textAnchor:v,verticalAnchor:f},S):s()(o)&&((0,p.hj)(o.x)||(0,p.hU)(o.x))&&((0,p.hj)(o.y)||(0,p.hU)(o.y))?b({x:i+(0,p.h1)(o.x,l),y:a+(0,p.h1)(o.y,c),textAnchor:"end",verticalAnchor:"end"},S):b({x:i+l/2,y:a+c/2,textAnchor:"middle",verticalAnchor:"middle"},S)};function C(e){var t,n=e.offset,o=b({offset:void 0===n?5:n},function(e,t){if(null==e)return{};var n,r,o=function(e,t){if(null==e)return{};var n,r,o={},i=Object.keys(e);for(r=0;r=0||(o[n]=e[n]);return o}(e,t);if(Object.getOwnPropertySymbols){var i=Object.getOwnPropertySymbols(e);for(r=0;r=0)&&Object.prototype.propertyIsEnumerable.call(e,n)&&(o[n]=e[n])}return o}(e,g)),a=o.viewBox,c=o.position,s=o.value,h=o.children,m=o.content,v=o.className,y=o.textBreakAll;if(!a||i()(s)&&i()(h)&&!(0,r.isValidElement)(m)&&!l()(m))return null;if((0,r.isValidElement)(m))return(0,r.cloneElement)(m,o);if(l()(m)){if(t=(0,r.createElement)(m,o),(0,r.isValidElement)(t))return t}else t=w(o);var C="cx"in a&&(0,p.hj)(a.cx),O=(0,f.L6)(o,!0);if(C&&("insideStart"===c||"insideEnd"===c||"end"===c))return S(o,t,O);var j=C?k(o):E(o);return r.createElement(d.x,x({className:(0,u.Z)("recharts-label",void 0===v?"":v)},O,j,{breakAll:y}),t)}C.displayName="Label";var O=function(e){var t=e.cx,n=e.cy,r=e.angle,o=e.startAngle,i=e.endAngle,a=e.r,l=e.radius,c=e.innerRadius,s=e.outerRadius,u=e.x,d=e.y,f=e.top,h=e.left,m=e.width,g=e.height,v=e.clockWise,y=e.labelViewBox;if(y)return y;if((0,p.hj)(m)&&(0,p.hj)(g)){if((0,p.hj)(u)&&(0,p.hj)(d))return{x:u,y:d,width:m,height:g};if((0,p.hj)(f)&&(0,p.hj)(h))return{x:f,y:h,width:m,height:g}}return(0,p.hj)(u)&&(0,p.hj)(d)?{x:u,y:d,width:0,height:0}:(0,p.hj)(t)&&(0,p.hj)(n)?{cx:t,cy:n,startAngle:o||r||0,endAngle:i||r||0,innerRadius:c||0,outerRadius:s||l||a||0,clockWise:v}:e.viewBox?e.viewBox:{}};C.parseViewBox=O,C.renderCallByParent=function(e,t){var n,o,i=!(arguments.length>2)||void 0===arguments[2]||arguments[2];if(!e||!e.children&&i&&!e.label)return null;var a=e.children,c=O(e),u=(0,f.NN)(a,C).map(function(e,n){return(0,r.cloneElement)(e,{viewBox:t||c,key:"label-".concat(n)})});return i?[(n=e.label,o=t||c,n?!0===n?r.createElement(C,{key:"label-implicit",viewBox:o}):(0,p.P2)(n)?r.createElement(C,{key:"label-implicit",viewBox:o,value:n}):(0,r.isValidElement)(n)?n.type===C?(0,r.cloneElement)(n,{key:"label-implicit",viewBox:o}):r.createElement(C,{key:"label-implicit",content:n,viewBox:o}):l()(n)?r.createElement(C,{key:"label-implicit",content:n,viewBox:o}):s()(n)?r.createElement(C,x({viewBox:o},n,{key:"label-implicit"})):null:null)].concat(function(e){if(Array.isArray(e))return v(e)}(u)||function(e){if("undefined"!=typeof Symbol&&null!=e[Symbol.iterator]||null!=e["@@iterator"])return Array.from(e)}(u)||function(e,t){if(e){if("string"==typeof e)return v(e,void 0);var n=Object.prototype.toString.call(e).slice(8,-1);if("Object"===n&&e.constructor&&(n=e.constructor.name),"Map"===n||"Set"===n)return Array.from(e);if("Arguments"===n||/^(?:Ui|I)nt(?:8|16|32)(?:Clamped)?Array$/.test(n))return v(e,void 0)}}(u)||function(){throw TypeError("Invalid attempt to spread non-iterable instance.\nIn order to be iterable, non-array objects must have a [Symbol.iterator]() method.")}()):u}},58772:function(e,t,n){"use strict";n.d(t,{e:function(){return C}});var r=n(2265),o=n(77571),i=n.n(o),a=n(28302),l=n.n(a),c=n(86757),s=n.n(c),u=n(86185),d=n.n(u),f=n(26680),p=n(9841),h=n(82944),m=n(85355);function g(e){return(g="function"==typeof Symbol&&"symbol"==typeof Symbol.iterator?function(e){return typeof e}:function(e){return e&&"function"==typeof Symbol&&e.constructor===Symbol&&e!==Symbol.prototype?"symbol":typeof e})(e)}var v=["valueAccessor"],y=["data","dataKey","clockWise","id","textBreakAll"];function b(e,t){(null==t||t>e.length)&&(t=e.length);for(var n=0,r=Array(t);n=0||(o[n]=e[n]);return o}(e,t);if(Object.getOwnPropertySymbols){var i=Object.getOwnPropertySymbols(e);for(r=0;r=0)&&Object.prototype.propertyIsEnumerable.call(e,n)&&(o[n]=e[n])}return o}var E=function(e){return Array.isArray(e.value)?d()(e.value):e.value};function C(e){var t=e.valueAccessor,n=void 0===t?E:t,o=k(e,v),a=o.data,l=o.dataKey,c=o.clockWise,s=o.id,u=o.textBreakAll,d=k(o,y);return a&&a.length?r.createElement(p.m,{className:"recharts-label-list"},a.map(function(e,t){var o=i()(l)?n(e,t):(0,m.F$)(e&&e.payload,l),a=i()(s)?{}:{id:"".concat(s,"-").concat(t)};return r.createElement(f._,x({},(0,h.L6)(e,!0),d,a,{parentViewBox:e.parentViewBox,value:o,textBreakAll:u,viewBox:f._.parseViewBox(i()(c)?e:S(S({},e),{},{clockWise:c})),key:"label-".concat(t),index:t}))})):null}C.displayName="LabelList",C.renderCallByParent=function(e,t){var n,o=!(arguments.length>2)||void 0===arguments[2]||arguments[2];if(!e||!e.children&&o&&!e.label)return null;var i=e.children,a=(0,h.NN)(i,C).map(function(e,n){return(0,r.cloneElement)(e,{data:t,key:"labelList-".concat(n)})});return o?[(n=e.label)?!0===n?r.createElement(C,{key:"labelList-implicit",data:t}):r.isValidElement(n)||s()(n)?r.createElement(C,{key:"labelList-implicit",data:t,content:n}):l()(n)?r.createElement(C,x({data:t},n,{key:"labelList-implicit"})):null:null].concat(function(e){if(Array.isArray(e))return b(e)}(a)||function(e){if("undefined"!=typeof Symbol&&null!=e[Symbol.iterator]||null!=e["@@iterator"])return Array.from(e)}(a)||function(e,t){if(e){if("string"==typeof e)return b(e,void 0);var n=Object.prototype.toString.call(e).slice(8,-1);if("Object"===n&&e.constructor&&(n=e.constructor.name),"Map"===n||"Set"===n)return Array.from(e);if("Arguments"===n||/^(?:Ui|I)nt(?:8|16|32)(?:Clamped)?Array$/.test(n))return b(e,void 0)}}(a)||function(){throw TypeError("Invalid attempt to spread non-iterable instance.\nIn order to be iterable, non-array objects must have a [Symbol.iterator]() method.")}()):a}},22190:function(e,t,n){"use strict";n.d(t,{D:function(){return R}});var r=n(2265),o=n(86757),i=n.n(o),a=n(61994),l=n(1175),c=n(48777),s=n(14870),u=n(41637);function d(e){return(d="function"==typeof Symbol&&"symbol"==typeof Symbol.iterator?function(e){return typeof e}:function(e){return e&&"function"==typeof Symbol&&e.constructor===Symbol&&e!==Symbol.prototype?"symbol":typeof e})(e)}function f(){return(f=Object.assign?Object.assign.bind():function(e){for(var t=1;t');var w=t.inactive?p:t.color;return r.createElement("li",f({className:b,style:m,key:"legend-item-".concat(n)},(0,u.bw)(e.props,t,n)),r.createElement(c.T,{width:o,height:o,viewBox:h,style:v},e.renderIcon(t)),r.createElement("span",{className:"recharts-legend-item-text",style:{color:w}},y?y(x,t,n):x))})}},{key:"render",value:function(){var e=this.props,t=e.payload,n=e.layout,o=e.align;return t&&t.length?r.createElement("ul",{className:"recharts-default-legend",style:{padding:0,margin:0,textAlign:"horizontal"===n?o:"left"}},this.renderItems()):null}}],function(e,t){for(var n=0;n1||Math.abs(t.height-this.lastBoundingBox.height)>1)&&(this.lastBoundingBox.width=t.width,this.lastBoundingBox.height=t.height,e&&e(t))}else(-1!==this.lastBoundingBox.width||-1!==this.lastBoundingBox.height)&&(this.lastBoundingBox.width=-1,this.lastBoundingBox.height=-1,e&&e(null))}},{key:"getBBoxSnapshot",value:function(){return this.lastBoundingBox.width>=0&&this.lastBoundingBox.height>=0?E({},this.lastBoundingBox):{width:0,height:0}}},{key:"getDefaultPosition",value:function(e){var t,n,r=this.props,o=r.layout,i=r.align,a=r.verticalAlign,l=r.margin,c=r.chartWidth,s=r.chartHeight;return e&&(void 0!==e.left&&null!==e.left||void 0!==e.right&&null!==e.right)||(t="center"===i&&"vertical"===o?{left:((c||0)-this.getBBoxSnapshot().width)/2}:"right"===i?{right:l&&l.right||0}:{left:l&&l.left||0}),e&&(void 0!==e.top&&null!==e.top||void 0!==e.bottom&&null!==e.bottom)||(n="middle"===a?{top:((s||0)-this.getBBoxSnapshot().height)/2}:"bottom"===a?{bottom:l&&l.bottom||0}:{top:l&&l.top||0}),E(E({},t),n)}},{key:"render",value:function(){var e=this,t=this.props,n=t.content,o=t.width,i=t.height,a=t.wrapperStyle,l=t.payloadUniqBy,c=t.payload,s=E(E({position:"absolute",width:o||"auto",height:i||"auto"},this.getDefaultPosition(a)),a);return r.createElement("div",{className:"recharts-legend-wrapper",style:s,ref:function(t){e.wrapperNode=t}},function(e,t){if(r.isValidElement(e))return r.cloneElement(e,t);if("function"==typeof e)return r.createElement(e,t);t.ref;var n=function(e,t){if(null==e)return{};var n,r,o=function(e,t){if(null==e)return{};var n,r,o={},i=Object.keys(e);for(r=0;r=0||(o[n]=e[n]);return o}(e,t);if(Object.getOwnPropertySymbols){var i=Object.getOwnPropertySymbols(e);for(r=0;r=0)&&Object.prototype.propertyIsEnumerable.call(e,n)&&(o[n]=e[n])}return o}(t,S);return r.createElement(y,n)}(n,E(E({},this.props),{},{payload:(0,x.z)(c,l,I)})))}}],o=[{key:"getWithHeight",value:function(e,t){var n=e.props.layout;return"vertical"===n&&(0,b.hj)(e.props.height)?{height:e.props.height}:"horizontal"===n?{width:e.props.width||t}:null}}],n&&C(a.prototype,n),o&&C(a,o),Object.defineProperty(a,"prototype",{writable:!1}),a}(r.PureComponent);M(R,"displayName","Legend"),M(R,"defaultProps",{iconSize:14,layout:"horizontal",align:"center",verticalAlign:"bottom"})},47625:function(e,t,n){"use strict";n.d(t,{h:function(){return m}});var r=n(61994),o=n(2265),i=n(37065),a=n.n(i),l=n(82558),c=n(16630),s=n(1175),u=n(82944);function d(e){return(d="function"==typeof Symbol&&"symbol"==typeof Symbol.iterator?function(e){return typeof e}:function(e){return e&&"function"==typeof Symbol&&e.constructor===Symbol&&e!==Symbol.prototype?"symbol":typeof e})(e)}function f(e,t){var n=Object.keys(e);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);t&&(r=r.filter(function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable})),n.push.apply(n,r)}return n}function p(e){for(var t=1;te.length)&&(t=e.length);for(var n=0,r=Array(t);n0&&(e=a()(e,C,{trailing:!0,leading:!1}));var t=new ResizeObserver(e),n=N.current.getBoundingClientRect();return _(n.width,n.height),t.observe(N.current),function(){t.disconnect()}},[_,C]);var D=(0,o.useMemo)(function(){var e=T.containerWidth,t=T.containerHeight;if(e<0||t<0)return null;(0,s.Z)((0,c.hU)(g)||(0,c.hU)(y),"The width(%s) and height(%s) are both fixed numbers,\n maybe you don't need to use a ResponsiveContainer.",g,y),(0,s.Z)(!i||i>0,"The aspect(%s) must be greater than zero.",i);var n=(0,c.hU)(g)?e:g,r=(0,c.hU)(y)?t:y;i&&i>0&&(n?r=n/i:r&&(n=r*i),S&&r>S&&(r=S)),(0,s.Z)(n>0||r>0,"The width(%s) and height(%s) of chart should be greater than 0,\n please check the style of container, or the props width(%s) and height(%s),\n or add a minWidth(%s) or minHeight(%s) or use aspect(%s) to control the\n height and width.",n,r,g,y,x,w,i);var a=!Array.isArray(k)&&(0,l.isElement)(k)&&(0,u.Gf)(k.type).endsWith("Chart");return o.Children.map(k,function(e){return(0,l.isElement)(e)?(0,o.cloneElement)(e,p({width:n,height:r},a?{style:p({height:"100%",width:"100%",maxHeight:r,maxWidth:n},e.props.style)}:{})):e})},[i,k,y,S,w,x,T,g]);return o.createElement("div",{id:O?"".concat(O):void 0,className:(0,r.Z)("recharts-responsive-container",j),style:p(p({},void 0===M?{}:M),{},{width:g,height:y,minWidth:x,minHeight:w,maxHeight:S}),ref:N},D)})},58811:function(e,t,n){"use strict";n.d(t,{x:function(){return Z}});var r=n(2265),o=n(77571),i=n.n(o),a=n(61994),l=n(16630),c=n(34067),s=n(82944),u=n(4094);function d(e){return(d="function"==typeof Symbol&&"symbol"==typeof Symbol.iterator?function(e){return typeof e}:function(e){return e&&"function"==typeof Symbol&&e.constructor===Symbol&&e!==Symbol.prototype?"symbol":typeof e})(e)}function f(e,t){return function(e){if(Array.isArray(e))return e}(e)||function(e,t){var n=null==e?null:"undefined"!=typeof Symbol&&e[Symbol.iterator]||e["@@iterator"];if(null!=n){var r,o,i,a,l=[],c=!0,s=!1;try{if(i=(n=n.call(e)).next,0===t){if(Object(n)!==n)return;c=!1}else for(;!(c=(r=i.call(n)).done)&&(l.push(r.value),l.length!==t);c=!0);}catch(e){s=!0,o=e}finally{try{if(!c&&null!=n.return&&(a=n.return(),Object(a)!==a))return}finally{if(s)throw o}}return l}}(e,t)||function(e,t){if(e){if("string"==typeof e)return p(e,t);var n=Object.prototype.toString.call(e).slice(8,-1);if("Object"===n&&e.constructor&&(n=e.constructor.name),"Map"===n||"Set"===n)return Array.from(e);if("Arguments"===n||/^(?:Ui|I)nt(?:8|16|32)(?:Clamped)?Array$/.test(n))return p(e,t)}}(e,t)||function(){throw TypeError("Invalid attempt to destructure non-iterable instance.\nIn order to be iterable, non-array objects must have a [Symbol.iterator]() method.")}()}function p(e,t){(null==t||t>e.length)&&(t=e.length);for(var n=0,r=Array(t);n=0||(o[n]=e[n]);return o}(e,t);if(Object.getOwnPropertySymbols){var i=Object.getOwnPropertySymbols(e);for(r=0;r=0)&&Object.prototype.propertyIsEnumerable.call(e,n)&&(o[n]=e[n])}return o}function M(e,t){return function(e){if(Array.isArray(e))return e}(e)||function(e,t){var n=null==e?null:"undefined"!=typeof Symbol&&e[Symbol.iterator]||e["@@iterator"];if(null!=n){var r,o,i,a,l=[],c=!0,s=!1;try{if(i=(n=n.call(e)).next,0===t){if(Object(n)!==n)return;c=!1}else for(;!(c=(r=i.call(n)).done)&&(l.push(r.value),l.length!==t);c=!0);}catch(e){s=!0,o=e}finally{try{if(!c&&null!=n.return&&(a=n.return(),Object(a)!==a))return}finally{if(s)throw o}}return l}}(e,t)||function(e,t){if(e){if("string"==typeof e)return N(e,t);var n=Object.prototype.toString.call(e).slice(8,-1);if("Object"===n&&e.constructor&&(n=e.constructor.name),"Map"===n||"Set"===n)return Array.from(e);if("Arguments"===n||/^(?:Ui|I)nt(?:8|16|32)(?:Clamped)?Array$/.test(n))return N(e,t)}}(e,t)||function(){throw TypeError("Invalid attempt to destructure non-iterable instance.\nIn order to be iterable, non-array objects must have a [Symbol.iterator]() method.")}()}function N(e,t){(null==t||t>e.length)&&(t=e.length);for(var n=0,r=Array(t);n0&&void 0!==arguments[0]?arguments[0]:[];return e.reduce(function(e,t){var i=t.word,a=t.width,l=e[e.length-1];return l&&(null==r||o||l.width+a+na||t.reduce(function(e,t){return e.width>t.width?e:t}).width>Number(r),t]},m=0,g=c.length-1,v=0;m<=g&&v<=c.length-1;){var y=Math.floor((m+g)/2),b=M(h(y-1),2),x=b[0],w=b[1],S=M(h(y),1)[0];if(x||S||(m=y+1),x&&S&&(g=y-1),!x&&S){i=w;break}v++}return i||p},A=function(e){return[{words:i()(e)?[]:e.toString().split(I)}]},_=function(e){var t=e.width,n=e.scaleToFit,r=e.children,o=e.style,i=e.breakAll,a=e.maxLines;if((t||n)&&!c.x.isSsr){var l=R({breakAll:i,children:r,style:o});return l?T({breakAll:i,children:r,maxLines:a,style:o},l.wordsWithComputedWidth,l.spaceWidth,t,n):A(r)}return A(r)},D="#808080",Z=function(e){var t,n=e.x,o=void 0===n?0:n,i=e.y,c=void 0===i?0:i,u=e.lineHeight,d=void 0===u?"1em":u,f=e.capHeight,p=void 0===f?"0.71em":f,h=e.scaleToFit,m=void 0!==h&&h,g=e.textAnchor,v=e.verticalAnchor,y=e.fill,b=void 0===y?D:y,x=P(e,C),w=(0,r.useMemo)(function(){return _({breakAll:x.breakAll,children:x.children,maxLines:x.maxLines,scaleToFit:m,style:x.style,width:x.width})},[x.breakAll,x.children,x.maxLines,m,x.style,x.width]),S=x.dx,k=x.dy,M=x.angle,N=x.className,I=x.breakAll,R=P(x,O);if(!(0,l.P2)(o)||!(0,l.P2)(c))return null;var T=o+((0,l.hj)(S)?S:0),A=c+((0,l.hj)(k)?k:0);switch(void 0===v?"end":v){case"start":t=E("calc(".concat(p,")"));break;case"middle":t=E("calc(".concat((w.length-1)/2," * -").concat(d," + (").concat(p," / 2))"));break;default:t=E("calc(".concat(w.length-1," * -").concat(d,")"))}var Z=[];if(m){var L=w[0].width,z=x.width;Z.push("scale(".concat(((0,l.hj)(z)?z/L:1)/L,")"))}return M&&Z.push("rotate(".concat(M,", ").concat(T,", ").concat(A,")")),Z.length&&(R.transform=Z.join(" ")),r.createElement("text",j({},(0,s.L6)(R,!0),{x:T,y:A,className:(0,a.Z)("recharts-text",N),textAnchor:void 0===g?"start":g,fill:b.includes("url")?D:b}),w.map(function(e,n){var o=e.words.join(I?"":" ");return r.createElement("tspan",{x:T,dy:0===n?t:d,key:o},o)}))}},8147:function(e,t,n){"use strict";n.d(t,{u:function(){return F}});var r=n(2265),o=n(34935),i=n.n(o),a=n(77571),l=n.n(a),c=n(61994),s=n(16630);function u(e){return(u="function"==typeof Symbol&&"symbol"==typeof Symbol.iterator?function(e){return typeof e}:function(e){return e&&"function"==typeof Symbol&&e.constructor===Symbol&&e!==Symbol.prototype?"symbol":typeof e})(e)}function d(e,t){(null==t||t>e.length)&&(t=e.length);for(var n=0,r=Array(t);nc[r]+u?Math.max(d,c[r]):Math.max(f,c[r])}function S(e){return(S="function"==typeof Symbol&&"symbol"==typeof Symbol.iterator?function(e){return typeof e}:function(e){return e&&"function"==typeof Symbol&&e.constructor===Symbol&&e!==Symbol.prototype?"symbol":typeof e})(e)}function k(e,t){var n=Object.keys(e);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);t&&(r=r.filter(function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable})),n.push.apply(n,r)}return n}function E(e){for(var t=1;t1||Math.abs(e.height-this.lastBoundingBox.height)>1)&&(this.lastBoundingBox.width=e.width,this.lastBoundingBox.height=e.height)}else(-1!==this.lastBoundingBox.width||-1!==this.lastBoundingBox.height)&&(this.lastBoundingBox.width=-1,this.lastBoundingBox.height=-1)}},{key:"componentDidMount",value:function(){document.addEventListener("keydown",this.handleKeyDown),this.updateBBox()}},{key:"componentWillUnmount",value:function(){document.removeEventListener("keydown",this.handleKeyDown)}},{key:"componentDidUpdate",value:function(){var e,t;this.props.active&&this.updateBBox(),this.state.dismissed&&((null===(e=this.props.coordinate)||void 0===e?void 0:e.x)!==this.state.dismissedAtCoordinate.x||(null===(t=this.props.coordinate)||void 0===t?void 0:t.y)!==this.state.dismissedAtCoordinate.y)&&(this.state.dismissed=!1)}},{key:"render",value:function(){var e,t,n,o,i,a,l,u,d,f,p,h,m,v,S,k,C,O,j,P,M,N=this,I=this.props,R=I.active,T=I.allowEscapeViewBox,A=I.animationDuration,_=I.animationEasing,D=I.children,Z=I.coordinate,L=I.hasPayload,z=I.isAnimationActive,B=I.offset,F=I.position,H=I.reverseDirection,q=I.useTranslate3d,W=I.viewBox,K=I.wrapperStyle,U=(v=(e={allowEscapeViewBox:T,coordinate:Z,offsetTopLeft:B,position:F,reverseDirection:H,tooltipBox:{height:this.lastBoundingBox.height,width:this.lastBoundingBox.width},useTranslate3d:q,viewBox:W}).allowEscapeViewBox,S=e.coordinate,k=e.offsetTopLeft,C=e.position,O=e.reverseDirection,j=e.tooltipBox,P=e.useTranslate3d,M=e.viewBox,j.height>0&&j.width>0&&S?(n=(t={translateX:h=w({allowEscapeViewBox:v,coordinate:S,key:"x",offsetTopLeft:k,position:C,reverseDirection:O,tooltipDimension:j.width,viewBox:M,viewBoxDimension:M.width}),translateY:m=w({allowEscapeViewBox:v,coordinate:S,key:"y",offsetTopLeft:k,position:C,reverseDirection:O,tooltipDimension:j.height,viewBox:M,viewBoxDimension:M.height}),useTranslate3d:P}).translateX,o=t.translateY,i=t.useTranslate3d,p=(0,g.bO)({transform:i?"translate3d(".concat(n,"px, ").concat(o,"px, 0)"):"translate(".concat(n,"px, ").concat(o,"px)")})):p=x,{cssProperties:p,cssClasses:(u=(a={translateX:h,translateY:m,coordinate:S}).coordinate,d=a.translateX,f=a.translateY,(0,c.Z)(b,(y(l={},"".concat(b,"-right"),(0,s.hj)(d)&&u&&(0,s.hj)(u.x)&&d>=u.x),y(l,"".concat(b,"-left"),(0,s.hj)(d)&&u&&(0,s.hj)(u.x)&&d=u.y),y(l,"".concat(b,"-top"),(0,s.hj)(f)&&u&&(0,s.hj)(u.y)&&f0;return r.createElement(N,{allowEscapeViewBox:i,animationDuration:a,animationEasing:l,isAnimationActive:d,active:o,coordinate:s,hasPayload:S,offset:f,position:g,reverseDirection:v,useTranslate3d:y,viewBox:b,wrapperStyle:x},(e=_(_({},this.props),{},{payload:w}),r.isValidElement(c)?r.cloneElement(c,e):"function"==typeof c?r.createElement(c,e):r.createElement(m,e)))}}],function(e,t){for(var n=0;n=0||(o[n]=e[n]);return o}(e,t);if(Object.getOwnPropertySymbols){var i=Object.getOwnPropertySymbols(e);for(r=0;r=0)&&Object.prototype.propertyIsEnumerable.call(e,n)&&(o[n]=e[n])}return o}(e,a),u=(0,o.Z)("recharts-layer",c);return r.createElement("g",l({className:u},(0,i.L6)(s,!0),{ref:t}),n)})},48777:function(e,t,n){"use strict";n.d(t,{T:function(){return c}});var r=n(2265),o=n(61994),i=n(82944),a=["children","width","height","viewBox","className","style","title","desc"];function l(){return(l=Object.assign?Object.assign.bind():function(e){for(var t=1;t=0||(o[n]=e[n]);return o}(e,t);if(Object.getOwnPropertySymbols){var i=Object.getOwnPropertySymbols(e);for(r=0;r=0)&&Object.prototype.propertyIsEnumerable.call(e,n)&&(o[n]=e[n])}return o}(e,a),m=s||{width:n,height:c,x:0,y:0},g=(0,o.Z)("recharts-surface",u);return r.createElement("svg",l({},(0,i.L6)(h,!0,"svg"),{className:g,width:n,height:c,style:d,viewBox:"".concat(m.x," ").concat(m.y," ").concat(m.width," ").concat(m.height)}),r.createElement("title",null,f),r.createElement("desc",null,p),t)}},25739:function(e,t,n){"use strict";n.d(t,{br:function(){return h},Mw:function(){return w},zn:function(){return x},sp:function(){return m},qD:function(){return b},d2:function(){return y},bH:function(){return g},Ud:function(){return v}});var r=n(2265),o=n(69398),i=n(50967),a=n.n(i)()(function(e){return{x:e.left,y:e.top,width:e.width,height:e.height}},function(e){return["l",e.left,"t",e.top,"w",e.width,"h",e.height].join("")}),l=(0,r.createContext)(void 0),c=(0,r.createContext)(void 0),s=(0,r.createContext)(void 0),u=(0,r.createContext)({}),d=(0,r.createContext)(void 0),f=(0,r.createContext)(0),p=(0,r.createContext)(0),h=function(e){var t=e.state,n=t.xAxisMap,o=t.yAxisMap,i=t.offset,h=e.clipPathId,m=e.children,g=e.width,v=e.height,y=a(i);return r.createElement(l.Provider,{value:n},r.createElement(c.Provider,{value:o},r.createElement(u.Provider,{value:i},r.createElement(s.Provider,{value:y},r.createElement(d.Provider,{value:h},r.createElement(f.Provider,{value:v},r.createElement(p.Provider,{value:g},m)))))))},m=function(){return(0,r.useContext)(d)},g=function(e){var t=(0,r.useContext)(l);null!=t||(0,o.Z)(!1);var n=t[e];return null!=n||(0,o.Z)(!1),n},v=function(e){var t=(0,r.useContext)(c);null!=t||(0,o.Z)(!1);var n=t[e];return null!=n||(0,o.Z)(!1),n},y=function(){return(0,r.useContext)(s)},b=function(){return(0,r.useContext)(u)},x=function(){return(0,r.useContext)(p)},w=function(){return(0,r.useContext)(f)}},57165:function(e,t,n){"use strict";n.d(t,{H:function(){return V}});var r=n(2265);function o(){}function i(e,t,n){e._context.bezierCurveTo((2*e._x0+e._x1)/3,(2*e._y0+e._y1)/3,(e._x0+2*e._x1)/3,(e._y0+2*e._y1)/3,(e._x0+4*e._x1+t)/6,(e._y0+4*e._y1+n)/6)}function a(e){this._context=e}function l(e){this._context=e}function c(e){this._context=e}a.prototype={areaStart:function(){this._line=0},areaEnd:function(){this._line=NaN},lineStart:function(){this._x0=this._x1=this._y0=this._y1=NaN,this._point=0},lineEnd:function(){switch(this._point){case 3:i(this,this._x1,this._y1);case 2:this._context.lineTo(this._x1,this._y1)}(this._line||0!==this._line&&1===this._point)&&this._context.closePath(),this._line=1-this._line},point:function(e,t){switch(e=+e,t=+t,this._point){case 0:this._point=1,this._line?this._context.lineTo(e,t):this._context.moveTo(e,t);break;case 1:this._point=2;break;case 2:this._point=3,this._context.lineTo((5*this._x0+this._x1)/6,(5*this._y0+this._y1)/6);default:i(this,e,t)}this._x0=this._x1,this._x1=e,this._y0=this._y1,this._y1=t}},l.prototype={areaStart:o,areaEnd:o,lineStart:function(){this._x0=this._x1=this._x2=this._x3=this._x4=this._y0=this._y1=this._y2=this._y3=this._y4=NaN,this._point=0},lineEnd:function(){switch(this._point){case 1:this._context.moveTo(this._x2,this._y2),this._context.closePath();break;case 2:this._context.moveTo((this._x2+2*this._x3)/3,(this._y2+2*this._y3)/3),this._context.lineTo((this._x3+2*this._x2)/3,(this._y3+2*this._y2)/3),this._context.closePath();break;case 3:this.point(this._x2,this._y2),this.point(this._x3,this._y3),this.point(this._x4,this._y4)}},point:function(e,t){switch(e=+e,t=+t,this._point){case 0:this._point=1,this._x2=e,this._y2=t;break;case 1:this._point=2,this._x3=e,this._y3=t;break;case 2:this._point=3,this._x4=e,this._y4=t,this._context.moveTo((this._x0+4*this._x1+e)/6,(this._y0+4*this._y1+t)/6);break;default:i(this,e,t)}this._x0=this._x1,this._x1=e,this._y0=this._y1,this._y1=t}},c.prototype={areaStart:function(){this._line=0},areaEnd:function(){this._line=NaN},lineStart:function(){this._x0=this._x1=this._y0=this._y1=NaN,this._point=0},lineEnd:function(){(this._line||0!==this._line&&3===this._point)&&this._context.closePath(),this._line=1-this._line},point:function(e,t){switch(e=+e,t=+t,this._point){case 0:this._point=1;break;case 1:this._point=2;break;case 2:this._point=3;var n=(this._x0+4*this._x1+e)/6,r=(this._y0+4*this._y1+t)/6;this._line?this._context.lineTo(n,r):this._context.moveTo(n,r);break;case 3:this._point=4;default:i(this,e,t)}this._x0=this._x1,this._x1=e,this._y0=this._y1,this._y1=t}};class s{constructor(e,t){this._context=e,this._x=t}areaStart(){this._line=0}areaEnd(){this._line=NaN}lineStart(){this._point=0}lineEnd(){(this._line||0!==this._line&&1===this._point)&&this._context.closePath(),this._line=1-this._line}point(e,t){switch(e=+e,t=+t,this._point){case 0:this._point=1,this._line?this._context.lineTo(e,t):this._context.moveTo(e,t);break;case 1:this._point=2;default:this._x?this._context.bezierCurveTo(this._x0=(this._x0+e)/2,this._y0,this._x0,t,e,t):this._context.bezierCurveTo(this._x0,this._y0=(this._y0+t)/2,e,this._y0,e,t)}this._x0=e,this._y0=t}}function u(e){this._context=e}function d(e){this._context=e}function f(e){return new d(e)}function p(e,t,n){var r=e._x1-e._x0,o=t-e._x1,i=(e._y1-e._y0)/(r||o<0&&-0),a=(n-e._y1)/(o||r<0&&-0);return((i<0?-1:1)+(a<0?-1:1))*Math.min(Math.abs(i),Math.abs(a),.5*Math.abs((i*o+a*r)/(r+o)))||0}function h(e,t){var n=e._x1-e._x0;return n?(3*(e._y1-e._y0)/n-t)/2:t}function m(e,t,n){var r=e._x0,o=e._y0,i=e._x1,a=e._y1,l=(i-r)/3;e._context.bezierCurveTo(r+l,o+l*t,i-l,a-l*n,i,a)}function g(e){this._context=e}function v(e){this._context=new y(e)}function y(e){this._context=e}function b(e){this._context=e}function x(e){var t,n,r=e.length-1,o=Array(r),i=Array(r),a=Array(r);for(o[0]=0,i[0]=2,a[0]=e[0]+2*e[1],t=1;t=0;--t)o[t]=(a[t]-o[t+1])/i[t];for(t=0,i[r-1]=(e[r]+o[r-1])/2;t=0&&(this._t=1-this._t,this._line=1-this._line)},point:function(e,t){switch(e=+e,t=+t,this._point){case 0:this._point=1,this._line?this._context.lineTo(e,t):this._context.moveTo(e,t);break;case 1:this._point=2;default:if(this._t<=0)this._context.lineTo(this._x,t),this._context.lineTo(e,t);else{var n=this._x*(1-this._t)+e*this._t;this._context.lineTo(n,this._y),this._context.lineTo(n,t)}}this._x=e,this._y=t}};var S=n(22516),k=n(76115),E=n(67790);function C(e){return e[0]}function O(e){return e[1]}function j(e,t){var n=(0,k.Z)(!0),r=null,o=f,i=null,a=(0,E.d)(l);function l(l){var c,s,u,d=(l=(0,S.Z)(l)).length,f=!1;for(null==r&&(i=o(u=a())),c=0;c<=d;++c)!(c=d;--f)l.point(v[f],y[f]);l.lineEnd(),l.areaEnd()}}g&&(v[u]=+e(p,u,s),y[u]=+t(p,u,s),l.point(r?+r(p,u,s):v[u],n?+n(p,u,s):y[u]))}if(h)return l=null,h+""||null}function u(){return j().defined(o).curve(a).context(i)}return e="function"==typeof e?e:void 0===e?C:(0,k.Z)(+e),t="function"==typeof t?t:void 0===t?(0,k.Z)(0):(0,k.Z)(+t),n="function"==typeof n?n:void 0===n?O:(0,k.Z)(+n),s.x=function(t){return arguments.length?(e="function"==typeof t?t:(0,k.Z)(+t),r=null,s):e},s.x0=function(t){return arguments.length?(e="function"==typeof t?t:(0,k.Z)(+t),s):e},s.x1=function(e){return arguments.length?(r=null==e?null:"function"==typeof e?e:(0,k.Z)(+e),s):r},s.y=function(e){return arguments.length?(t="function"==typeof e?e:(0,k.Z)(+e),n=null,s):t},s.y0=function(e){return arguments.length?(t="function"==typeof e?e:(0,k.Z)(+e),s):t},s.y1=function(e){return arguments.length?(n=null==e?null:"function"==typeof e?e:(0,k.Z)(+e),s):n},s.lineX0=s.lineY0=function(){return u().x(e).y(t)},s.lineY1=function(){return u().x(e).y(n)},s.lineX1=function(){return u().x(r).y(t)},s.defined=function(e){return arguments.length?(o="function"==typeof e?e:(0,k.Z)(!!e),s):o},s.curve=function(e){return arguments.length?(a=e,null!=i&&(l=a(i)),s):a},s.context=function(e){return arguments.length?(null==e?i=l=null:l=a(i=e),s):i},s}var M=n(75551),N=n.n(M),I=n(86757),R=n.n(I),T=n(61994),A=n(41637),_=n(82944),D=n(16630);function Z(e){return(Z="function"==typeof Symbol&&"symbol"==typeof Symbol.iterator?function(e){return typeof e}:function(e){return e&&"function"==typeof Symbol&&e.constructor===Symbol&&e!==Symbol.prototype?"symbol":typeof e})(e)}function L(){return(L=Object.assign?Object.assign.bind():function(e){for(var t=1;te.length)&&(t=e.length);for(var n=0,r=Array(t);n=0?1:-1,c=n>=0?1:-1,s=r>=0&&n>=0||r<0&&n<0?1:0;if(a>0&&o instanceof Array){for(var u=[0,0,0,0],d=0;d<4;d++)u[d]=o[d]>a?a:o[d];i="M".concat(e,",").concat(t+l*u[0]),u[0]>0&&(i+="A ".concat(u[0],",").concat(u[0],",0,0,").concat(s,",").concat(e+c*u[0],",").concat(t)),i+="L ".concat(e+n-c*u[1],",").concat(t),u[1]>0&&(i+="A ".concat(u[1],",").concat(u[1],",0,0,").concat(s,",\n ").concat(e+n,",").concat(t+l*u[1])),i+="L ".concat(e+n,",").concat(t+r-l*u[2]),u[2]>0&&(i+="A ".concat(u[2],",").concat(u[2],",0,0,").concat(s,",\n ").concat(e+n-c*u[2],",").concat(t+r)),i+="L ".concat(e+c*u[3],",").concat(t+r),u[3]>0&&(i+="A ".concat(u[3],",").concat(u[3],",0,0,").concat(s,",\n ").concat(e,",").concat(t+r-l*u[3])),i+="Z"}else if(a>0&&o===+o&&o>0){var f=Math.min(a,o);i="M ".concat(e,",").concat(t+l*f,"\n A ").concat(f,",").concat(f,",0,0,").concat(s,",").concat(e+c*f,",").concat(t,"\n L ").concat(e+n-c*f,",").concat(t,"\n A ").concat(f,",").concat(f,",0,0,").concat(s,",").concat(e+n,",").concat(t+l*f,"\n L ").concat(e+n,",").concat(t+r-l*f,"\n A ").concat(f,",").concat(f,",0,0,").concat(s,",").concat(e+n-c*f,",").concat(t+r,"\n L ").concat(e+c*f,",").concat(t+r,"\n A ").concat(f,",").concat(f,",0,0,").concat(s,",").concat(e,",").concat(t+r-l*f," Z")}else i="M ".concat(e,",").concat(t," h ").concat(n," v ").concat(r," h ").concat(-n," Z");return i},p=function(e,t){if(!e||!t)return!1;var n=e.x,r=e.y,o=t.x,i=t.y,a=t.width,l=t.height;return!!(Math.abs(a)>0&&Math.abs(l)>0)&&n>=Math.min(o,o+a)&&n<=Math.max(o,o+a)&&r>=Math.min(i,i+l)&&r<=Math.max(i,i+l)},h={x:0,y:0,width:0,height:0,radius:0,isAnimationActive:!1,isUpdateAnimationActive:!1,animationBegin:0,animationDuration:1500,animationEasing:"ease"},m=function(e){var t,n=d(d({},h),e),l=(0,r.useRef)(),u=function(e){if(Array.isArray(e))return e}(t=(0,r.useState)(-1))||function(e,t){var n=null==e?null:"undefined"!=typeof Symbol&&e[Symbol.iterator]||e["@@iterator"];if(null!=n){var r,o,i,a,l=[],c=!0,s=!1;try{for(i=(n=n.call(e)).next;!(c=(r=i.call(n)).done)&&(l.push(r.value),2!==l.length);c=!0);}catch(e){s=!0,o=e}finally{try{if(!c&&null!=n.return&&(a=n.return(),Object(a)!==a))return}finally{if(s)throw o}}return l}}(t,2)||function(e,t){if(e){if("string"==typeof e)return s(e,2);var n=Object.prototype.toString.call(e).slice(8,-1);if("Object"===n&&e.constructor&&(n=e.constructor.name),"Map"===n||"Set"===n)return Array.from(e);if("Arguments"===n||/^(?:Ui|I)nt(?:8|16|32)(?:Clamped)?Array$/.test(n))return s(e,2)}}(t,2)||function(){throw TypeError("Invalid attempt to destructure non-iterable instance.\nIn order to be iterable, non-array objects must have a [Symbol.iterator]() method.")}(),p=u[0],m=u[1];(0,r.useEffect)(function(){if(l.current&&l.current.getTotalLength)try{var e=l.current.getTotalLength();e&&m(e)}catch(e){}},[]);var g=n.x,v=n.y,y=n.width,b=n.height,x=n.radius,w=n.className,S=n.animationEasing,k=n.animationDuration,E=n.animationBegin,C=n.isAnimationActive,O=n.isUpdateAnimationActive;if(g!==+g||v!==+v||y!==+y||b!==+b||0===y||0===b)return null;var j=(0,o.Z)("recharts-rectangle",w);return O?r.createElement(i.ZP,{canBegin:p>0,from:{width:y,height:b,x:g,y:v},to:{width:y,height:b,x:g,y:v},duration:k,animationEasing:S,isActive:O},function(e){var t=e.width,o=e.height,s=e.x,u=e.y;return r.createElement(i.ZP,{canBegin:p>0,from:"0px ".concat(-1===p?1:p,"px"),to:"".concat(p,"px 0px"),attributeName:"strokeDasharray",begin:E,duration:k,isActive:C,easing:S},r.createElement("path",c({},(0,a.L6)(n,!0),{className:j,d:f(s,u,t,o,x),ref:l})))}):r.createElement("path",c({},(0,a.L6)(n,!0),{className:j,d:f(g,v,y,b,x)}))}},60474:function(e,t,n){"use strict";n.d(t,{L:function(){return g}});var r=n(2265),o=n(61994),i=n(82944),a=n(39206),l=n(16630);function c(e){return(c="function"==typeof Symbol&&"symbol"==typeof Symbol.iterator?function(e){return typeof e}:function(e){return e&&"function"==typeof Symbol&&e.constructor===Symbol&&e!==Symbol.prototype?"symbol":typeof e})(e)}function s(){return(s=Object.assign?Object.assign.bind():function(e){for(var t=1;t180),",").concat(+(c>u),",\n ").concat(f.x,",").concat(f.y,"\n ");if(o>0){var h=(0,a.op)(n,r,o,c),m=(0,a.op)(n,r,o,u);p+="L ".concat(m.x,",").concat(m.y,"\n A ").concat(o,",").concat(o,",0,\n ").concat(+(Math.abs(s)>180),",").concat(+(c<=u),",\n ").concat(h.x,",").concat(h.y," Z")}else p+="L ".concat(n,",").concat(r," Z");return p},h=function(e){var t=e.cx,n=e.cy,r=e.innerRadius,o=e.outerRadius,i=e.cornerRadius,a=e.forceCornerRadius,c=e.cornerIsExternal,s=e.startAngle,u=e.endAngle,d=(0,l.uY)(u-s),h=f({cx:t,cy:n,radius:o,angle:s,sign:d,cornerRadius:i,cornerIsExternal:c}),m=h.circleTangency,g=h.lineTangency,v=h.theta,y=f({cx:t,cy:n,radius:o,angle:u,sign:-d,cornerRadius:i,cornerIsExternal:c}),b=y.circleTangency,x=y.lineTangency,w=y.theta,S=c?Math.abs(s-u):Math.abs(s-u)-v-w;if(S<0)return a?"M ".concat(g.x,",").concat(g.y,"\n a").concat(i,",").concat(i,",0,0,1,").concat(2*i,",0\n a").concat(i,",").concat(i,",0,0,1,").concat(-(2*i),",0\n "):p({cx:t,cy:n,innerRadius:r,outerRadius:o,startAngle:s,endAngle:u});var k="M ".concat(g.x,",").concat(g.y,"\n A").concat(i,",").concat(i,",0,0,").concat(+(d<0),",").concat(m.x,",").concat(m.y,"\n A").concat(o,",").concat(o,",0,").concat(+(S>180),",").concat(+(d<0),",").concat(b.x,",").concat(b.y,"\n A").concat(i,",").concat(i,",0,0,").concat(+(d<0),",").concat(x.x,",").concat(x.y,"\n ");if(r>0){var E=f({cx:t,cy:n,radius:r,angle:s,sign:d,isExternal:!0,cornerRadius:i,cornerIsExternal:c}),C=E.circleTangency,O=E.lineTangency,j=E.theta,P=f({cx:t,cy:n,radius:r,angle:u,sign:-d,isExternal:!0,cornerRadius:i,cornerIsExternal:c}),M=P.circleTangency,N=P.lineTangency,I=P.theta,R=c?Math.abs(s-u):Math.abs(s-u)-j-I;if(R<0&&0===i)return"".concat(k,"L").concat(t,",").concat(n,"Z");k+="L".concat(N.x,",").concat(N.y,"\n A").concat(i,",").concat(i,",0,0,").concat(+(d<0),",").concat(M.x,",").concat(M.y,"\n A").concat(r,",").concat(r,",0,").concat(+(R>180),",").concat(+(d>0),",").concat(C.x,",").concat(C.y,"\n A").concat(i,",").concat(i,",0,0,").concat(+(d<0),",").concat(O.x,",").concat(O.y,"Z")}else k+="L".concat(t,",").concat(n,"Z");return k},m={cx:0,cy:0,innerRadius:0,outerRadius:0,startAngle:0,endAngle:0,cornerRadius:0,forceCornerRadius:!1,cornerIsExternal:!1},g=function(e){var t,n=d(d({},m),e),a=n.cx,c=n.cy,u=n.innerRadius,f=n.outerRadius,g=n.cornerRadius,v=n.forceCornerRadius,y=n.cornerIsExternal,b=n.startAngle,x=n.endAngle,w=n.className;if(f