diff --git a/.circleci/config.yml b/.circleci/config.yml index 18bfeedb52..5f4628d26d 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -344,4 +344,4 @@ workflows: filters: branches: only: - - main + - main \ No newline at end of file diff --git a/README.md b/README.md index 8868dc8ccd..fe7d56b6c4 100644 --- a/README.md +++ b/README.md @@ -225,37 +225,37 @@ curl 'http://0.0.0.0:4000/key/generate' \ ## Supported Providers ([Docs](https://docs.litellm.ai/docs/providers)) | Provider | [Completion](https://docs.litellm.ai/docs/#basic-usage) | [Streaming](https://docs.litellm.ai/docs/completion/stream#streaming-responses) | [Async Completion](https://docs.litellm.ai/docs/completion/stream#async-completion) | [Async Streaming](https://docs.litellm.ai/docs/completion/stream#async-streaming) | [Async Embedding](https://docs.litellm.ai/docs/embedding/supported_embedding) | [Async Image Generation](https://docs.litellm.ai/docs/image_generation) | -| ----------------------------------------------------------------------------------- | ------------------------------------------------------- | ------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------- | --------------------------------------------------------------------------------- | ----------------------------------------------------------------------------- | ----------------------------------------------------------------------- | -| [openai](https://docs.litellm.ai/docs/providers/openai) | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | -| [azure](https://docs.litellm.ai/docs/providers/azure) | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | -| [aws - sagemaker](https://docs.litellm.ai/docs/providers/aws_sagemaker) | ✅ | ✅ | ✅ | ✅ | ✅ | -| [aws - bedrock](https://docs.litellm.ai/docs/providers/bedrock) | ✅ | ✅ | ✅ | ✅ | ✅ | -| [google - vertex_ai](https://docs.litellm.ai/docs/providers/vertex) | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ -| [google - palm](https://docs.litellm.ai/docs/providers/palm) | ✅ | ✅ | ✅ | ✅ | -| [google AI Studio - gemini](https://docs.litellm.ai/docs/providers/gemini) | ✅ | ✅ | ✅ | ✅ | | -| [mistral ai api](https://docs.litellm.ai/docs/providers/mistral) | ✅ | ✅ | ✅ | ✅ | ✅ | -| [cloudflare AI Workers](https://docs.litellm.ai/docs/providers/cloudflare_workers) | ✅ | ✅ | ✅ | ✅ | -| [cohere](https://docs.litellm.ai/docs/providers/cohere) | ✅ | ✅ | ✅ | ✅ | ✅ | -| [anthropic](https://docs.litellm.ai/docs/providers/anthropic) | ✅ | ✅ | ✅ | ✅ | -| [huggingface](https://docs.litellm.ai/docs/providers/huggingface) | ✅ | ✅ | ✅ | ✅ | ✅ | -| [replicate](https://docs.litellm.ai/docs/providers/replicate) | ✅ | ✅ | ✅ | ✅ | -| [together_ai](https://docs.litellm.ai/docs/providers/togetherai) | ✅ | ✅ | ✅ | ✅ | -| [openrouter](https://docs.litellm.ai/docs/providers/openrouter) | ✅ | ✅ | ✅ | ✅ | -| [ai21](https://docs.litellm.ai/docs/providers/ai21) | ✅ | ✅ | ✅ | ✅ | -| [baseten](https://docs.litellm.ai/docs/providers/baseten) | ✅ | ✅ | ✅ | ✅ | -| [vllm](https://docs.litellm.ai/docs/providers/vllm) | ✅ | ✅ | ✅ | ✅ | -| [nlp_cloud](https://docs.litellm.ai/docs/providers/nlp_cloud) | ✅ | ✅ | ✅ | ✅ | -| [aleph alpha](https://docs.litellm.ai/docs/providers/aleph_alpha) | ✅ | ✅ | ✅ | ✅ | -| [petals](https://docs.litellm.ai/docs/providers/petals) | ✅ | ✅ | ✅ | ✅ | -| [ollama](https://docs.litellm.ai/docs/providers/ollama) | ✅ | ✅ | ✅ | ✅ | ✅ | -| [deepinfra](https://docs.litellm.ai/docs/providers/deepinfra) | ✅ | ✅ | ✅ | ✅ | -| [perplexity-ai](https://docs.litellm.ai/docs/providers/perplexity) | ✅ | ✅ | ✅ | ✅ | -| [Groq AI](https://docs.litellm.ai/docs/providers/groq) | ✅ | ✅ | ✅ | ✅ | -| [Deepseek](https://docs.litellm.ai/docs/providers/deepseek) | ✅ | ✅ | ✅ | ✅ | -| [anyscale](https://docs.litellm.ai/docs/providers/anyscale) | ✅ | ✅ | ✅ | ✅ | -| [IBM - watsonx.ai](https://docs.litellm.ai/docs/providers/watsonx) | ✅ | ✅ | ✅ | ✅ | ✅ -| [voyage ai](https://docs.litellm.ai/docs/providers/voyage) | | | | | ✅ | -| [xinference [Xorbits Inference]](https://docs.litellm.ai/docs/providers/xinference) | | | | | ✅ | +|-------------------------------------------------------------------------------------|---------------------------------------------------------|---------------------------------------------------------------------------------|-------------------------------------------------------------------------------------|-----------------------------------------------------------------------------------|-------------------------------------------------------------------------------|-------------------------------------------------------------------------| +| [openai](https://docs.litellm.ai/docs/providers/openai) | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | +| [azure](https://docs.litellm.ai/docs/providers/azure) | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | +| [aws - sagemaker](https://docs.litellm.ai/docs/providers/aws_sagemaker) | ✅ | ✅ | ✅ | ✅ | ✅ | | +| [aws - bedrock](https://docs.litellm.ai/docs/providers/bedrock) | ✅ | ✅ | ✅ | ✅ | ✅ | | +| [google - vertex_ai](https://docs.litellm.ai/docs/providers/vertex) | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | +| [google - palm](https://docs.litellm.ai/docs/providers/palm) | ✅ | ✅ | ✅ | ✅ | | | +| [google AI Studio - gemini](https://docs.litellm.ai/docs/providers/gemini) | ✅ | ✅ | ✅ | ✅ | | | +| [mistral ai api](https://docs.litellm.ai/docs/providers/mistral) | ✅ | ✅ | ✅ | ✅ | ✅ | | +| [cloudflare AI Workers](https://docs.litellm.ai/docs/providers/cloudflare_workers) | ✅ | ✅ | ✅ | ✅ | | | +| [cohere](https://docs.litellm.ai/docs/providers/cohere) | ✅ | ✅ | ✅ | ✅ | ✅ | | +| [anthropic](https://docs.litellm.ai/docs/providers/anthropic) | ✅ | ✅ | ✅ | ✅ | | | +| [huggingface](https://docs.litellm.ai/docs/providers/huggingface) | ✅ | ✅ | ✅ | ✅ | ✅ | | +| [replicate](https://docs.litellm.ai/docs/providers/replicate) | ✅ | ✅ | ✅ | ✅ | | | +| [together_ai](https://docs.litellm.ai/docs/providers/togetherai) | ✅ | ✅ | ✅ | ✅ | | | +| [openrouter](https://docs.litellm.ai/docs/providers/openrouter) | ✅ | ✅ | ✅ | ✅ | | | +| [ai21](https://docs.litellm.ai/docs/providers/ai21) | ✅ | ✅ | ✅ | ✅ | | | +| [baseten](https://docs.litellm.ai/docs/providers/baseten) | ✅ | ✅ | ✅ | ✅ | | | +| [vllm](https://docs.litellm.ai/docs/providers/vllm) | ✅ | ✅ | ✅ | ✅ | | | +| [nlp_cloud](https://docs.litellm.ai/docs/providers/nlp_cloud) | ✅ | ✅ | ✅ | ✅ | | | +| [aleph alpha](https://docs.litellm.ai/docs/providers/aleph_alpha) | ✅ | ✅ | ✅ | ✅ | | | +| [petals](https://docs.litellm.ai/docs/providers/petals) | ✅ | ✅ | ✅ | ✅ | | | +| [ollama](https://docs.litellm.ai/docs/providers/ollama) | ✅ | ✅ | ✅ | ✅ | ✅ | | +| [deepinfra](https://docs.litellm.ai/docs/providers/deepinfra) | ✅ | ✅ | ✅ | ✅ | | | +| [perplexity-ai](https://docs.litellm.ai/docs/providers/perplexity) | ✅ | ✅ | ✅ | ✅ | | | +| [Groq AI](https://docs.litellm.ai/docs/providers/groq) | ✅ | ✅ | ✅ | ✅ | | | +| [Deepseek](https://docs.litellm.ai/docs/providers/deepseek) | ✅ | ✅ | ✅ | ✅ | | | +| [anyscale](https://docs.litellm.ai/docs/providers/anyscale) | ✅ | ✅ | ✅ | ✅ | | | +| [IBM - watsonx.ai](https://docs.litellm.ai/docs/providers/watsonx) | ✅ | ✅ | ✅ | ✅ | ✅ | | +| [voyage ai](https://docs.litellm.ai/docs/providers/voyage) | | | | | ✅ | | +| [xinference [Xorbits Inference]](https://docs.litellm.ai/docs/providers/xinference) | | | | | ✅ | | [**Read the Docs**](https://docs.litellm.ai/docs/) diff --git a/docs/my-website/docs/enterprise.md b/docs/my-website/docs/enterprise.md index 0d57b4c25d..0edf937ed3 100644 --- a/docs/my-website/docs/enterprise.md +++ b/docs/my-website/docs/enterprise.md @@ -10,6 +10,7 @@ For companies that need SSO, user management and professional support for LiteLL This covers: - ✅ **Features under the [LiteLLM Commercial License (Content Mod, Custom Tags, etc.)](https://docs.litellm.ai/docs/proxy/enterprise)** - ✅ [**Secure UI access with Single Sign-On**](../docs/proxy/ui.md#setup-ssoauth-for-ui) +- ✅ [**Audit Logs with retention policy**](../docs/proxy/enterprise.md#audit-logs) - ✅ [**JWT-Auth**](../docs/proxy/token_auth.md) - ✅ [**Prompt Injection Detection**](#prompt-injection-detection-lakeraai) - ✅ [**Invite Team Members to access `/spend` Routes**](../docs/proxy/cost_tracking#allowing-non-proxy-admins-to-access-spend-endpoints) diff --git a/docs/my-website/docs/observability/custom_callback.md b/docs/my-website/docs/observability/custom_callback.md index 3168222273..373b4a96c0 100644 --- a/docs/my-website/docs/observability/custom_callback.md +++ b/docs/my-website/docs/observability/custom_callback.md @@ -38,7 +38,7 @@ class MyCustomHandler(CustomLogger): print(f"On Async Success") async def async_log_failure_event(self, kwargs, response_obj, start_time, end_time): - print(f"On Async Success") + print(f"On Async Failure") customHandler = MyCustomHandler() diff --git a/docs/my-website/docs/observability/langfuse_integration.md b/docs/my-website/docs/observability/langfuse_integration.md index 6dd5377ea7..07970f599d 100644 --- a/docs/my-website/docs/observability/langfuse_integration.md +++ b/docs/my-website/docs/observability/langfuse_integration.md @@ -144,6 +144,26 @@ print(response) ``` +You can also pass `metadata` as part of the request header with a `langfuse_*` prefix: + +```shell +curl --location 'http://0.0.0.0:4000/chat/completions' \ + --header 'Content-Type: application/json' \ + --header 'langfuse_trace_id: trace-id22' \ + --header 'langfuse_trace_user_id: user-id2' \ + --header 'langfuse_trace_metadata: {"key":"value"}' \ + --data '{ + "model": "gpt-3.5-turbo", + "messages": [ + { + "role": "user", + "content": "what llm are you" + } + ] +}' +``` + + ### Trace & Generation Parameters #### Trace Specific Parameters diff --git a/docs/my-website/docs/projects/llmcord.py (Discord LLM Chatbot).md b/docs/my-website/docs/projects/llmcord.py (Discord LLM Chatbot).md new file mode 100644 index 0000000000..f8acb9383c --- /dev/null +++ b/docs/my-website/docs/projects/llmcord.py (Discord LLM Chatbot).md @@ -0,0 +1,3 @@ +llmcord.py lets you and your friends chat with LLMs directly in your Discord server. It works with practically any LLM, remote or locally hosted. + +Github: https://github.com/jakobdylanc/discord-llm-chatbot diff --git a/docs/my-website/docs/providers/groq.md b/docs/my-website/docs/providers/groq.md index da453c3ceb..857aae5bd2 100644 --- a/docs/my-website/docs/providers/groq.md +++ b/docs/my-website/docs/providers/groq.md @@ -46,13 +46,13 @@ for chunk in response: ## Supported Models - ALL Groq Models Supported! We support ALL Groq models, just set `groq/` as a prefix when sending completion requests -| Model Name | Function Call | -|--------------------------|------------------------------------------------------------------------------------------------------------------------------------------------------------------| -| llama3-8b-8192 | `completion(model="groq/llama3-8b-8192", messages)` | -| llama3-70b-8192 | `completion(model="groq/llama3-70b-8192", messages)` | -| llama2-70b-4096 | `completion(model="groq/llama2-70b-4096", messages)` | +| Model Name | Function Call | +|--------------------|---------------------------------------------------------| +| llama3-8b-8192 | `completion(model="groq/llama3-8b-8192", messages)` | +| llama3-70b-8192 | `completion(model="groq/llama3-70b-8192", messages)` | +| llama2-70b-4096 | `completion(model="groq/llama2-70b-4096", messages)` | | mixtral-8x7b-32768 | `completion(model="groq/mixtral-8x7b-32768", messages)` | -| gemma-7b-it | `completion(model="groq/gemma-7b-it", messages)` | +| gemma-7b-it | `completion(model="groq/gemma-7b-it", messages)` | ## Groq - Tool / Function Calling Example diff --git a/docs/my-website/docs/providers/togetherai.md b/docs/my-website/docs/providers/togetherai.md index d718619f04..1021f5ba84 100644 --- a/docs/my-website/docs/providers/togetherai.md +++ b/docs/my-website/docs/providers/togetherai.md @@ -26,52 +26,52 @@ Example TogetherAI Usage - Note: liteLLM supports all models deployed on Togethe ### Llama LLMs - Chat -| Model Name | Function Call | Required OS Variables | -|-----------------------------------|------------------------------------------------------------------------|---------------------------------| -| togethercomputer/llama-2-70b-chat | `completion('together_ai/togethercomputer/llama-2-70b-chat', messages)` | `os.environ['TOGETHERAI_API_KEY']` | +| Model Name | Function Call | Required OS Variables | +|-----------------------------------|-------------------------------------------------------------------------|------------------------------------| +| togethercomputer/llama-2-70b-chat | `completion('together_ai/togethercomputer/llama-2-70b-chat', messages)` | `os.environ['TOGETHERAI_API_KEY']` | ### Llama LLMs - Language / Instruct -| Model Name | Function Call | Required OS Variables | -|-----------------------------------|------------------------------------------------------------------------|---------------------------------| -| togethercomputer/llama-2-70b | `completion('together_ai/togethercomputer/llama-2-70b', messages)` | `os.environ['TOGETHERAI_API_KEY']` | -| togethercomputer/LLaMA-2-7B-32K | `completion('together_ai/togethercomputer/LLaMA-2-7B-32K', messages)` | `os.environ['TOGETHERAI_API_KEY']` | -| togethercomputer/Llama-2-7B-32K-Instruct | `completion('together_ai/togethercomputer/Llama-2-7B-32K-Instruct', messages)` | `os.environ['TOGETHERAI_API_KEY']` | -| togethercomputer/llama-2-7b | `completion('together_ai/togethercomputer/llama-2-7b', messages)` | `os.environ['TOGETHERAI_API_KEY']` | +| Model Name | Function Call | Required OS Variables | +|------------------------------------------|--------------------------------------------------------------------------------|------------------------------------| +| togethercomputer/llama-2-70b | `completion('together_ai/togethercomputer/llama-2-70b', messages)` | `os.environ['TOGETHERAI_API_KEY']` | +| togethercomputer/LLaMA-2-7B-32K | `completion('together_ai/togethercomputer/LLaMA-2-7B-32K', messages)` | `os.environ['TOGETHERAI_API_KEY']` | +| togethercomputer/Llama-2-7B-32K-Instruct | `completion('together_ai/togethercomputer/Llama-2-7B-32K-Instruct', messages)` | `os.environ['TOGETHERAI_API_KEY']` | +| togethercomputer/llama-2-7b | `completion('together_ai/togethercomputer/llama-2-7b', messages)` | `os.environ['TOGETHERAI_API_KEY']` | ### Falcon LLMs -| Model Name | Function Call | Required OS Variables | -|-----------------------------------|------------------------------------------------------------------------|---------------------------------| -| togethercomputer/falcon-40b-instruct | `completion('together_ai/togethercomputer/falcon-40b-instruct', messages)` | `os.environ['TOGETHERAI_API_KEY']` | -| togethercomputer/falcon-7b-instruct | `completion('together_ai/togethercomputer/falcon-7b-instruct', messages)` | `os.environ['TOGETHERAI_API_KEY']` | +| Model Name | Function Call | Required OS Variables | +|--------------------------------------|----------------------------------------------------------------------------|------------------------------------| +| togethercomputer/falcon-40b-instruct | `completion('together_ai/togethercomputer/falcon-40b-instruct', messages)` | `os.environ['TOGETHERAI_API_KEY']` | +| togethercomputer/falcon-7b-instruct | `completion('together_ai/togethercomputer/falcon-7b-instruct', messages)` | `os.environ['TOGETHERAI_API_KEY']` | ### Alpaca LLMs -| Model Name | Function Call | Required OS Variables | -|-----------------------------------|------------------------------------------------------------------------|---------------------------------| -| togethercomputer/alpaca-7b | `completion('together_ai/togethercomputer/alpaca-7b', messages)` | `os.environ['TOGETHERAI_API_KEY']` | +| Model Name | Function Call | Required OS Variables | +|----------------------------|------------------------------------------------------------------|------------------------------------| +| togethercomputer/alpaca-7b | `completion('together_ai/togethercomputer/alpaca-7b', messages)` | `os.environ['TOGETHERAI_API_KEY']` | ### Other Chat LLMs -| Model Name | Function Call | Required OS Variables | -|-----------------------------------|------------------------------------------------------------------------|---------------------------------| -| HuggingFaceH4/starchat-alpha | `completion('together_ai/HuggingFaceH4/starchat-alpha', messages)` | `os.environ['TOGETHERAI_API_KEY']` | +| Model Name | Function Call | Required OS Variables | +|------------------------------|--------------------------------------------------------------------|------------------------------------| +| HuggingFaceH4/starchat-alpha | `completion('together_ai/HuggingFaceH4/starchat-alpha', messages)` | `os.environ['TOGETHERAI_API_KEY']` | ### Code LLMs -| Model Name | Function Call | Required OS Variables | -|-----------------------------------|------------------------------------------------------------------------|---------------------------------| -| togethercomputer/CodeLlama-34b | `completion('together_ai/togethercomputer/CodeLlama-34b', messages)` | `os.environ['TOGETHERAI_API_KEY']` | -| togethercomputer/CodeLlama-34b-Instruct | `completion('together_ai/togethercomputer/CodeLlama-34b-Instruct', messages)` | `os.environ['TOGETHERAI_API_KEY']` | -| togethercomputer/CodeLlama-34b-Python | `completion('together_ai/togethercomputer/CodeLlama-34b-Python', messages)` | `os.environ['TOGETHERAI_API_KEY']` | -| defog/sqlcoder | `completion('together_ai/defog/sqlcoder', messages)` | `os.environ['TOGETHERAI_API_KEY']` | -| NumbersStation/nsql-llama-2-7B | `completion('together_ai/NumbersStation/nsql-llama-2-7B', messages)` | `os.environ['TOGETHERAI_API_KEY']` | -| WizardLM/WizardCoder-15B-V1.0 | `completion('together_ai/WizardLM/WizardCoder-15B-V1.0', messages)` | `os.environ['TOGETHERAI_API_KEY']` | -| WizardLM/WizardCoder-Python-34B-V1.0 | `completion('together_ai/WizardLM/WizardCoder-Python-34B-V1.0', messages)` | `os.environ['TOGETHERAI_API_KEY']` | +| Model Name | Function Call | Required OS Variables | +|-----------------------------------------|-------------------------------------------------------------------------------|------------------------------------| +| togethercomputer/CodeLlama-34b | `completion('together_ai/togethercomputer/CodeLlama-34b', messages)` | `os.environ['TOGETHERAI_API_KEY']` | +| togethercomputer/CodeLlama-34b-Instruct | `completion('together_ai/togethercomputer/CodeLlama-34b-Instruct', messages)` | `os.environ['TOGETHERAI_API_KEY']` | +| togethercomputer/CodeLlama-34b-Python | `completion('together_ai/togethercomputer/CodeLlama-34b-Python', messages)` | `os.environ['TOGETHERAI_API_KEY']` | +| defog/sqlcoder | `completion('together_ai/defog/sqlcoder', messages)` | `os.environ['TOGETHERAI_API_KEY']` | +| NumbersStation/nsql-llama-2-7B | `completion('together_ai/NumbersStation/nsql-llama-2-7B', messages)` | `os.environ['TOGETHERAI_API_KEY']` | +| WizardLM/WizardCoder-15B-V1.0 | `completion('together_ai/WizardLM/WizardCoder-15B-V1.0', messages)` | `os.environ['TOGETHERAI_API_KEY']` | +| WizardLM/WizardCoder-Python-34B-V1.0 | `completion('together_ai/WizardLM/WizardCoder-Python-34B-V1.0', messages)` | `os.environ['TOGETHERAI_API_KEY']` | ### Language LLMs -| Model Name | Function Call | Required OS Variables | -|-----------------------------------|------------------------------------------------------------------------|---------------------------------| -| NousResearch/Nous-Hermes-Llama2-13b | `completion('together_ai/NousResearch/Nous-Hermes-Llama2-13b', messages)` | `os.environ['TOGETHERAI_API_KEY']` | -| Austism/chronos-hermes-13b | `completion('together_ai/Austism/chronos-hermes-13b', messages)` | `os.environ['TOGETHERAI_API_KEY']` | -| upstage/SOLAR-0-70b-16bit | `completion('together_ai/upstage/SOLAR-0-70b-16bit', messages)` | `os.environ['TOGETHERAI_API_KEY']` | -| WizardLM/WizardLM-70B-V1.0 | `completion('together_ai/WizardLM/WizardLM-70B-V1.0', messages)` | `os.environ['TOGETHERAI_API_KEY']` | +| Model Name | Function Call | Required OS Variables | +|-------------------------------------|---------------------------------------------------------------------------|------------------------------------| +| NousResearch/Nous-Hermes-Llama2-13b | `completion('together_ai/NousResearch/Nous-Hermes-Llama2-13b', messages)` | `os.environ['TOGETHERAI_API_KEY']` | +| Austism/chronos-hermes-13b | `completion('together_ai/Austism/chronos-hermes-13b', messages)` | `os.environ['TOGETHERAI_API_KEY']` | +| upstage/SOLAR-0-70b-16bit | `completion('together_ai/upstage/SOLAR-0-70b-16bit', messages)` | `os.environ['TOGETHERAI_API_KEY']` | +| WizardLM/WizardLM-70B-V1.0 | `completion('together_ai/WizardLM/WizardLM-70B-V1.0', messages)` | `os.environ['TOGETHERAI_API_KEY']` | ## Prompt Templates diff --git a/docs/my-website/docs/providers/vllm.md b/docs/my-website/docs/providers/vllm.md index c22cd4fc2d..61dd1fffdc 100644 --- a/docs/my-website/docs/providers/vllm.md +++ b/docs/my-website/docs/providers/vllm.md @@ -155,14 +155,14 @@ def default_pt(messages): #### Models we already have Prompt Templates for -| Model Name | Works for Models | Function Call | -| -------- | -------- | -------- | -| meta-llama/Llama-2-7b-chat | All meta-llama llama2 chat models| `completion(model='vllm/meta-llama/Llama-2-7b', messages=messages, api_base="your_api_endpoint")` | -| tiiuae/falcon-7b-instruct | All falcon instruct models | `completion(model='vllm/tiiuae/falcon-7b-instruct', messages=messages, api_base="your_api_endpoint")` | -| mosaicml/mpt-7b-chat | All mpt chat models | `completion(model='vllm/mosaicml/mpt-7b-chat', messages=messages, api_base="your_api_endpoint")` | -| codellama/CodeLlama-34b-Instruct-hf | All codellama instruct models | `completion(model='vllm/codellama/CodeLlama-34b-Instruct-hf', messages=messages, api_base="your_api_endpoint")` | -| WizardLM/WizardCoder-Python-34B-V1.0 | All wizardcoder models | `completion(model='vllm/WizardLM/WizardCoder-Python-34B-V1.0', messages=messages, api_base="your_api_endpoint")` | -| Phind/Phind-CodeLlama-34B-v2 | All phind-codellama models | `completion(model='vllm/Phind/Phind-CodeLlama-34B-v2', messages=messages, api_base="your_api_endpoint")` | +| Model Name | Works for Models | Function Call | +|--------------------------------------|-----------------------------------|------------------------------------------------------------------------------------------------------------------| +| meta-llama/Llama-2-7b-chat | All meta-llama llama2 chat models | `completion(model='vllm/meta-llama/Llama-2-7b', messages=messages, api_base="your_api_endpoint")` | +| tiiuae/falcon-7b-instruct | All falcon instruct models | `completion(model='vllm/tiiuae/falcon-7b-instruct', messages=messages, api_base="your_api_endpoint")` | +| mosaicml/mpt-7b-chat | All mpt chat models | `completion(model='vllm/mosaicml/mpt-7b-chat', messages=messages, api_base="your_api_endpoint")` | +| codellama/CodeLlama-34b-Instruct-hf | All codellama instruct models | `completion(model='vllm/codellama/CodeLlama-34b-Instruct-hf', messages=messages, api_base="your_api_endpoint")` | +| WizardLM/WizardCoder-Python-34B-V1.0 | All wizardcoder models | `completion(model='vllm/WizardLM/WizardCoder-Python-34B-V1.0', messages=messages, api_base="your_api_endpoint")` | +| Phind/Phind-CodeLlama-34B-v2 | All phind-codellama models | `completion(model='vllm/Phind/Phind-CodeLlama-34B-v2', messages=messages, api_base="your_api_endpoint")` | #### Custom prompt templates diff --git a/docs/my-website/docs/providers/watsonx.md b/docs/my-website/docs/providers/watsonx.md index d8c5740a86..7a42a54edd 100644 --- a/docs/my-website/docs/providers/watsonx.md +++ b/docs/my-website/docs/providers/watsonx.md @@ -251,23 +251,23 @@ response = completion( Here are some examples of models available in IBM watsonx.ai that you can use with LiteLLM: -| Mode Name | Command | -| ---------- | --------- | -| Flan T5 XXL | `completion(model=watsonx/google/flan-t5-xxl, messages=messages)` | -| Flan Ul2 | `completion(model=watsonx/google/flan-ul2, messages=messages)` | -| Mt0 XXL | `completion(model=watsonx/bigscience/mt0-xxl, messages=messages)` | -| Gpt Neox | `completion(model=watsonx/eleutherai/gpt-neox-20b, messages=messages)` | -| Mpt 7B Instruct2 | `completion(model=watsonx/ibm/mpt-7b-instruct2, messages=messages)` | -| Starcoder | `completion(model=watsonx/bigcode/starcoder, messages=messages)` | -| Llama 2 70B Chat | `completion(model=watsonx/meta-llama/llama-2-70b-chat, messages=messages)` | -| Llama 2 13B Chat | `completion(model=watsonx/meta-llama/llama-2-13b-chat, messages=messages)` | -| Granite 13B Instruct | `completion(model=watsonx/ibm/granite-13b-instruct-v1, messages=messages)` | -| Granite 13B Chat | `completion(model=watsonx/ibm/granite-13b-chat-v1, messages=messages)` | -| Flan T5 XL | `completion(model=watsonx/google/flan-t5-xl, messages=messages)` | -| Granite 13B Chat V2 | `completion(model=watsonx/ibm/granite-13b-chat-v2, messages=messages)` | -| Granite 13B Instruct V2 | `completion(model=watsonx/ibm/granite-13b-instruct-v2, messages=messages)` | -| Elyza Japanese Llama 2 7B Instruct | `completion(model=watsonx/elyza/elyza-japanese-llama-2-7b-instruct, messages=messages)` | -| Mixtral 8X7B Instruct V01 Q | `completion(model=watsonx/ibm-mistralai/mixtral-8x7b-instruct-v01-q, messages=messages)` | +| Mode Name | Command | +|------------------------------------|------------------------------------------------------------------------------------------| +| Flan T5 XXL | `completion(model=watsonx/google/flan-t5-xxl, messages=messages)` | +| Flan Ul2 | `completion(model=watsonx/google/flan-ul2, messages=messages)` | +| Mt0 XXL | `completion(model=watsonx/bigscience/mt0-xxl, messages=messages)` | +| Gpt Neox | `completion(model=watsonx/eleutherai/gpt-neox-20b, messages=messages)` | +| Mpt 7B Instruct2 | `completion(model=watsonx/ibm/mpt-7b-instruct2, messages=messages)` | +| Starcoder | `completion(model=watsonx/bigcode/starcoder, messages=messages)` | +| Llama 2 70B Chat | `completion(model=watsonx/meta-llama/llama-2-70b-chat, messages=messages)` | +| Llama 2 13B Chat | `completion(model=watsonx/meta-llama/llama-2-13b-chat, messages=messages)` | +| Granite 13B Instruct | `completion(model=watsonx/ibm/granite-13b-instruct-v1, messages=messages)` | +| Granite 13B Chat | `completion(model=watsonx/ibm/granite-13b-chat-v1, messages=messages)` | +| Flan T5 XL | `completion(model=watsonx/google/flan-t5-xl, messages=messages)` | +| Granite 13B Chat V2 | `completion(model=watsonx/ibm/granite-13b-chat-v2, messages=messages)` | +| Granite 13B Instruct V2 | `completion(model=watsonx/ibm/granite-13b-instruct-v2, messages=messages)` | +| Elyza Japanese Llama 2 7B Instruct | `completion(model=watsonx/elyza/elyza-japanese-llama-2-7b-instruct, messages=messages)` | +| Mixtral 8X7B Instruct V01 Q | `completion(model=watsonx/ibm-mistralai/mixtral-8x7b-instruct-v01-q, messages=messages)` | For a list of all available models in watsonx.ai, see [here](https://dataplatform.cloud.ibm.com/docs/content/wsj/analyze-data/fm-models.html?context=wx&locale=en&audience=wdp). @@ -275,10 +275,10 @@ For a list of all available models in watsonx.ai, see [here](https://dataplatfor ## Supported IBM watsonx.ai Embedding Models -| Model Name | Function Call | -|----------------------|---------------------------------------------| -| Slate 30m | `embedding(model="watsonx/ibm/slate-30m-english-rtrvr", input=input)` | -| Slate 125m | `embedding(model="watsonx/ibm/slate-125m-english-rtrvr", input=input)` | +| Model Name | Function Call | +|------------|------------------------------------------------------------------------| +| Slate 30m | `embedding(model="watsonx/ibm/slate-30m-english-rtrvr", input=input)` | +| Slate 125m | `embedding(model="watsonx/ibm/slate-125m-english-rtrvr", input=input)` | For a list of all available embedding models in watsonx.ai, see [here](https://dataplatform.cloud.ibm.com/docs/content/wsj/analyze-data/fm-models-embed.html?context=wx). \ No newline at end of file diff --git a/docs/my-website/docs/providers/xinference.md b/docs/my-website/docs/providers/xinference.md index 3c927dcb43..3686c02098 100644 --- a/docs/my-website/docs/providers/xinference.md +++ b/docs/my-website/docs/providers/xinference.md @@ -37,26 +37,26 @@ print(response) ## Supported Models All models listed here https://inference.readthedocs.io/en/latest/models/builtin/embedding/index.html are supported -| Model Name | Function Call | -|------------------------------|--------------------------------------------------------| -| bge-base-en | `embedding(model="xinference/bge-base-en", input)` | -| bge-base-en-v1.5 | `embedding(model="xinference/bge-base-en-v1.5", input)` | -| bge-base-zh | `embedding(model="xinference/bge-base-zh", input)` | -| bge-base-zh-v1.5 | `embedding(model="xinference/bge-base-zh-v1.5", input)` | -| bge-large-en | `embedding(model="xinference/bge-large-en", input)` | -| bge-large-en-v1.5 | `embedding(model="xinference/bge-large-en-v1.5", input)` | -| bge-large-zh | `embedding(model="xinference/bge-large-zh", input)` | -| bge-large-zh-noinstruct | `embedding(model="xinference/bge-large-zh-noinstruct", input)` | -| bge-large-zh-v1.5 | `embedding(model="xinference/bge-large-zh-v1.5", input)` | -| bge-small-en-v1.5 | `embedding(model="xinference/bge-small-en-v1.5", input)` | -| bge-small-zh | `embedding(model="xinference/bge-small-zh", input)` | -| bge-small-zh-v1.5 | `embedding(model="xinference/bge-small-zh-v1.5", input)` | -| e5-large-v2 | `embedding(model="xinference/e5-large-v2", input)` | -| gte-base | `embedding(model="xinference/gte-base", input)` | -| gte-large | `embedding(model="xinference/gte-large", input)` | -| jina-embeddings-v2-base-en | `embedding(model="xinference/jina-embeddings-v2-base-en", input)` | -| jina-embeddings-v2-small-en | `embedding(model="xinference/jina-embeddings-v2-small-en", input)` | -| multilingual-e5-large | `embedding(model="xinference/multilingual-e5-large", input)` | +| Model Name | Function Call | +|-----------------------------|--------------------------------------------------------------------| +| bge-base-en | `embedding(model="xinference/bge-base-en", input)` | +| bge-base-en-v1.5 | `embedding(model="xinference/bge-base-en-v1.5", input)` | +| bge-base-zh | `embedding(model="xinference/bge-base-zh", input)` | +| bge-base-zh-v1.5 | `embedding(model="xinference/bge-base-zh-v1.5", input)` | +| bge-large-en | `embedding(model="xinference/bge-large-en", input)` | +| bge-large-en-v1.5 | `embedding(model="xinference/bge-large-en-v1.5", input)` | +| bge-large-zh | `embedding(model="xinference/bge-large-zh", input)` | +| bge-large-zh-noinstruct | `embedding(model="xinference/bge-large-zh-noinstruct", input)` | +| bge-large-zh-v1.5 | `embedding(model="xinference/bge-large-zh-v1.5", input)` | +| bge-small-en-v1.5 | `embedding(model="xinference/bge-small-en-v1.5", input)` | +| bge-small-zh | `embedding(model="xinference/bge-small-zh", input)` | +| bge-small-zh-v1.5 | `embedding(model="xinference/bge-small-zh-v1.5", input)` | +| e5-large-v2 | `embedding(model="xinference/e5-large-v2", input)` | +| gte-base | `embedding(model="xinference/gte-base", input)` | +| gte-large | `embedding(model="xinference/gte-large", input)` | +| jina-embeddings-v2-base-en | `embedding(model="xinference/jina-embeddings-v2-base-en", input)` | +| jina-embeddings-v2-small-en | `embedding(model="xinference/jina-embeddings-v2-small-en", input)` | +| multilingual-e5-large | `embedding(model="xinference/multilingual-e5-large", input)` | diff --git a/docs/my-website/docs/proxy/deploy.md b/docs/my-website/docs/proxy/deploy.md index 6fb8c5bfe3..b756f56e2e 100644 --- a/docs/my-website/docs/proxy/deploy.md +++ b/docs/my-website/docs/proxy/deploy.md @@ -260,7 +260,7 @@ Requirements: -We maintain a [seperate Dockerfile](https://github.com/BerriAI/litellm/pkgs/container/litellm-database) for reducing build time when running LiteLLM proxy with a connected Postgres Database +We maintain a [separate Dockerfile](https://github.com/BerriAI/litellm/pkgs/container/litellm-database) for reducing build time when running LiteLLM proxy with a connected Postgres Database ```shell docker pull ghcr.io/berriai/litellm-database:main-latest diff --git a/docs/my-website/docs/proxy/enterprise.md b/docs/my-website/docs/proxy/enterprise.md index 8e2b79a5fb..2b984b3e78 100644 --- a/docs/my-website/docs/proxy/enterprise.md +++ b/docs/my-website/docs/proxy/enterprise.md @@ -2,30 +2,213 @@ import Image from '@theme/IdealImage'; import Tabs from '@theme/Tabs'; import TabItem from '@theme/TabItem'; -# ✨ Enterprise Features - Content Mod, SSO, Custom Swagger +# ✨ Enterprise Features - SSO, Audit Logs, Guardrails -Features here are behind a commercial license in our `/enterprise` folder. [**See Code**](https://github.com/BerriAI/litellm/tree/main/enterprise) +:::tip -:::info - -[Get Started with Enterprise here](https://github.com/BerriAI/litellm/tree/main/enterprise) +Get in touch with us [here](https://calendly.com/d/4mp-gd3-k5k/litellm-1-1-onboarding-chat) ::: Features: - ✅ [SSO for Admin UI](./ui.md#✨-enterprise-features) -- ✅ Content Moderation with LLM Guard, LlamaGuard, Google Text Moderations -- ✅ [Prompt Injection Detection (with LakeraAI API)](#prompt-injection-detection-lakeraai) +- ✅ [Audit Logs](#audit-logs) +- ✅ [Tracking Spend for Custom Tags](#tracking-spend-for-custom-tags) +- ✅ [Content Moderation with LLM Guard, LlamaGuard, Google Text Moderations](#content-moderation) +- ✅ [Prompt Injection Detection (with LakeraAI API)](#prompt-injection-detection---lakeraai) +- ✅ [Custom Branding + Routes on Swagger Docs](#swagger-docs---custom-routes--branding) - ✅ Reject calls from Blocked User list - ✅ Reject calls (incoming / outgoing) with Banned Keywords (e.g. competitors) -- ✅ Don't log/store specific requests to Langfuse, Sentry, etc. (eg confidential LLM requests) -- ✅ Tracking Spend for Custom Tags -- ✅ Custom Branding + Routes on Swagger Docs -- ✅ Audit Logs for `Created At, Created By` when Models Added + +## Audit Logs + +Store Audit logs for **Create, Update Delete Operations** done on `Teams` and `Virtual Keys` + +**Step 1** Switch on audit Logs +```shell +litellm_settings: + store_audit_logs: true +``` + +Start the litellm proxy with this config + +**Step 2** Test it - Create a Team + +```shell +curl --location 'http://0.0.0.0:4000/team/new' \ + --header 'Authorization: Bearer sk-1234' \ + --header 'Content-Type: application/json' \ + --data '{ + "max_budget": 2 + }' +``` + +**Step 3** Expected Log + +```json +{ + "id": "e1760e10-4264-4499-82cd-c08c86c8d05b", + "updated_at": "2024-06-06T02:10:40.836420+00:00", + "changed_by": "109010464461339474872", + "action": "created", + "table_name": "LiteLLM_TeamTable", + "object_id": "82e725b5-053f-459d-9a52-867191635446", + "before_value": null, + "updated_values": { + "team_id": "82e725b5-053f-459d-9a52-867191635446", + "admins": [], + "members": [], + "members_with_roles": [ + { + "role": "admin", + "user_id": "109010464461339474872" + } + ], + "max_budget": 2.0, + "models": [], + "blocked": false + } +} +``` + + +## Tracking Spend for Custom Tags + +Requirements: + +- Virtual Keys & a database should be set up, see [virtual keys](https://docs.litellm.ai/docs/proxy/virtual_keys) + +#### Usage - /chat/completions requests with request tags + + + + + + + +Set `extra_body={"metadata": { }}` to `metadata` you want to pass + +```python +import openai +client = openai.OpenAI( + api_key="anything", + base_url="http://0.0.0.0:4000" +) + +# request sent to model set on litellm proxy, `litellm --model` +response = client.chat.completions.create( + model="gpt-3.5-turbo", + messages = [ + { + "role": "user", + "content": "this is a test request, write a short poem" + } + ], + extra_body={ + "metadata": { + "tags": ["model-anthropic-claude-v2.1", "app-ishaan-prod"] + } + } +) + +print(response) +``` + + + + +Pass `metadata` as part of the request body + +```shell +curl --location 'http://0.0.0.0:4000/chat/completions' \ + --header 'Content-Type: application/json' \ + --data '{ + "model": "gpt-3.5-turbo", + "messages": [ + { + "role": "user", + "content": "what llm are you" + } + ], + "metadata": {"tags": ["model-anthropic-claude-v2.1", "app-ishaan-prod"]} +}' +``` + + + +```python +from langchain.chat_models import ChatOpenAI +from langchain.prompts.chat import ( + ChatPromptTemplate, + HumanMessagePromptTemplate, + SystemMessagePromptTemplate, +) +from langchain.schema import HumanMessage, SystemMessage + +chat = ChatOpenAI( + openai_api_base="http://0.0.0.0:4000", + model = "gpt-3.5-turbo", + temperature=0.1, + extra_body={ + "metadata": { + "tags": ["model-anthropic-claude-v2.1", "app-ishaan-prod"] + } + } +) + +messages = [ + SystemMessage( + content="You are a helpful assistant that im using to make a test request to." + ), + HumanMessage( + content="test from litellm. tell me why it's amazing in 1 sentence" + ), +] +response = chat(messages) + +print(response) +``` + + + + + +#### Viewing Spend per tag + +#### `/spend/tags` Request Format +```shell +curl -X GET "http://0.0.0.0:4000/spend/tags" \ +-H "Authorization: Bearer sk-1234" +``` + +#### `/spend/tags`Response Format +```shell +[ + { + "individual_request_tag": "model-anthropic-claude-v2.1", + "log_count": 6, + "total_spend": 0.000672 + }, + { + "individual_request_tag": "app-ishaan-local", + "log_count": 4, + "total_spend": 0.000448 + }, + { + "individual_request_tag": "app-ishaan-prod", + "log_count": 2, + "total_spend": 0.000224 + } +] + +``` + + + ## Content Moderation -### Content Moderation with LLM Guard +#### Content Moderation with LLM Guard Set the LLM Guard API Base in your environment @@ -160,7 +343,7 @@ curl --location 'http://0.0.0.0:4000/v1/chat/completions' \ -### Content Moderation with LlamaGuard +#### Content Moderation with LlamaGuard Currently works with Sagemaker's LlamaGuard endpoint. @@ -194,7 +377,7 @@ callbacks: ["llamaguard_moderations"] -### Content Moderation with Google Text Moderation +#### Content Moderation with Google Text Moderation Requires your GOOGLE_APPLICATION_CREDENTIALS to be set in your .env (same as VertexAI). @@ -250,7 +433,7 @@ Here are the category specific values: -### Content Moderation with OpenAI Moderations +#### Content Moderation with OpenAI Moderations Use this if you want to reject /chat, /completions, /embeddings calls that fail OpenAI Moderations checks @@ -276,7 +459,7 @@ Step 1 Set a `LAKERA_API_KEY` in your env LAKERA_API_KEY="7a91a1a6059da*******" ``` -Step 2. Add `lakera_prompt_injection` to your calbacks +Step 2. Add `lakera_prompt_injection` to your callbacks ```yaml litellm_settings: @@ -302,6 +485,42 @@ curl --location 'http://localhost:4000/chat/completions' \ }' ``` +## Swagger Docs - Custom Routes + Branding + +:::info + +Requires a LiteLLM Enterprise key to use. Get a free 2-week license [here](https://forms.gle/sTDVprBs18M4V8Le8) + +::: + +Set LiteLLM Key in your environment + +```bash +LITELLM_LICENSE="" +``` + +#### Customize Title + Description + +In your environment, set: + +```bash +DOCS_TITLE="TotalGPT" +DOCS_DESCRIPTION="Sample Company Description" +``` + +#### Customize Routes + +Hide admin routes from users. + +In your environment, set: + +```bash +DOCS_FILTERED="True" # only shows openai routes to user +``` + + + + ## Enable Blocked User Lists If any call is made to proxy with this user id, it'll be rejected - use this if you want to let users opt-out of ai features @@ -417,176 +636,6 @@ curl --location 'http://0.0.0.0:4000/chat/completions' \ } ' ``` -## Tracking Spend for Custom Tags - -Requirements: - -- Virtual Keys & a database should be set up, see [virtual keys](https://docs.litellm.ai/docs/proxy/virtual_keys) - -### Usage - /chat/completions requests with request tags - - - - - - - -Set `extra_body={"metadata": { }}` to `metadata` you want to pass - -```python -import openai -client = openai.OpenAI( - api_key="anything", - base_url="http://0.0.0.0:4000" -) - -# request sent to model set on litellm proxy, `litellm --model` -response = client.chat.completions.create( - model="gpt-3.5-turbo", - messages = [ - { - "role": "user", - "content": "this is a test request, write a short poem" - } - ], - extra_body={ - "metadata": { - "tags": ["model-anthropic-claude-v2.1", "app-ishaan-prod"] - } - } -) - -print(response) -``` - - - - -Pass `metadata` as part of the request body - -```shell -curl --location 'http://0.0.0.0:4000/chat/completions' \ - --header 'Content-Type: application/json' \ - --data '{ - "model": "gpt-3.5-turbo", - "messages": [ - { - "role": "user", - "content": "what llm are you" - } - ], - "metadata": {"tags": ["model-anthropic-claude-v2.1", "app-ishaan-prod"]} -}' -``` - - - -```python -from langchain.chat_models import ChatOpenAI -from langchain.prompts.chat import ( - ChatPromptTemplate, - HumanMessagePromptTemplate, - SystemMessagePromptTemplate, -) -from langchain.schema import HumanMessage, SystemMessage - -chat = ChatOpenAI( - openai_api_base="http://0.0.0.0:4000", - model = "gpt-3.5-turbo", - temperature=0.1, - extra_body={ - "metadata": { - "tags": ["model-anthropic-claude-v2.1", "app-ishaan-prod"] - } - } -) - -messages = [ - SystemMessage( - content="You are a helpful assistant that im using to make a test request to." - ), - HumanMessage( - content="test from litellm. tell me why it's amazing in 1 sentence" - ), -] -response = chat(messages) - -print(response) -``` - - - - - -### Viewing Spend per tag - -#### `/spend/tags` Request Format -```shell -curl -X GET "http://0.0.0.0:4000/spend/tags" \ --H "Authorization: Bearer sk-1234" -``` - -#### `/spend/tags`Response Format -```shell -[ - { - "individual_request_tag": "model-anthropic-claude-v2.1", - "log_count": 6, - "total_spend": 0.000672 - }, - { - "individual_request_tag": "app-ishaan-local", - "log_count": 4, - "total_spend": 0.000448 - }, - { - "individual_request_tag": "app-ishaan-prod", - "log_count": 2, - "total_spend": 0.000224 - } -] - -``` - - - - -## Swagger Docs - Custom Routes + Branding - -:::info - -Requires a LiteLLM Enterprise key to use. Get a free 2-week license [here](https://forms.gle/sTDVprBs18M4V8Le8) - -::: - -Set LiteLLM Key in your environment - -```bash -LITELLM_LICENSE="" -``` - -### Customize Title + Description - -In your environment, set: - -```bash -DOCS_TITLE="TotalGPT" -DOCS_DESCRIPTION="Sample Company Description" -``` - -### Customize Routes - -Hide admin routes from users. - -In your environment, set: - -```bash -DOCS_FILTERED="True" # only shows openai routes to user -``` - - ## Public Model Hub diff --git a/docs/my-website/docs/proxy/logging.md b/docs/my-website/docs/proxy/logging.md index 692d69d29f..fd57545ca9 100644 --- a/docs/my-website/docs/proxy/logging.md +++ b/docs/my-website/docs/proxy/logging.md @@ -41,7 +41,9 @@ litellm_settings: **Step 3**: Set required env variables for logging to langfuse ```shell export LANGFUSE_PUBLIC_KEY="pk_kk" -export LANGFUSE_SECRET_KEY="sk_ss +export LANGFUSE_SECRET_KEY="sk_ss" +# Optional, defaults to https://cloud.langfuse.com +export LANGFUSE_HOST="https://xxx.langfuse.com" ``` **Step 4**: Start the proxy, make a test request diff --git a/docs/my-website/docs/scheduler.md b/docs/my-website/docs/scheduler.md index 486549a08c..e7943c4591 100644 --- a/docs/my-website/docs/scheduler.md +++ b/docs/my-website/docs/scheduler.md @@ -100,4 +100,76 @@ print(response) ``` - \ No newline at end of file + + +## Advanced - Redis Caching + +Use redis caching to do request prioritization across multiple instances of LiteLLM. + +### SDK +```python +from litellm import Router + +router = Router( + model_list=[ + { + "model_name": "gpt-3.5-turbo", + "litellm_params": { + "model": "gpt-3.5-turbo", + "mock_response": "Hello world this is Macintosh!", # fakes the LLM API call + "rpm": 1, + }, + }, + ], + ### REDIS PARAMS ### + redis_host=os.environ["REDIS_HOST"], + redis_password=os.environ["REDIS_PASSWORD"], + redis_port=os.environ["REDIS_PORT"], +) + +try: + _response = await router.schedule_acompletion( # 👈 ADDS TO QUEUE + POLLS + MAKES CALL + model="gpt-3.5-turbo", + messages=[{"role": "user", "content": "Hey!"}], + priority=0, # 👈 LOWER IS BETTER + ) +except Exception as e: + print("didn't make request") +``` + +### PROXY + +```yaml +model_list: + - model_name: gpt-3.5-turbo-fake-model + litellm_params: + model: gpt-3.5-turbo + mock_response: "hello world!" + api_key: my-good-key + +router_settings: + redis_host; os.environ/REDIS_HOST + redis_password: os.environ/REDIS_PASSWORD + redis_port: os.environ/REDIS_PORT +``` + +```bash +$ litellm --config /path/to/config.yaml + +# RUNNING on http://0.0.0.0:4000s +``` + +```bash +curl -X POST 'http://localhost:4000/queue/chat/completions' \ +-H 'Content-Type: application/json' \ +-H 'Authorization: Bearer sk-1234' \ +-D '{ + "model": "gpt-3.5-turbo-fake-model", + "messages": [ + { + "role": "user", + "content": "what is the meaning of the universe? 1234" + }], + "priority": 0 👈 SET VALUE HERE +}' +``` \ No newline at end of file diff --git a/docs/my-website/docs/secret.md b/docs/my-website/docs/secret.md index 2b945837ad..08c2e89d1f 100644 --- a/docs/my-website/docs/secret.md +++ b/docs/my-website/docs/secret.md @@ -1,11 +1,31 @@ # Secret Manager LiteLLM supports reading secrets from Azure Key Vault and Infisical +- AWS Key Managemenet Service +- AWS Secret Manager - [Azure Key Vault](#azure-key-vault) - Google Key Management Service - [Infisical Secret Manager](#infisical-secret-manager) - [.env Files](#env-files) +## AWS Key Management Service + +Use AWS KMS to storing a hashed copy of your Proxy Master Key in the environment. + +```bash +export LITELLM_MASTER_KEY="djZ9xjVaZ..." # 👈 ENCRYPTED KEY +export AWS_REGION_NAME="us-west-2" +``` + +```yaml +general_settings: + key_management_system: "aws_kms" + key_management_settings: + hosted_keys: ["LITELLM_MASTER_KEY"] # 👈 WHICH KEYS ARE STORED ON KMS +``` + +[**See Decryption Code**](https://github.com/BerriAI/litellm/blob/a2da2a8f168d45648b61279d4795d647d94f90c9/litellm/utils.py#L10182) + ## AWS Secret Manager Store your proxy keys in AWS Secret Manager. diff --git a/docs/my-website/docs/tutorials/finetuned_chat_gpt.md b/docs/my-website/docs/tutorials/finetuned_chat_gpt.md index 641c45b5f8..5dde3b3ff9 100644 --- a/docs/my-website/docs/tutorials/finetuned_chat_gpt.md +++ b/docs/my-website/docs/tutorials/finetuned_chat_gpt.md @@ -1,8 +1,8 @@ # Using Fine-Tuned gpt-3.5-turbo LiteLLM allows you to call `completion` with your fine-tuned gpt-3.5-turbo models -If you're trying to create your custom finetuned gpt-3.5-turbo model following along on this tutorial: https://platform.openai.com/docs/guides/fine-tuning/preparing-your-dataset +If you're trying to create your custom fine-tuned gpt-3.5-turbo model following along on this tutorial: https://platform.openai.com/docs/guides/fine-tuning/preparing-your-dataset -Once you've created your fine tuned model, you can call it with `litellm.completion()` +Once you've created your fine-tuned model, you can call it with `litellm.completion()` ## Usage ```python diff --git a/docs/my-website/package-lock.json b/docs/my-website/package-lock.json index 0738c4d09f..48b5419862 100644 --- a/docs/my-website/package-lock.json +++ b/docs/my-website/package-lock.json @@ -5975,9 +5975,9 @@ } }, "node_modules/caniuse-lite": { - "version": "1.0.30001519", - "resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001519.tgz", - "integrity": "sha512-0QHgqR+Jv4bxHMp8kZ1Kn8CH55OikjKJ6JmKkZYP1F3D7w+lnFXF70nG5eNfsZS89jadi5Ywy5UCSKLAglIRkg==", + "version": "1.0.30001629", + "resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001629.tgz", + "integrity": "sha512-c3dl911slnQhmxUIT4HhYzT7wnBK/XYpGnYLOj4nJBaRiw52Ibe7YxlDaAeRECvA786zCuExhxIUJ2K7nHMrBw==", "funding": [ { "type": "opencollective", diff --git a/docs/my-website/sidebars.js b/docs/my-website/sidebars.js index f5bdc945ba..88ac5e6cdb 100644 --- a/docs/my-website/sidebars.js +++ b/docs/my-website/sidebars.js @@ -36,6 +36,7 @@ const sidebars = { label: "📖 All Endpoints (Swagger)", href: "https://litellm-api.up.railway.app/", }, + "proxy/enterprise", "proxy/demo", "proxy/configs", "proxy/reliability", @@ -45,7 +46,6 @@ const sidebars = { "proxy/customers", "proxy/billing", "proxy/user_keys", - "proxy/enterprise", "proxy/virtual_keys", "proxy/alerting", { diff --git a/docs/my-website/yarn.lock b/docs/my-website/yarn.lock index 3acc481539..e96d90e26f 100644 --- a/docs/my-website/yarn.lock +++ b/docs/my-website/yarn.lock @@ -84,7 +84,7 @@ "@algolia/requester-common" "4.19.1" "@algolia/transporter" "4.19.1" -"@algolia/client-search@4.19.1": +"@algolia/client-search@>= 4.9.1 < 6", "@algolia/client-search@4.19.1": version "4.19.1" resolved "https://registry.npmjs.org/@algolia/client-search/-/client-search-4.19.1.tgz" integrity sha512-mBecfMFS4N+yK/p0ZbK53vrZbL6OtWMk8YmnOv1i0LXx4pelY8TFhqKoTit3NPVPwoSNN0vdSN9dTu1xr1XOVw== @@ -146,13 +146,6 @@ "@jridgewell/gen-mapping" "^0.3.0" "@jridgewell/trace-mapping" "^0.3.9" -"@babel/code-frame@7.10.4", "@babel/code-frame@^7.5.5": - version "7.10.4" - resolved "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.10.4.tgz" - integrity sha512-vG6SvB6oYEhvgisZNFRmRCUkLz11c7rp+tbNTynGqc6mS1d5ATd/sGyV6W0KZZnXRKMTzZDRgQT3Ou9jhpAfUg== - dependencies: - "@babel/highlight" "^7.10.4" - "@babel/code-frame@^7.0.0", "@babel/code-frame@^7.10.4", "@babel/code-frame@^7.16.0", "@babel/code-frame@^7.22.10", "@babel/code-frame@^7.22.13", "@babel/code-frame@^7.8.3": version "7.22.13" resolved "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.22.13.tgz" @@ -161,11 +154,39 @@ "@babel/highlight" "^7.22.13" chalk "^2.4.2" +"@babel/code-frame@^7.5.5", "@babel/code-frame@7.10.4": + version "7.10.4" + resolved "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.10.4.tgz" + integrity sha512-vG6SvB6oYEhvgisZNFRmRCUkLz11c7rp+tbNTynGqc6mS1d5ATd/sGyV6W0KZZnXRKMTzZDRgQT3Ou9jhpAfUg== + dependencies: + "@babel/highlight" "^7.10.4" + "@babel/compat-data@^7.22.5", "@babel/compat-data@^7.22.6", "@babel/compat-data@^7.22.9": version "7.22.9" resolved "https://registry.npmjs.org/@babel/compat-data/-/compat-data-7.22.9.tgz" integrity sha512-5UamI7xkUcJ3i9qVDS+KFDEK8/7oJ55/sJMB1Ge7IEapr7KfdfV/HErR+koZwOfd+SgtFKOKRhRakdg++DcJpQ== +"@babel/core@^7.0.0", "@babel/core@^7.0.0-0", "@babel/core@^7.0.0-0 || ^8.0.0-0 <8.0.0", "@babel/core@^7.11.6", "@babel/core@^7.12.0", "@babel/core@^7.12.3", "@babel/core@^7.13.0", "@babel/core@^7.18.6", "@babel/core@^7.19.6", "@babel/core@^7.4.0 || ^8.0.0-0 <8.0.0": + version "7.22.10" + resolved "https://registry.npmjs.org/@babel/core/-/core-7.22.10.tgz" + integrity sha512-fTmqbbUBAwCcre6zPzNngvsI0aNrPZe77AeqvDxWM9Nm+04RrJ3CAmGHA9f7lJQY6ZMhRztNemy4uslDxTX4Qw== + dependencies: + "@ampproject/remapping" "^2.2.0" + "@babel/code-frame" "^7.22.10" + "@babel/generator" "^7.22.10" + "@babel/helper-compilation-targets" "^7.22.10" + "@babel/helper-module-transforms" "^7.22.9" + "@babel/helpers" "^7.22.10" + "@babel/parser" "^7.22.10" + "@babel/template" "^7.22.5" + "@babel/traverse" "^7.22.10" + "@babel/types" "^7.22.10" + convert-source-map "^1.7.0" + debug "^4.1.0" + gensync "^1.0.0-beta.2" + json5 "^2.2.2" + semver "^6.3.1" + "@babel/core@7.12.9": version "7.12.9" resolved "https://registry.npmjs.org/@babel/core/-/core-7.12.9.tgz" @@ -188,27 +209,6 @@ semver "^5.4.1" source-map "^0.5.0" -"@babel/core@^7.12.3", "@babel/core@^7.18.6", "@babel/core@^7.19.6": - version "7.22.10" - resolved "https://registry.npmjs.org/@babel/core/-/core-7.22.10.tgz" - integrity sha512-fTmqbbUBAwCcre6zPzNngvsI0aNrPZe77AeqvDxWM9Nm+04RrJ3CAmGHA9f7lJQY6ZMhRztNemy4uslDxTX4Qw== - dependencies: - "@ampproject/remapping" "^2.2.0" - "@babel/code-frame" "^7.22.10" - "@babel/generator" "^7.22.10" - "@babel/helper-compilation-targets" "^7.22.10" - "@babel/helper-module-transforms" "^7.22.9" - "@babel/helpers" "^7.22.10" - "@babel/parser" "^7.22.10" - "@babel/template" "^7.22.5" - "@babel/traverse" "^7.22.10" - "@babel/types" "^7.22.10" - convert-source-map "^1.7.0" - debug "^4.1.0" - gensync "^1.0.0-beta.2" - json5 "^2.2.2" - semver "^6.3.1" - "@babel/generator@^7.12.5", "@babel/generator@^7.18.7", "@babel/generator@^7.22.10", "@babel/generator@^7.23.3": version "7.23.3" resolved "https://registry.npmjs.org/@babel/generator/-/generator-7.23.3.tgz" @@ -331,16 +331,16 @@ dependencies: "@babel/types" "^7.22.5" -"@babel/helper-plugin-utils@7.10.4": - version "7.10.4" - resolved "https://registry.npmjs.org/@babel/helper-plugin-utils/-/helper-plugin-utils-7.10.4.tgz" - integrity sha512-O4KCvQA6lLiMU9l2eawBPMf1xPP8xPfB3iEQw150hOVTqj/rfXz0ThTb4HEzqQfs2Bmo5Ay8BzxfzVtBrr9dVg== - "@babel/helper-plugin-utils@^7.0.0", "@babel/helper-plugin-utils@^7.10.4", "@babel/helper-plugin-utils@^7.12.13", "@babel/helper-plugin-utils@^7.14.5", "@babel/helper-plugin-utils@^7.18.6", "@babel/helper-plugin-utils@^7.22.5", "@babel/helper-plugin-utils@^7.8.0", "@babel/helper-plugin-utils@^7.8.3": version "7.22.5" resolved "https://registry.npmjs.org/@babel/helper-plugin-utils/-/helper-plugin-utils-7.22.5.tgz" integrity sha512-uLls06UVKgFG9QD4OeFYLEGteMIAa5kpTPcFL28yuCIIzsf6ZyKZMllKVOCZFhiZ5ptnwX4mtKdWCBE/uT4amg== +"@babel/helper-plugin-utils@7.10.4": + version "7.10.4" + resolved "https://registry.npmjs.org/@babel/helper-plugin-utils/-/helper-plugin-utils-7.10.4.tgz" + integrity sha512-O4KCvQA6lLiMU9l2eawBPMf1xPP8xPfB3iEQw150hOVTqj/rfXz0ThTb4HEzqQfs2Bmo5Ay8BzxfzVtBrr9dVg== + "@babel/helper-remap-async-to-generator@^7.22.5", "@babel/helper-remap-async-to-generator@^7.22.9": version "7.22.9" resolved "https://registry.npmjs.org/@babel/helper-remap-async-to-generator/-/helper-remap-async-to-generator-7.22.9.tgz" @@ -451,7 +451,7 @@ "@babel/helper-create-class-features-plugin" "^7.18.6" "@babel/helper-plugin-utils" "^7.18.6" -"@babel/plugin-proposal-object-rest-spread@7.12.1", "@babel/plugin-proposal-object-rest-spread@^7.12.1": +"@babel/plugin-proposal-object-rest-spread@^7.12.1", "@babel/plugin-proposal-object-rest-spread@7.12.1": version "7.12.1" resolved "https://registry.npmjs.org/@babel/plugin-proposal-object-rest-spread/-/plugin-proposal-object-rest-spread-7.12.1.tgz" integrity sha512-s6SowJIjzlhx8o7lsFx5zmY4At6CTtDvgNQDdPzkBQucle58A6b/TTeEBYtyDgmcXjUTM+vE8YOGHZzzbc/ioA== @@ -528,13 +528,6 @@ dependencies: "@babel/helper-plugin-utils" "^7.8.0" -"@babel/plugin-syntax-jsx@7.12.1": - version "7.12.1" - resolved "https://registry.npmjs.org/@babel/plugin-syntax-jsx/-/plugin-syntax-jsx-7.12.1.tgz" - integrity sha512-1yRi7yAtB0ETgxdY9ti/p2TivUxJkTdhu/ZbF9MshVGqOx1TdB3b7xCXs49Fupgg50N45KcAsRP/ZqWjs9SRjg== - dependencies: - "@babel/helper-plugin-utils" "^7.10.4" - "@babel/plugin-syntax-jsx@^7.22.5": version "7.22.5" resolved "https://registry.npmjs.org/@babel/plugin-syntax-jsx/-/plugin-syntax-jsx-7.22.5.tgz" @@ -542,6 +535,13 @@ dependencies: "@babel/helper-plugin-utils" "^7.22.5" +"@babel/plugin-syntax-jsx@7.12.1": + version "7.12.1" + resolved "https://registry.npmjs.org/@babel/plugin-syntax-jsx/-/plugin-syntax-jsx-7.12.1.tgz" + integrity sha512-1yRi7yAtB0ETgxdY9ti/p2TivUxJkTdhu/ZbF9MshVGqOx1TdB3b7xCXs49Fupgg50N45KcAsRP/ZqWjs9SRjg== + dependencies: + "@babel/helper-plugin-utils" "^7.10.4" + "@babel/plugin-syntax-logical-assignment-operators@^7.10.4": version "7.10.4" resolved "https://registry.npmjs.org/@babel/plugin-syntax-logical-assignment-operators/-/plugin-syntax-logical-assignment-operators-7.10.4.tgz" @@ -563,7 +563,7 @@ dependencies: "@babel/helper-plugin-utils" "^7.10.4" -"@babel/plugin-syntax-object-rest-spread@7.8.3", "@babel/plugin-syntax-object-rest-spread@^7.8.0", "@babel/plugin-syntax-object-rest-spread@^7.8.3": +"@babel/plugin-syntax-object-rest-spread@^7.8.0", "@babel/plugin-syntax-object-rest-spread@^7.8.3", "@babel/plugin-syntax-object-rest-spread@7.8.3": version "7.8.3" resolved "https://registry.npmjs.org/@babel/plugin-syntax-object-rest-spread/-/plugin-syntax-object-rest-spread-7.8.3.tgz" integrity sha512-XoqMijGZb9y3y2XskN+P1wUGiVwWZ5JmoDRwx5+3GmEplNyVM2s2Dg8ILFQm8rWM48orGy5YpI5Bl8U1y7ydlA== @@ -1279,7 +1279,7 @@ "@docsearch/css" "3.5.1" algoliasearch "^4.0.0" -"@docusaurus/core@2.4.1": +"@docusaurus/core@^2.0.0-alpha.60 || ^2.0.0", "@docusaurus/core@2.4.1": version "2.4.1" resolved "https://registry.npmjs.org/@docusaurus/core/-/core-2.4.1.tgz" integrity sha512-SNsY7PshK3Ri7vtsLXVeAJGS50nJN3RgF836zkyUfAD01Fq+sAk5EwWgLw+nnm5KVNGDu7PRR2kRGDsWvqpo0g== @@ -1502,7 +1502,7 @@ "@docusaurus/utils-validation" "2.4.1" tslib "^2.4.0" -"@docusaurus/plugin-google-gtag@2.4.1", "@docusaurus/plugin-google-gtag@^2.4.1": +"@docusaurus/plugin-google-gtag@^2.4.1", "@docusaurus/plugin-google-gtag@2.4.1": version "2.4.1" resolved "https://registry.npmjs.org/@docusaurus/plugin-google-gtag/-/plugin-google-gtag-2.4.1.tgz" integrity sha512-mKIefK+2kGTQBYvloNEKtDmnRD7bxHLsBcxgnbt4oZwzi2nxCGjPX6+9SQO2KCN5HZbNrYmGo5GJfMgoRvy6uA== @@ -1573,7 +1573,7 @@ "@docusaurus/theme-search-algolia" "2.4.1" "@docusaurus/types" "2.4.1" -"@docusaurus/react-loadable@5.5.2", "react-loadable@npm:@docusaurus/react-loadable@5.5.2": +"@docusaurus/react-loadable@5.5.2": version "5.5.2" resolved "https://registry.npmjs.org/@docusaurus/react-loadable/-/react-loadable-5.5.2.tgz" integrity sha512-A3dYjdBGuy0IGT+wyLIGIKLRE+sAk1iNk0f1HjNDysO7u8lhL4N3VEm+FAubmJbAztn94F7MxBTPmnixbiyFdQ== @@ -1671,7 +1671,7 @@ fs-extra "^10.1.0" tslib "^2.4.0" -"@docusaurus/types@2.4.1": +"@docusaurus/types@*", "@docusaurus/types@2.4.1": version "2.4.1" resolved "https://registry.npmjs.org/@docusaurus/types/-/types-2.4.1.tgz" integrity sha512-0R+cbhpMkhbRXX138UOc/2XZFF8hiZa6ooZAEEJFp5scytzCw4tC1gChMFXrpa3d2tYE6AX8IrOEpSonLmfQuQ== @@ -1857,16 +1857,16 @@ "@nodelib/fs.stat" "2.0.5" run-parallel "^1.1.9" -"@nodelib/fs.stat@2.0.5", "@nodelib/fs.stat@^2.0.2": - version "2.0.5" - resolved "https://registry.npmjs.org/@nodelib/fs.stat/-/fs.stat-2.0.5.tgz" - integrity sha512-RkhPPp2zrqDAQA/2jNhnztcPAlv64XdhIp7a7454A5ovI7Bukxgt7MX7udwAu3zg1DcpPU0rz3VV1SeaqvY4+A== - "@nodelib/fs.stat@^1.1.2": version "1.1.3" resolved "https://registry.npmjs.org/@nodelib/fs.stat/-/fs.stat-1.1.3.tgz" integrity sha512-shAmDyaQC4H92APFoIaVDHCx5bStIocgvbwQyxPRrbUY20V1EYTbSDchWbuwlMG3V17cprZhA6+78JfB+3DTPw== +"@nodelib/fs.stat@^2.0.2", "@nodelib/fs.stat@2.0.5": + version "2.0.5" + resolved "https://registry.npmjs.org/@nodelib/fs.stat/-/fs.stat-2.0.5.tgz" + integrity sha512-RkhPPp2zrqDAQA/2jNhnztcPAlv64XdhIp7a7454A5ovI7Bukxgt7MX7udwAu3zg1DcpPU0rz3VV1SeaqvY4+A== + "@nodelib/fs.walk@^1.2.3": version "1.2.8" resolved "https://registry.npmjs.org/@nodelib/fs.walk/-/fs.walk-1.2.8.tgz" @@ -1975,7 +1975,7 @@ "@svgr/babel-plugin-transform-react-native-svg" "^6.5.1" "@svgr/babel-plugin-transform-svg-component" "^6.5.1" -"@svgr/core@^6.5.1": +"@svgr/core@*", "@svgr/core@^6.0.0", "@svgr/core@^6.5.1": version "6.5.1" resolved "https://registry.npmjs.org/@svgr/core/-/core-6.5.1.tgz" integrity sha512-/xdLSWxK5QkqG524ONSjvg3V/FkNyCv538OIBdQqPNaAta3AsXj/Bd2FbvR87yMbXO2hFSWiAe/Q6IkVPDw+mw== @@ -2248,7 +2248,7 @@ "@types/history" "^4.7.11" "@types/react" "*" -"@types/react@*": +"@types/react@*", "@types/react@>= 16.8.0 < 19.0.0": version "18.2.20" resolved "https://registry.npmjs.org/@types/react/-/react-18.2.20.tgz" integrity sha512-WKNtmsLWJM/3D5mG4U84cysVY31ivmyw85dE84fOCk5Hx78wezB/XEjVPWl2JTZ5FkEeaTJf+VgUAUn3PE7Isw== @@ -2329,7 +2329,7 @@ dependencies: "@types/yargs-parser" "*" -"@webassemblyjs/ast@1.11.6", "@webassemblyjs/ast@^1.11.5": +"@webassemblyjs/ast@^1.11.5", "@webassemblyjs/ast@1.11.6": version "1.11.6" resolved "https://registry.npmjs.org/@webassemblyjs/ast/-/ast-1.11.6.tgz" integrity sha512-IN1xI7PwOvLPgjcf180gC1bqn3q/QaOCwYUahIOhbYUu8KA/3tw2RT/T0Gidi1l7Hhj5D/INhJxiICObqpMu4Q== @@ -2430,7 +2430,7 @@ "@webassemblyjs/wasm-gen" "1.11.6" "@webassemblyjs/wasm-parser" "1.11.6" -"@webassemblyjs/wasm-parser@1.11.6", "@webassemblyjs/wasm-parser@^1.11.5": +"@webassemblyjs/wasm-parser@^1.11.5", "@webassemblyjs/wasm-parser@1.11.6": version "1.11.6" resolved "https://registry.npmjs.org/@webassemblyjs/wasm-parser/-/wasm-parser-1.11.6.tgz" integrity sha512-6ZwPeGzMJM3Dqp3hCsLgESxBGtT/OeCvCZ4TA1JUPYgmhAx38tTPR9JaKy0S5H3evQpO/h2uWs2j6Yc/fjkpTQ== @@ -2483,21 +2483,21 @@ acorn-walk@^8.0.0: resolved "https://registry.npmjs.org/acorn-walk/-/acorn-walk-8.2.0.tgz" integrity sha512-k+iyHEuPgSw6SbuDpGQM+06HQUa04DZ3o+F6CSzXMvvI5KMvnaEqXe+YVe555R9nn6GPt404fos4wcgpw12SDA== -acorn@^8.0.4, acorn@^8.7.1, acorn@^8.8.2: +acorn@^8, acorn@^8.0.4, acorn@^8.7.1, acorn@^8.8.2: version "8.10.0" resolved "https://registry.npmjs.org/acorn/-/acorn-8.10.0.tgz" integrity sha512-F0SAmZ8iUtS//m8DmCTA0jlh6TDKkHQyK6xc6V4KDTyZKA9dnvX9/3sRTVQrWm79glUAZbnmmNcdYwUIHWVybw== -address@1.1.2: - version "1.1.2" - resolved "https://registry.npmjs.org/address/-/address-1.1.2.tgz" - integrity sha512-aT6camzM4xEA54YVJYSqxz1kv4IHnQZRtThJJHhUMRExaU5spC7jX5ugSwTaTgJliIgs4VhZOk7htClvQ/LmRA== - address@^1.0.1, address@^1.1.2: version "1.2.2" resolved "https://registry.npmjs.org/address/-/address-1.2.2.tgz" integrity sha512-4B/qKCfeE/ODUaAUpSwfzazo5x29WD4r3vXiWsB7I2mSDAihwEqKO+g8GELZUQSSAo5e1XTYh3ZVfLyxBc12nA== +address@1.1.2: + version "1.1.2" + resolved "https://registry.npmjs.org/address/-/address-1.1.2.tgz" + integrity sha512-aT6camzM4xEA54YVJYSqxz1kv4IHnQZRtThJJHhUMRExaU5spC7jX5ugSwTaTgJliIgs4VhZOk7htClvQ/LmRA== + aggregate-error@^3.0.0: version "3.1.0" resolved "https://registry.npmjs.org/aggregate-error/-/aggregate-error-3.1.0.tgz" @@ -2540,7 +2540,7 @@ ajv-keywords@^5.1.0: dependencies: fast-deep-equal "^3.1.3" -ajv@^6.12.2, ajv@^6.12.3, ajv@^6.12.4, ajv@^6.12.5: +ajv@^6.12.2, ajv@^6.12.3, ajv@^6.12.4, ajv@^6.12.5, ajv@^6.9.1: version "6.12.6" resolved "https://registry.npmjs.org/ajv/-/ajv-6.12.6.tgz" integrity sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g== @@ -2550,7 +2550,17 @@ ajv@^6.12.2, ajv@^6.12.3, ajv@^6.12.4, ajv@^6.12.5: json-schema-traverse "^0.4.1" uri-js "^4.2.2" -ajv@^8.0.0, ajv@^8.9.0: +ajv@^8.0.0: + version "8.12.0" + resolved "https://registry.npmjs.org/ajv/-/ajv-8.12.0.tgz" + integrity sha512-sRu1kpcO9yLtYxBKvqfTeh9KzZEwO3STyX1HT+4CaDzC6HpTGYhIhPIzj9XuKU7KYDwnaeh5hcOwjy1QuJzBPA== + dependencies: + fast-deep-equal "^3.1.1" + json-schema-traverse "^1.0.0" + require-from-string "^2.0.2" + uri-js "^4.2.2" + +ajv@^8.8.2, ajv@^8.9.0: version "8.12.0" resolved "https://registry.npmjs.org/ajv/-/ajv-8.12.0.tgz" integrity sha512-sRu1kpcO9yLtYxBKvqfTeh9KzZEwO3STyX1HT+4CaDzC6HpTGYhIhPIzj9XuKU7KYDwnaeh5hcOwjy1QuJzBPA== @@ -2567,7 +2577,7 @@ algoliasearch-helper@^3.10.0: dependencies: "@algolia/events" "^4.0.1" -algoliasearch@^4.0.0, algoliasearch@^4.13.1: +algoliasearch@^4.0.0, algoliasearch@^4.13.1, "algoliasearch@>= 3.1 < 6", "algoliasearch@>= 4.9.1 < 6": version "4.19.1" resolved "https://registry.npmjs.org/algoliasearch/-/algoliasearch-4.19.1.tgz" integrity sha512-IJF5b93b2MgAzcE/tuzW0yOPnuUyRgGAtaPv5UUywXM8kzqfdwZTO4sPJBzoGz1eOy6H9uEchsJsBFTELZSu+g== @@ -2725,16 +2735,16 @@ array-find-index@^1.0.1: resolved "https://registry.npmjs.org/array-find-index/-/array-find-index-1.0.2.tgz" integrity sha512-M1HQyIXcBGtVywBt8WVdim+lrNaK7VHp99Qt5pSNziXznKHViIBbXWtfRTpEFpF/c4FdfxNAsCCwPp5phBYJtw== -array-flatten@1.1.1: - version "1.1.1" - resolved "https://registry.npmjs.org/array-flatten/-/array-flatten-1.1.1.tgz" - integrity sha512-PCVAQswWemu6UdxsDFFX/+gVeYqKAod3D3UVm91jHwynguOwAvYPhx8nNlM++NqRcK6CxxpUafjmhIdKiHibqg== - array-flatten@^2.1.2: version "2.1.2" resolved "https://registry.npmjs.org/array-flatten/-/array-flatten-2.1.2.tgz" integrity sha512-hNfzcOV8W4NdualtqBFPyVO+54DSJuZGY9qT4pRroB6S9e3iiido2ISIC5h9R2sPJ8H3FHCIiEnsv1lPXO3KtQ== +array-flatten@1.1.1: + version "1.1.1" + resolved "https://registry.npmjs.org/array-flatten/-/array-flatten-1.1.1.tgz" + integrity sha512-PCVAQswWemu6UdxsDFFX/+gVeYqKAod3D3UVm91jHwynguOwAvYPhx8nNlM++NqRcK6CxxpUafjmhIdKiHibqg== + array-union@^1.0.1: version "1.0.2" resolved "https://registry.npmjs.org/array-union/-/array-union-1.0.2.tgz" @@ -2829,7 +2839,7 @@ asn1@~0.2.3: dependencies: safer-buffer "~2.1.0" -assert-plus@1.0.0, assert-plus@^1.0.0: +assert-plus@^1.0.0, assert-plus@1.0.0: version "1.0.0" resolved "https://registry.npmjs.org/assert-plus/-/assert-plus-1.0.0.tgz" integrity sha512-NfJ4UzBCcQGLDlQq7nHxH+tv3kyZ0hHQqF5BO6J7tNJeP5do1llPr8dZ8zHonfhAu0PHAdMkSo+8o0wxg9lZWw== @@ -3005,16 +3015,6 @@ balanced-match@^1.0.0: resolved "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.2.tgz" integrity sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw== -base16@^1.0.0: - version "1.0.0" - resolved "https://registry.npmjs.org/base16/-/base16-1.0.0.tgz" - integrity sha512-pNdYkNPiJUnEhnfXV56+sQy8+AaPcG3POZAUnwr4EeqCUZFz4u2PePbo3e5Gj4ziYPCWGUZT9RHisvJKnwFuBQ== - -base64-js@^1.3.1: - version "1.5.1" - resolved "https://registry.npmjs.org/base64-js/-/base64-js-1.5.1.tgz" - integrity sha512-AKpaYlHn8t4SVbOHCy+b5+KKgvR4vrsD8vbvrbiQJps7fKDTkjkDry6ji0rUJjC0kzbNePLwzxq8iypo41qeWA== - base@^0.11.1: version "0.11.2" resolved "https://registry.npmjs.org/base/-/base-0.11.2.tgz" @@ -3028,6 +3028,16 @@ base@^0.11.1: mixin-deep "^1.2.0" pascalcase "^0.1.1" +base16@^1.0.0: + version "1.0.0" + resolved "https://registry.npmjs.org/base16/-/base16-1.0.0.tgz" + integrity sha512-pNdYkNPiJUnEhnfXV56+sQy8+AaPcG3POZAUnwr4EeqCUZFz4u2PePbo3e5Gj4ziYPCWGUZT9RHisvJKnwFuBQ== + +base64-js@^1.3.1: + version "1.5.1" + resolved "https://registry.npmjs.org/base64-js/-/base64-js-1.5.1.tgz" + integrity sha512-AKpaYlHn8t4SVbOHCy+b5+KKgvR4vrsD8vbvrbiQJps7fKDTkjkDry6ji0rUJjC0kzbNePLwzxq8iypo41qeWA== + batch@0.6.1: version "0.6.1" resolved "https://registry.npmjs.org/batch/-/batch-0.6.1.tgz" @@ -3140,7 +3150,7 @@ bluebird@~3.4.1: body-parser@1.20.2: version "1.20.2" - resolved "https://registry.yarnpkg.com/body-parser/-/body-parser-1.20.2.tgz#6feb0e21c4724d06de7ff38da36dad4f57a747fd" + resolved "https://registry.npmjs.org/body-parser/-/body-parser-1.20.2.tgz" integrity sha512-ml9pReCu3M61kGlqoTm2umSXTlRTuGTx0bfYj+uIUKKYycG5NtSbeetV3faSU6R7ajOPw0g/J1PvK4qNy7s5bA== dependencies: bytes "3.1.2" @@ -3240,17 +3250,7 @@ braces@^3.0.2, braces@~3.0.2: dependencies: fill-range "^7.0.1" -browserslist@4.14.2, browserslist@^4.12.0: - version "4.14.2" - resolved "https://registry.npmjs.org/browserslist/-/browserslist-4.14.2.tgz" - integrity sha512-HI4lPveGKUR0x2StIz+2FXfDk9SfVMrxn6PLh1JeGUwcuoDkdKZebWiyLRJ68iIPDpMI4JLVDf7S7XzslgWOhw== - dependencies: - caniuse-lite "^1.0.30001125" - electron-to-chromium "^1.3.564" - escalade "^3.0.2" - node-releases "^1.1.61" - -browserslist@^4.0.0, browserslist@^4.14.5, browserslist@^4.18.1, browserslist@^4.21.4, browserslist@^4.21.5, browserslist@^4.21.9: +browserslist@^4.0.0, browserslist@^4.14.5, browserslist@^4.18.1, browserslist@^4.21.4, browserslist@^4.21.5, browserslist@^4.21.9, "browserslist@>= 4.21.0": version "4.21.10" resolved "https://registry.npmjs.org/browserslist/-/browserslist-4.21.10.tgz" integrity sha512-bipEBdZfVH5/pwrvqc+Ub0kUPVfGUhlKxbvfD+z1BDnPEO/X98ruXGA1WP5ASpAFKan7Qr6j736IacbZQuAlKQ== @@ -3260,6 +3260,16 @@ browserslist@^4.0.0, browserslist@^4.14.5, browserslist@^4.18.1, browserslist@^4 node-releases "^2.0.13" update-browserslist-db "^1.0.11" +browserslist@^4.12.0, browserslist@4.14.2: + version "4.14.2" + resolved "https://registry.npmjs.org/browserslist/-/browserslist-4.14.2.tgz" + integrity sha512-HI4lPveGKUR0x2StIz+2FXfDk9SfVMrxn6PLh1JeGUwcuoDkdKZebWiyLRJ68iIPDpMI4JLVDf7S7XzslgWOhw== + dependencies: + caniuse-lite "^1.0.30001125" + electron-to-chromium "^1.3.564" + escalade "^3.0.2" + node-releases "^1.1.61" + buffer-alloc-unsafe@^1.1.0: version "1.1.0" resolved "https://registry.npmjs.org/buffer-alloc-unsafe/-/buffer-alloc-unsafe-1.1.0.tgz" @@ -3442,9 +3452,9 @@ caniuse-api@^3.0.0: lodash.uniq "^4.5.0" caniuse-lite@^1.0.0, caniuse-lite@^1.0.30001109, caniuse-lite@^1.0.30001125, caniuse-lite@^1.0.30001464, caniuse-lite@^1.0.30001517: - version "1.0.30001519" - resolved "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001519.tgz" - integrity sha512-0QHgqR+Jv4bxHMp8kZ1Kn8CH55OikjKJ6JmKkZYP1F3D7w+lnFXF70nG5eNfsZS89jadi5Ywy5UCSKLAglIRkg== + version "1.0.30001629" + resolved "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001629.tgz" + integrity sha512-c3dl911slnQhmxUIT4HhYzT7wnBK/XYpGnYLOj4nJBaRiw52Ibe7YxlDaAeRECvA786zCuExhxIUJ2K7nHMrBw== caseless@~0.12.0: version "0.12.0" @@ -3473,15 +3483,6 @@ chainsaw@~0.1.0: dependencies: traverse ">=0.3.0 <0.4" -chalk@2.4.2, chalk@^2.4.1, chalk@^2.4.2: - version "2.4.2" - resolved "https://registry.npmjs.org/chalk/-/chalk-2.4.2.tgz" - integrity sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ== - dependencies: - ansi-styles "^3.2.1" - escape-string-regexp "^1.0.5" - supports-color "^5.3.0" - chalk@^1.0.0: version "1.1.3" resolved "https://registry.npmjs.org/chalk/-/chalk-1.1.3.tgz" @@ -3493,6 +3494,24 @@ chalk@^1.0.0: strip-ansi "^3.0.0" supports-color "^2.0.0" +chalk@^2.4.1: + version "2.4.2" + resolved "https://registry.npmjs.org/chalk/-/chalk-2.4.2.tgz" + integrity sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ== + dependencies: + ansi-styles "^3.2.1" + escape-string-regexp "^1.0.5" + supports-color "^5.3.0" + +chalk@^2.4.2: + version "2.4.2" + resolved "https://registry.npmjs.org/chalk/-/chalk-2.4.2.tgz" + integrity sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ== + dependencies: + ansi-styles "^3.2.1" + escape-string-regexp "^1.0.5" + supports-color "^5.3.0" + chalk@^3.0.0: version "3.0.0" resolved "https://registry.npmjs.org/chalk/-/chalk-3.0.0.tgz" @@ -3509,6 +3528,15 @@ chalk@^4.0.0, chalk@^4.1.0, chalk@^4.1.2: ansi-styles "^4.1.0" supports-color "^7.1.0" +chalk@2.4.2: + version "2.4.2" + resolved "https://registry.npmjs.org/chalk/-/chalk-2.4.2.tgz" + integrity sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ== + dependencies: + ansi-styles "^3.2.1" + escape-string-regexp "^1.0.5" + supports-color "^5.3.0" + character-entities-legacy@^1.0.0: version "1.1.4" resolved "https://registry.npmjs.org/character-entities-legacy/-/character-entities-legacy-1.1.4.tgz" @@ -3536,6 +3564,19 @@ cheerio-select@^2.1.0: domhandler "^5.0.3" domutils "^3.0.1" +cheerio@^1.0.0-rc.12, cheerio@^1.0.0-rc.3: + version "1.0.0-rc.12" + resolved "https://registry.npmjs.org/cheerio/-/cheerio-1.0.0-rc.12.tgz" + integrity sha512-VqR8m68vM46BNnuZ5NtnGBKIE/DfN0cRIzg9n40EIq9NOv90ayxLBXA8fXC5gquFRGJSTRqBq25Jt2ECLR431Q== + dependencies: + cheerio-select "^2.1.0" + dom-serializer "^2.0.0" + domhandler "^5.0.3" + domutils "^3.0.1" + htmlparser2 "^8.0.1" + parse5 "^7.0.0" + parse5-htmlparser2-tree-adapter "^7.0.0" + cheerio@0.22.0: version "0.22.0" resolved "https://registry.npmjs.org/cheerio/-/cheerio-0.22.0.tgz" @@ -3558,19 +3599,6 @@ cheerio@0.22.0: lodash.reject "^4.4.0" lodash.some "^4.4.0" -cheerio@^1.0.0-rc.12, cheerio@^1.0.0-rc.3: - version "1.0.0-rc.12" - resolved "https://registry.npmjs.org/cheerio/-/cheerio-1.0.0-rc.12.tgz" - integrity sha512-VqR8m68vM46BNnuZ5NtnGBKIE/DfN0cRIzg9n40EIq9NOv90ayxLBXA8fXC5gquFRGJSTRqBq25Jt2ECLR431Q== - dependencies: - cheerio-select "^2.1.0" - dom-serializer "^2.0.0" - domhandler "^5.0.3" - domutils "^3.0.1" - htmlparser2 "^8.0.1" - parse5 "^7.0.0" - parse5-htmlparser2-tree-adapter "^7.0.0" - chokidar@^3.4.2, chokidar@^3.5.3: version "3.5.3" resolved "https://registry.npmjs.org/chokidar/-/chokidar-3.5.3.tgz" @@ -3661,13 +3689,6 @@ clone-deep@^4.0.1: kind-of "^6.0.2" shallow-clone "^3.0.0" -clone-response@1.0.2: - version "1.0.2" - resolved "https://registry.npmjs.org/clone-response/-/clone-response-1.0.2.tgz" - integrity sha512-yjLXh88P599UOyPTFX0POsd7WxnbsVsGohcwzHOLspIhhpalPw1BcqED8NblyZLKcGrL8dTgMlcaZxV2jAD41Q== - dependencies: - mimic-response "^1.0.0" - clone-response@^1.0.2: version "1.0.3" resolved "https://registry.npmjs.org/clone-response/-/clone-response-1.0.3.tgz" @@ -3675,6 +3696,13 @@ clone-response@^1.0.2: dependencies: mimic-response "^1.0.0" +clone-response@1.0.2: + version "1.0.2" + resolved "https://registry.npmjs.org/clone-response/-/clone-response-1.0.2.tgz" + integrity sha512-yjLXh88P599UOyPTFX0POsd7WxnbsVsGohcwzHOLspIhhpalPw1BcqED8NblyZLKcGrL8dTgMlcaZxV2jAD41Q== + dependencies: + mimic-response "^1.0.0" + clsx@^1.2.1: version "1.2.1" resolved "https://registry.npmjs.org/clsx/-/clsx-1.2.1.tgz" @@ -3721,16 +3749,16 @@ color-convert@^2.0.1: dependencies: color-name "~1.1.4" -color-name@1.1.3: - version "1.1.3" - resolved "https://registry.npmjs.org/color-name/-/color-name-1.1.3.tgz" - integrity sha512-72fSenhMw2HZMTVHeCA9KCmpEIbzWiQsjN+BHcBbS9vr1mtt+vJjPdksIBNUmKAW8TFUDPJK5SUU3QhE9NEXDw== - color-name@^1.0.0, color-name@~1.1.4: version "1.1.4" resolved "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz" integrity sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA== +color-name@1.1.3: + version "1.1.3" + resolved "https://registry.npmjs.org/color-name/-/color-name-1.1.3.tgz" + integrity sha512-72fSenhMw2HZMTVHeCA9KCmpEIbzWiQsjN+BHcBbS9vr1mtt+vJjPdksIBNUmKAW8TFUDPJK5SUU3QhE9NEXDw== + color-string@^1.6.0, color-string@^1.9.0: version "1.9.1" resolved "https://registry.npmjs.org/color-string/-/color-string-1.9.1.tgz" @@ -3787,7 +3815,17 @@ comma-separated-tokens@^1.0.0: resolved "https://registry.npmjs.org/comma-separated-tokens/-/comma-separated-tokens-1.0.8.tgz" integrity sha512-GHuDRO12Sypu2cV70d1dkA2EUmXHgntrzbpvOB+Qy+49ypNfGgFQIC2fhhXbnyrJRynDCAARsT7Ou0M6hirpfw== -commander@^2.19.0, commander@^2.20.0, commander@^2.8.1: +commander@^2.19.0: + version "2.20.3" + resolved "https://registry.npmjs.org/commander/-/commander-2.20.3.tgz" + integrity sha512-GpVkmM8vF2vQUkj2LvZmD35JxeJOLCwJ9cUkugyk2nuhbv3+mJvpLYYt+0+USMxE+oj+ey/lJEnhZw75x/OMcQ== + +commander@^2.20.0: + version "2.20.3" + resolved "https://registry.npmjs.org/commander/-/commander-2.20.3.tgz" + integrity sha512-GpVkmM8vF2vQUkj2LvZmD35JxeJOLCwJ9cUkugyk2nuhbv3+mJvpLYYt+0+USMxE+oj+ey/lJEnhZw75x/OMcQ== + +commander@^2.8.1: version "2.20.3" resolved "https://registry.npmjs.org/commander/-/commander-2.20.3.tgz" integrity sha512-GpVkmM8vF2vQUkj2LvZmD35JxeJOLCwJ9cUkugyk2nuhbv3+mJvpLYYt+0+USMxE+oj+ey/lJEnhZw75x/OMcQ== @@ -3909,7 +3947,7 @@ console-stream@^0.1.1: resolved "https://registry.npmjs.org/consolidated-events/-/consolidated-events-2.0.2.tgz" integrity sha512-2/uRVMdRypf5z/TW/ncD/66l75P5hH2vM/GR8Jf8HLc2xnfJtmina6F6du8+v4Z2vTrMo7jC+W1tmEEuuELgkQ== -content-disposition@0.5.2, content-disposition@^0.5.2: +content-disposition@^0.5.2, content-disposition@0.5.2: version "0.5.2" resolved "https://registry.npmjs.org/content-disposition/-/content-disposition-0.5.2.tgz" integrity sha512-kRGRZw3bLlFISDBgwTSA1TMBFN6J6GWDeubmDE3AF+3+yXL8hTWv8r5rkLbqYXY4RjPk/EzHnClI3zQf1cFmHA== @@ -3943,7 +3981,7 @@ cookie-signature@1.0.6: cookie@0.6.0: version "0.6.0" - resolved "https://registry.yarnpkg.com/cookie/-/cookie-0.6.0.tgz#2798b04b071b0ecbff0dbb62a505a8efa4e19051" + resolved "https://registry.npmjs.org/cookie/-/cookie-0.6.0.tgz" integrity sha512-U71cyTamuh1CRNCfpGY6to28lxvNwPG4Guz/EVjgf3Jmzv0vlDp1atT9eS5dDjMYHucpHbWns6Lwf3BKz6svdw== copy-descriptor@^0.1.0: @@ -3990,16 +4028,16 @@ core-js@^3.23.3: resolved "https://registry.npmjs.org/core-js/-/core-js-3.32.0.tgz" integrity sha512-rd4rYZNlF3WuoYuRIDEmbR/ga9CeuWX9U05umAvgrrZoHY4Z++cp/xwPQMvUpBB4Ag6J8KfD80G0zwCyaSxDww== -core-util-is@1.0.2: - version "1.0.2" - resolved "https://registry.npmjs.org/core-util-is/-/core-util-is-1.0.2.tgz" - integrity sha512-3lqz5YjWTYnW6dlDa5TLaTCcShfar1e40rmcJVwCBJC6mWlFuj0eCHIElmG1g5kyuJ/GD+8Wn4FFCcz4gJPfaQ== - core-util-is@~1.0.0: version "1.0.3" resolved "https://registry.npmjs.org/core-util-is/-/core-util-is-1.0.3.tgz" integrity sha512-ZQBvi1DcpJ4GDqanjucZ2Hj3wEO5pZDS89BWbkcrvdxksJorwUDDZamX9ldFkp9aw2lmBDLgkObEA4DWNJ9FYQ== +core-util-is@1.0.2: + version "1.0.2" + resolved "https://registry.npmjs.org/core-util-is/-/core-util-is-1.0.2.tgz" + integrity sha512-3lqz5YjWTYnW6dlDa5TLaTCcShfar1e40rmcJVwCBJC6mWlFuj0eCHIElmG1g5kyuJ/GD+8Wn4FFCcz4gJPfaQ== + cosmiconfig@^5.0.0: version "5.2.1" resolved "https://registry.npmjs.org/cosmiconfig/-/cosmiconfig-5.2.1.tgz" @@ -4049,15 +4087,6 @@ cross-fetch@^3.1.5: dependencies: node-fetch "^2.6.12" -cross-spawn@7.0.3, cross-spawn@^7.0.3: - version "7.0.3" - resolved "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.3.tgz" - integrity sha512-iRDPJKUPVEND7dHPO8rkbOnPpyDygcDFtWjpeWNCgy8WP2rXcxXL8TskReQl6OrB2G7+UJrags1q15Fudc7G6w== - dependencies: - path-key "^3.1.0" - shebang-command "^2.0.0" - which "^2.0.1" - cross-spawn@^5.0.1: version "5.1.0" resolved "https://registry.npmjs.org/cross-spawn/-/cross-spawn-5.1.0.tgz" @@ -4078,6 +4107,15 @@ cross-spawn@^6.0.0: shebang-command "^1.2.0" which "^1.2.9" +cross-spawn@^7.0.3, cross-spawn@7.0.3: + version "7.0.3" + resolved "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.3.tgz" + integrity sha512-iRDPJKUPVEND7dHPO8rkbOnPpyDygcDFtWjpeWNCgy8WP2rXcxXL8TskReQl6OrB2G7+UJrags1q15Fudc7G6w== + dependencies: + path-key "^3.1.0" + shebang-command "^2.0.0" + which "^2.0.1" + crowdin-cli@^0.3.0: version "0.3.0" resolved "https://registry.npmjs.org/crowdin-cli/-/crowdin-cli-0.3.0.tgz" @@ -4092,7 +4130,7 @@ crypto-random-string@^2.0.0: resolved "https://registry.npmjs.org/crypto-random-string/-/crypto-random-string-2.0.0.tgz" integrity sha512-v1plID3y9r/lPhviJ1wrXpLeyUIGAZ2SHNYTEapm7/8A9nLPoyvVp3RK/EPFqn5kEznyWgYZNsRtYYIWbuG8KA== -css-color-names@0.0.4, css-color-names@^0.0.4: +css-color-names@^0.0.4, css-color-names@0.0.4: version "0.0.4" resolved "https://registry.npmjs.org/css-color-names/-/css-color-names-0.0.4.tgz" integrity sha512-zj5D7X1U2h2zsXOAM8EyUREBnnts6H+Jm+d1M2DbiQQcUtnqgQsMrdo8JW9R80YFUmIdBZeMu5wvYM7hcgWP/Q== @@ -4188,14 +4226,6 @@ css-selector-parser@^1.0.0: resolved "https://registry.npmjs.org/css-selector-parser/-/css-selector-parser-1.4.1.tgz" integrity sha512-HYPSb7y/Z7BNDCOrakL4raGO2zltZkbeXyAd6Tg9obzix6QhzxCotdBl6VT0Dv4vZfJGVz3WL/xaEI9Ly3ul0g== -css-tree@1.0.0-alpha.37: - version "1.0.0-alpha.37" - resolved "https://registry.npmjs.org/css-tree/-/css-tree-1.0.0-alpha.37.tgz" - integrity sha512-DMxWJg0rnz7UgxKT0Q1HU/L9BeJI0M6ksor0OgqOnF+aRCDWg/N2641HmVyU9KVIu0OVVWOb2IpC9A+BJRnejg== - dependencies: - mdn-data "2.0.4" - source-map "^0.6.1" - css-tree@^1.1.2, css-tree@^1.1.3: version "1.1.3" resolved "https://registry.npmjs.org/css-tree/-/css-tree-1.1.3.tgz" @@ -4204,10 +4234,13 @@ css-tree@^1.1.2, css-tree@^1.1.3: mdn-data "2.0.14" source-map "^0.6.1" -css-what@2.1: - version "2.1.3" - resolved "https://registry.npmjs.org/css-what/-/css-what-2.1.3.tgz" - integrity sha512-a+EPoD+uZiNfh+5fxw2nO9QwFa6nJe2Or35fGY6Ipw1R3R4AGz1d1TEZrCegvw2YTmZ0jXirGYlzxxpYSHwpEg== +css-tree@1.0.0-alpha.37: + version "1.0.0-alpha.37" + resolved "https://registry.npmjs.org/css-tree/-/css-tree-1.0.0-alpha.37.tgz" + integrity sha512-DMxWJg0rnz7UgxKT0Q1HU/L9BeJI0M6ksor0OgqOnF+aRCDWg/N2641HmVyU9KVIu0OVVWOb2IpC9A+BJRnejg== + dependencies: + mdn-data "2.0.4" + source-map "^0.6.1" css-what@^3.2.1: version "3.4.2" @@ -4219,6 +4252,11 @@ css-what@^6.0.1, css-what@^6.1.0: resolved "https://registry.npmjs.org/css-what/-/css-what-6.1.0.tgz" integrity sha512-HTUrgRJ7r4dsZKU6GjmpfRK1O76h97Z8MfS1G0FozR+oF2kG6Vfe8JE6zwrkbxigziPHinCJ+gCPjA9EaBDtRw== +css-what@2.1: + version "2.1.3" + resolved "https://registry.npmjs.org/css-what/-/css-what-2.1.3.tgz" + integrity sha512-a+EPoD+uZiNfh+5fxw2nO9QwFa6nJe2Or35fGY6Ipw1R3R4AGz1d1TEZrCegvw2YTmZ0jXirGYlzxxpYSHwpEg== + cssesc@^3.0.0: version "3.0.0" resolved "https://registry.npmjs.org/cssesc/-/cssesc-3.0.0.tgz" @@ -4379,20 +4417,55 @@ dashdash@^1.12.0: dependencies: assert-plus "^1.0.0" -debug@2.6.9, debug@^2.2.0, debug@^2.3.3, debug@^2.6.0: +debug@^2.2.0: version "2.6.9" resolved "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz" integrity sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA== dependencies: ms "2.0.0" -debug@4, debug@^4.1.0, debug@^4.1.1: +debug@^2.3.3: + version "2.6.9" + resolved "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz" + integrity sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA== + dependencies: + ms "2.0.0" + +debug@^2.6.0: + version "2.6.9" + resolved "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz" + integrity sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA== + dependencies: + ms "2.0.0" + +debug@^3.1.0: + version "3.2.7" + resolved "https://registry.npmjs.org/debug/-/debug-3.2.7.tgz" + integrity sha512-CFjzYYAi4ThfiQvizrFQevTTXHtnCqWfe7x1AhgEscTz6ZbLbfoLRLPugTQyBth6f8ZERVUSyWHFD/7Wu4t1XQ== + dependencies: + ms "^2.1.1" + +debug@^3.2.7: + version "3.2.7" + resolved "https://registry.npmjs.org/debug/-/debug-3.2.7.tgz" + integrity sha512-CFjzYYAi4ThfiQvizrFQevTTXHtnCqWfe7x1AhgEscTz6ZbLbfoLRLPugTQyBth6f8ZERVUSyWHFD/7Wu4t1XQ== + dependencies: + ms "^2.1.1" + +debug@^4.1.0, debug@^4.1.1, debug@4: version "4.3.4" resolved "https://registry.npmjs.org/debug/-/debug-4.3.4.tgz" integrity sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ== dependencies: ms "2.1.2" +debug@2.6.9: + version "2.6.9" + resolved "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz" + integrity sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA== + dependencies: + ms "2.0.0" + debug@4.3.1: version "4.3.1" resolved "https://registry.npmjs.org/debug/-/debug-4.3.1.tgz" @@ -4400,13 +4473,6 @@ debug@4.3.1: dependencies: ms "2.1.2" -debug@^3.1.0, debug@^3.2.7: - version "3.2.7" - resolved "https://registry.npmjs.org/debug/-/debug-3.2.7.tgz" - integrity sha512-CFjzYYAi4ThfiQvizrFQevTTXHtnCqWfe7x1AhgEscTz6ZbLbfoLRLPugTQyBth6f8ZERVUSyWHFD/7Wu4t1XQ== - dependencies: - ms "^2.1.1" - decamelize@^1.1.2: version "1.2.0" resolved "https://registry.npmjs.org/decamelize/-/decamelize-1.2.0.tgz" @@ -4574,16 +4640,16 @@ delayed-stream@~1.0.0: resolved "https://registry.npmjs.org/delayed-stream/-/delayed-stream-1.0.0.tgz" integrity sha512-ZySD7Nf91aLB0RxL4KGrKHBXl7Eds1DAmEdcoVawXnLD7SDhpNgtuII2aAkg7a7QS41jxPSZ17p4VdGnMHk3MQ== -depd@2.0.0: - version "2.0.0" - resolved "https://registry.npmjs.org/depd/-/depd-2.0.0.tgz" - integrity sha512-g7nH6P6dyDioJogAAGprGpCtVImJhpPk/roCzdb3fIh61/s/nPsfR6onyMwkCAR/OlC3yBC0lESvUoQEAssIrw== - depd@~1.1.2: version "1.1.2" resolved "https://registry.npmjs.org/depd/-/depd-1.1.2.tgz" integrity sha512-7emPTl6Dpo6JRXOXjLRxck+FlLRX5847cLKEn00PLAgc3g2hTZZgr+e4c2v6QpSmLeFP3n5yUo7ft6avBK/5jQ== +depd@2.0.0: + version "2.0.0" + resolved "https://registry.npmjs.org/depd/-/depd-2.0.0.tgz" + integrity sha512-g7nH6P6dyDioJogAAGprGpCtVImJhpPk/roCzdb3fIh61/s/nPsfR6onyMwkCAR/OlC3yBC0lESvUoQEAssIrw== + destroy@1.2.0: version "1.2.0" resolved "https://registry.npmjs.org/destroy/-/destroy-1.2.0.tgz" @@ -4606,7 +4672,7 @@ detect-node@^2.0.4: resolved "https://registry.npmjs.org/detect-node/-/detect-node-2.1.0.tgz" integrity sha512-T0NIuQpnTvFDATNuHN5roPwSBG83rFsuO+MXXH9/3N1eFbn4wcPjttvjMLEPWJ0RGUYgQE7cGgS3tNxbqCGM7g== -detect-port-alt@1.1.6, detect-port-alt@^1.1.6: +detect-port-alt@^1.1.6, detect-port-alt@1.1.6: version "1.1.6" resolved "https://registry.npmjs.org/detect-port-alt/-/detect-port-alt-1.1.6.tgz" integrity sha512-5tQykt+LqfJFBEYaDITx7S7cR7mJ/zQmLXZ2qt5w04ainYZw6tBf9dBunMjVeVOdYVRUzUOE4HkY5J7+uttb5Q== @@ -4627,6 +4693,13 @@ diacritics-map@^0.1.0: resolved "https://registry.npmjs.org/diacritics-map/-/diacritics-map-0.1.0.tgz" integrity sha512-3omnDTYrGigU0i4cJjvaKwD52B8aoqyX/NEIkukFFkogBemsIbhSa1O414fpTp5nuszJG6lvQ5vBvDVNCbSsaQ== +dir-glob@^3.0.1: + version "3.0.1" + resolved "https://registry.npmjs.org/dir-glob/-/dir-glob-3.0.1.tgz" + integrity sha512-WkrWp9GR4KXfKGYzOLmTuGVi1UWFfws377n9cc55/tb6DuqyF6pcQ5AbiHEshaDpY9v6oaSr2XCDidGmMwdzIA== + dependencies: + path-type "^4.0.0" + dir-glob@2.0.0: version "2.0.0" resolved "https://registry.npmjs.org/dir-glob/-/dir-glob-2.0.0.tgz" @@ -4635,13 +4708,6 @@ dir-glob@2.0.0: arrify "^1.0.1" path-type "^3.0.0" -dir-glob@^3.0.1: - version "3.0.1" - resolved "https://registry.npmjs.org/dir-glob/-/dir-glob-3.0.1.tgz" - integrity sha512-WkrWp9GR4KXfKGYzOLmTuGVi1UWFfws377n9cc55/tb6DuqyF6pcQ5AbiHEshaDpY9v6oaSr2XCDidGmMwdzIA== - dependencies: - path-type "^4.0.0" - direction@^1.0.0: version "1.0.4" resolved "https://registry.npmjs.org/direction/-/direction-1.0.4.tgz" @@ -4745,14 +4811,6 @@ dom-converter@^0.2.0: dependencies: utila "~0.4" -dom-serializer@0: - version "0.2.2" - resolved "https://registry.npmjs.org/dom-serializer/-/dom-serializer-0.2.2.tgz" - integrity sha512-2/xPb3ORsQ42nHYiSunXkDjPLBaEj/xTwUO4B7XCZQTRk7EBtTOPaygh10YAAh2OI1Qrp6NWfpAhzswj0ydt9g== - dependencies: - domelementtype "^2.0.1" - entities "^2.0.0" - dom-serializer@^1.0.1: version "1.4.1" resolved "https://registry.npmjs.org/dom-serializer/-/dom-serializer-1.4.1.tgz" @@ -4779,7 +4837,15 @@ dom-serializer@~0.1.0: domelementtype "^1.3.0" entities "^1.1.1" -domelementtype@1, domelementtype@^1.3.0, domelementtype@^1.3.1: +dom-serializer@0: + version "0.2.2" + resolved "https://registry.npmjs.org/dom-serializer/-/dom-serializer-0.2.2.tgz" + integrity sha512-2/xPb3ORsQ42nHYiSunXkDjPLBaEj/xTwUO4B7XCZQTRk7EBtTOPaygh10YAAh2OI1Qrp6NWfpAhzswj0ydt9g== + dependencies: + domelementtype "^2.0.1" + entities "^2.0.0" + +domelementtype@^1.3.0, domelementtype@^1.3.1, domelementtype@1: version "1.3.1" resolved "https://registry.npmjs.org/domelementtype/-/domelementtype-1.3.1.tgz" integrity sha512-BSKB+TSpMpFI/HOxCNr1O8aMOTZ8hT3pM3GQ0w/mWRmkhEDSFJkkyzz4XQsBV44BChwGkrDfMyjVD0eA2aFV3w== @@ -4810,7 +4876,7 @@ domhandler@^5.0.2, domhandler@^5.0.3: dependencies: domelementtype "^2.3.0" -domutils@1.5.1, domutils@^1.5.1: +domutils@^1.5.1, domutils@1.5.1: version "1.5.1" resolved "https://registry.npmjs.org/domutils/-/domutils-1.5.1.tgz" integrity sha512-gSu5Oi/I+3wDENBsOWBiRK1eoGxcywYSqg3rR960/+EfY0CF4EX1VPkgHOZ3WiS/Jg2DtliF6BhWcHlfpYUcGw== @@ -4894,6 +4960,11 @@ download@^7.1.0: p-event "^2.1.0" pify "^3.0.0" +duplexer@^0.1.1, duplexer@^0.1.2: + version "0.1.2" + resolved "https://registry.npmjs.org/duplexer/-/duplexer-0.1.2.tgz" + integrity sha512-jtD6YG370ZCIi/9GTaJKQxWTZD045+4R4hTk/x1UyoqadyJ9x9CgSi1RlVDQF8U2sxLLSnFkCaMihqljHIWgMg== + duplexer2@~0.1.4: version "0.1.4" resolved "https://registry.npmjs.org/duplexer2/-/duplexer2-0.1.4.tgz" @@ -4906,11 +4977,6 @@ duplexer3@^0.1.4: resolved "https://registry.npmjs.org/duplexer3/-/duplexer3-0.1.5.tgz" integrity sha512-1A8za6ws41LQgv9HrE/66jyC5yuSjQ3L/KOpFtoBilsAK2iA2wuS5rTt1OCzIvtS2V7nVmedsUU+DGRcjBmOYA== -duplexer@^0.1.1, duplexer@^0.1.2: - version "0.1.2" - resolved "https://registry.npmjs.org/duplexer/-/duplexer-0.1.2.tgz" - integrity sha512-jtD6YG370ZCIi/9GTaJKQxWTZD045+4R4hTk/x1UyoqadyJ9x9CgSi1RlVDQF8U2sxLLSnFkCaMihqljHIWgMg== - eastasianwidth@^0.2.0: version "0.2.0" resolved "https://registry.npmjs.org/eastasianwidth/-/eastasianwidth-0.2.0.tgz" @@ -5025,7 +5091,7 @@ enzyme-shallow-equal@^1.0.1, enzyme-shallow-equal@^1.0.5: has "^1.0.3" object-is "^1.1.5" -enzyme@^3.10.0: +enzyme@^3.0.0, enzyme@^3.10.0: version "3.11.0" resolved "https://registry.npmjs.org/enzyme/-/enzyme-3.11.0.tgz" integrity sha512-Dw8/Gs4vRjxY6/6i9wU0V+utmQO9kvh9XLnz3LIudviOnVYDEe2ec+0k+NQoMamn1VrjKgCUOWj5jG/5M5M0Qw== @@ -5162,16 +5228,21 @@ escape-html@^1.0.3, escape-html@~1.0.3: resolved "https://registry.npmjs.org/escape-html/-/escape-html-1.0.3.tgz" integrity sha512-NiSupZ4OeuGwr68lGIeym/ksIZMJodUGOSCZ/FSnTxcrekbvqrgdUxlJOMpijaKZVjAJrWrGs/6Jy8OMuyj9ow== -escape-string-regexp@2.0.0, escape-string-regexp@^2.0.0: - version "2.0.0" - resolved "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-2.0.0.tgz" - integrity sha512-UpzcLCXolUWcNu5HtVMHYdXJjArjsF9C0aNnquZYY4uW/Vu0miy5YoWvbV345HauVvcAUnpRuhMMcqTcGOY2+w== - -escape-string-regexp@^1.0.2, escape-string-regexp@^1.0.5: +escape-string-regexp@^1.0.2: version "1.0.5" resolved "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz" integrity sha512-vbRorB5FUQWvla16U8R/qgaFIya2qGzwDrNmCZuYKrbdSUMG6I1ZCGQRefkRVhuOkIGVne7BQ35DSfo1qvJqFg== +escape-string-regexp@^1.0.5: + version "1.0.5" + resolved "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz" + integrity sha512-vbRorB5FUQWvla16U8R/qgaFIya2qGzwDrNmCZuYKrbdSUMG6I1ZCGQRefkRVhuOkIGVne7BQ35DSfo1qvJqFg== + +escape-string-regexp@^2.0.0, escape-string-regexp@2.0.0: + version "2.0.0" + resolved "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-2.0.0.tgz" + integrity sha512-UpzcLCXolUWcNu5HtVMHYdXJjArjsF9C0aNnquZYY4uW/Vu0miy5YoWvbV345HauVvcAUnpRuhMMcqTcGOY2+w== + escape-string-regexp@^4.0.0: version "4.0.0" resolved "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-4.0.0.tgz" @@ -5326,7 +5397,7 @@ expand-template@^2.0.3: express@^4.17.1, express@^4.17.3: version "4.19.2" - resolved "https://registry.yarnpkg.com/express/-/express-4.19.2.tgz#e25437827a3aa7f2a827bc8171bbbb664a356465" + resolved "https://registry.npmjs.org/express/-/express-4.19.2.tgz" integrity sha512-5T6nhjsT+EOMzuck8JjBHARTHfMht0POzlA60WV2pMD3gyXw2LZnZ+ueGdNxG+0calOJcWKbpFcuzLZ91YWq9Q== dependencies: accepts "~1.3.8" @@ -5383,7 +5454,15 @@ extend-shallow@^2.0.1: dependencies: is-extendable "^0.1.0" -extend-shallow@^3.0.0, extend-shallow@^3.0.2: +extend-shallow@^3.0.0: + version "3.0.2" + resolved "https://registry.npmjs.org/extend-shallow/-/extend-shallow-3.0.2.tgz" + integrity sha512-BwY5b5Ql4+qZoefgMj2NUmx+tehVTH/Kf4k1ZEtOHNFcm2wSxMRo992l6X3TIgni2eZVTZ85xMOjF31fwZAj6Q== + dependencies: + assign-symbols "^1.0.0" + is-extendable "^1.0.1" + +extend-shallow@^3.0.2: version "3.0.2" resolved "https://registry.npmjs.org/extend-shallow/-/extend-shallow-3.0.2.tgz" integrity sha512-BwY5b5Ql4+qZoefgMj2NUmx+tehVTH/Kf4k1ZEtOHNFcm2wSxMRo992l6X3TIgni2eZVTZ85xMOjF31fwZAj6Q== @@ -5410,7 +5489,7 @@ extglob@^2.0.4: snapdragon "^0.8.1" to-regex "^3.0.1" -extsprintf@1.3.0, extsprintf@^1.2.0: +extsprintf@^1.2.0, extsprintf@1.3.0: version "1.3.0" resolved "https://registry.npmjs.org/extsprintf/-/extsprintf-1.3.0.tgz" integrity sha512-11Ndz7Nv+mvAC1j0ktTa7fAb0vLyGGX+rMHNBYQviQDGU0Hw7lhctJANqbPhu9nV9/izT/IntTgZ7Im/9LJs9g== @@ -5542,7 +5621,7 @@ figures@^1.3.5: escape-string-regexp "^1.0.5" object-assign "^4.1.0" -file-loader@^6.2.0: +file-loader@*, file-loader@^6.2.0: version "6.2.0" resolved "https://registry.npmjs.org/file-loader/-/file-loader-6.2.0.tgz" integrity sha512-qo3glqyTa61Ytg4u73GultjHGjdRyig3tG6lPtyX/jOEJvHif9uB0/OCI2Kif6ctF3caQTW2G5gym21oAsI4pw== @@ -5550,11 +5629,6 @@ file-loader@^6.2.0: loader-utils "^2.0.0" schema-utils "^3.0.0" -file-type@5.2.0, file-type@^5.2.0: - version "5.2.0" - resolved "https://registry.npmjs.org/file-type/-/file-type-5.2.0.tgz" - integrity sha512-Iq1nJ6D2+yIO4c8HHg4fyVb8mAJieo1Oloy1mLLaB2PvezNedhBVm+QU7g0qM42aiMbRXTxKKwGD17rjKNJYVQ== - file-type@^10.4.0, file-type@^10.7.0: version "10.11.0" resolved "https://registry.npmjs.org/file-type/-/file-type-10.11.0.tgz" @@ -5570,6 +5644,11 @@ file-type@^4.2.0: resolved "https://registry.npmjs.org/file-type/-/file-type-4.4.0.tgz" integrity sha512-f2UbFQEk7LXgWpi5ntcO86OeA/cC80fuDDDaX/fZ2ZGel+AF7leRQqBBW1eJNiiQkrZlAoM6P+VYP5P6bOlDEQ== +file-type@^5.2.0: + version "5.2.0" + resolved "https://registry.npmjs.org/file-type/-/file-type-5.2.0.tgz" + integrity sha512-Iq1nJ6D2+yIO4c8HHg4fyVb8mAJieo1Oloy1mLLaB2PvezNedhBVm+QU7g0qM42aiMbRXTxKKwGD17rjKNJYVQ== + file-type@^6.1.0: version "6.2.0" resolved "https://registry.npmjs.org/file-type/-/file-type-6.2.0.tgz" @@ -5580,6 +5659,11 @@ file-type@^8.1.0: resolved "https://registry.npmjs.org/file-type/-/file-type-8.1.0.tgz" integrity sha512-qyQ0pzAy78gVoJsmYeNgl8uH8yKhr1lVhW7JbzJmnlRi0I4R2eEDEJZVKG8agpDnLpacwNbDhLNG/LMdxHD2YQ== +file-type@5.2.0: + version "5.2.0" + resolved "https://registry.npmjs.org/file-type/-/file-type-5.2.0.tgz" + integrity sha512-Iq1nJ6D2+yIO4c8HHg4fyVb8mAJieo1Oloy1mLLaB2PvezNedhBVm+QU7g0qM42aiMbRXTxKKwGD17rjKNJYVQ== + filename-reserved-regex@^2.0.0: version "2.0.0" resolved "https://registry.npmjs.org/filename-reserved-regex/-/filename-reserved-regex-2.0.0.tgz" @@ -5594,16 +5678,16 @@ filenamify@^2.0.0: strip-outer "^1.0.0" trim-repeated "^1.0.0" -filesize@6.1.0: - version "6.1.0" - resolved "https://registry.npmjs.org/filesize/-/filesize-6.1.0.tgz" - integrity sha512-LpCHtPQ3sFx67z+uh2HnSyWSLLu5Jxo21795uRDuar/EOuYWXib5EmPaGIBuSnRqH2IODiKA2k5re/K9OnN/Yg== - filesize@^8.0.6: version "8.0.7" resolved "https://registry.npmjs.org/filesize/-/filesize-8.0.7.tgz" integrity sha512-pjmC+bkIF8XI7fWaH8KxHcZL3DPybs1roSKP4rKDvy20tAWwIObE4+JIseG2byfGKhud5ZnM4YSGKBz7Sh0ndQ== +filesize@6.1.0: + version "6.1.0" + resolved "https://registry.npmjs.org/filesize/-/filesize-6.1.0.tgz" + integrity sha512-LpCHtPQ3sFx67z+uh2HnSyWSLLu5Jxo21795uRDuar/EOuYWXib5EmPaGIBuSnRqH2IODiKA2k5re/K9OnN/Yg== + fill-range@^2.1.0: version "2.2.4" resolved "https://registry.npmjs.org/fill-range/-/fill-range-2.2.4.tgz" @@ -5663,14 +5747,6 @@ find-cache-dir@^3.3.1: make-dir "^3.0.2" pkg-dir "^4.1.0" -find-up@4.1.0, find-up@^4.0.0: - version "4.1.0" - resolved "https://registry.npmjs.org/find-up/-/find-up-4.1.0.tgz" - integrity sha512-PpOwAdQ/YlXQ2vj8a3h8IipDuYRi3wceVQQGYWxNINccq40Anw7BlsEXCMbt1Zt+OLA6Fq9suIpIWD0OsnISlw== - dependencies: - locate-path "^5.0.0" - path-exists "^4.0.0" - find-up@^1.0.0: version "1.1.2" resolved "https://registry.npmjs.org/find-up/-/find-up-1.1.2.tgz" @@ -5686,6 +5762,14 @@ find-up@^3.0.0: dependencies: locate-path "^3.0.0" +find-up@^4.0.0, find-up@4.1.0: + version "4.1.0" + resolved "https://registry.npmjs.org/find-up/-/find-up-4.1.0.tgz" + integrity sha512-PpOwAdQ/YlXQ2vj8a3h8IipDuYRi3wceVQQGYWxNINccq40Anw7BlsEXCMbt1Zt+OLA6Fq9suIpIWD0OsnISlw== + dependencies: + locate-path "^5.0.0" + path-exists "^4.0.0" + find-up@^5.0.0: version "5.0.0" resolved "https://registry.npmjs.org/find-up/-/find-up-5.0.0.tgz" @@ -5711,7 +5795,7 @@ flux@^4.0.1: follow-redirects@^1.0.0, follow-redirects@^1.14.7: version "1.15.6" - resolved "https://registry.yarnpkg.com/follow-redirects/-/follow-redirects-1.15.6.tgz#7f815c0cda4249c74ff09e95ef97c23b5fd0399b" + resolved "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.15.6.tgz" integrity sha512-wWN62YITEaOpSK584EZXJafH1AGpO8RVgElfkuXbTOrPX4fIfOyEpW/CsiNd8JdYrAoOvafRTOEnvsO++qCqFA== for-each@^0.3.3: @@ -5731,19 +5815,6 @@ forever-agent@~0.6.1: resolved "https://registry.npmjs.org/forever-agent/-/forever-agent-0.6.1.tgz" integrity sha512-j0KLYPhm6zeac4lz3oJ3o65qvgQCcPubiyotZrXqEaG4hNagNYO8qdlUrX5vwqv9ohqeT/Z3j6+yW067yWWdUw== -fork-ts-checker-webpack-plugin@4.1.6: - version "4.1.6" - resolved "https://registry.npmjs.org/fork-ts-checker-webpack-plugin/-/fork-ts-checker-webpack-plugin-4.1.6.tgz" - integrity sha512-DUxuQaKoqfNne8iikd14SAkh5uw4+8vNifp6gmA73yYNS6ywLIWSLD/n/mBzHQRpW3J7rbATEakmiA8JvkTyZw== - dependencies: - "@babel/code-frame" "^7.5.5" - chalk "^2.4.1" - micromatch "^3.1.10" - minimatch "^3.0.4" - semver "^5.6.0" - tapable "^1.0.0" - worker-rpc "^0.1.0" - fork-ts-checker-webpack-plugin@^6.5.0: version "6.5.3" resolved "https://registry.npmjs.org/fork-ts-checker-webpack-plugin/-/fork-ts-checker-webpack-plugin-6.5.3.tgz" @@ -5763,6 +5834,19 @@ fork-ts-checker-webpack-plugin@^6.5.0: semver "^7.3.2" tapable "^1.0.0" +fork-ts-checker-webpack-plugin@4.1.6: + version "4.1.6" + resolved "https://registry.npmjs.org/fork-ts-checker-webpack-plugin/-/fork-ts-checker-webpack-plugin-4.1.6.tgz" + integrity sha512-DUxuQaKoqfNne8iikd14SAkh5uw4+8vNifp6gmA73yYNS6ywLIWSLD/n/mBzHQRpW3J7rbATEakmiA8JvkTyZw== + dependencies: + "@babel/code-frame" "^7.5.5" + chalk "^2.4.1" + micromatch "^3.1.10" + minimatch "^3.0.4" + semver "^5.6.0" + tapable "^1.0.0" + worker-rpc "^0.1.0" + form-data@~2.3.2: version "2.3.3" resolved "https://registry.npmjs.org/form-data/-/form-data-2.3.3.tgz" @@ -5816,7 +5900,17 @@ fs-extra@^10.1.0: jsonfile "^6.0.1" universalify "^2.0.0" -fs-extra@^9.0.0, fs-extra@^9.0.1: +fs-extra@^9.0.0: + version "9.1.0" + resolved "https://registry.npmjs.org/fs-extra/-/fs-extra-9.1.0.tgz" + integrity sha512-hcg3ZmepS30/7BSFqRvoo3DOMQu7IjqxO5nCDt+zM9XWjb33Wg7ziNT+Qvqbuc3+gWpzO02JubVyk2G4Zvo1OQ== + dependencies: + at-least-node "^1.0.0" + graceful-fs "^4.2.0" + jsonfile "^6.0.1" + universalify "^2.0.0" + +fs-extra@^9.0.1: version "9.1.0" resolved "https://registry.npmjs.org/fs-extra/-/fs-extra-9.1.0.tgz" integrity sha512-hcg3ZmepS30/7BSFqRvoo3DOMQu7IjqxO5nCDt+zM9XWjb33Wg7ziNT+Qvqbuc3+gWpzO02JubVyk2G4Zvo1OQ== @@ -5925,11 +6019,6 @@ get-stdin@^4.0.1: resolved "https://registry.npmjs.org/get-stdin/-/get-stdin-4.0.1.tgz" integrity sha512-F5aQMywwJ2n85s4hJPTT9RPxGmubonuB10MNYo17/xph174n2MIR33HRguhzVag10O/npM7SPk73LMZNP+FaWw== -get-stream@3.0.0, get-stream@^3.0.0: - version "3.0.0" - resolved "https://registry.npmjs.org/get-stream/-/get-stream-3.0.0.tgz" - integrity sha512-GlhdIUuVakc8SJ6kK0zAFbiGzRFzNnY4jUuEbV9UROo4Y+0Ny4fjvcZFVTeDA4odpFyOQzaw6hXukJSq/f28sQ== - get-stream@^2.2.0: version "2.3.1" resolved "https://registry.npmjs.org/get-stream/-/get-stream-2.3.1.tgz" @@ -5938,6 +6027,11 @@ get-stream@^2.2.0: object-assign "^4.0.1" pinkie-promise "^2.0.0" +get-stream@^3.0.0, get-stream@3.0.0: + version "3.0.0" + resolved "https://registry.npmjs.org/get-stream/-/get-stream-3.0.0.tgz" + integrity sha512-GlhdIUuVakc8SJ6kK0zAFbiGzRFzNnY4jUuEbV9UROo4Y+0Ny4fjvcZFVTeDA4odpFyOQzaw6hXukJSq/f28sQ== + get-stream@^4.0.0, get-stream@^4.1.0: version "4.1.0" resolved "https://registry.npmjs.org/get-stream/-/get-stream-4.1.0.tgz" @@ -6060,7 +6154,7 @@ global-dirs@^3.0.0: dependencies: ini "2.0.0" -global-modules@2.0.0, global-modules@^2.0.0: +global-modules@^2.0.0, global-modules@2.0.0: version "2.0.0" resolved "https://registry.npmjs.org/global-modules/-/global-modules-2.0.0.tgz" integrity sha512-NGbfmJBp9x8IxyJSd1P+otYK8vonoJactOogrVfFRIAEY1ukil8RSKDz2Yo7wh1oihl51l/r6W4epkeKJHqL8A== @@ -6088,18 +6182,6 @@ globalthis@^1.0.3: dependencies: define-properties "^1.1.3" -globby@11.0.1: - version "11.0.1" - resolved "https://registry.npmjs.org/globby/-/globby-11.0.1.tgz" - integrity sha512-iH9RmgwCmUJHi2z5o2l3eTtGBtXek1OYlHrbcxOYugyHLmAsZrPj43OtHThd62Buh/Vv6VyCBD2bdyWcGNQqoQ== - dependencies: - array-union "^2.1.0" - dir-glob "^3.0.1" - fast-glob "^3.1.1" - ignore "^5.1.4" - merge2 "^1.3.0" - slash "^3.0.0" - globby@^11.0.1, globby@^11.0.4, globby@^11.1.0: version "11.1.0" resolved "https://registry.npmjs.org/globby/-/globby-11.1.0.tgz" @@ -6136,6 +6218,18 @@ globby@^8.0.1: pify "^3.0.0" slash "^1.0.0" +globby@11.0.1: + version "11.0.1" + resolved "https://registry.npmjs.org/globby/-/globby-11.0.1.tgz" + integrity sha512-iH9RmgwCmUJHi2z5o2l3eTtGBtXek1OYlHrbcxOYugyHLmAsZrPj43OtHThd62Buh/Vv6VyCBD2bdyWcGNQqoQ== + dependencies: + array-union "^2.1.0" + dir-glob "^3.0.1" + fast-glob "^3.1.1" + ignore "^5.1.4" + merge2 "^1.3.0" + slash "^3.0.0" + globule@^1.0.0: version "1.3.4" resolved "https://registry.npmjs.org/globule/-/globule-1.3.4.tgz" @@ -6247,6 +6341,13 @@ gulp-header@^1.7.1: lodash.template "^4.4.0" through2 "^2.0.0" +gzip-size@^6.0.0: + version "6.0.0" + resolved "https://registry.npmjs.org/gzip-size/-/gzip-size-6.0.0.tgz" + integrity sha512-ax7ZYomf6jqPTQ4+XCpUGyXKHk5WweS+e05MBO4/y3WJ5RkmPXNKvX+bx1behVILVwr6JSQvZAku021CHPXG3Q== + dependencies: + duplexer "^0.1.2" + gzip-size@5.1.1: version "5.1.1" resolved "https://registry.npmjs.org/gzip-size/-/gzip-size-5.1.1.tgz" @@ -6255,13 +6356,6 @@ gzip-size@5.1.1: duplexer "^0.1.1" pify "^4.0.1" -gzip-size@^6.0.0: - version "6.0.0" - resolved "https://registry.npmjs.org/gzip-size/-/gzip-size-6.0.0.tgz" - integrity sha512-ax7ZYomf6jqPTQ4+XCpUGyXKHk5WweS+e05MBO4/y3WJ5RkmPXNKvX+bx1behVILVwr6JSQvZAku021CHPXG3Q== - dependencies: - duplexer "^0.1.2" - handle-thing@^2.0.0: version "2.0.1" resolved "https://registry.npmjs.org/handle-thing/-/handle-thing-2.0.1.tgz" @@ -6656,21 +6750,31 @@ htmlparser2@^8.0.1: domutils "^3.0.1" entities "^4.4.0" -http-cache-semantics@3.8.1: - version "3.8.1" - resolved "https://registry.npmjs.org/http-cache-semantics/-/http-cache-semantics-3.8.1.tgz" - integrity sha512-5ai2iksyV8ZXmnZhHH4rWPoxxistEexSi5936zIQ1bnNTW5VnA85B6P/VpXiRM017IgRvb2kKo1a//y+0wSp3w== - http-cache-semantics@^4.0.0: version "4.1.1" resolved "https://registry.npmjs.org/http-cache-semantics/-/http-cache-semantics-4.1.1.tgz" integrity sha512-er295DKPVsV82j5kw1Gjt+ADA/XYHsajl82cGNQG2eyoPkvgUhX+nDIyelzhIWbbsXP39EHcI6l5tYs2FYqYXQ== +http-cache-semantics@3.8.1: + version "3.8.1" + resolved "https://registry.npmjs.org/http-cache-semantics/-/http-cache-semantics-3.8.1.tgz" + integrity sha512-5ai2iksyV8ZXmnZhHH4rWPoxxistEexSi5936zIQ1bnNTW5VnA85B6P/VpXiRM017IgRvb2kKo1a//y+0wSp3w== + http-deceiver@^1.2.7: version "1.2.7" resolved "https://registry.npmjs.org/http-deceiver/-/http-deceiver-1.2.7.tgz" integrity sha512-LmpOGxTfbpgtGVxJrj5k7asXHCgNZp5nLfp+hWc8QQRqtb7fUy6kRY3BO1h9ddF6yIPYUARgxGOwB42DnxIaNw== +http-errors@~1.6.2: + version "1.6.3" + resolved "https://registry.npmjs.org/http-errors/-/http-errors-1.6.3.tgz" + integrity sha512-lks+lVC8dgGyh97jxvxeYTWQFvh4uw4yC12gVl63Cg30sjPX4wuGcdkICVXDAESr6OJGjqGA8Iz5mkeN6zlD7A== + dependencies: + depd "~1.1.2" + inherits "2.0.3" + setprototypeof "1.1.0" + statuses ">= 1.4.0 < 2" + http-errors@2.0.0: version "2.0.0" resolved "https://registry.npmjs.org/http-errors/-/http-errors-2.0.0.tgz" @@ -6682,16 +6786,6 @@ http-errors@2.0.0: statuses "2.0.1" toidentifier "1.0.1" -http-errors@~1.6.2: - version "1.6.3" - resolved "https://registry.npmjs.org/http-errors/-/http-errors-1.6.3.tgz" - integrity sha512-lks+lVC8dgGyh97jxvxeYTWQFvh4uw4yC12gVl63Cg30sjPX4wuGcdkICVXDAESr6OJGjqGA8Iz5mkeN6zlD7A== - dependencies: - depd "~1.1.2" - inherits "2.0.3" - setprototypeof "1.1.0" - statuses ">= 1.4.0 < 2" - http-parser-js@>=0.5.1: version "0.5.8" resolved "https://registry.npmjs.org/http-parser-js/-/http-parser-js-0.5.8.tgz" @@ -6817,16 +6911,16 @@ immediate@^3.2.3: resolved "https://registry.npmjs.org/immediate/-/immediate-3.3.0.tgz" integrity sha512-HR7EVodfFUdQCTIeySw+WDRFJlPcLOJbXfwwZ7Oom6tjsvZ3bOkCDJHehQC3nxJrv7+f9XecwazynjU8e4Vw3Q== -immer@8.0.1: - version "8.0.1" - resolved "https://registry.npmjs.org/immer/-/immer-8.0.1.tgz" - integrity sha512-aqXhGP7//Gui2+UrEtvxZxSquQVXTpZ7KDxfCcKAF3Vysvw0CViVaW9RZ1j1xlIYqaaaipBoqdqeibkc18PNvA== - immer@^9.0.7: version "9.0.21" resolved "https://registry.npmjs.org/immer/-/immer-9.0.21.tgz" integrity sha512-bc4NBHqOqSfRW7POMkHd51LvClaeMXpm8dx0e8oE2GORbq5aRK7Bxl4FyzVLdGtLmvLKL7BTDBG5ACQm4HWjTA== +immer@8.0.1: + version "8.0.1" + resolved "https://registry.npmjs.org/immer/-/immer-8.0.1.tgz" + integrity sha512-aqXhGP7//Gui2+UrEtvxZxSquQVXTpZ7KDxfCcKAF3Vysvw0CViVaW9RZ1j1xlIYqaaaipBoqdqeibkc18PNvA== + import-fresh@^2.0.0: version "2.0.0" resolved "https://registry.npmjs.org/import-fresh/-/import-fresh-2.0.0.tgz" @@ -6888,7 +6982,7 @@ inflight@^1.0.4: once "^1.3.0" wrappy "1" -inherits@2, inherits@2.0.4, inherits@^2.0.0, inherits@^2.0.1, inherits@^2.0.3, inherits@^2.0.4, inherits@~2.0.0, inherits@~2.0.3: +inherits@^2.0.0, inherits@^2.0.1, inherits@^2.0.3, inherits@^2.0.4, inherits@~2.0.0, inherits@~2.0.3, inherits@2, inherits@2.0.4: version "2.0.4" resolved "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz" integrity sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ== @@ -6898,16 +6992,16 @@ inherits@2.0.3: resolved "https://registry.npmjs.org/inherits/-/inherits-2.0.3.tgz" integrity sha512-x00IRNXNy63jwGkJmzPigoySHbaqpNuzKbBOmzK+g2OdZpQ9w+sxCN+VSB3ja7IAge2OP2qpfxTjeNcyjmW1uw== -ini@2.0.0: - version "2.0.0" - resolved "https://registry.npmjs.org/ini/-/ini-2.0.0.tgz" - integrity sha512-7PnF4oN3CvZF23ADhA5wRaYEQpJ8qygSkbtTXWBeXWXmEVRXK+1ITciHWwHhsjv1TmW0MgacIv6hEi5pX5NQdA== - ini@^1.3.4, ini@^1.3.5, ini@~1.3.0: version "1.3.8" resolved "https://registry.npmjs.org/ini/-/ini-1.3.8.tgz" integrity sha512-JV/yugV2uzW5iMRSiZAyDtQd+nxtUnjeLt0acNdw98kKLrvuRVyB80tsREOE7yvGVgalhZ6RNXCmEHkUKBKxew== +ini@2.0.0: + version "2.0.0" + resolved "https://registry.npmjs.org/ini/-/ini-2.0.0.tgz" + integrity sha512-7PnF4oN3CvZF23ADhA5wRaYEQpJ8qygSkbtTXWBeXWXmEVRXK+1ITciHWwHhsjv1TmW0MgacIv6hEi5pX5NQdA== + inline-style-parser@0.1.1: version "0.1.1" resolved "https://registry.npmjs.org/inline-style-parser/-/inline-style-parser-0.1.1.tgz" @@ -6947,16 +7041,16 @@ ip-regex@^4.1.0: resolved "https://registry.npmjs.org/ip-regex/-/ip-regex-4.3.0.tgz" integrity sha512-B9ZWJxHHOHUhUjCPrMpLD4xEq35bUTClHM1S6CBU5ixQnkZmwipwgc96vAd7AAGM9TGHvJR+Uss+/Ak6UphK+Q== -ipaddr.js@1.9.1: - version "1.9.1" - resolved "https://registry.npmjs.org/ipaddr.js/-/ipaddr.js-1.9.1.tgz" - integrity sha512-0KI/607xoxSToH7GjN1FfSbLoU0+btTicjsQSWQlh/hZykN8KpmMf7uYwPW3R+akZ6R/w18ZlXSHBYXiYUPO3g== - ipaddr.js@^2.0.1: version "2.1.0" resolved "https://registry.npmjs.org/ipaddr.js/-/ipaddr.js-2.1.0.tgz" integrity sha512-LlbxQ7xKzfBusov6UMi4MFpEg0m+mAm9xyNGEduwXMEDuf4WfzB/RZwMVYEd7IKGvh4IUkEXYxtAVu9T3OelJQ== +ipaddr.js@1.9.1: + version "1.9.1" + resolved "https://registry.npmjs.org/ipaddr.js/-/ipaddr.js-1.9.1.tgz" + integrity sha512-0KI/607xoxSToH7GjN1FfSbLoU0+btTicjsQSWQlh/hZykN8KpmMf7uYwPW3R+akZ6R/w18ZlXSHBYXiYUPO3g== + is-absolute-url@^2.0.0: version "2.1.0" resolved "https://registry.npmjs.org/is-absolute-url/-/is-absolute-url-2.1.0.tgz" @@ -6969,7 +7063,7 @@ is-accessor-descriptor@^1.0.1: dependencies: hasown "^2.0.0" -is-alphabetical@1.0.4, is-alphabetical@^1.0.0: +is-alphabetical@^1.0.0, is-alphabetical@1.0.4: version "1.0.4" resolved "https://registry.npmjs.org/is-alphabetical/-/is-alphabetical-1.0.4.tgz" integrity sha512-DwzsA04LQ10FHTZuL0/grVDk4rFoVH1pjAToYwBrHSxcrBIGQuXrQMtD5U1b0U2XVgKZCTLLP8u2Qxqhy3l2Vg== @@ -7246,7 +7340,12 @@ is-path-inside@^3.0.2: resolved "https://registry.npmjs.org/is-path-inside/-/is-path-inside-3.0.3.tgz" integrity sha512-Fd4gABb+ycGAmKou8eMftCupSir5lRxqf4aD/vd0cD2qc4HL07OjCeuHMr8Ro4CoMaeCKDB0/ECBOVWjTwUvPQ== -is-plain-obj@^1.0.0, is-plain-obj@^1.1.0: +is-plain-obj@^1.0.0: + version "1.1.0" + resolved "https://registry.npmjs.org/is-plain-obj/-/is-plain-obj-1.1.0.tgz" + integrity sha512-yvkRyxmFKEOQ4pNXCmJG5AEQNlXJS5LaONXo5/cLdTZdWvsZ1ioJEonLGAosKlMWE8lwUy/bJzMjcw8az73+Fg== + +is-plain-obj@^1.1.0: version "1.1.0" resolved "https://registry.npmjs.org/is-plain-obj/-/is-plain-obj-1.1.0.tgz" integrity sha512-yvkRyxmFKEOQ4pNXCmJG5AEQNlXJS5LaONXo5/cLdTZdWvsZ1ioJEonLGAosKlMWE8lwUy/bJzMjcw8az73+Fg== @@ -7296,7 +7395,7 @@ is-retry-allowed@^1.0.0, is-retry-allowed@^1.1.0: resolved "https://registry.npmjs.org/is-retry-allowed/-/is-retry-allowed-1.2.0.tgz" integrity sha512-RUbUeKwvm3XG2VYamhJL1xFktgjvPzL0Hq8C+6yrWIswDy3BIXGqCxhxkc30N9jqK311gVU137K8Ei55/zVJRg== -is-root@2.1.0, is-root@^2.1.0: +is-root@^2.1.0, is-root@2.1.0: version "2.1.0" resolved "https://registry.npmjs.org/is-root/-/is-root-2.1.0.tgz" integrity sha512-AGOriNp96vNBd3HtU+RzFEc75FfR5ymiYv8E553I71SCeXBiMsVDUtdio1OEFvrPyLIQ9tVR5RxXIFe5PUFjMg== @@ -7308,7 +7407,12 @@ is-shared-array-buffer@^1.0.2: dependencies: call-bind "^1.0.2" -is-stream@^1.0.0, is-stream@^1.1.0: +is-stream@^1.0.0: + version "1.1.0" + resolved "https://registry.npmjs.org/is-stream/-/is-stream-1.1.0.tgz" + integrity sha512-uQPm8kcs47jx38atAcWTVxyltQYoPT68y9aWYdV6yWXSyW8mzSat0TL6CiWdZeCdF3KrAvpVtnHbTv4RN+rqdQ== + +is-stream@^1.1.0: version "1.1.0" resolved "https://registry.npmjs.org/is-stream/-/is-stream-1.1.0.tgz" integrity sha512-uQPm8kcs47jx38atAcWTVxyltQYoPT68y9aWYdV6yWXSyW8mzSat0TL6CiWdZeCdF3KrAvpVtnHbTv4RN+rqdQ== @@ -7409,21 +7513,26 @@ is2@^2.0.6: ip-regex "^4.1.0" is-url "^1.2.4" +isarray@^2.0.5: + version "2.0.5" + resolved "https://registry.npmjs.org/isarray/-/isarray-2.0.5.tgz" + integrity sha512-xHjhDr3cNBK0BzdUJSPXZntQUx/mwMS5Rw4A7lPJ90XGAO6ISP/ePDNuo0vhqOZU+UD5JoodwCAAoZQd3FeAKw== + +isarray@~1.0.0: + version "1.0.0" + resolved "https://registry.npmjs.org/isarray/-/isarray-1.0.0.tgz" + integrity sha512-VLghIWNM6ELQzo7zwmcg0NmTVyWKYjvIeM83yjp0wRDTmUnrM678fQbcKBo6n2CJEF0szoG//ytg+TKla89ALQ== + isarray@0.0.1: version "0.0.1" resolved "https://registry.npmjs.org/isarray/-/isarray-0.0.1.tgz" integrity sha512-D2S+3GLxWH+uhrNEcoh/fnmYeP8E8/zHl644d/jdA0g2uyXvy3sb0qxotE+ne0LtccHknQzWwZEzhak7oJ0COQ== -isarray@1.0.0, isarray@~1.0.0: +isarray@1.0.0: version "1.0.0" resolved "https://registry.npmjs.org/isarray/-/isarray-1.0.0.tgz" integrity sha512-VLghIWNM6ELQzo7zwmcg0NmTVyWKYjvIeM83yjp0wRDTmUnrM678fQbcKBo6n2CJEF0szoG//ytg+TKla89ALQ== -isarray@^2.0.5: - version "2.0.5" - resolved "https://registry.npmjs.org/isarray/-/isarray-2.0.5.tgz" - integrity sha512-xHjhDr3cNBK0BzdUJSPXZntQUx/mwMS5Rw4A7lPJ90XGAO6ISP/ePDNuo0vhqOZU+UD5JoodwCAAoZQd3FeAKw== - isexe@^2.0.0: version "2.0.0" resolved "https://registry.npmjs.org/isexe/-/isexe-2.0.0.tgz" @@ -7515,7 +7624,15 @@ jpegtran-bin@^4.0.0: resolved "https://registry.npmjs.org/js-tokens/-/js-tokens-4.0.0.tgz" integrity sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ== -js-yaml@^3.13.1, js-yaml@^3.8.1: +js-yaml@^3.13.1: + version "3.14.1" + resolved "https://registry.npmjs.org/js-yaml/-/js-yaml-3.14.1.tgz" + integrity sha512-okMH7OXXJ7YrN9Ok3/SXrnu4iX9yOk+25nqX4imS2npuvTYDmo/QEZoqwZkYaIDk3jVvBOTOIEgEhaLOynBS9g== + dependencies: + argparse "^1.0.7" + esprima "^4.0.0" + +js-yaml@^3.8.1: version "3.14.1" resolved "https://registry.npmjs.org/js-yaml/-/js-yaml-3.14.1.tgz" integrity sha512-okMH7OXXJ7YrN9Ok3/SXrnu4iX9yOk+25nqX4imS2npuvTYDmo/QEZoqwZkYaIDk3jVvBOTOIEgEhaLOynBS9g== @@ -7604,13 +7721,6 @@ jsprim@^1.2.2: json-schema "0.4.0" verror "1.10.0" -keyv@3.0.0: - version "3.0.0" - resolved "https://registry.npmjs.org/keyv/-/keyv-3.0.0.tgz" - integrity sha512-eguHnq22OE3uVoSYG0LVWNP+4ppamWr9+zWBe1bsNcovIMy6huUJFPgy4mGwCd/rnl3vOLGW1MTlu4c57CT1xA== - dependencies: - json-buffer "3.0.0" - keyv@^3.0.0: version "3.1.0" resolved "https://registry.npmjs.org/keyv/-/keyv-3.1.0.tgz" @@ -7618,7 +7728,28 @@ keyv@^3.0.0: dependencies: json-buffer "3.0.0" -kind-of@^3.0.2, kind-of@^3.0.3, kind-of@^3.2.0: +keyv@3.0.0: + version "3.0.0" + resolved "https://registry.npmjs.org/keyv/-/keyv-3.0.0.tgz" + integrity sha512-eguHnq22OE3uVoSYG0LVWNP+4ppamWr9+zWBe1bsNcovIMy6huUJFPgy4mGwCd/rnl3vOLGW1MTlu4c57CT1xA== + dependencies: + json-buffer "3.0.0" + +kind-of@^3.0.2: + version "3.2.2" + resolved "https://registry.npmjs.org/kind-of/-/kind-of-3.2.2.tgz" + integrity sha512-NOW9QQXMoZGg/oqnVNoNTTIFEIid1627WCffUBJEdMxYApq7mNE7CpzucIPc+ZQg25Phej7IJSmX3hO+oblOtQ== + dependencies: + is-buffer "^1.1.5" + +kind-of@^3.0.3: + version "3.2.2" + resolved "https://registry.npmjs.org/kind-of/-/kind-of-3.2.2.tgz" + integrity sha512-NOW9QQXMoZGg/oqnVNoNTTIFEIid1627WCffUBJEdMxYApq7mNE7CpzucIPc+ZQg25Phej7IJSmX3hO+oblOtQ== + dependencies: + is-buffer "^1.1.5" + +kind-of@^3.2.0: version "3.2.2" resolved "https://registry.npmjs.org/kind-of/-/kind-of-3.2.2.tgz" integrity sha512-NOW9QQXMoZGg/oqnVNoNTTIFEIid1627WCffUBJEdMxYApq7mNE7CpzucIPc+ZQg25Phej7IJSmX3hO+oblOtQ== @@ -7715,15 +7846,6 @@ loader-runner@^4.2.0: resolved "https://registry.npmjs.org/loader-runner/-/loader-runner-4.3.0.tgz" integrity sha512-3R/1M+yS3j5ou80Me59j7F9IMs4PXs3VqRrm0TU3AbKPxlmpoY1TNscJV/oGJXo8qCatFGTfDbY6W6ipGOYXfg== -loader-utils@2.0.0: - version "2.0.0" - resolved "https://registry.npmjs.org/loader-utils/-/loader-utils-2.0.0.tgz" - integrity sha512-rP4F0h2RaWSvPEkD7BLDFQnvSf+nK+wr3ESUjNTyAGobqrijmW92zc+SO6d4p4B1wh7+B/Jg1mkQe5NYUEHtHQ== - dependencies: - big.js "^5.2.2" - emojis-list "^3.0.0" - json5 "^2.1.2" - loader-utils@^2.0.0: version "2.0.4" resolved "https://registry.npmjs.org/loader-utils/-/loader-utils-2.0.4.tgz" @@ -7738,6 +7860,15 @@ loader-utils@^3.2.0: resolved "https://registry.npmjs.org/loader-utils/-/loader-utils-3.2.1.tgz" integrity sha512-ZvFw1KWS3GVyYBYb7qkmRM/WwL2TQQBxgCK62rlvm4WpVQ23Nb4tYjApUlfjrEGvOs7KHEsmyUn75OHZrJMWPw== +loader-utils@2.0.0: + version "2.0.0" + resolved "https://registry.npmjs.org/loader-utils/-/loader-utils-2.0.0.tgz" + integrity sha512-rP4F0h2RaWSvPEkD7BLDFQnvSf+nK+wr3ESUjNTyAGobqrijmW92zc+SO6d4p4B1wh7+B/Jg1mkQe5NYUEHtHQ== + dependencies: + big.js "^5.2.2" + emojis-list "^3.0.0" + json5 "^2.1.2" + locate-path@^3.0.0: version "3.0.0" resolved "https://registry.npmjs.org/locate-path/-/locate-path-3.0.0.tgz" @@ -7890,7 +8021,7 @@ lodash.templatesettings@^4.0.0: dependencies: lodash._reinterpolate "^3.0.0" -lodash.uniq@4.5.0, lodash.uniq@^4.5.0: +lodash.uniq@^4.5.0, lodash.uniq@4.5.0: version "4.5.0" resolved "https://registry.npmjs.org/lodash.uniq/-/lodash.uniq-4.5.0.tgz" integrity sha512-xfBaXQd9ryd9dlSDvnvI0lvxfLJlYAZzXomUYzLKtUeOQvOP5piqAWuGtrhWeqaXK9hhoM/iyJc5AV+XfsX3HQ== @@ -7935,11 +8066,6 @@ lower-case@^2.0.2: dependencies: tslib "^2.0.3" -lowercase-keys@1.0.0: - version "1.0.0" - resolved "https://registry.npmjs.org/lowercase-keys/-/lowercase-keys-1.0.0.tgz" - integrity sha512-RPlX0+PHuvxVDZ7xX+EBVAp4RsVxP/TdDSN2mJYdiq1Lc4Hz7EUSjUI7RZrKKlmrIzVhf6Jo2stj7++gVarS0A== - lowercase-keys@^1.0.0, lowercase-keys@^1.0.1: version "1.0.1" resolved "https://registry.npmjs.org/lowercase-keys/-/lowercase-keys-1.0.1.tgz" @@ -7950,6 +8076,11 @@ lowercase-keys@^2.0.0: resolved "https://registry.npmjs.org/lowercase-keys/-/lowercase-keys-2.0.0.tgz" integrity sha512-tqNXrS78oMOE73NMxK4EMLQsQowWf8jKooH9g7xPavRT706R6bkQJ6DY2Te7QukaZsulxa30wQ7bk0pm4XiHmA== +lowercase-keys@1.0.0: + version "1.0.0" + resolved "https://registry.npmjs.org/lowercase-keys/-/lowercase-keys-1.0.0.tgz" + integrity sha512-RPlX0+PHuvxVDZ7xX+EBVAp4RsVxP/TdDSN2mJYdiq1Lc4Hz7EUSjUI7RZrKKlmrIzVhf6Jo2stj7++gVarS0A== + lpad-align@^1.0.1: version "1.1.2" resolved "https://registry.npmjs.org/lpad-align/-/lpad-align-1.1.2.tgz" @@ -7992,7 +8123,14 @@ lunr@^2.3.8: resolved "https://registry.npmjs.org/lunr/-/lunr-2.3.9.tgz" integrity sha512-zTU3DaZaF3Rt9rhN3uBMGQD3dD2/vFQqnvZCDv4dl5iOzq2IZQqTxu90r4E5J+nP70J3ilqVCrbho2eWaeW8Ow== -make-dir@^1.0.0, make-dir@^1.2.0: +make-dir@^1.0.0: + version "1.3.0" + resolved "https://registry.npmjs.org/make-dir/-/make-dir-1.3.0.tgz" + integrity sha512-2w31R7SJtieJJnQtGc7RVL2StM2vGYVfqUOvUDxH6bC6aJTxPxTF0GnIgCyu7tjockiUWAYQRbxa7vKn34s5sQ== + dependencies: + pify "^3.0.0" + +make-dir@^1.2.0: version "1.3.0" resolved "https://registry.npmjs.org/make-dir/-/make-dir-1.3.0.tgz" integrity sha512-2w31R7SJtieJJnQtGc7RVL2StM2vGYVfqUOvUDxH6bC6aJTxPxTF0GnIgCyu7tjockiUWAYQRbxa7vKn34s5sQ== @@ -8192,24 +8330,57 @@ micromatch@^4.0.2, micromatch@^4.0.4, micromatch@^4.0.5: braces "^3.0.2" picomatch "^2.3.1" -mime-db@1.52.0, "mime-db@>= 1.43.0 < 2": - version "1.52.0" - resolved "https://registry.npmjs.org/mime-db/-/mime-db-1.52.0.tgz" - integrity sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg== - mime-db@^1.28.0, mime-db@~1.33.0: version "1.33.0" resolved "https://registry.npmjs.org/mime-db/-/mime-db-1.33.0.tgz" integrity sha512-BHJ/EKruNIqJf/QahvxwQZXKygOQ256myeN/Ew+THcAa5q+PjyTTMMeNQC4DZw5AwfvelsUrA6B67NKMqXDbzQ== -mime-types@2.1.18, mime-types@^2.1.12, mime-types@~2.1.17: +"mime-db@>= 1.43.0 < 2": + version "1.52.0" + resolved "https://registry.npmjs.org/mime-db/-/mime-db-1.52.0.tgz" + integrity sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg== + +mime-db@1.52.0: + version "1.52.0" + resolved "https://registry.npmjs.org/mime-db/-/mime-db-1.52.0.tgz" + integrity sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg== + +mime-types@^2.1.12, mime-types@~2.1.17, mime-types@2.1.18: version "2.1.18" resolved "https://registry.npmjs.org/mime-types/-/mime-types-2.1.18.tgz" integrity sha512-lc/aahn+t4/SWV/qcmumYjymLsWfN3ELhpmVuUFjgsORruuZPVSwAQryq+HHGvO/SI2KVX26bx+En+zhM8g8hQ== dependencies: mime-db "~1.33.0" -mime-types@^2.1.27, mime-types@^2.1.31, mime-types@~2.1.19, mime-types@~2.1.24, mime-types@~2.1.34: +mime-types@^2.1.27: + version "2.1.35" + resolved "https://registry.npmjs.org/mime-types/-/mime-types-2.1.35.tgz" + integrity sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw== + dependencies: + mime-db "1.52.0" + +mime-types@^2.1.31: + version "2.1.35" + resolved "https://registry.npmjs.org/mime-types/-/mime-types-2.1.35.tgz" + integrity sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw== + dependencies: + mime-db "1.52.0" + +mime-types@~2.1.19: + version "2.1.35" + resolved "https://registry.npmjs.org/mime-types/-/mime-types-2.1.35.tgz" + integrity sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw== + dependencies: + mime-db "1.52.0" + +mime-types@~2.1.24: + version "2.1.35" + resolved "https://registry.npmjs.org/mime-types/-/mime-types-2.1.35.tgz" + integrity sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw== + dependencies: + mime-db "1.52.0" + +mime-types@~2.1.34: version "2.1.35" resolved "https://registry.npmjs.org/mime-types/-/mime-types-2.1.35.tgz" integrity sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw== @@ -8248,14 +8419,7 @@ minimalistic-assert@^1.0.0: resolved "https://registry.npmjs.org/minimalistic-assert/-/minimalistic-assert-1.0.1.tgz" integrity sha512-UtJcAD4yEaGtjPezWuO9wC4nwUnVH/8/Im3yEHQP4b67cXlD/Qr9hdITCU1xDbSEXg2XKNaP8jsReV7vQd00/A== -minimatch@3.0.4: - version "3.0.4" - resolved "https://registry.npmjs.org/minimatch/-/minimatch-3.0.4.tgz" - integrity sha512-yJHVQEhyqPLUTgt9B83PXu6W3rx4MvvHvSUvToogpwoGDOUQ+yDrR0HRot+yOCdCO7u4hX3pWft6kWBBcqh0UA== - dependencies: - brace-expansion "^1.1.7" - -minimatch@3.1.2, minimatch@^3.0.4, minimatch@^3.0.5, minimatch@^3.1.1: +minimatch@^3.0.4, minimatch@^3.0.5, minimatch@^3.1.1, minimatch@3.1.2: version "3.1.2" resolved "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz" integrity sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw== @@ -8269,6 +8433,13 @@ minimatch@~3.0.2: dependencies: brace-expansion "^1.1.7" +minimatch@3.0.4: + version "3.0.4" + resolved "https://registry.npmjs.org/minimatch/-/minimatch-3.0.4.tgz" + integrity sha512-yJHVQEhyqPLUTgt9B83PXu6W3rx4MvvHvSUvToogpwoGDOUQ+yDrR0HRot+yOCdCO7u4hX3pWft6kWBBcqh0UA== + dependencies: + brace-expansion "^1.1.7" + minimist@^1.1.3, minimist@^1.2.0, minimist@^1.2.3, minimist@^1.2.5, minimist@^1.2.6: version "1.2.8" resolved "https://registry.npmjs.org/minimist/-/minimist-1.2.8.tgz" @@ -8287,18 +8458,32 @@ mkdirp-classic@^0.5.2, mkdirp-classic@^0.5.3: resolved "https://registry.npmjs.org/mkdirp-classic/-/mkdirp-classic-0.5.3.tgz" integrity sha512-gKLcREMhtuZRwRAfqP3RFW+TK4JqApVBtOIftVgjuABpAtpxhPGaDcfvbhNvD0B8iD1oUr/txX35NjcaY6Ns/A== -mkdirp@0.3.0: - version "0.3.0" - resolved "https://registry.npmjs.org/mkdirp/-/mkdirp-0.3.0.tgz" - integrity sha512-OHsdUcVAQ6pOtg5JYWpCBo9W/GySVuwvP9hueRMW7UqshC0tbfzLv8wjySTPm3tfUZ/21CE9E1pJagOA91Pxew== - -"mkdirp@>=0.5 0", mkdirp@^0.5.1, mkdirp@^0.5.6, mkdirp@~0.5.1: +mkdirp@^0.5.1, mkdirp@~0.5.1: version "0.5.6" resolved "https://registry.npmjs.org/mkdirp/-/mkdirp-0.5.6.tgz" integrity sha512-FP+p8RB8OWpF3YZBCrP5gtADmtXApB5AMLn+vdyA+PyxCjrCs00mjyUozssO33cwDeT3wNGdLxJ5M//YqtHAJw== dependencies: minimist "^1.2.6" +mkdirp@^0.5.6: + version "0.5.6" + resolved "https://registry.npmjs.org/mkdirp/-/mkdirp-0.5.6.tgz" + integrity sha512-FP+p8RB8OWpF3YZBCrP5gtADmtXApB5AMLn+vdyA+PyxCjrCs00mjyUozssO33cwDeT3wNGdLxJ5M//YqtHAJw== + dependencies: + minimist "^1.2.6" + +"mkdirp@>=0.5 0": + version "0.5.6" + resolved "https://registry.npmjs.org/mkdirp/-/mkdirp-0.5.6.tgz" + integrity sha512-FP+p8RB8OWpF3YZBCrP5gtADmtXApB5AMLn+vdyA+PyxCjrCs00mjyUozssO33cwDeT3wNGdLxJ5M//YqtHAJw== + dependencies: + minimist "^1.2.6" + +mkdirp@0.3.0: + version "0.3.0" + resolved "https://registry.npmjs.org/mkdirp/-/mkdirp-0.3.0.tgz" + integrity sha512-OHsdUcVAQ6pOtg5JYWpCBo9W/GySVuwvP9hueRMW7UqshC0tbfzLv8wjySTPm3tfUZ/21CE9E1pJagOA91Pxew== + moo@^0.5.0: version "0.5.2" resolved "https://registry.npmjs.org/moo/-/moo-0.5.2.tgz" @@ -8309,16 +8494,16 @@ mrmime@^1.0.0: resolved "https://registry.npmjs.org/mrmime/-/mrmime-1.0.1.tgz" integrity sha512-hzzEagAgDyoU1Q6yg5uI+AorQgdvMCur3FcKf7NhMKWsaYg+RnbTyHRa/9IlLF9rf455MOCtcqqrQQ83pPP7Uw== +ms@^2.1.1, ms@2.1.2: + version "2.1.2" + resolved "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz" + integrity sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w== + ms@2.0.0: version "2.0.0" resolved "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz" integrity sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A== -ms@2.1.2, ms@^2.1.1: - version "2.1.2" - resolved "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz" - integrity sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w== - ms@2.1.3: version "2.1.3" resolved "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz" @@ -8465,15 +8650,6 @@ normalize-range@^0.1.2: resolved "https://registry.npmjs.org/normalize-range/-/normalize-range-0.1.2.tgz" integrity sha512-bdok/XvKII3nUpklnV6P2hxtMNrCboOjAcyBuQnWEhO665FwrSNRxU+AqpsyvO6LgGYPspN+lu5CLtw4jPRKNA== -normalize-url@2.0.1: - version "2.0.1" - resolved "https://registry.npmjs.org/normalize-url/-/normalize-url-2.0.1.tgz" - integrity sha512-D6MUW4K/VzoJ4rJ01JFKxDrtY1v9wrgzCX5f2qj/lzH1m/lW6MhUZFKerVsnyjOhOsYzI9Kqqak+10l4LvLpMw== - dependencies: - prepend-http "^2.0.0" - query-string "^5.0.1" - sort-keys "^2.0.0" - normalize-url@^3.0.0: version "3.3.0" resolved "https://registry.npmjs.org/normalize-url/-/normalize-url-3.3.0.tgz" @@ -8489,6 +8665,15 @@ normalize-url@^6.0.1: resolved "https://registry.npmjs.org/normalize-url/-/normalize-url-6.1.0.tgz" integrity sha512-DlL+XwOy3NxAQ8xuC0okPgK46iuVNAK01YN7RueYBqqFeGsBjV9XmCAzAdgt+667bCl5kPh9EqKKDwnaPG1I7A== +normalize-url@2.0.1: + version "2.0.1" + resolved "https://registry.npmjs.org/normalize-url/-/normalize-url-2.0.1.tgz" + integrity sha512-D6MUW4K/VzoJ4rJ01JFKxDrtY1v9wrgzCX5f2qj/lzH1m/lW6MhUZFKerVsnyjOhOsYzI9Kqqak+10l4LvLpMw== + dependencies: + prepend-http "^2.0.0" + query-string "^5.0.1" + sort-keys "^2.0.0" + not@^0.1.0: version "0.1.0" resolved "https://registry.npmjs.org/not/-/not-0.1.0.tgz" @@ -8521,7 +8706,7 @@ nprogress@^0.2.0: resolved "https://registry.npmjs.org/nprogress/-/nprogress-0.2.0.tgz" integrity sha512-I19aIingLgR1fmhftnbWWO3dXc0hSxqHQHQb3H8m+K3TnEn/iSeTZZOyvKXWqQESMwuUVnatlCnZdLBZZt2VSA== -nth-check@^1.0.2, nth-check@~1.0.1: +nth-check@^1.0.2: version "1.0.2" resolved "https://registry.npmjs.org/nth-check/-/nth-check-1.0.2.tgz" integrity sha512-WeBOdju8SnzPN5vTUJYxYUxLeXpCaVP5i5e0LF8fg7WORF2Wd7wFX/pk0tYZk7s8T+J7VLy0Da6J1+wCT0AtHg== @@ -8535,6 +8720,13 @@ nth-check@^2.0.0, nth-check@^2.0.1: dependencies: boolbase "^1.0.0" +nth-check@~1.0.1: + version "1.0.2" + resolved "https://registry.npmjs.org/nth-check/-/nth-check-1.0.2.tgz" + integrity sha512-WeBOdju8SnzPN5vTUJYxYUxLeXpCaVP5i5e0LF8fg7WORF2Wd7wFX/pk0tYZk7s8T+J7VLy0Da6J1+wCT0AtHg== + dependencies: + boolbase "~1.0.0" + num2fraction@^1.2.2: version "1.2.2" resolved "https://registry.npmjs.org/num2fraction/-/num2fraction-1.2.2.tgz" @@ -8985,6 +9177,13 @@ path-parse@^1.0.7: resolved "https://registry.npmjs.org/path-parse/-/path-parse-1.0.7.tgz" integrity sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw== +path-to-regexp@^1.7.0: + version "1.8.0" + resolved "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-1.8.0.tgz" + integrity sha512-n43JRhlUKUAlibEJhPeir1ncUID16QnEjNpwzNdO3Lm4ywrBpBZ5oLD0I6br9evr1Y9JTqwRtAh7JLoOzAQdVA== + dependencies: + isarray "0.0.1" + path-to-regexp@0.1.7: version "0.1.7" resolved "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-0.1.7.tgz" @@ -8995,13 +9194,6 @@ path-to-regexp@2.2.1: resolved "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-2.2.1.tgz" integrity sha512-gu9bD6Ta5bwGrrU8muHzVOBFFREpp2iRkVfhBJahwJ6p6Xw20SjT0MxLnwkjOibQmGSYhiUnf2FLe7k+jcFmGQ== -path-to-regexp@^1.7.0: - version "1.8.0" - resolved "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-1.8.0.tgz" - integrity sha512-n43JRhlUKUAlibEJhPeir1ncUID16QnEjNpwzNdO3Lm4ywrBpBZ5oLD0I6br9evr1Y9JTqwRtAh7JLoOzAQdVA== - dependencies: - isarray "0.0.1" - path-type@^1.0.0: version "1.1.0" resolved "https://registry.npmjs.org/path-type/-/path-type-1.1.0.tgz" @@ -9048,7 +9240,17 @@ picomatch@^2.0.4, picomatch@^2.2.1, picomatch@^2.2.3, picomatch@^2.3.1: resolved "https://registry.npmjs.org/picomatch/-/picomatch-2.3.1.tgz" integrity sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA== -pify@^2.0.0, pify@^2.2.0, pify@^2.3.0: +pify@^2.0.0: + version "2.3.0" + resolved "https://registry.npmjs.org/pify/-/pify-2.3.0.tgz" + integrity sha512-udgsAY+fTnvv7kI7aaxbqwWNb0AHiB0qBO89PZKPkoTmGOgdbrHDKD+0B2X4uTfJ/FT1R09r9gTsjUjNJotuog== + +pify@^2.2.0: + version "2.3.0" + resolved "https://registry.npmjs.org/pify/-/pify-2.3.0.tgz" + integrity sha512-udgsAY+fTnvv7kI7aaxbqwWNb0AHiB0qBO89PZKPkoTmGOgdbrHDKD+0B2X4uTfJ/FT1R09r9gTsjUjNJotuog== + +pify@^2.3.0: version "2.3.0" resolved "https://registry.npmjs.org/pify/-/pify-2.3.0.tgz" integrity sha512-udgsAY+fTnvv7kI7aaxbqwWNb0AHiB0qBO89PZKPkoTmGOgdbrHDKD+0B2X4uTfJ/FT1R09r9gTsjUjNJotuog== @@ -9094,7 +9296,7 @@ pkg-dir@^4.1.0: dependencies: find-up "^4.0.0" -pkg-up@3.1.0, pkg-up@^3.1.0: +pkg-up@^3.1.0, pkg-up@3.1.0: version "3.1.0" resolved "https://registry.npmjs.org/pkg-up/-/pkg-up-3.1.0.tgz" integrity sha512-nDywThFk1i4BQK4twPQ6TA4RT8bDY96yeuCVBWL3ePARCiEKDRSrNGbFIgUJpLp+XeIR65v8ra7WuJOFUBtkMA== @@ -9656,15 +9858,7 @@ postcss-zindex@^5.1.0: resolved "https://registry.npmjs.org/postcss-zindex/-/postcss-zindex-5.1.0.tgz" integrity sha512-fgFMf0OtVSBR1va1JNHYgMxYk73yhn/qb4uQDq1DLGYolz8gHCyr/sesEuGUaYs58E3ZJRcpoGuPVoB7Meiq9A== -postcss@^7.0.0, postcss@^7.0.1, postcss@^7.0.23, postcss@^7.0.27, postcss@^7.0.32: - version "7.0.39" - resolved "https://registry.npmjs.org/postcss/-/postcss-7.0.39.tgz" - integrity sha512-yioayjNbHn6z1/Bywyb2Y4s3yvDAeXGOyxqD+LnVOinq6Mdmd++SW2wUNVzavyyHxd6+DxzWGIuosg6P1Rj8uA== - dependencies: - picocolors "^0.2.1" - source-map "^0.6.1" - -postcss@^8.3.11, postcss@^8.4.14, postcss@^8.4.17, postcss@^8.4.21: +"postcss@^7.0.0 || ^8.0.1", postcss@^8.0.9, postcss@^8.1.0, postcss@^8.2.15, postcss@^8.2.2, postcss@^8.3.11, postcss@^8.4.14, postcss@^8.4.16, postcss@^8.4.17, postcss@^8.4.21: version "8.4.31" resolved "https://registry.npmjs.org/postcss/-/postcss-8.4.31.tgz" integrity sha512-PS08Iboia9mts/2ygV3eLpY5ghnUcfLV/EXTOW1E2qYxJKGGBUtNjN76FYHnMs36RmARn41bC0AZmn+rR0OVpQ== @@ -9673,6 +9867,14 @@ postcss@^8.3.11, postcss@^8.4.14, postcss@^8.4.17, postcss@^8.4.21: picocolors "^1.0.0" source-map-js "^1.0.2" +postcss@^7.0.0, postcss@^7.0.1, postcss@^7.0.23, postcss@^7.0.27, postcss@^7.0.32: + version "7.0.39" + resolved "https://registry.npmjs.org/postcss/-/postcss-7.0.39.tgz" + integrity sha512-yioayjNbHn6z1/Bywyb2Y4s3yvDAeXGOyxqD+LnVOinq6Mdmd++SW2wUNVzavyyHxd6+DxzWGIuosg6P1Rj8uA== + dependencies: + picocolors "^0.2.1" + source-map "^0.6.1" + prebuild-install@^7.1.1: version "7.1.1" resolved "https://registry.npmjs.org/prebuild-install/-/prebuild-install-7.1.1.tgz" @@ -9741,14 +9943,6 @@ promise@^7.1.1: dependencies: asap "~2.0.3" -prompts@2.4.0: - version "2.4.0" - resolved "https://registry.npmjs.org/prompts/-/prompts-2.4.0.tgz" - integrity sha512-awZAKrk3vN6CroQukBL+R9051a4R3zCZBlJm/HBfrSZ8iTpYix3VX1vU4mveiLpiwmOJT4wokTF9m6HUk4KqWQ== - dependencies: - kleur "^3.0.3" - sisteransi "^1.0.5" - prompts@^2.4.2: version "2.4.2" resolved "https://registry.npmjs.org/prompts/-/prompts-2.4.2.tgz" @@ -9757,6 +9951,14 @@ prompts@^2.4.2: kleur "^3.0.3" sisteransi "^1.0.5" +prompts@2.4.0: + version "2.4.0" + resolved "https://registry.npmjs.org/prompts/-/prompts-2.4.0.tgz" + integrity sha512-awZAKrk3vN6CroQukBL+R9051a4R3zCZBlJm/HBfrSZ8iTpYix3VX1vU4mveiLpiwmOJT4wokTF9m6HUk4KqWQ== + dependencies: + kleur "^3.0.3" + sisteransi "^1.0.5" + prop-types-exact@^1.2.0: version "1.2.0" resolved "https://registry.npmjs.org/prop-types-exact/-/prop-types-exact-1.2.0.tgz" @@ -9766,7 +9968,7 @@ prop-types-exact@^1.2.0: object.assign "^4.1.0" reflect.ownkeys "^0.2.0" -prop-types@^15.0.0, prop-types@^15.6.2, prop-types@^15.7.2, prop-types@^15.8.1: +prop-types@^15.0.0, prop-types@^15.6.2, prop-types@^15.7.2, prop-types@^15.8.1, prop-types@>=15: version "15.8.1" resolved "https://registry.npmjs.org/prop-types/-/prop-types-15.8.1.tgz" integrity sha512-oj87CgZICdulUohogVAR7AjlC0327U4el4L6eAvOqCeudMDVU0NThNaV+b9Df4dXgSP1gXMTnPdhfe/2qDH5cg== @@ -9818,7 +10020,12 @@ punycode@^1.3.2: resolved "https://registry.npmjs.org/punycode/-/punycode-1.4.1.tgz" integrity sha512-jmYNElW7yvO7TV33CjSmvSiE2yco3bV2czu/OzDKdMNVZQWfxCblURLhf+47syQRBntjfLdd/H0egrzIG+oaFQ== -punycode@^2.1.0, punycode@^2.1.1: +punycode@^2.1.0: + version "2.3.1" + resolved "https://registry.npmjs.org/punycode/-/punycode-2.3.1.tgz" + integrity sha512-vYt7UD1U9Wg6138shLtLOvdAu+8DsC/ilFtEVHcH+wydcSpNE20AfSOduf6MkRFahL5FY7X1oU7nKVZFtfq8Fg== + +punycode@^2.1.1: version "2.3.1" resolved "https://registry.npmjs.org/punycode/-/punycode-2.3.1.tgz" integrity sha512-vYt7UD1U9Wg6138shLtLOvdAu+8DsC/ilFtEVHcH+wydcSpNE20AfSOduf6MkRFahL5FY7X1oU7nKVZFtfq8Fg== @@ -9840,7 +10047,7 @@ q@^1.1.2: resolved "https://registry.npmjs.org/q/-/q-1.5.1.tgz" integrity sha512-kV/CThkXo6xyFEZUugw/+pIOywXcDbFYgSct5cT3gqlbkBE1SJdwy6UQoZvodiWF/ckQLZyDE/Bu1M6gVu5lVw== -qs@6.11.0, qs@^6.4.0: +qs@^6.4.0, qs@6.11.0: version "6.11.0" resolved "https://registry.npmjs.org/qs/-/qs-6.11.0.tgz" integrity sha512-MvjoMCJwEarSbUYk5O+nmoSzSutSsTwF85zcHPQ9OrlFoZOYIjaqBAJIqIXjptyD5vThxGq52Xu/MaJzRkIk4Q== @@ -9914,25 +10121,20 @@ randombytes@^2.1.0: dependencies: safe-buffer "^5.1.0" -range-parser@1.2.0: - version "1.2.0" - resolved "https://registry.npmjs.org/range-parser/-/range-parser-1.2.0.tgz" - integrity sha512-kA5WQoNVo4t9lNx2kQNFCxKeBl5IbbSNBl1M/tLkw9WCn+hxNBAW5Qh8gdhs63CJnhjJ2zQWFoqPJP2sK1AV5A== - -range-parser@^1.2.1, range-parser@~1.2.1: +range-parser@^1.2.1: version "1.2.1" resolved "https://registry.npmjs.org/range-parser/-/range-parser-1.2.1.tgz" integrity sha512-Hrgsx+orqoygnmhFbKaHE6c296J+HTAQXoxEF6gNupROmmGJRoyzfG3ccAveqCBrwr/2yxQ5BVd/GTl5agOwSg== -raw-body@2.5.2: - version "2.5.2" - resolved "https://registry.yarnpkg.com/raw-body/-/raw-body-2.5.2.tgz#99febd83b90e08975087e8f1f9419a149366b68a" - integrity sha512-8zGqypfENjCIqGhgXToC8aB2r7YrBX+AQAfIPs/Mlk+BtPTztOvTS01NRW/3Eh60J+a48lt8qsCzirQ6loCVfA== - dependencies: - bytes "3.1.2" - http-errors "2.0.0" - iconv-lite "0.4.24" - unpipe "1.0.0" +range-parser@~1.2.1: + version "1.2.1" + resolved "https://registry.npmjs.org/range-parser/-/range-parser-1.2.1.tgz" + integrity sha512-Hrgsx+orqoygnmhFbKaHE6c296J+HTAQXoxEF6gNupROmmGJRoyzfG3ccAveqCBrwr/2yxQ5BVd/GTl5agOwSg== + +range-parser@1.2.0: + version "1.2.0" + resolved "https://registry.npmjs.org/range-parser/-/range-parser-1.2.0.tgz" + integrity sha512-kA5WQoNVo4t9lNx2kQNFCxKeBl5IbbSNBl1M/tLkw9WCn+hxNBAW5Qh8gdhs63CJnhjJ2zQWFoqPJP2sK1AV5A== raw-body@~1.1.0: version "1.1.7" @@ -9942,7 +10144,17 @@ raw-body@~1.1.0: bytes "1" string_decoder "0.10" -rc@1.2.8, rc@^1.2.7, rc@^1.2.8: +raw-body@2.5.2: + version "2.5.2" + resolved "https://registry.npmjs.org/raw-body/-/raw-body-2.5.2.tgz" + integrity sha512-8zGqypfENjCIqGhgXToC8aB2r7YrBX+AQAfIPs/Mlk+BtPTztOvTS01NRW/3Eh60J+a48lt8qsCzirQ6loCVfA== + dependencies: + bytes "3.1.2" + http-errors "2.0.0" + iconv-lite "0.4.24" + unpipe "1.0.0" + +rc@^1.2.7, rc@^1.2.8, rc@1.2.8: version "1.2.8" resolved "https://registry.npmjs.org/rc/-/rc-1.2.8.tgz" integrity sha512-y3bGgqKj3QBdxLbLkomlohkvsA8gdAiUQlSBJnBhfn+BPxg4bc62d8TcBW15wavDfgexCgccckhcZvywyQYPOw== @@ -10022,7 +10234,16 @@ react-dev-utils@^12.0.1: strip-ansi "^6.0.1" text-table "^0.2.0" -react-dom@^16.8.4: +react-dom@*, "react-dom@^16.6.0 || ^17.0.0 || ^18.0.0", "react-dom@^16.8.4 || ^17", "react-dom@^16.8.4 || ^17.0.0", "react-dom@^17.0.0 || ^16.3.0 || ^15.5.4", react-dom@^17.0.2, "react-dom@>= 16.8.0 < 19.0.0": + version "17.0.2" + resolved "https://registry.npmjs.org/react-dom/-/react-dom-17.0.2.tgz" + integrity sha512-s4h96KtLDUQlsENhMn1ar8t2bEa+q/YAtj8pPPdIjPDGBDIVNsrD9aXNWqspUe6AzKCIG0C1HZZLqLV7qpOBGA== + dependencies: + loose-envify "^1.1.0" + object-assign "^4.1.1" + scheduler "^0.20.2" + +react-dom@^16.0.0-0, react-dom@^16.8.4: version "16.14.0" resolved "https://registry.npmjs.org/react-dom/-/react-dom-16.14.0.tgz" integrity sha512-1gCeQXDLoIqMgqD3IO2Ah9bnf0w9kzhwN5q4FGnHZ67hBm9yePzB5JJAIQCc8x3pFnNlwFq4RidZggNAAkzWWw== @@ -10032,15 +10253,6 @@ react-dom@^16.8.4: prop-types "^15.6.2" scheduler "^0.19.1" -react-dom@^17.0.2: - version "17.0.2" - resolved "https://registry.npmjs.org/react-dom/-/react-dom-17.0.2.tgz" - integrity sha512-s4h96KtLDUQlsENhMn1ar8t2bEa+q/YAtj8pPPdIjPDGBDIVNsrD9aXNWqspUe6AzKCIG0C1HZZLqLV7qpOBGA== - dependencies: - loose-envify "^1.1.0" - object-assign "^4.1.1" - scheduler "^0.20.2" - react-error-overlay@^6.0.11, react-error-overlay@^6.0.9: version "6.0.11" resolved "https://registry.npmjs.org/react-error-overlay/-/react-error-overlay-6.0.11.tgz" @@ -10094,6 +10306,14 @@ react-loadable-ssr-addon-v5-slorber@^1.0.1: dependencies: "@babel/runtime" "^7.10.3" +react-loadable@*, "react-loadable@npm:@docusaurus/react-loadable@5.5.2": + version "5.5.2" + resolved "https://registry.npmjs.org/@docusaurus/react-loadable/-/react-loadable-5.5.2.tgz" + integrity sha512-A3dYjdBGuy0IGT+wyLIGIKLRE+sAk1iNk0f1HjNDysO7u8lhL4N3VEm+FAubmJbAztn94F7MxBTPmnixbiyFdQ== + dependencies: + "@types/react" "*" + prop-types "^15.6.2" + react-router-config@^5.1.1: version "5.1.1" resolved "https://registry.npmjs.org/react-router-config/-/react-router-config-5.1.1.tgz" @@ -10114,7 +10334,7 @@ react-router-dom@^5.3.3: tiny-invariant "^1.0.2" tiny-warning "^1.0.0" -react-router@5.3.4, react-router@^5.3.3: +react-router@^5.3.3, react-router@>=5, react-router@5.3.4: version "5.3.4" resolved "https://registry.npmjs.org/react-router/-/react-router-5.3.4.tgz" integrity sha512-Ys9K+ppnJah3QuaRiLxk+jDWOR1MekYQrlytiXxC1RyfbdsZkS5pvKAzCCr031xHixZwpnsYNT5xysdFHQaYsA== @@ -10148,7 +10368,7 @@ react-textarea-autosize@^8.3.2: use-composed-ref "^1.3.0" use-latest "^1.2.1" -react-waypoint@^10.3.0: +react-waypoint@^10.3.0, react-waypoint@>=9.0.2: version "10.3.0" resolved "https://registry.npmjs.org/react-waypoint/-/react-waypoint-10.3.0.tgz" integrity sha512-iF1y2c1BsoXuEGz08NoahaLFIGI9gTUAAOKip96HUmylRT6DUtpgoBPjk/Y8dfcFVmfVDvUzWjNXpZyKTOV0SQ== @@ -10158,7 +10378,15 @@ react-waypoint@^10.3.0: prop-types "^15.0.0" react-is "^17.0.1 || ^18.0.0" -react@^16.8.4: +react@*, "react@^15.0.2 || ^16.0.0 || ^17.0.0", "react@^15.3.0 || ^16.0.0 || ^17.0.0 || ^18.0.0", "react@^16.13.1 || ^17.0.0", "react@^16.6.0 || ^17.0.0 || ^18.0.0", "react@^16.8.0 || ^17.0.0 || ^18.0.0", "react@^16.8.4 || ^17", "react@^16.8.4 || ^17.0.0", "react@^17.0.0 || ^16.3.0 || ^15.5.4", react@^17.0.2, "react@>= 16.8.0 < 19.0.0", react@>=0.14.9, react@>=0.14.x, react@>=15, react@17.0.2: + version "17.0.2" + resolved "https://registry.npmjs.org/react/-/react-17.0.2.tgz" + integrity sha512-gnhPt75i/dq/z3/6q/0asP78D0u592D5L1pd7M8P+dck6Fu/jJeL6iVVK23fptSUZj8Vjf++7wXA8UNclGQcbA== + dependencies: + loose-envify "^1.1.0" + object-assign "^4.1.1" + +"react@^0.14 || ^15.0.0 || ^16.0.0-alpha", react@^16.0.0-0, react@^16.14.0, react@^16.8.4, "react@0.13.x || 0.14.x || ^15.0.0-0 || ^16.0.0-0": version "16.14.0" resolved "https://registry.npmjs.org/react/-/react-16.14.0.tgz" integrity sha512-0X2CImDkJGApiAlcf0ODKIneSwBPhqJawOa5wCtKbu7ZECrmS26NvtSILynQ66cgkT/RJ4LidJOc3bUESwmU8g== @@ -10167,14 +10395,6 @@ react@^16.8.4: object-assign "^4.1.1" prop-types "^15.6.2" -react@^17.0.2: - version "17.0.2" - resolved "https://registry.npmjs.org/react/-/react-17.0.2.tgz" - integrity sha512-gnhPt75i/dq/z3/6q/0asP78D0u592D5L1pd7M8P+dck6Fu/jJeL6iVVK23fptSUZj8Vjf++7wXA8UNclGQcbA== - dependencies: - loose-envify "^1.1.0" - object-assign "^4.1.1" - read-pkg-up@^1.0.1: version "1.0.1" resolved "https://registry.npmjs.org/read-pkg-up/-/read-pkg-up-1.0.1.tgz" @@ -10192,7 +10412,59 @@ read-pkg@^1.0.0: normalize-package-data "^2.3.2" path-type "^1.0.0" -readable-stream@^2.0.0, readable-stream@^2.0.1, readable-stream@^2.0.2, readable-stream@^2.2.2, readable-stream@^2.3.0, readable-stream@^2.3.5, readable-stream@~2.3.6: +readable-stream@^2.0.0: + version "2.3.8" + resolved "https://registry.npmjs.org/readable-stream/-/readable-stream-2.3.8.tgz" + integrity sha512-8p0AUk4XODgIewSi0l8Epjs+EVnWiK7NoDIEGU0HhE7+ZyY8D1IMY7odu5lRrFXGg71L15KG8QrPmum45RTtdA== + dependencies: + core-util-is "~1.0.0" + inherits "~2.0.3" + isarray "~1.0.0" + process-nextick-args "~2.0.0" + safe-buffer "~5.1.1" + string_decoder "~1.1.1" + util-deprecate "~1.0.1" + +readable-stream@^2.0.1: + version "2.3.8" + resolved "https://registry.npmjs.org/readable-stream/-/readable-stream-2.3.8.tgz" + integrity sha512-8p0AUk4XODgIewSi0l8Epjs+EVnWiK7NoDIEGU0HhE7+ZyY8D1IMY7odu5lRrFXGg71L15KG8QrPmum45RTtdA== + dependencies: + core-util-is "~1.0.0" + inherits "~2.0.3" + isarray "~1.0.0" + process-nextick-args "~2.0.0" + safe-buffer "~5.1.1" + string_decoder "~1.1.1" + util-deprecate "~1.0.1" + +readable-stream@^2.0.2: + version "2.3.8" + resolved "https://registry.npmjs.org/readable-stream/-/readable-stream-2.3.8.tgz" + integrity sha512-8p0AUk4XODgIewSi0l8Epjs+EVnWiK7NoDIEGU0HhE7+ZyY8D1IMY7odu5lRrFXGg71L15KG8QrPmum45RTtdA== + dependencies: + core-util-is "~1.0.0" + inherits "~2.0.3" + isarray "~1.0.0" + process-nextick-args "~2.0.0" + safe-buffer "~5.1.1" + string_decoder "~1.1.1" + util-deprecate "~1.0.1" + +readable-stream@^2.2.2: + version "2.3.8" + resolved "https://registry.npmjs.org/readable-stream/-/readable-stream-2.3.8.tgz" + integrity sha512-8p0AUk4XODgIewSi0l8Epjs+EVnWiK7NoDIEGU0HhE7+ZyY8D1IMY7odu5lRrFXGg71L15KG8QrPmum45RTtdA== + dependencies: + core-util-is "~1.0.0" + inherits "~2.0.3" + isarray "~1.0.0" + process-nextick-args "~2.0.0" + safe-buffer "~5.1.1" + string_decoder "~1.1.1" + util-deprecate "~1.0.1" + +readable-stream@^2.3.0, readable-stream@^2.3.5: version "2.3.8" resolved "https://registry.npmjs.org/readable-stream/-/readable-stream-2.3.8.tgz" integrity sha512-8p0AUk4XODgIewSi0l8Epjs+EVnWiK7NoDIEGU0HhE7+ZyY8D1IMY7odu5lRrFXGg71L15KG8QrPmum45RTtdA== @@ -10214,6 +10486,19 @@ readable-stream@^3.0.6, readable-stream@^3.1.1, readable-stream@^3.4.0: string_decoder "^1.1.1" util-deprecate "^1.0.1" +readable-stream@~2.3.6: + version "2.3.8" + resolved "https://registry.npmjs.org/readable-stream/-/readable-stream-2.3.8.tgz" + integrity sha512-8p0AUk4XODgIewSi0l8Epjs+EVnWiK7NoDIEGU0HhE7+ZyY8D1IMY7odu5lRrFXGg71L15KG8QrPmum45RTtdA== + dependencies: + core-util-is "~1.0.0" + inherits "~2.0.3" + isarray "~1.0.0" + process-nextick-args "~2.0.0" + safe-buffer "~5.1.1" + string_decoder "~1.1.1" + util-deprecate "~1.0.1" + readdirp@~3.6.0: version "3.6.0" resolved "https://registry.npmjs.org/readdirp/-/readdirp-3.6.0.tgz" @@ -10233,13 +10518,6 @@ rechoir@^0.6.2: dependencies: resolve "^1.1.6" -recursive-readdir@2.2.2: - version "2.2.2" - resolved "https://registry.npmjs.org/recursive-readdir/-/recursive-readdir-2.2.2.tgz" - integrity sha512-nRCcW9Sj7NuZwa2XvH9co8NPeXUBhZP7CRKJtU+cS6PW9FpCIFoI5ib0NT1ZrbNuPoRy0ylyCaUL8Gih4LSyFg== - dependencies: - minimatch "3.0.4" - recursive-readdir@^2.2.2: version "2.2.3" resolved "https://registry.npmjs.org/recursive-readdir/-/recursive-readdir-2.2.3.tgz" @@ -10247,6 +10525,13 @@ recursive-readdir@^2.2.2: dependencies: minimatch "^3.0.5" +recursive-readdir@2.2.2: + version "2.2.2" + resolved "https://registry.npmjs.org/recursive-readdir/-/recursive-readdir-2.2.2.tgz" + integrity sha512-nRCcW9Sj7NuZwa2XvH9co8NPeXUBhZP7CRKJtU+cS6PW9FpCIFoI5ib0NT1ZrbNuPoRy0ylyCaUL8Gih4LSyFg== + dependencies: + minimatch "3.0.4" + redent@^1.0.0: version "1.0.0" resolved "https://registry.npmjs.org/redent/-/redent-1.0.0.tgz" @@ -10528,7 +10813,7 @@ resolve@^1.1.6, resolve@^1.10.0, resolve@^1.14.2, resolve@^1.3.2: path-parse "^1.0.7" supports-preserve-symlinks-flag "^1.0.0" -responselike@1.0.2, responselike@^1.0.2: +responselike@^1.0.2, responselike@1.0.2: version "1.0.2" resolved "https://registry.npmjs.org/responselike/-/responselike-1.0.2.tgz" integrity sha512-/Fpe5guzJk1gPqdJLJR5u7eG/gNY4nImjbRDaVWVMRhne55TCmj2i9Q+54PBRfatRC8v/rIiv9BN0pMd9OV5EQ== @@ -10560,7 +10845,7 @@ rgba-regex@^1.0.0: resolved "https://registry.npmjs.org/rgba-regex/-/rgba-regex-1.0.0.tgz" integrity sha512-zgn5OjNQXLUTdq8m17KdaicF6w89TZs8ZU8y0AYENIU6wG8GG6LLm0yLSiPY8DmaYmHdgRW8rnApjoT0fQRfMg== -rimraf@2, rimraf@^2.5.4: +rimraf@^2.5.4: version "2.7.1" resolved "https://registry.npmjs.org/rimraf/-/rimraf-2.7.1.tgz" integrity sha512-uWjbaKIK3T1OSVptzX7Nl6PvQ3qAGtKEtVRjRuazjfL3Bx5eI409VZSqgND+4UNnmzLVdPj9FqFJNPqBZFve4w== @@ -10574,6 +10859,13 @@ rimraf@^3.0.2: dependencies: glob "^7.1.3" +rimraf@2: + version "2.7.1" + resolved "https://registry.npmjs.org/rimraf/-/rimraf-2.7.1.tgz" + integrity sha512-uWjbaKIK3T1OSVptzX7Nl6PvQ3qAGtKEtVRjRuazjfL3Bx5eI409VZSqgND+4UNnmzLVdPj9FqFJNPqBZFve4w== + dependencies: + glob "^7.1.3" + rst-selector-parser@^2.2.3: version "2.2.3" resolved "https://registry.npmjs.org/rst-selector-parser/-/rst-selector-parser-2.2.3.tgz" @@ -10621,15 +10913,20 @@ safe-array-concat@^1.0.0, safe-array-concat@^1.0.1: has-symbols "^1.0.3" isarray "^2.0.5" -safe-buffer@5.1.2, safe-buffer@~5.1.0, safe-buffer@~5.1.1: +safe-buffer@^5.0.1, safe-buffer@^5.1.0, safe-buffer@^5.1.1, safe-buffer@^5.1.2, safe-buffer@>=5.1.0, safe-buffer@~5.2.0, safe-buffer@5.2.1: + version "5.2.1" + resolved "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz" + integrity sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ== + +safe-buffer@~5.1.0, safe-buffer@~5.1.1: version "5.1.2" resolved "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.2.tgz" integrity sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g== -safe-buffer@5.2.1, safe-buffer@>=5.1.0, safe-buffer@^5.0.1, safe-buffer@^5.1.0, safe-buffer@^5.1.1, safe-buffer@^5.1.2, safe-buffer@~5.2.0: - version "5.2.1" - resolved "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz" - integrity sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ== +safe-buffer@5.1.2: + version "5.1.2" + resolved "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.2.tgz" + integrity sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g== safe-json-parse@~1.0.1: version "1.0.1" @@ -10652,7 +10949,7 @@ safe-regex@^1.1.0: dependencies: ret "~0.1.10" -"safer-buffer@>= 2.1.2 < 3", safer-buffer@^2.0.2, safer-buffer@^2.1.0, safer-buffer@~2.1.0: +safer-buffer@^2.0.2, safer-buffer@^2.1.0, "safer-buffer@>= 2.1.2 < 3", safer-buffer@~2.1.0: version "2.1.2" resolved "https://registry.npmjs.org/safer-buffer/-/safer-buffer-2.1.2.tgz" integrity sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg== @@ -10678,15 +10975,6 @@ scheduler@^0.20.2: loose-envify "^1.1.0" object-assign "^4.1.1" -schema-utils@2.7.0: - version "2.7.0" - resolved "https://registry.npmjs.org/schema-utils/-/schema-utils-2.7.0.tgz" - integrity sha512-0ilKFI6QQF5nxDZLFn2dMjvc4hjg/Wkg7rHd3jK6/A4a1Hl9VFdQWvgB1UMGoU94pad1P/8N7fMcEnLnSiju8A== - dependencies: - "@types/json-schema" "^7.0.4" - ajv "^6.12.2" - ajv-keywords "^3.4.1" - schema-utils@^2.6.5: version "2.7.1" resolved "https://registry.npmjs.org/schema-utils/-/schema-utils-2.7.1.tgz" @@ -10696,7 +10984,25 @@ schema-utils@^2.6.5: ajv "^6.12.4" ajv-keywords "^3.5.2" -schema-utils@^3.0.0, schema-utils@^3.1.1, schema-utils@^3.2.0: +schema-utils@^3.0.0: + version "3.3.0" + resolved "https://registry.npmjs.org/schema-utils/-/schema-utils-3.3.0.tgz" + integrity sha512-pN/yOAvcC+5rQ5nERGuwrjLlYvLTbCibnZ1I7B1LaiAz9BRBlE9GMgE/eqV30P7aJQUf7Ddimy/RsbYO/GrVGg== + dependencies: + "@types/json-schema" "^7.0.8" + ajv "^6.12.5" + ajv-keywords "^3.5.2" + +schema-utils@^3.1.1: + version "3.3.0" + resolved "https://registry.npmjs.org/schema-utils/-/schema-utils-3.3.0.tgz" + integrity sha512-pN/yOAvcC+5rQ5nERGuwrjLlYvLTbCibnZ1I7B1LaiAz9BRBlE9GMgE/eqV30P7aJQUf7Ddimy/RsbYO/GrVGg== + dependencies: + "@types/json-schema" "^7.0.8" + ajv "^6.12.5" + ajv-keywords "^3.5.2" + +schema-utils@^3.2.0: version "3.3.0" resolved "https://registry.npmjs.org/schema-utils/-/schema-utils-3.3.0.tgz" integrity sha512-pN/yOAvcC+5rQ5nERGuwrjLlYvLTbCibnZ1I7B1LaiAz9BRBlE9GMgE/eqV30P7aJQUf7Ddimy/RsbYO/GrVGg== @@ -10715,6 +11021,20 @@ schema-utils@^4.0.0: ajv-formats "^2.1.1" ajv-keywords "^5.1.0" +schema-utils@2.7.0: + version "2.7.0" + resolved "https://registry.npmjs.org/schema-utils/-/schema-utils-2.7.0.tgz" + integrity sha512-0ilKFI6QQF5nxDZLFn2dMjvc4hjg/Wkg7rHd3jK6/A4a1Hl9VFdQWvgB1UMGoU94pad1P/8N7fMcEnLnSiju8A== + dependencies: + "@types/json-schema" "^7.0.4" + ajv "^6.12.2" + ajv-keywords "^3.4.1" + +"search-insights@>= 1 < 3": + version "2.7.0" + resolved "https://registry.npmjs.org/search-insights/-/search-insights-2.7.0.tgz" + integrity sha512-GLbVaGgzYEKMvuJbHRhLi1qoBFnjXZGZ6l4LxOYPCp4lI2jDRB3jPU9/XNhMwv6kvnA9slTreq6pvK+b3o3aqg== + section-matter@^1.0.0: version "1.0.0" resolved "https://registry.npmjs.org/section-matter/-/section-matter-1.0.0.tgz" @@ -10761,12 +11081,42 @@ semver-truncate@^1.1.2: dependencies: semver "^5.3.0" -"semver@2 || 3 || 4 || 5", semver@^5.3.0, semver@^5.4.1, semver@^5.5.0, semver@^5.6.0, semver@^5.7.0, semver@^5.7.1: +semver@^5.3.0: version "5.7.2" resolved "https://registry.npmjs.org/semver/-/semver-5.7.2.tgz" integrity sha512-cBznnQ9KjJqU67B52RMC65CMarK2600WFnbkcaiwWq3xy/5haFJlshgnpjovMVJ+Hff49d8GEn0b87C5pDQ10g== -semver@^6.0.0, semver@^6.2.0, semver@^6.3.0, semver@^6.3.1: +semver@^5.4.1: + version "5.7.2" + resolved "https://registry.npmjs.org/semver/-/semver-5.7.2.tgz" + integrity sha512-cBznnQ9KjJqU67B52RMC65CMarK2600WFnbkcaiwWq3xy/5haFJlshgnpjovMVJ+Hff49d8GEn0b87C5pDQ10g== + +semver@^5.5.0: + version "5.7.2" + resolved "https://registry.npmjs.org/semver/-/semver-5.7.2.tgz" + integrity sha512-cBznnQ9KjJqU67B52RMC65CMarK2600WFnbkcaiwWq3xy/5haFJlshgnpjovMVJ+Hff49d8GEn0b87C5pDQ10g== + +semver@^5.6.0, semver@^5.7.0, semver@^5.7.1: + version "5.7.2" + resolved "https://registry.npmjs.org/semver/-/semver-5.7.2.tgz" + integrity sha512-cBznnQ9KjJqU67B52RMC65CMarK2600WFnbkcaiwWq3xy/5haFJlshgnpjovMVJ+Hff49d8GEn0b87C5pDQ10g== + +semver@^6.0.0: + version "6.3.1" + resolved "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz" + integrity sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA== + +semver@^6.2.0: + version "6.3.1" + resolved "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz" + integrity sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA== + +semver@^6.3.0: + version "6.3.1" + resolved "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz" + integrity sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA== + +semver@^6.3.1: version "6.3.1" resolved "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz" integrity sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA== @@ -10778,6 +11128,11 @@ semver@^7.3.2, semver@^7.3.4, semver@^7.3.5, semver@^7.3.7, semver@^7.3.8, semve dependencies: lru-cache "^6.0.0" +"semver@2 || 3 || 4 || 5": + version "5.7.2" + resolved "https://registry.npmjs.org/semver/-/semver-5.7.2.tgz" + integrity sha512-cBznnQ9KjJqU67B52RMC65CMarK2600WFnbkcaiwWq3xy/5haFJlshgnpjovMVJ+Hff49d8GEn0b87C5pDQ10g== + send@0.18.0: version "0.18.0" resolved "https://registry.npmjs.org/send/-/send-0.18.0.tgz" @@ -10904,6 +11259,20 @@ shallowequal@^1.1.0: resolved "https://registry.npmjs.org/shallowequal/-/shallowequal-1.1.0.tgz" integrity sha512-y0m1JoUZSlPAjXVtPPW70aZWfIL/dSP7AFkRnniLCrK/8MDKog3TySTBmckD+RObVxH0v4Tox67+F14PdED2oQ== +sharp@*, sharp@^0.32.6: + version "0.32.6" + resolved "https://registry.npmjs.org/sharp/-/sharp-0.32.6.tgz" + integrity sha512-KyLTWwgcR9Oe4d9HwCwNM2l7+J0dUQwn/yf7S0EnTtb0eVS4RxO0eUSvxPtzT4F3SY+C4K6fqdv/DO27sJ/v/w== + dependencies: + color "^4.2.3" + detect-libc "^2.0.2" + node-addon-api "^6.1.0" + prebuild-install "^7.1.1" + semver "^7.5.4" + simple-get "^4.0.1" + tar-fs "^3.0.4" + tunnel-agent "^0.6.0" + sharp@^0.30.7: version "0.30.7" resolved "https://registry.npmjs.org/sharp/-/sharp-0.30.7.tgz" @@ -10918,20 +11287,6 @@ sharp@^0.30.7: tar-fs "^2.1.1" tunnel-agent "^0.6.0" -sharp@^0.32.6: - version "0.32.6" - resolved "https://registry.npmjs.org/sharp/-/sharp-0.32.6.tgz" - integrity sha512-KyLTWwgcR9Oe4d9HwCwNM2l7+J0dUQwn/yf7S0EnTtb0eVS4RxO0eUSvxPtzT4F3SY+C4K6fqdv/DO27sJ/v/w== - dependencies: - color "^4.2.3" - detect-libc "^2.0.2" - node-addon-api "^6.1.0" - prebuild-install "^7.1.1" - semver "^7.5.4" - simple-get "^4.0.1" - tar-fs "^3.0.4" - tunnel-agent "^0.6.0" - shebang-command@^1.2.0: version "1.2.0" resolved "https://registry.npmjs.org/shebang-command/-/shebang-command-1.2.0.tgz" @@ -10956,16 +11311,16 @@ shebang-regex@^3.0.0: resolved "https://registry.npmjs.org/shebang-regex/-/shebang-regex-3.0.0.tgz" integrity sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A== -shell-quote@1.7.2: - version "1.7.2" - resolved "https://registry.npmjs.org/shell-quote/-/shell-quote-1.7.2.tgz" - integrity sha512-mRz/m/JVscCrkMyPqHc/bczi3OQHkLTqXHEFu0zDhK/qfv3UcOA4SVmRCLmos4bhjr9ekVQubj/R7waKapmiQg== - shell-quote@^1.7.3: version "1.8.1" resolved "https://registry.npmjs.org/shell-quote/-/shell-quote-1.8.1.tgz" integrity sha512-6j1W9l1iAs/4xYBI1SYOVZyFcCis9b4KCLQ8fgAGG07QvzaRLVVRQvAy85yNmmZSjYjg4MWh4gNvlPujU/5LpA== +shell-quote@1.7.2: + version "1.7.2" + resolved "https://registry.npmjs.org/shell-quote/-/shell-quote-1.7.2.tgz" + integrity sha512-mRz/m/JVscCrkMyPqHc/bczi3OQHkLTqXHEFu0zDhK/qfv3UcOA4SVmRCLmos4bhjr9ekVQubj/R7waKapmiQg== + shelljs@^0.8.4, shelljs@^0.8.5: version "0.8.5" resolved "https://registry.npmjs.org/shelljs/-/shelljs-0.8.5.tgz" @@ -11153,7 +11508,12 @@ source-map-url@^0.4.0: resolved "https://registry.npmjs.org/source-map-url/-/source-map-url-0.4.1.tgz" integrity sha512-cPiFOTLUKvJFIg4SKVScy4ilPPW6rFgMgfuZJPNoDuMs3nC1HbMUycBoJw77xFIp6z1UJQJOfx6C9GMH80DiTw== -source-map@^0.5.0, source-map@^0.5.6: +source-map@^0.5.0: + version "0.5.7" + resolved "https://registry.npmjs.org/source-map/-/source-map-0.5.7.tgz" + integrity sha512-LbrmJOMUSdEVxIKvdcJzQC+nQhe8FUZQTXQy6+I75skNgn3OoQ0DZA8YnFa7gp8tqtL3KPf1kmo0R5DoApeSGQ== + +source-map@^0.5.6: version "0.5.7" resolved "https://registry.npmjs.org/source-map/-/source-map-0.5.7.tgz" integrity sha512-LbrmJOMUSdEVxIKvdcJzQC+nQhe8FUZQTXQy6+I75skNgn3OoQ0DZA8YnFa7gp8tqtL3KPf1kmo0R5DoApeSGQ== @@ -11271,16 +11631,16 @@ static-extend@^0.1.1: define-property "^0.2.5" object-copy "^0.1.0" -statuses@2.0.1: - version "2.0.1" - resolved "https://registry.npmjs.org/statuses/-/statuses-2.0.1.tgz" - integrity sha512-RwNA9Z/7PrK06rYLIzFMlaF+l73iwpzsqRIFgbMLbTcLD6cOao82TaWefPXQvB2fOC4AjuYSEndS7N/mTCbkdQ== - "statuses@>= 1.4.0 < 2": version "1.5.0" resolved "https://registry.npmjs.org/statuses/-/statuses-1.5.0.tgz" integrity sha512-OpZ3zP+jT1PI7I8nemJX4AKmAX070ZkYPVWV/AaKTJl+tXCTGyVdC1a4SL8RUQYEwk/f34ZX8UTykN68FwrqAA== +statuses@2.0.1: + version "2.0.1" + resolved "https://registry.npmjs.org/statuses/-/statuses-2.0.1.tgz" + integrity sha512-RwNA9Z/7PrK06rYLIzFMlaF+l73iwpzsqRIFgbMLbTcLD6cOao82TaWefPXQvB2fOC4AjuYSEndS7N/mTCbkdQ== + std-env@^3.0.1: version "3.3.3" resolved "https://registry.npmjs.org/std-env/-/std-env-3.3.3.tgz" @@ -11299,12 +11659,58 @@ strict-uri-encode@^1.0.0: resolved "https://registry.npmjs.org/strict-uri-encode/-/strict-uri-encode-1.1.0.tgz" integrity sha512-R3f198pcvnB+5IpnBlRkphuE9n46WyVl8I39W/ZUTZLz4nqSP/oLYUrcnJrw462Ds8he4YKMov2efsTIw1BDGQ== +string_decoder@^1.1.1: + version "1.3.0" + resolved "https://registry.npmjs.org/string_decoder/-/string_decoder-1.3.0.tgz" + integrity sha512-hkRX8U1WjJFd8LsDJ2yQ/wWWxaopEsABU1XfkM8A+j0+85JAGppt16cr1Whg6KIbb4okU6Mql6BOj+uup/wKeA== + dependencies: + safe-buffer "~5.2.0" + +string_decoder@~1.1.1: + version "1.1.1" + resolved "https://registry.npmjs.org/string_decoder/-/string_decoder-1.1.1.tgz" + integrity sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg== + dependencies: + safe-buffer "~5.1.0" + +string_decoder@0.10: + version "0.10.31" + resolved "https://registry.npmjs.org/string_decoder/-/string_decoder-0.10.31.tgz" + integrity sha512-ev2QzSzWPYmy9GuqfIVildA4OdcGLeFZQrq5ys6RtiuF+RQQiZWr8TZNyAcuVXyQRYfEO+MsoB/1BuQVhOJuoQ== + string-template@~0.2.1: version "0.2.1" resolved "https://registry.npmjs.org/string-template/-/string-template-0.2.1.tgz" integrity sha512-Yptehjogou2xm4UJbxJ4CxgZx12HBfeystp0y3x7s4Dj32ltVVG1Gg8YhKjHZkHicuKpZX/ffilA8505VbUbpw== -"string-width@^1.0.2 || 2 || 3 || 4", string-width@^4.0.0, string-width@^4.1.0, string-width@^4.2.0, string-width@^4.2.2, string-width@^4.2.3: +"string-width@^1.0.2 || 2 || 3 || 4": + version "4.2.3" + resolved "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz" + integrity sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g== + dependencies: + emoji-regex "^8.0.0" + is-fullwidth-code-point "^3.0.0" + strip-ansi "^6.0.1" + +string-width@^4.0.0, string-width@^4.1.0, string-width@^4.2.2: + version "4.2.3" + resolved "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz" + integrity sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g== + dependencies: + emoji-regex "^8.0.0" + is-fullwidth-code-point "^3.0.0" + strip-ansi "^6.0.1" + +string-width@^4.2.0: + version "4.2.3" + resolved "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz" + integrity sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g== + dependencies: + emoji-regex "^8.0.0" + is-fullwidth-code-point "^3.0.0" + strip-ansi "^6.0.1" + +string-width@^4.2.3: version "4.2.3" resolved "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz" integrity sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g== @@ -11349,25 +11755,6 @@ string.prototype.trimstart@^1.0.7: define-properties "^1.2.0" es-abstract "^1.22.1" -string_decoder@0.10: - version "0.10.31" - resolved "https://registry.npmjs.org/string_decoder/-/string_decoder-0.10.31.tgz" - integrity sha512-ev2QzSzWPYmy9GuqfIVildA4OdcGLeFZQrq5ys6RtiuF+RQQiZWr8TZNyAcuVXyQRYfEO+MsoB/1BuQVhOJuoQ== - -string_decoder@^1.1.1: - version "1.3.0" - resolved "https://registry.npmjs.org/string_decoder/-/string_decoder-1.3.0.tgz" - integrity sha512-hkRX8U1WjJFd8LsDJ2yQ/wWWxaopEsABU1XfkM8A+j0+85JAGppt16cr1Whg6KIbb4okU6Mql6BOj+uup/wKeA== - dependencies: - safe-buffer "~5.2.0" - -string_decoder@~1.1.1: - version "1.1.1" - resolved "https://registry.npmjs.org/string_decoder/-/string_decoder-1.1.1.tgz" - integrity sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg== - dependencies: - safe-buffer "~5.1.0" - stringify-object@^3.3.0: version "3.3.0" resolved "https://registry.npmjs.org/stringify-object/-/stringify-object-3.3.0.tgz" @@ -11377,13 +11764,6 @@ stringify-object@^3.3.0: is-obj "^1.0.1" is-regexp "^1.0.0" -strip-ansi@6.0.0: - version "6.0.0" - resolved "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.0.tgz" - integrity sha512-AuvKTrTfQNYNIctbR1K/YGTR1756GycPsg7b9bdV9Duqur4gv6aKqHXah67Z8ImS7WEz5QVcOtlfW2rZEugt6w== - dependencies: - ansi-regex "^5.0.0" - strip-ansi@^3.0.0: version "3.0.1" resolved "https://registry.npmjs.org/strip-ansi/-/strip-ansi-3.0.1.tgz" @@ -11405,6 +11785,13 @@ strip-ansi@^7.0.1: dependencies: ansi-regex "^6.0.1" +strip-ansi@6.0.0: + version "6.0.0" + resolved "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.0.tgz" + integrity sha512-AuvKTrTfQNYNIctbR1K/YGTR1756GycPsg7b9bdV9Duqur4gv6aKqHXah67Z8ImS7WEz5QVcOtlfW2rZEugt6w== + dependencies: + ansi-regex "^5.0.0" + strip-bom-string@^1.0.0: version "1.0.0" resolved "https://registry.npmjs.org/strip-bom-string/-/strip-bom-string-1.0.0.tgz" @@ -11468,7 +11855,7 @@ strnum@^1.0.5: resolved "https://registry.npmjs.org/strnum/-/strnum-1.0.5.tgz" integrity sha512-J8bbNyKKXl5qYcR36TIO8W3mVGVHrmmxsd5PAItGkmyzwJvybiw2IVq5nqd0i4LSNSkB/sx9VHllbfFdr9k1JA== -style-to-object@0.3.0, style-to-object@^0.3.0: +style-to-object@^0.3.0, style-to-object@0.3.0: version "0.3.0" resolved "https://registry.npmjs.org/style-to-object/-/style-to-object-0.3.0.tgz" integrity sha512-CzFnRRXhzWIdItT3OmF8SQfWyahHhjq3HwcMNCNLn+N7klOOqPjMeG/4JSu77D7ypZdGvSzvkrbyeTMizz2VrA== @@ -11528,7 +11915,26 @@ svg-parser@^2.0.4: resolved "https://registry.npmjs.org/svg-parser/-/svg-parser-2.0.4.tgz" integrity sha512-e4hG1hRwoOdRb37cIMSgzNsxyzKfayW6VOflrwvR+/bzrkyxY/31WkbgnQpgtrNp1SdpJvpUAGTa/ZoiPNDuRQ== -svgo@^1.0.0, svgo@^1.3.2: +svgo@^1.0.0: + version "1.3.2" + resolved "https://registry.npmjs.org/svgo/-/svgo-1.3.2.tgz" + integrity sha512-yhy/sQYxR5BkC98CY7o31VGsg014AKLEPxdfhora76l36hD9Rdy5NZA/Ocn6yayNPgSamYdtX2rFJdcv07AYVw== + dependencies: + chalk "^2.4.1" + coa "^2.0.2" + css-select "^2.0.0" + css-select-base-adapter "^0.1.1" + css-tree "1.0.0-alpha.37" + csso "^4.0.2" + js-yaml "^3.13.1" + mkdirp "~0.5.1" + object.values "^1.1.0" + sax "~1.2.4" + stable "^0.1.8" + unquote "~1.1.1" + util.promisify "~1.0.0" + +svgo@^1.3.2: version "1.3.2" resolved "https://registry.npmjs.org/svgo/-/svgo-1.3.2.tgz" integrity sha512-yhy/sQYxR5BkC98CY7o31VGsg014AKLEPxdfhora76l36hD9Rdy5NZA/Ocn6yayNPgSamYdtX2rFJdcv07AYVw== @@ -11664,11 +12070,16 @@ terser@^5.10.0, terser@^5.16.8: commander "^2.20.0" source-map-support "~0.5.20" -text-table@0.2.0, text-table@^0.2.0: +text-table@^0.2.0, text-table@0.2.0: version "0.2.0" resolved "https://registry.npmjs.org/text-table/-/text-table-0.2.0.tgz" integrity sha512-N+8UisAXDGk8PFXP4HAzVR9nbfmVJ3zYLAWiTIoqC5v5isinhr+r5uaO8+7r3BMfuNIufIsA7RdpVgacC2cSpw== +through@^2.3.8: + version "2.3.8" + resolved "https://registry.npmjs.org/through/-/through-2.3.8.tgz" + integrity sha512-w89qg7PI8wAdvX60bMDP+bFoD5Dvhm9oLheFp5O4a2QF0cSBGsBX4qZmadPMvVqlLJBBci+WqGGOAPvcDeNSVg== + through2@^2.0.0: version "2.0.5" resolved "https://registry.npmjs.org/through2/-/through2-2.0.5.tgz" @@ -11677,11 +12088,6 @@ through2@^2.0.0: readable-stream "~2.3.6" xtend "~4.0.1" -through@^2.3.8: - version "2.3.8" - resolved "https://registry.npmjs.org/through/-/through-2.3.8.tgz" - integrity sha512-w89qg7PI8wAdvX60bMDP+bFoD5Dvhm9oLheFp5O4a2QF0cSBGsBX4qZmadPMvVqlLJBBci+WqGGOAPvcDeNSVg== - thunky@^1.0.2: version "1.1.0" resolved "https://registry.npmjs.org/thunky/-/thunky-1.1.0.tgz" @@ -11944,6 +12350,11 @@ typedarray@^0.0.6: resolved "https://registry.npmjs.org/typedarray/-/typedarray-0.0.6.tgz" integrity sha512-/aCDEGatGvZ2BIk+HmLf4ifCJFwvKFNb9/JeZPMulfgFracn9QFcAf5GO8B/mweUjSoblS5In0cWhqpfs/5PQA== +"typescript@>= 2.7": + version "5.1.6" + resolved "https://registry.npmjs.org/typescript/-/typescript-5.1.6.tgz" + integrity sha512-zaWCozRZ6DLEWAWFrVDz1H6FVXzUSfTy5FUMWsQlU8Ym5JP9eO4xkTIROFCQvhQf61z6O/G6ugw3SgAnvvm+HA== + ua-parser-js@^1.0.35: version "1.0.35" resolved "https://registry.npmjs.org/ua-parser-js/-/ua-parser-js-1.0.35.tgz" @@ -11998,10 +12409,10 @@ unicode-property-aliases-ecmascript@^2.0.0: resolved "https://registry.npmjs.org/unicode-property-aliases-ecmascript/-/unicode-property-aliases-ecmascript-2.1.0.tgz" integrity sha512-6t3foTQI9qne+OZoVQB/8x8rk2k1eVy1gRXhV3oFQ5T6R1dqQ1xtin3XqSlx3+ATBkliTaR/hHyJBm+LVPNM8w== -unified@9.2.0: - version "9.2.0" - resolved "https://registry.npmjs.org/unified/-/unified-9.2.0.tgz" - integrity sha512-vx2Z0vY+a3YoTj8+pttM3tiJHCwY5UFbYdiWrwBEbHmK8pvsPj2rtAX2BFfgXen8T39CJWblWRDT4L5WGXtDdg== +unified@^9.0.0, unified@^9.2.2: + version "9.2.2" + resolved "https://registry.npmjs.org/unified/-/unified-9.2.2.tgz" + integrity sha512-Sg7j110mtefBD+qunSLO1lqOEKdrwBFBrR6Qd8f4uwkhWNlbkaqwHse6e7QvD3AP/MNoJdEDLaf8OxYyoWgorQ== dependencies: bail "^1.0.0" extend "^3.0.0" @@ -12010,10 +12421,10 @@ unified@9.2.0: trough "^1.0.0" vfile "^4.0.0" -unified@^9.0.0, unified@^9.2.2: - version "9.2.2" - resolved "https://registry.npmjs.org/unified/-/unified-9.2.2.tgz" - integrity sha512-Sg7j110mtefBD+qunSLO1lqOEKdrwBFBrR6Qd8f4uwkhWNlbkaqwHse6e7QvD3AP/MNoJdEDLaf8OxYyoWgorQ== +unified@9.2.0: + version "9.2.0" + resolved "https://registry.npmjs.org/unified/-/unified-9.2.0.tgz" + integrity sha512-vx2Z0vY+a3YoTj8+pttM3tiJHCwY5UFbYdiWrwBEbHmK8pvsPj2rtAX2BFfgXen8T39CJWblWRDT4L5WGXtDdg== dependencies: bail "^1.0.0" extend "^3.0.0" @@ -12049,7 +12460,7 @@ unique-string@^2.0.0: dependencies: crypto-random-string "^2.0.0" -unist-builder@2.0.3, unist-builder@^2.0.0: +unist-builder@^2.0.0, unist-builder@2.0.3: version "2.0.3" resolved "https://registry.npmjs.org/unist-builder/-/unist-builder-2.0.3.tgz" integrity sha512-f98yt5pnlMWlzP539tPc4grGMsFaQQlP/vM396b00jngsiINumNmsY8rkXjfoi1c6QaM8nQ3vaGDuoKWbe/1Uw== @@ -12105,7 +12516,7 @@ unist-util-visit-parents@^3.0.0: "@types/unist" "^2.0.0" unist-util-is "^4.0.0" -unist-util-visit@2.0.3, unist-util-visit@^2.0.0, unist-util-visit@^2.0.3: +unist-util-visit@^2.0.0, unist-util-visit@^2.0.3, unist-util-visit@2.0.3: version "2.0.3" resolved "https://registry.npmjs.org/unist-util-visit/-/unist-util-visit-2.0.3.tgz" integrity sha512-iJ4/RczbJMkD0712mGktuGpm/U4By4FfDonL7N/9tATGIF4imikjOuagyMY53tnZq3NP6BcmlrHhEKAfGWjh7Q== @@ -12119,7 +12530,7 @@ universalify@^2.0.0: resolved "https://registry.npmjs.org/universalify/-/universalify-2.0.0.tgz" integrity sha512-hAZsKq7Yy11Zu1DE0OzWjw7nnLZmJZYTDZZyEFHZdUhV8FkH5MCfoU1XMaxXovpyW5nq5scPqq0ZDP9Zyl04oQ== -unpipe@1.0.0, unpipe@~1.0.0: +unpipe@~1.0.0, unpipe@1.0.0: version "1.0.0" resolved "https://registry.npmjs.org/unpipe/-/unpipe-1.0.0.tgz" integrity sha512-pjy2bYhSsufwWlKwPc+l3cN7+wuJlK6uz0YdJEOlQDbl6jo/YlPi4mb8agUkVC8BF7V8NuzeyPNqRksA3hztKQ== @@ -12278,7 +12689,12 @@ utils-merge@1.0.1: resolved "https://registry.npmjs.org/utils-merge/-/utils-merge-1.0.1.tgz" integrity sha512-pMZTvIkT1d+TFGvDOqodOclx0QWkkgi6Tdoa8gC8ffGAAqz9pzPTZWAybbsHHoED/ztMtkv/VoYTYyShUn81hA== -uuid@^3.0.1, uuid@^3.3.2: +uuid@^3.0.1: + version "3.4.0" + resolved "https://registry.npmjs.org/uuid/-/uuid-3.4.0.tgz" + integrity sha512-HjSDRw6gZE5JMggctHBcjVak08+KEVhSIiDzFnT9S9aegmp85S/bReBVTb4QTFaRNptJ9kuYaNhnbNEOkbKb/A== + +uuid@^3.3.2: version "3.4.0" resolved "https://registry.npmjs.org/uuid/-/uuid-3.4.0.tgz" integrity sha512-HjSDRw6gZE5JMggctHBcjVak08+KEVhSIiDzFnT9S9aegmp85S/bReBVTb4QTFaRNptJ9kuYaNhnbNEOkbKb/A== @@ -12407,7 +12823,7 @@ webpack-bundle-analyzer@^4.5.0: webpack-dev-middleware@^5.3.1: version "5.3.4" - resolved "https://registry.yarnpkg.com/webpack-dev-middleware/-/webpack-dev-middleware-5.3.4.tgz#eb7b39281cbce10e104eb2b8bf2b63fce49a3517" + resolved "https://registry.npmjs.org/webpack-dev-middleware/-/webpack-dev-middleware-5.3.4.tgz" integrity sha512-BVdTqhhs+0IfoeAf7EoH5WE+exCmqGerHfDM0IL096Px60Tq2Mn9MAbnaGUe6HiMa41KMCYF19gyzZmBcq/o4Q== dependencies: colorette "^2.0.10" @@ -12465,7 +12881,7 @@ webpack-sources@^3.2.2, webpack-sources@^3.2.3: resolved "https://registry.npmjs.org/webpack-sources/-/webpack-sources-3.2.3.tgz" integrity sha512-/DyMEOrDgLKKIG0fmvtz+4dUX/3Ghozwgm6iPp8KRhvn+eQf9+Q7GWxVNMk3+uCPWfdXYC4ExGBckIXdFEfH1w== -webpack@^5.73.0: +"webpack@^4.0.0 || ^5.0.0", "webpack@^4.37.0 || ^5.0.0", webpack@^5.0.0, webpack@^5.1.0, webpack@^5.20.0, webpack@^5.73.0, "webpack@>= 4", webpack@>=2, "webpack@>=4.41.1 || 5.x", "webpack@3 || 4 || 5": version "5.88.2" resolved "https://registry.npmjs.org/webpack/-/webpack-5.88.2.tgz" integrity sha512-JmcgNZ1iKj+aiR0OvTYtWQqJwq37Pf683dY9bVORwVbUrDhLhdn/PlO2sHsFHPkj7sHNQF3JwaAkp49V+Sq1tQ== @@ -12505,7 +12921,7 @@ webpackbar@^5.0.2: pretty-time "^1.1.0" std-env "^3.0.1" -websocket-driver@>=0.5.1, websocket-driver@^0.7.4: +websocket-driver@^0.7.4, websocket-driver@>=0.5.1: version "0.7.4" resolved "https://registry.npmjs.org/websocket-driver/-/websocket-driver-0.7.4.tgz" integrity sha512-b17KeDIQVjvb0ssuSDF2cYXSg2iztliJ4B9WdsuB6J952qCPKmnVq4DyW5motImXHDC1cBT/1UezrJVsKw5zjg== @@ -12558,7 +12974,14 @@ which-typed-array@^1.1.11, which-typed-array@^1.1.13: gopd "^1.0.1" has-tostringtag "^1.0.0" -which@^1.2.9, which@^1.3.1: +which@^1.2.9: + version "1.3.1" + resolved "https://registry.npmjs.org/which/-/which-1.3.1.tgz" + integrity sha512-HxJdYWq1MTIQbJ3nw0cqssHoTNU267KlrDuGZ1WYlxDStUtKUhOaJmh112/TZmHxxUfuJqPXSOm7tDyas0OSIQ== + dependencies: + isexe "^2.0.0" + +which@^1.3.1: version "1.3.1" resolved "https://registry.npmjs.org/which/-/which-1.3.1.tgz" integrity sha512-HxJdYWq1MTIQbJ3nw0cqssHoTNU267KlrDuGZ1WYlxDStUtKUhOaJmh112/TZmHxxUfuJqPXSOm7tDyas0OSIQ== diff --git a/enterprise/enterprise_callbacks/example_logging_api.py b/enterprise/enterprise_callbacks/example_logging_api.py index 57ea99a674..c3d3f5e63f 100644 --- a/enterprise/enterprise_callbacks/example_logging_api.py +++ b/enterprise/enterprise_callbacks/example_logging_api.py @@ -18,10 +18,6 @@ async def log_event(request: Request): return {"message": "Request received successfully"} except Exception as e: - print(f"Error processing request: {str(e)}") - import traceback - - traceback.print_exc() raise HTTPException(status_code=500, detail="Internal Server Error") diff --git a/enterprise/enterprise_callbacks/generic_api_callback.py b/enterprise/enterprise_callbacks/generic_api_callback.py index cf1d22e8f8..ba189b149c 100644 --- a/enterprise/enterprise_callbacks/generic_api_callback.py +++ b/enterprise/enterprise_callbacks/generic_api_callback.py @@ -120,6 +120,5 @@ class GenericAPILogger: ) return response except Exception as e: - traceback.print_exc() - verbose_logger.debug(f"Generic - {str(e)}\n{traceback.format_exc()}") + verbose_logger.error(f"Generic - {str(e)}\n{traceback.format_exc()}") pass diff --git a/enterprise/enterprise_hooks/banned_keywords.py b/enterprise/enterprise_hooks/banned_keywords.py index acd390d798..4cf68b2fd9 100644 --- a/enterprise/enterprise_hooks/banned_keywords.py +++ b/enterprise/enterprise_hooks/banned_keywords.py @@ -82,7 +82,7 @@ class _ENTERPRISE_BannedKeywords(CustomLogger): except HTTPException as e: raise e except Exception as e: - traceback.print_exc() + verbose_proxy_logger.error(traceback.format_exc()) async def async_post_call_success_hook( self, diff --git a/enterprise/enterprise_hooks/blocked_user_list.py b/enterprise/enterprise_hooks/blocked_user_list.py index cbc14d2c2b..8e642a026f 100644 --- a/enterprise/enterprise_hooks/blocked_user_list.py +++ b/enterprise/enterprise_hooks/blocked_user_list.py @@ -118,4 +118,4 @@ class _ENTERPRISE_BlockedUserList(CustomLogger): except HTTPException as e: raise e except Exception as e: - traceback.print_exc() + verbose_proxy_logger.error(traceback.format_exc()) diff --git a/enterprise/enterprise_hooks/llm_guard.py b/enterprise/enterprise_hooks/llm_guard.py index 3a15ca52b9..9db10cf79c 100644 --- a/enterprise/enterprise_hooks/llm_guard.py +++ b/enterprise/enterprise_hooks/llm_guard.py @@ -92,7 +92,7 @@ class _ENTERPRISE_LLMGuard(CustomLogger): }, ) except Exception as e: - traceback.print_exc() + verbose_proxy_logger.error(traceback.format_exc()) raise e def should_proceed(self, user_api_key_dict: UserAPIKeyAuth, data: dict) -> bool: diff --git a/litellm/__init__.py b/litellm/__init__.py index fe0dd2a56b..ba30366744 100644 --- a/litellm/__init__.py +++ b/litellm/__init__.py @@ -60,6 +60,7 @@ _async_failure_callback: List[Callable] = ( pre_call_rules: List[Callable] = [] post_call_rules: List[Callable] = [] turn_off_message_logging: Optional[bool] = False +store_audit_logs = False # Enterprise feature, allow users to see audit logs ## end of callbacks ############# email: Optional[str] = ( @@ -808,6 +809,7 @@ from .exceptions import ( APIConnectionError, APIResponseValidationError, UnprocessableEntityError, + InternalServerError, LITELLM_EXCEPTION_TYPES, ) from .budget_manager import BudgetManager diff --git a/litellm/_logging.py b/litellm/_logging.py index 1ff6e45ddb..ab7a08f976 100644 --- a/litellm/_logging.py +++ b/litellm/_logging.py @@ -1,5 +1,6 @@ import logging, os, json from logging import Formatter +import traceback set_verbose = False json_logs = bool(os.getenv("JSON_LOGS", False)) diff --git a/litellm/caching.py b/litellm/caching.py index c8c1736d86..d1f3387ee4 100644 --- a/litellm/caching.py +++ b/litellm/caching.py @@ -253,7 +253,6 @@ class RedisCache(BaseCache): str(e), value, ) - traceback.print_exc() raise e async def async_scan_iter(self, pattern: str, count: int = 100) -> list: @@ -313,7 +312,6 @@ class RedisCache(BaseCache): str(e), value, ) - traceback.print_exc() key = self.check_and_fix_namespace(key=key) async with _redis_client as redis_client: @@ -352,7 +350,6 @@ class RedisCache(BaseCache): str(e), value, ) - traceback.print_exc() async def async_set_cache_pipeline(self, cache_list, ttl=None): """ @@ -413,7 +410,6 @@ class RedisCache(BaseCache): str(e), cache_value, ) - traceback.print_exc() async def batch_cache_write(self, key, value, **kwargs): print_verbose( @@ -458,7 +454,6 @@ class RedisCache(BaseCache): str(e), value, ) - traceback.print_exc() raise e async def flush_cache_buffer(self): @@ -495,8 +490,9 @@ class RedisCache(BaseCache): return self._get_cache_logic(cached_response=cached_response) except Exception as e: # NON blocking - notify users Redis is throwing an exception - traceback.print_exc() - logging.debug("LiteLLM Caching: get() - Got exception from REDIS: ", e) + verbose_logger.error( + "LiteLLM Caching: get() - Got exception from REDIS: ", e + ) def batch_get_cache(self, key_list) -> dict: """ @@ -646,10 +642,9 @@ class RedisCache(BaseCache): error=e, call_type="sync_ping", ) - print_verbose( + verbose_logger.error( f"LiteLLM Redis Cache PING: - Got exception from REDIS : {str(e)}" ) - traceback.print_exc() raise e async def ping(self) -> bool: @@ -683,10 +678,9 @@ class RedisCache(BaseCache): call_type="async_ping", ) ) - print_verbose( + verbose_logger.error( f"LiteLLM Redis Cache PING: - Got exception from REDIS : {str(e)}" ) - traceback.print_exc() raise e async def delete_cache_keys(self, keys): @@ -1138,22 +1132,23 @@ class S3Cache(BaseCache): cached_response = ast.literal_eval(cached_response) if type(cached_response) is not dict: cached_response = dict(cached_response) - print_verbose( + verbose_logger.debug( f"Got S3 Cache: key: {key}, cached_response {cached_response}. Type Response {type(cached_response)}" ) return cached_response except botocore.exceptions.ClientError as e: if e.response["Error"]["Code"] == "NoSuchKey": - print_verbose( + verbose_logger.error( f"S3 Cache: The specified key '{key}' does not exist in the S3 bucket." ) return None except Exception as e: # NON blocking - notify users S3 is throwing an exception - traceback.print_exc() - print_verbose(f"S3 Caching: get_cache() - Got exception from S3: {e}") + verbose_logger.error( + f"S3 Caching: get_cache() - Got exception from S3: {e}" + ) async def async_get_cache(self, key, **kwargs): return self.get_cache(key=key, **kwargs) @@ -1234,8 +1229,7 @@ class DualCache(BaseCache): return result except Exception as e: - print_verbose(f"LiteLLM Cache: Excepton async add_cache: {str(e)}") - traceback.print_exc() + verbose_logger.error(f"LiteLLM Cache: Excepton async add_cache: {str(e)}") raise e def get_cache(self, key, local_only: bool = False, **kwargs): @@ -1262,7 +1256,7 @@ class DualCache(BaseCache): print_verbose(f"get cache: cache result: {result}") return result except Exception as e: - traceback.print_exc() + verbose_logger.error(traceback.format_exc()) def batch_get_cache(self, keys: list, local_only: bool = False, **kwargs): try: @@ -1295,7 +1289,7 @@ class DualCache(BaseCache): print_verbose(f"async batch get cache: cache result: {result}") return result except Exception as e: - traceback.print_exc() + verbose_logger.error(traceback.format_exc()) async def async_get_cache(self, key, local_only: bool = False, **kwargs): # Try to fetch from in-memory cache first @@ -1328,7 +1322,7 @@ class DualCache(BaseCache): print_verbose(f"get cache: cache result: {result}") return result except Exception as e: - traceback.print_exc() + verbose_logger.error(traceback.format_exc()) async def async_batch_get_cache( self, keys: list, local_only: bool = False, **kwargs @@ -1368,7 +1362,7 @@ class DualCache(BaseCache): return result except Exception as e: - traceback.print_exc() + verbose_logger.error(traceback.format_exc()) async def async_set_cache(self, key, value, local_only: bool = False, **kwargs): print_verbose( @@ -1381,8 +1375,8 @@ class DualCache(BaseCache): if self.redis_cache is not None and local_only == False: await self.redis_cache.async_set_cache(key, value, **kwargs) except Exception as e: - print_verbose(f"LiteLLM Cache: Excepton async add_cache: {str(e)}") - traceback.print_exc() + verbose_logger.error(f"LiteLLM Cache: Excepton async add_cache: {str(e)}") + verbose_logger.debug(traceback.format_exc()) async def async_batch_set_cache( self, cache_list: list, local_only: bool = False, **kwargs @@ -1404,8 +1398,8 @@ class DualCache(BaseCache): cache_list=cache_list, ttl=kwargs.get("ttl", None) ) except Exception as e: - print_verbose(f"LiteLLM Cache: Excepton async add_cache: {str(e)}") - traceback.print_exc() + verbose_logger.error(f"LiteLLM Cache: Excepton async add_cache: {str(e)}") + verbose_logger.debug(traceback.format_exc()) async def async_increment_cache( self, key, value: float, local_only: bool = False, **kwargs @@ -1429,8 +1423,8 @@ class DualCache(BaseCache): return result except Exception as e: - print_verbose(f"LiteLLM Cache: Excepton async add_cache: {str(e)}") - traceback.print_exc() + verbose_logger.error(f"LiteLLM Cache: Excepton async add_cache: {str(e)}") + verbose_logger.debug(traceback.format_exc()) raise e def flush_cache(self): @@ -1846,8 +1840,8 @@ class Cache: ) self.cache.set_cache(cache_key, cached_data, **kwargs) except Exception as e: - print_verbose(f"LiteLLM Cache: Excepton add_cache: {str(e)}") - traceback.print_exc() + verbose_logger.error(f"LiteLLM Cache: Excepton add_cache: {str(e)}") + verbose_logger.debug(traceback.format_exc()) pass async def async_add_cache(self, result, *args, **kwargs): @@ -1864,8 +1858,8 @@ class Cache: ) await self.cache.async_set_cache(cache_key, cached_data, **kwargs) except Exception as e: - print_verbose(f"LiteLLM Cache: Excepton add_cache: {str(e)}") - traceback.print_exc() + verbose_logger.error(f"LiteLLM Cache: Excepton add_cache: {str(e)}") + verbose_logger.debug(traceback.format_exc()) async def async_add_cache_pipeline(self, result, *args, **kwargs): """ @@ -1897,8 +1891,8 @@ class Cache: ) await asyncio.gather(*tasks) except Exception as e: - print_verbose(f"LiteLLM Cache: Excepton add_cache: {str(e)}") - traceback.print_exc() + verbose_logger.error(f"LiteLLM Cache: Excepton add_cache: {str(e)}") + verbose_logger.debug(traceback.format_exc()) async def batch_cache_write(self, result, *args, **kwargs): cache_key, cached_data, kwargs = self._add_cache_logic( diff --git a/litellm/exceptions.py b/litellm/exceptions.py index f84cf31668..484e843b6d 100644 --- a/litellm/exceptions.py +++ b/litellm/exceptions.py @@ -638,6 +638,7 @@ LITELLM_EXCEPTION_TYPES = [ APIConnectionError, APIResponseValidationError, OpenAIError, + InternalServerError, ] diff --git a/litellm/integrations/aispend.py b/litellm/integrations/aispend.py index 2fe8ea0dfa..ca284e62e5 100644 --- a/litellm/integrations/aispend.py +++ b/litellm/integrations/aispend.py @@ -169,6 +169,5 @@ class AISpendLogger: print_verbose(f"AISpend Logging - final data object: {data}") except: - # traceback.print_exc() print_verbose(f"AISpend Logging Error - {traceback.format_exc()}") pass diff --git a/litellm/integrations/berrispend.py b/litellm/integrations/berrispend.py index 7d30b706c8..d428fb54d8 100644 --- a/litellm/integrations/berrispend.py +++ b/litellm/integrations/berrispend.py @@ -178,6 +178,5 @@ class BerriSpendLogger: print_verbose(f"BerriSpend Logging - final data object: {data}") response = requests.post(url, headers=headers, json=data) except: - # traceback.print_exc() print_verbose(f"BerriSpend Logging Error - {traceback.format_exc()}") pass diff --git a/litellm/integrations/clickhouse.py b/litellm/integrations/clickhouse.py index 0c38b86267..f8b6b1bbf0 100644 --- a/litellm/integrations/clickhouse.py +++ b/litellm/integrations/clickhouse.py @@ -297,6 +297,5 @@ class ClickhouseLogger: # make request to endpoint with payload verbose_logger.debug(f"Clickhouse Logger - final response = {response}") except Exception as e: - traceback.print_exc() verbose_logger.debug(f"Clickhouse - {str(e)}\n{traceback.format_exc()}") pass diff --git a/litellm/integrations/custom_logger.py b/litellm/integrations/custom_logger.py index e192cdaea7..1d447da1f2 100644 --- a/litellm/integrations/custom_logger.py +++ b/litellm/integrations/custom_logger.py @@ -115,7 +115,6 @@ class CustomLogger: # https://docs.litellm.ai/docs/observability/custom_callbac ) print_verbose(f"Custom Logger - model call details: {kwargs}") except: - traceback.print_exc() print_verbose(f"Custom Logger Error - {traceback.format_exc()}") async def async_log_input_event( @@ -130,7 +129,6 @@ class CustomLogger: # https://docs.litellm.ai/docs/observability/custom_callbac ) print_verbose(f"Custom Logger - model call details: {kwargs}") except: - traceback.print_exc() print_verbose(f"Custom Logger Error - {traceback.format_exc()}") def log_event( @@ -146,7 +144,6 @@ class CustomLogger: # https://docs.litellm.ai/docs/observability/custom_callbac end_time, ) except: - # traceback.print_exc() print_verbose(f"Custom Logger Error - {traceback.format_exc()}") pass @@ -163,6 +160,5 @@ class CustomLogger: # https://docs.litellm.ai/docs/observability/custom_callbac end_time, ) except: - # traceback.print_exc() print_verbose(f"Custom Logger Error - {traceback.format_exc()}") pass diff --git a/litellm/integrations/datadog.py b/litellm/integrations/datadog.py index 6d5e08faff..d835b3d670 100644 --- a/litellm/integrations/datadog.py +++ b/litellm/integrations/datadog.py @@ -134,7 +134,6 @@ class DataDogLogger: f"Datadog Layer Logging - final response object: {response_obj}" ) except Exception as e: - traceback.print_exc() verbose_logger.debug( f"Datadog Layer Error - {str(e)}\n{traceback.format_exc()}" ) diff --git a/litellm/integrations/dynamodb.py b/litellm/integrations/dynamodb.py index 21ccabe4b7..847f930ece 100644 --- a/litellm/integrations/dynamodb.py +++ b/litellm/integrations/dynamodb.py @@ -85,6 +85,5 @@ class DyanmoDBLogger: ) return response except: - traceback.print_exc() print_verbose(f"DynamoDB Layer Error - {traceback.format_exc()}") pass diff --git a/litellm/integrations/helicone.py b/litellm/integrations/helicone.py index 85e73258ea..8ea18a7d5b 100644 --- a/litellm/integrations/helicone.py +++ b/litellm/integrations/helicone.py @@ -112,6 +112,5 @@ class HeliconeLogger: ) print_verbose(f"Helicone Logging - Error {response.text}") except: - # traceback.print_exc() print_verbose(f"Helicone Logging Error - {traceback.format_exc()}") pass diff --git a/litellm/integrations/langfuse.py b/litellm/integrations/langfuse.py index 4d580f6666..7fe2e9f227 100644 --- a/litellm/integrations/langfuse.py +++ b/litellm/integrations/langfuse.py @@ -69,6 +69,43 @@ class LangFuseLogger: else: self.upstream_langfuse = None + @staticmethod + def add_metadata_from_header(litellm_params: dict, metadata: dict) -> dict: + """ + Adds metadata from proxy request headers to Langfuse logging if keys start with "langfuse_" + and overwrites litellm_params.metadata if already included. + + For example if you want to append your trace to an existing `trace_id` via header, send + `headers: { ..., langfuse_existing_trace_id: your-existing-trace-id }` via proxy request. + """ + if litellm_params is None: + return metadata + + if litellm_params.get("proxy_server_request") is None: + return metadata + + if metadata is None: + metadata = {} + + proxy_headers = ( + litellm_params.get("proxy_server_request", {}).get("headers", {}) or {} + ) + + for metadata_param_key in proxy_headers: + if metadata_param_key.startswith("langfuse_"): + trace_param_key = metadata_param_key.replace("langfuse_", "", 1) + if trace_param_key in metadata: + verbose_logger.warning( + f"Overwriting Langfuse `{trace_param_key}` from request header" + ) + else: + verbose_logger.debug( + f"Found Langfuse `{trace_param_key}` in request header" + ) + metadata[trace_param_key] = proxy_headers.get(metadata_param_key) + + return metadata + # def log_error(kwargs, response_obj, start_time, end_time): # generation = trace.generation( # level ="ERROR" # can be any of DEBUG, DEFAULT, WARNING or ERROR @@ -97,6 +134,7 @@ class LangFuseLogger: metadata = ( litellm_params.get("metadata", {}) or {} ) # if litellm_params['metadata'] == None + metadata = self.add_metadata_from_header(litellm_params, metadata) optional_params = copy.deepcopy(kwargs.get("optional_params", {})) prompt = {"messages": kwargs.get("messages")} @@ -182,9 +220,11 @@ class LangFuseLogger: verbose_logger.info(f"Langfuse Layer Logging - logging success") return {"trace_id": trace_id, "generation_id": generation_id} - except: - traceback.print_exc() - verbose_logger.debug(f"Langfuse Layer Error - {traceback.format_exc()}") + except Exception as e: + verbose_logger.error( + "Langfuse Layer Error(): Exception occured - {}".format(str(e)) + ) + verbose_logger.debug(traceback.format_exc()) return {"trace_id": None, "generation_id": None} async def _async_log_event( diff --git a/litellm/integrations/langsmith.py b/litellm/integrations/langsmith.py index 3e25b4ee77..48185afeea 100644 --- a/litellm/integrations/langsmith.py +++ b/litellm/integrations/langsmith.py @@ -44,7 +44,9 @@ class LangsmithLogger: print_verbose( f"Langsmith Logging - project_name: {project_name}, run_name {run_name}" ) - langsmith_base_url = os.getenv("LANGSMITH_BASE_URL", "https://api.smith.langchain.com") + langsmith_base_url = os.getenv( + "LANGSMITH_BASE_URL", "https://api.smith.langchain.com" + ) try: print_verbose( @@ -89,9 +91,7 @@ class LangsmithLogger: } url = f"{langsmith_base_url}/runs" - print_verbose( - f"Langsmith Logging - About to send data to {url} ..." - ) + print_verbose(f"Langsmith Logging - About to send data to {url} ...") response = requests.post( url=url, json=data, @@ -106,6 +106,5 @@ class LangsmithLogger: f"Langsmith Layer Logging - final response object: {response_obj}" ) except: - # traceback.print_exc() print_verbose(f"Langsmith Layer Error - {traceback.format_exc()}") pass diff --git a/litellm/integrations/logfire_logger.py b/litellm/integrations/logfire_logger.py index e27d848fb4..b4ab00820e 100644 --- a/litellm/integrations/logfire_logger.py +++ b/litellm/integrations/logfire_logger.py @@ -171,7 +171,6 @@ class LogfireLogger: f"Logfire Layer Logging - final response object: {response_obj}" ) except Exception as e: - traceback.print_exc() verbose_logger.debug( f"Logfire Layer Error - {str(e)}\n{traceback.format_exc()}" ) diff --git a/litellm/integrations/lunary.py b/litellm/integrations/lunary.py index 2e16e44a14..141ea64884 100644 --- a/litellm/integrations/lunary.py +++ b/litellm/integrations/lunary.py @@ -14,6 +14,7 @@ def parse_usage(usage): "prompt": usage["prompt_tokens"] if "prompt_tokens" in usage else 0, } + def parse_tool_calls(tool_calls): if tool_calls is None: return None @@ -26,13 +27,13 @@ def parse_tool_calls(tool_calls): "function": { "name": tool_call.function.name, "arguments": tool_call.function.arguments, - } + }, } return serialized - + return [clean_tool_call(tool_call) for tool_call in tool_calls] - + def parse_messages(input): @@ -176,6 +177,5 @@ class LunaryLogger: ) except: - # traceback.print_exc() print_verbose(f"Lunary Logging Error - {traceback.format_exc()}") pass diff --git a/litellm/integrations/prometheus.py b/litellm/integrations/prometheus.py index 6fbc6ca4ce..af0d1d310b 100644 --- a/litellm/integrations/prometheus.py +++ b/litellm/integrations/prometheus.py @@ -109,8 +109,8 @@ class PrometheusLogger: end_user_id, user_api_key, model, user_api_team, user_id ).inc() except Exception as e: - traceback.print_exc() - verbose_logger.debug( - f"prometheus Layer Error - {str(e)}\n{traceback.format_exc()}" + verbose_logger.error( + "prometheus Layer Error(): Exception occured - {}".format(str(e)) ) + verbose_logger.debug(traceback.format_exc()) pass diff --git a/litellm/integrations/s3.py b/litellm/integrations/s3.py index d131e44f0e..0796d1048b 100644 --- a/litellm/integrations/s3.py +++ b/litellm/integrations/s3.py @@ -180,6 +180,5 @@ class S3Logger: print_verbose(f"s3 Layer Logging - final response object: {response_obj}") return response except Exception as e: - traceback.print_exc() verbose_logger.debug(f"s3 Layer Error - {str(e)}\n{traceback.format_exc()}") pass diff --git a/litellm/integrations/supabase.py b/litellm/integrations/supabase.py index 4e6bf517f3..7309342e4c 100644 --- a/litellm/integrations/supabase.py +++ b/litellm/integrations/supabase.py @@ -110,6 +110,5 @@ class Supabase: ) except: - # traceback.print_exc() print_verbose(f"Supabase Logging Error - {traceback.format_exc()}") pass diff --git a/litellm/integrations/weights_biases.py b/litellm/integrations/weights_biases.py index a56233b22f..1ac535c4f2 100644 --- a/litellm/integrations/weights_biases.py +++ b/litellm/integrations/weights_biases.py @@ -217,6 +217,5 @@ class WeightsBiasesLogger: f"W&B Logging Logging - final response object: {response_obj}" ) except: - # traceback.print_exc() print_verbose(f"W&B Logging Layer Error - {traceback.format_exc()}") pass diff --git a/litellm/llms/gemini.py b/litellm/llms/gemini.py index a55b39aef9..cfdf39eca2 100644 --- a/litellm/llms/gemini.py +++ b/litellm/llms/gemini.py @@ -1,13 +1,14 @@ -import os, types, traceback, copy, asyncio -import json -from enum import Enum +import types +import traceback +import copy import time from typing import Callable, Optional -from litellm.utils import ModelResponse, get_secret, Choices, Message, Usage +from litellm.utils import ModelResponse, Choices, Message, Usage import litellm -import sys, httpx +import httpx from .prompt_templates.factory import prompt_factory, custom_prompt, get_system_prompt from packaging.version import Version +from litellm import verbose_logger class GeminiError(Exception): @@ -264,7 +265,8 @@ def completion( choices_list.append(choice_obj) model_response["choices"] = choices_list except Exception as e: - traceback.print_exc() + verbose_logger.error("LiteLLM.gemini.py: Exception occured - {}".format(str(e))) + verbose_logger.debug(traceback.format_exc()) raise GeminiError( message=traceback.format_exc(), status_code=response.status_code ) @@ -356,7 +358,8 @@ async def async_completion( choices_list.append(choice_obj) model_response["choices"] = choices_list except Exception as e: - traceback.print_exc() + verbose_logger.error("LiteLLM.gemini.py: Exception occured - {}".format(str(e))) + verbose_logger.debug(traceback.format_exc()) raise GeminiError( message=traceback.format_exc(), status_code=response.status_code ) diff --git a/litellm/llms/ollama.py b/litellm/llms/ollama.py index 283878056f..e7dd1d5f55 100644 --- a/litellm/llms/ollama.py +++ b/litellm/llms/ollama.py @@ -2,10 +2,12 @@ from itertools import chain import requests, types, time # type: ignore import json, uuid import traceback -from typing import Optional +from typing import Optional, List import litellm +from litellm.types.utils import ProviderField import httpx, aiohttp, asyncio # type: ignore from .prompt_templates.factory import prompt_factory, custom_prompt +from litellm import verbose_logger class OllamaError(Exception): @@ -124,6 +126,19 @@ class OllamaConfig: ) and v is not None } + + def get_required_params(self) -> List[ProviderField]: + """For a given provider, return it's required fields with a description""" + return [ + ProviderField( + field_name="base_url", + field_type="string", + field_description="Your Ollama API Base", + field_value="http://10.10.11.249:11434", + ) + ] + + def get_supported_openai_params( self, ): @@ -138,10 +153,12 @@ class OllamaConfig: "response_format", ] + # ollama wants plain base64 jpeg/png files as images. strip any leading dataURI # and convert to jpeg if necessary. def _convert_image(image): import base64, io + try: from PIL import Image except: @@ -391,7 +408,13 @@ async def ollama_async_streaming(url, data, model_response, encoding, logging_ob async for transformed_chunk in streamwrapper: yield transformed_chunk except Exception as e: - traceback.print_exc() + verbose_logger.error( + "LiteLLM.ollama.py::ollama_async_streaming(): Exception occured - {}".format( + str(e) + ) + ) + verbose_logger.debug(traceback.format_exc()) + raise e @@ -455,7 +478,12 @@ async def ollama_acompletion(url, data, model_response, encoding, logging_obj): ) return model_response except Exception as e: - traceback.print_exc() + verbose_logger.error( + "LiteLLM.ollama.py::ollama_acompletion(): Exception occured - {}".format( + str(e) + ) + ) + verbose_logger.debug(traceback.format_exc()) raise e diff --git a/litellm/llms/ollama_chat.py b/litellm/llms/ollama_chat.py index a058077227..a7439bbcc0 100644 --- a/litellm/llms/ollama_chat.py +++ b/litellm/llms/ollama_chat.py @@ -1,11 +1,15 @@ from itertools import chain -import requests, types, time -import json, uuid +import requests +import types +import time +import json +import uuid import traceback from typing import Optional +from litellm import verbose_logger import litellm -import httpx, aiohttp, asyncio -from .prompt_templates.factory import prompt_factory, custom_prompt +import httpx +import aiohttp class OllamaError(Exception): @@ -299,7 +303,10 @@ def get_ollama_response( tool_calls=[ { "id": f"call_{str(uuid.uuid4())}", - "function": {"name": function_call["name"], "arguments": json.dumps(function_call["arguments"])}, + "function": { + "name": function_call["name"], + "arguments": json.dumps(function_call["arguments"]), + }, "type": "function", } ], @@ -307,7 +314,9 @@ def get_ollama_response( model_response["choices"][0]["message"] = message model_response["choices"][0]["finish_reason"] = "tool_calls" else: - model_response["choices"][0]["message"]["content"] = response_json["message"]["content"] + model_response["choices"][0]["message"]["content"] = response_json["message"][ + "content" + ] model_response["created"] = int(time.time()) model_response["model"] = "ollama/" + model prompt_tokens = response_json.get("prompt_eval_count", litellm.token_counter(messages=messages)) # type: ignore @@ -361,7 +370,10 @@ def ollama_completion_stream(url, api_key, data, logging_obj): tool_calls=[ { "id": f"call_{str(uuid.uuid4())}", - "function": {"name": function_call["name"], "arguments": json.dumps(function_call["arguments"])}, + "function": { + "name": function_call["name"], + "arguments": json.dumps(function_call["arguments"]), + }, "type": "function", } ], @@ -410,9 +422,10 @@ async def ollama_async_streaming( first_chunk_content = first_chunk.choices[0].delta.content or "" response_content = first_chunk_content + "".join( [ - chunk.choices[0].delta.content - async for chunk in streamwrapper - if chunk.choices[0].delta.content] + chunk.choices[0].delta.content + async for chunk in streamwrapper + if chunk.choices[0].delta.content + ] ) function_call = json.loads(response_content) delta = litellm.utils.Delta( @@ -420,7 +433,10 @@ async def ollama_async_streaming( tool_calls=[ { "id": f"call_{str(uuid.uuid4())}", - "function": {"name": function_call["name"], "arguments": json.dumps(function_call["arguments"])}, + "function": { + "name": function_call["name"], + "arguments": json.dumps(function_call["arguments"]), + }, "type": "function", } ], @@ -433,7 +449,8 @@ async def ollama_async_streaming( async for transformed_chunk in streamwrapper: yield transformed_chunk except Exception as e: - traceback.print_exc() + verbose_logger.error("LiteLLM.gemini(): Exception occured - {}".format(str(e))) + verbose_logger.debug(traceback.format_exc()) async def ollama_acompletion( @@ -483,7 +500,10 @@ async def ollama_acompletion( tool_calls=[ { "id": f"call_{str(uuid.uuid4())}", - "function": {"name": function_call["name"], "arguments": json.dumps(function_call["arguments"])}, + "function": { + "name": function_call["name"], + "arguments": json.dumps(function_call["arguments"]), + }, "type": "function", } ], @@ -491,7 +511,9 @@ async def ollama_acompletion( model_response["choices"][0]["message"] = message model_response["choices"][0]["finish_reason"] = "tool_calls" else: - model_response["choices"][0]["message"]["content"] = response_json["message"]["content"] + model_response["choices"][0]["message"]["content"] = response_json[ + "message" + ]["content"] model_response["created"] = int(time.time()) model_response["model"] = "ollama_chat/" + data["model"] @@ -509,5 +531,9 @@ async def ollama_acompletion( ) return model_response except Exception as e: - traceback.print_exc() + verbose_logger.error( + "LiteLLM.ollama_acompletion(): Exception occured - {}".format(str(e)) + ) + verbose_logger.debug(traceback.format_exc()) + raise e diff --git a/litellm/llms/palm.py b/litellm/llms/palm.py index f15be43db4..4d9953e77a 100644 --- a/litellm/llms/palm.py +++ b/litellm/llms/palm.py @@ -1,11 +1,12 @@ -import os, types, traceback, copy -import json -from enum import Enum +import types +import traceback +import copy import time from typing import Callable, Optional -from litellm.utils import ModelResponse, get_secret, Choices, Message, Usage +from litellm.utils import ModelResponse, Choices, Message, Usage import litellm -import sys, httpx +import httpx +from litellm import verbose_logger class PalmError(Exception): @@ -165,7 +166,10 @@ def completion( choices_list.append(choice_obj) model_response["choices"] = choices_list except Exception as e: - traceback.print_exc() + verbose_logger.error( + "litellm.llms.palm.py::completion(): Exception occured - {}".format(str(e)) + ) + verbose_logger.debug(traceback.format_exc()) raise PalmError( message=traceback.format_exc(), status_code=response.status_code ) diff --git a/litellm/llms/prompt_templates/factory.py b/litellm/llms/prompt_templates/factory.py index ddd0e1909f..e5c8a79958 100644 --- a/litellm/llms/prompt_templates/factory.py +++ b/litellm/llms/prompt_templates/factory.py @@ -826,7 +826,7 @@ def anthropic_messages_pt_xml(messages: list): ) # either string or none if messages[msg_i].get( "tool_calls", [] - ): # support assistant tool invoke convertion + ): # support assistant tool invoke conversion assistant_text += convert_to_anthropic_tool_invoke_xml( # type: ignore messages[msg_i]["tool_calls"] ) @@ -1217,7 +1217,7 @@ def anthropic_messages_pt(messages: list): if messages[msg_i].get( "tool_calls", [] - ): # support assistant tool invoke convertion + ): # support assistant tool invoke conversion assistant_content.extend( convert_to_anthropic_tool_invoke(messages[msg_i]["tool_calls"]) ) diff --git a/litellm/llms/vertex_ai.py b/litellm/llms/vertex_ai.py index 5171b1efcc..aabe084b8e 100644 --- a/litellm/llms/vertex_ai.py +++ b/litellm/llms/vertex_ai.py @@ -297,24 +297,29 @@ def _convert_gemini_role(role: str) -> Literal["user", "model"]: def _process_gemini_image(image_url: str) -> PartType: try: - if "gs://" in image_url: - # Case 1: Images with Cloud Storage URIs + if ".mp4" in image_url and "gs://" in image_url: + # Case 1: Videos with Cloud Storage URIs + part_mime = "video/mp4" + _file_data = FileDataType(mime_type=part_mime, file_uri=image_url) + return PartType(file_data=_file_data) + elif ".pdf" in image_url and "gs://" in image_url: + # Case 2: PDF's with Cloud Storage URIs + part_mime = "application/pdf" + _file_data = FileDataType(mime_type=part_mime, file_uri=image_url) + return PartType(file_data=_file_data) + elif "gs://" in image_url: + # Case 3: Images with Cloud Storage URIs # The supported MIME types for images include image/png and image/jpeg. part_mime = "image/png" if "png" in image_url else "image/jpeg" _file_data = FileDataType(mime_type=part_mime, file_uri=image_url) return PartType(file_data=_file_data) elif "https:/" in image_url: - # Case 2: Images with direct links + # Case 4: Images with direct links image = _load_image_from_url(image_url) _blob = BlobType(data=image.data, mime_type=image._mime_type) return PartType(inline_data=_blob) - elif ".mp4" in image_url and "gs://" in image_url: - # Case 3: Videos with Cloud Storage URIs - part_mime = "video/mp4" - _file_data = FileDataType(mime_type=part_mime, file_uri=image_url) - return PartType(file_data=_file_data) elif "base64" in image_url: - # Case 4: Images with base64 encoding + # Case 5: Images with base64 encoding import base64, re # base 64 is passed as data:image/jpeg;base64, @@ -390,7 +395,7 @@ def _gemini_convert_messages_with_history(messages: list) -> List[ContentType]: assistant_content.extend(_parts) elif messages[msg_i].get( "tool_calls", [] - ): # support assistant tool invoke convertion + ): # support assistant tool invoke conversion assistant_content.extend( convert_to_gemini_tool_call_invoke(messages[msg_i]["tool_calls"]) ) @@ -642,9 +647,9 @@ def completion( prompt = " ".join( [ - message["content"] + message.get("content") for message in messages - if isinstance(message["content"], str) + if isinstance(message.get("content", None), str) ] ) diff --git a/litellm/main.py b/litellm/main.py index 15334d0414..5012ef1662 100644 --- a/litellm/main.py +++ b/litellm/main.py @@ -365,7 +365,10 @@ async def acompletion( ) # sets the logging event loop if the user does sync streaming (e.g. on proxy for sagemaker calls) return response except Exception as e: - traceback.print_exc() + verbose_logger.error( + "litellm.acompletion(): Exception occured - {}".format(str(e)) + ) + verbose_logger.debug(traceback.format_exc()) custom_llm_provider = custom_llm_provider or "openai" raise exception_type( model=model, @@ -478,7 +481,10 @@ def mock_completion( except Exception as e: if isinstance(e, openai.APIError): raise e - traceback.print_exc() + verbose_logger.error( + "litellm.mock_completion(): Exception occured - {}".format(str(e)) + ) + verbose_logger.debug(traceback.format_exc()) raise Exception("Mock completion response failed") @@ -4449,7 +4455,10 @@ async def ahealth_check( response = {} # args like remaining ratelimit etc. return response except Exception as e: - traceback.print_exc() + verbose_logger.error( + "litellm.ahealth_check(): Exception occured - {}".format(str(e)) + ) + verbose_logger.debug(traceback.format_exc()) stack_trace = traceback.format_exc() if isinstance(stack_trace, str): stack_trace = stack_trace[:1000] diff --git a/litellm/proxy/_logging.py b/litellm/proxy/_logging.py index 22cbd88cb7..f453cef395 100644 --- a/litellm/proxy/_logging.py +++ b/litellm/proxy/_logging.py @@ -1,6 +1,7 @@ import json import logging from logging import Formatter +import sys class JsonFormatter(Formatter): diff --git a/litellm/proxy/_super_secret_config.yaml b/litellm/proxy/_super_secret_config.yaml index 72479bd5d5..7fa1bbc19c 100644 --- a/litellm/proxy/_super_secret_config.yaml +++ b/litellm/proxy/_super_secret_config.yaml @@ -56,8 +56,10 @@ router_settings: litellm_settings: success_callback: ["langfuse"] - json_logs: true general_settings: alerting: ["email"] + key_management_system: "aws_kms" + key_management_settings: + hosted_keys: ["LITELLM_MASTER_KEY"] diff --git a/litellm/proxy/_types.py b/litellm/proxy/_types.py index f54bee399b..c19fd7e69e 100644 --- a/litellm/proxy/_types.py +++ b/litellm/proxy/_types.py @@ -76,6 +76,17 @@ class LitellmUserRoles(str, enum.Enum): return ui_labels.get(self.value, "") +class LitellmTableNames(str, enum.Enum): + """ + Enum for Table Names used by LiteLLM + """ + + TEAM_TABLE_NAME: str = "LiteLLM_TeamTable" + USER_TABLE_NAME: str = "LiteLLM_UserTable" + KEY_TABLE_NAME: str = "LiteLLM_VerificationToken" + PROXY_MODEL_TABLE_NAME: str = "LiteLLM_ModelTable" + + AlertType = Literal[ "llm_exceptions", "llm_too_slow", @@ -935,6 +946,7 @@ class KeyManagementSystem(enum.Enum): AZURE_KEY_VAULT = "azure_key_vault" AWS_SECRET_MANAGER = "aws_secret_manager" LOCAL = "local" + AWS_KMS = "aws_kms" class KeyManagementSettings(LiteLLMBase): @@ -1276,6 +1288,22 @@ class LiteLLM_ErrorLogs(LiteLLMBase): endTime: Union[str, datetime, None] +class LiteLLM_AuditLogs(LiteLLMBase): + id: str + updated_at: datetime + changed_by: str + action: Literal["created", "updated", "deleted"] + table_name: Literal[ + LitellmTableNames.TEAM_TABLE_NAME, + LitellmTableNames.USER_TABLE_NAME, + LitellmTableNames.KEY_TABLE_NAME, + LitellmTableNames.PROXY_MODEL_TABLE_NAME, + ] + object_id: str + before_value: Optional[Json] = None + updated_values: Optional[Json] = None + + class LiteLLM_SpendLogs_ResponseObject(LiteLLMBase): response: Optional[List[Union[LiteLLM_SpendLogs, Any]]] = None diff --git a/litellm/proxy/hooks/azure_content_safety.py b/litellm/proxy/hooks/azure_content_safety.py index 5b5139f8c7..47ba36a683 100644 --- a/litellm/proxy/hooks/azure_content_safety.py +++ b/litellm/proxy/hooks/azure_content_safety.py @@ -88,7 +88,7 @@ class _PROXY_AzureContentSafety( verbose_proxy_logger.debug( "Error in Azure Content-Safety: %s", traceback.format_exc() ) - traceback.print_exc() + verbose_proxy_logger.debug(traceback.format_exc()) raise result = self._compute_result(response) @@ -123,7 +123,12 @@ class _PROXY_AzureContentSafety( except HTTPException as e: raise e except Exception as e: - traceback.print_exc() + verbose_proxy_logger.error( + "litellm.proxy.hooks.azure_content_safety.py::async_pre_call_hook(): Exception occured - {}".format( + str(e) + ) + ) + verbose_proxy_logger.debug(traceback.format_exc()) async def async_post_call_success_hook( self, diff --git a/litellm/proxy/hooks/batch_redis_get.py b/litellm/proxy/hooks/batch_redis_get.py index 64541c1bff..d506109b81 100644 --- a/litellm/proxy/hooks/batch_redis_get.py +++ b/litellm/proxy/hooks/batch_redis_get.py @@ -94,7 +94,12 @@ class _PROXY_BatchRedisRequests(CustomLogger): except HTTPException as e: raise e except Exception as e: - traceback.print_exc() + verbose_proxy_logger.error( + "litellm.proxy.hooks.batch_redis_get.py::async_pre_call_hook(): Exception occured - {}".format( + str(e) + ) + ) + verbose_proxy_logger.debug(traceback.format_exc()) async def async_get_cache(self, *args, **kwargs): """ diff --git a/litellm/proxy/hooks/cache_control_check.py b/litellm/proxy/hooks/cache_control_check.py index 3160fe97ad..89971a0bf7 100644 --- a/litellm/proxy/hooks/cache_control_check.py +++ b/litellm/proxy/hooks/cache_control_check.py @@ -1,13 +1,13 @@ # What this does? ## Checks if key is allowed to use the cache controls passed in to the completion() call -from typing import Optional import litellm +from litellm import verbose_logger from litellm.caching import DualCache from litellm.proxy._types import UserAPIKeyAuth from litellm.integrations.custom_logger import CustomLogger from fastapi import HTTPException -import json, traceback +import traceback class _PROXY_CacheControlCheck(CustomLogger): @@ -54,4 +54,9 @@ class _PROXY_CacheControlCheck(CustomLogger): except HTTPException as e: raise e except Exception as e: - traceback.print_exc() + verbose_logger.error( + "litellm.proxy.hooks.cache_control_check.py::async_pre_call_hook(): Exception occured - {}".format( + str(e) + ) + ) + verbose_logger.debug(traceback.format_exc()) diff --git a/litellm/proxy/hooks/max_budget_limiter.py b/litellm/proxy/hooks/max_budget_limiter.py index 442cc53e37..c4b328bab0 100644 --- a/litellm/proxy/hooks/max_budget_limiter.py +++ b/litellm/proxy/hooks/max_budget_limiter.py @@ -1,10 +1,10 @@ -from typing import Optional +from litellm import verbose_logger import litellm from litellm.caching import DualCache from litellm.proxy._types import UserAPIKeyAuth from litellm.integrations.custom_logger import CustomLogger from fastapi import HTTPException -import json, traceback +import traceback class _PROXY_MaxBudgetLimiter(CustomLogger): @@ -44,4 +44,9 @@ class _PROXY_MaxBudgetLimiter(CustomLogger): except HTTPException as e: raise e except Exception as e: - traceback.print_exc() + verbose_logger.error( + "litellm.proxy.hooks.max_budget_limiter.py::async_pre_call_hook(): Exception occured - {}".format( + str(e) + ) + ) + verbose_logger.debug(traceback.format_exc()) diff --git a/litellm/proxy/hooks/presidio_pii_masking.py b/litellm/proxy/hooks/presidio_pii_masking.py index 95a6e9c3c8..e64e69c457 100644 --- a/litellm/proxy/hooks/presidio_pii_masking.py +++ b/litellm/proxy/hooks/presidio_pii_masking.py @@ -8,8 +8,8 @@ # Tell us how we can improve! - Krrish & Ishaan -from typing import Optional, Literal, Union -import litellm, traceback, sys, uuid, json +from typing import Optional, Union +import litellm, traceback, uuid, json # noqa: E401 from litellm.caching import DualCache from litellm.proxy._types import UserAPIKeyAuth from litellm.integrations.custom_logger import CustomLogger @@ -21,8 +21,8 @@ from litellm.utils import ( ImageResponse, StreamingChoices, ) -from datetime import datetime -import aiohttp, asyncio +import aiohttp +import asyncio class _OPTIONAL_PresidioPIIMasking(CustomLogger): @@ -138,7 +138,12 @@ class _OPTIONAL_PresidioPIIMasking(CustomLogger): else: raise Exception(f"Invalid anonymizer response: {redacted_text}") except Exception as e: - traceback.print_exc() + verbose_proxy_logger.error( + "litellm.proxy.hooks.presidio_pii_masking.py::async_pre_call_hook(): Exception occured - {}".format( + str(e) + ) + ) + verbose_proxy_logger.debug(traceback.format_exc()) raise e async def async_pre_call_hook( diff --git a/litellm/proxy/hooks/prompt_injection_detection.py b/litellm/proxy/hooks/prompt_injection_detection.py index 08dbedd8c8..ed33e3b519 100644 --- a/litellm/proxy/hooks/prompt_injection_detection.py +++ b/litellm/proxy/hooks/prompt_injection_detection.py @@ -204,7 +204,12 @@ class _OPTIONAL_PromptInjectionDetection(CustomLogger): return e.detail["error"] raise e except Exception as e: - traceback.print_exc() + verbose_proxy_logger.error( + "litellm.proxy.hooks.prompt_injection_detection.py::async_pre_call_hook(): Exception occured - {}".format( + str(e) + ) + ) + verbose_proxy_logger.debug(traceback.format_exc()) async def async_moderation_hook( self, diff --git a/litellm/proxy/proxy_config.yaml b/litellm/proxy/proxy_config.yaml index e3d4effe80..88fc0e9136 100644 --- a/litellm/proxy/proxy_config.yaml +++ b/litellm/proxy/proxy_config.yaml @@ -23,4 +23,5 @@ general_settings: master_key: sk-1234 litellm_settings: - callbacks: ["otel"] \ No newline at end of file + callbacks: ["otel"] + store_audit_logs: true \ No newline at end of file diff --git a/litellm/proxy/proxy_server.py b/litellm/proxy/proxy_server.py index 25035a016f..9d1688815c 100644 --- a/litellm/proxy/proxy_server.py +++ b/litellm/proxy/proxy_server.py @@ -103,6 +103,7 @@ from litellm.proxy.utils import ( update_spend, encrypt_value, decrypt_value, + get_error_message_str, ) from litellm import ( CreateBatchRequest, @@ -112,7 +113,10 @@ from litellm import ( CreateFileRequest, ) from litellm.proxy.secret_managers.google_kms import load_google_kms -from litellm.proxy.secret_managers.aws_secret_manager import load_aws_secret_manager +from litellm.proxy.secret_managers.aws_secret_manager import ( + load_aws_secret_manager, + load_aws_kms, +) import pydantic from litellm.proxy._types import * from litellm.caching import DualCache, RedisCache @@ -125,7 +129,10 @@ from litellm.router import ( AssistantsTypedDict, ) from litellm.router import ModelInfo as RouterModelInfo -from litellm._logging import verbose_router_logger, verbose_proxy_logger +from litellm._logging import ( + verbose_router_logger, + verbose_proxy_logger, +) from litellm.proxy.auth.handle_jwt import JWTHandler from litellm.proxy.auth.litellm_license import LicenseCheck from litellm.proxy.auth.model_checks import ( @@ -1471,7 +1478,12 @@ async def user_api_key_auth( else: raise Exception() except Exception as e: - traceback.print_exc() + verbose_proxy_logger.error( + "litellm.proxy.proxy_server.user_api_key_auth(): Exception occured - {}".format( + str(e) + ) + ) + verbose_proxy_logger.debug(traceback.format_exc()) if isinstance(e, litellm.BudgetExceededError): raise ProxyException( message=e.message, type="auth_error", param=None, code=400 @@ -2736,10 +2748,12 @@ class ProxyConfig: load_google_kms(use_google_kms=True) elif ( key_management_system - == KeyManagementSystem.AWS_SECRET_MANAGER.value + == KeyManagementSystem.AWS_SECRET_MANAGER.value # noqa: F405 ): ### LOAD FROM AWS SECRET MANAGER ### load_aws_secret_manager(use_aws_secret_manager=True) + elif key_management_system == KeyManagementSystem.AWS_KMS.value: + load_aws_kms(use_aws_kms=True) else: raise ValueError("Invalid Key Management System selected") key_management_settings = general_settings.get( @@ -2773,6 +2787,7 @@ class ProxyConfig: master_key = general_settings.get( "master_key", litellm.get_secret("LITELLM_MASTER_KEY", None) ) + if master_key and master_key.startswith("os.environ/"): master_key = litellm.get_secret(master_key) if not isinstance(master_key, str): @@ -3476,7 +3491,12 @@ async def generate_key_helper_fn( ) key_data["token_id"] = getattr(create_key_response, "token", None) except Exception as e: - traceback.print_exc() + verbose_proxy_logger.error( + "litellm.proxy.proxy_server.generate_key_helper_fn(): Exception occured - {}".format( + str(e) + ) + ) + verbose_proxy_logger.debug(traceback.format_exc()) if isinstance(e, HTTPException): raise e raise HTTPException( @@ -3515,7 +3535,12 @@ async def delete_verification_token(tokens: List, user_id: Optional[str] = None) else: raise Exception("DB not connected. prisma_client is None") except Exception as e: - traceback.print_exc() + verbose_proxy_logger.error( + "litellm.proxy.proxy_server.delete_verification_token(): Exception occured - {}".format( + str(e) + ) + ) + verbose_proxy_logger.debug(traceback.format_exc()) raise e return deleted_tokens @@ -3676,7 +3701,12 @@ async def async_assistants_data_generator( done_message = "[DONE]" yield f"data: {done_message}\n\n" except Exception as e: - traceback.print_exc() + verbose_proxy_logger.error( + "litellm.proxy.proxy_server.async_assistants_data_generator(): Exception occured - {}".format( + str(e) + ) + ) + verbose_proxy_logger.debug(traceback.format_exc()) await proxy_logging_obj.post_call_failure_hook( user_api_key_dict=user_api_key_dict, original_exception=e, @@ -3686,9 +3716,6 @@ async def async_assistants_data_generator( f"\033[1;31mAn error occurred: {e}\n\n Debug this by setting `--debug`, e.g. `litellm --model gpt-3.5-turbo --debug`" ) router_model_names = llm_router.model_names if llm_router is not None else [] - if user_debug: - traceback.print_exc() - if isinstance(e, HTTPException): raise e else: @@ -3728,7 +3755,12 @@ async def async_data_generator( done_message = "[DONE]" yield f"data: {done_message}\n\n" except Exception as e: - traceback.print_exc() + verbose_proxy_logger.error( + "litellm.proxy.proxy_server.async_data_generator(): Exception occured - {}".format( + str(e) + ) + ) + verbose_proxy_logger.debug(traceback.format_exc()) await proxy_logging_obj.post_call_failure_hook( user_api_key_dict=user_api_key_dict, original_exception=e, @@ -3738,8 +3770,6 @@ async def async_data_generator( f"\033[1;31mAn error occurred: {e}\n\n Debug this by setting `--debug`, e.g. `litellm --model gpt-3.5-turbo --debug`" ) router_model_names = llm_router.model_names if llm_router is not None else [] - if user_debug: - traceback.print_exc() if isinstance(e, HTTPException): raise e @@ -3800,6 +3830,18 @@ def on_backoff(details): verbose_proxy_logger.debug("Backing off... this was attempt # %s", details["tries"]) +def giveup(e): + result = not ( + isinstance(e, ProxyException) + and getattr(e, "message", None) is not None + and isinstance(e.message, str) + and "Max parallel request limit reached" in e.message + ) + if result: + verbose_proxy_logger.info(json.dumps({"event": "giveup", "exception": str(e)})) + return result + + @router.on_event("startup") async def startup_event(): global prisma_client, master_key, use_background_health_checks, llm_router, llm_model_list, general_settings, proxy_budget_rescheduler_min_time, proxy_budget_rescheduler_max_time, litellm_proxy_admin_name, db_writer_client, store_model_in_db @@ -4084,12 +4126,8 @@ def model_list( max_tries=litellm.num_retries or 3, # maximum number of retries max_time=litellm.request_timeout or 60, # maximum total time to retry for on_backoff=on_backoff, # specifying the function to call on backoff - giveup=lambda e: not ( - isinstance(e, ProxyException) - and getattr(e, "message", None) is not None - and isinstance(e.message, str) - and "Max parallel request limit reached" in e.message - ), # the result of the logical expression is on the second position + giveup=giveup, + logger=verbose_proxy_logger, ) async def chat_completion( request: Request, @@ -4098,6 +4136,7 @@ async def chat_completion( user_api_key_dict: UserAPIKeyAuth = Depends(user_api_key_auth), ): global general_settings, user_debug, proxy_logging_obj, llm_model_list + data = {} try: body = await request.body() @@ -4386,7 +4425,12 @@ async def chat_completion( return _chat_response except Exception as e: data["litellm_status"] = "fail" # used for alerting - traceback.print_exc() + verbose_proxy_logger.error( + "litellm.proxy.proxy_server.chat_completion(): Exception occured - {}".format( + get_error_message_str(e=e) + ) + ) + verbose_proxy_logger.debug(traceback.format_exc()) await proxy_logging_obj.post_call_failure_hook( user_api_key_dict=user_api_key_dict, original_exception=e, request_data=data ) @@ -4397,8 +4441,6 @@ async def chat_completion( litellm_debug_info, ) router_model_names = llm_router.model_names if llm_router is not None else [] - if user_debug: - traceback.print_exc() if isinstance(e, HTTPException): raise ProxyException( @@ -4630,15 +4672,12 @@ async def completion( await proxy_logging_obj.post_call_failure_hook( user_api_key_dict=user_api_key_dict, original_exception=e, request_data=data ) - verbose_proxy_logger.debug("EXCEPTION RAISED IN PROXY MAIN.PY") - litellm_debug_info = getattr(e, "litellm_debug_info", "") - verbose_proxy_logger.debug( - "\033[1;31mAn error occurred: %s %s\n\n Debug this by setting `--debug`, e.g. `litellm --model gpt-3.5-turbo --debug`", - e, - litellm_debug_info, + verbose_proxy_logger.error( + "litellm.proxy.proxy_server.completion(): Exception occured - {}".format( + str(e) + ) ) - traceback.print_exc() - error_traceback = traceback.format_exc() + verbose_proxy_logger.debug(traceback.format_exc()) error_msg = f"{str(e)}" raise ProxyException( message=getattr(e, "message", error_msg), @@ -4848,7 +4887,12 @@ async def embeddings( e, litellm_debug_info, ) - traceback.print_exc() + verbose_proxy_logger.error( + "litellm.proxy.proxy_server.embeddings(): Exception occured - {}".format( + str(e) + ) + ) + verbose_proxy_logger.debug(traceback.format_exc()) if isinstance(e, HTTPException): raise ProxyException( message=getattr(e, "message", str(e)), @@ -5027,7 +5071,12 @@ async def image_generation( await proxy_logging_obj.post_call_failure_hook( user_api_key_dict=user_api_key_dict, original_exception=e, request_data=data ) - traceback.print_exc() + verbose_proxy_logger.error( + "litellm.proxy.proxy_server.image_generation(): Exception occured - {}".format( + str(e) + ) + ) + verbose_proxy_logger.debug(traceback.format_exc()) if isinstance(e, HTTPException): raise ProxyException( message=getattr(e, "message", str(e)), @@ -5205,7 +5254,12 @@ async def audio_speech( ) except Exception as e: - traceback.print_exc() + verbose_proxy_logger.error( + "litellm.proxy.proxy_server.audio_speech(): Exception occured - {}".format( + str(e) + ) + ) + verbose_proxy_logger.debug(traceback.format_exc()) raise e @@ -5394,7 +5448,12 @@ async def audio_transcriptions( await proxy_logging_obj.post_call_failure_hook( user_api_key_dict=user_api_key_dict, original_exception=e, request_data=data ) - traceback.print_exc() + verbose_proxy_logger.error( + "litellm.proxy.proxy_server.audio_transcription(): Exception occured - {}".format( + str(e) + ) + ) + verbose_proxy_logger.debug(traceback.format_exc()) if isinstance(e, HTTPException): raise ProxyException( message=getattr(e, "message", str(e.detail)), @@ -5403,7 +5462,6 @@ async def audio_transcriptions( code=getattr(e, "status_code", status.HTTP_400_BAD_REQUEST), ) else: - error_traceback = traceback.format_exc() error_msg = f"{str(e)}" raise ProxyException( message=getattr(e, "message", error_msg), @@ -5531,7 +5589,12 @@ async def get_assistants( await proxy_logging_obj.post_call_failure_hook( user_api_key_dict=user_api_key_dict, original_exception=e, request_data=data ) - traceback.print_exc() + verbose_proxy_logger.error( + "litellm.proxy.proxy_server.get_assistants(): Exception occured - {}".format( + str(e) + ) + ) + verbose_proxy_logger.debug(traceback.format_exc()) if isinstance(e, HTTPException): raise ProxyException( message=getattr(e, "message", str(e.detail)), @@ -5540,7 +5603,6 @@ async def get_assistants( code=getattr(e, "status_code", status.HTTP_400_BAD_REQUEST), ) else: - error_traceback = traceback.format_exc() error_msg = f"{str(e)}" raise ProxyException( message=getattr(e, "message", error_msg), @@ -5660,7 +5722,12 @@ async def create_threads( await proxy_logging_obj.post_call_failure_hook( user_api_key_dict=user_api_key_dict, original_exception=e, request_data=data ) - traceback.print_exc() + verbose_proxy_logger.error( + "litellm.proxy.proxy_server.create_threads(): Exception occured - {}".format( + str(e) + ) + ) + verbose_proxy_logger.debug(traceback.format_exc()) if isinstance(e, HTTPException): raise ProxyException( message=getattr(e, "message", str(e.detail)), @@ -5669,7 +5736,6 @@ async def create_threads( code=getattr(e, "status_code", status.HTTP_400_BAD_REQUEST), ) else: - error_traceback = traceback.format_exc() error_msg = f"{str(e)}" raise ProxyException( message=getattr(e, "message", error_msg), @@ -5788,7 +5854,12 @@ async def get_thread( await proxy_logging_obj.post_call_failure_hook( user_api_key_dict=user_api_key_dict, original_exception=e, request_data=data ) - traceback.print_exc() + verbose_proxy_logger.error( + "litellm.proxy.proxy_server.get_thread(): Exception occured - {}".format( + str(e) + ) + ) + verbose_proxy_logger.debug(traceback.format_exc()) if isinstance(e, HTTPException): raise ProxyException( message=getattr(e, "message", str(e.detail)), @@ -5797,7 +5868,6 @@ async def get_thread( code=getattr(e, "status_code", status.HTTP_400_BAD_REQUEST), ) else: - error_traceback = traceback.format_exc() error_msg = f"{str(e)}" raise ProxyException( message=getattr(e, "message", error_msg), @@ -5919,7 +5989,12 @@ async def add_messages( await proxy_logging_obj.post_call_failure_hook( user_api_key_dict=user_api_key_dict, original_exception=e, request_data=data ) - traceback.print_exc() + verbose_proxy_logger.error( + "litellm.proxy.proxy_server.add_messages(): Exception occured - {}".format( + str(e) + ) + ) + verbose_proxy_logger.debug(traceback.format_exc()) if isinstance(e, HTTPException): raise ProxyException( message=getattr(e, "message", str(e.detail)), @@ -5928,7 +6003,6 @@ async def add_messages( code=getattr(e, "status_code", status.HTTP_400_BAD_REQUEST), ) else: - error_traceback = traceback.format_exc() error_msg = f"{str(e)}" raise ProxyException( message=getattr(e, "message", error_msg), @@ -6046,7 +6120,12 @@ async def get_messages( await proxy_logging_obj.post_call_failure_hook( user_api_key_dict=user_api_key_dict, original_exception=e, request_data=data ) - traceback.print_exc() + verbose_proxy_logger.error( + "litellm.proxy.proxy_server.get_messages(): Exception occured - {}".format( + str(e) + ) + ) + verbose_proxy_logger.debug(traceback.format_exc()) if isinstance(e, HTTPException): raise ProxyException( message=getattr(e, "message", str(e.detail)), @@ -6055,7 +6134,6 @@ async def get_messages( code=getattr(e, "status_code", status.HTTP_400_BAD_REQUEST), ) else: - error_traceback = traceback.format_exc() error_msg = f"{str(e)}" raise ProxyException( message=getattr(e, "message", error_msg), @@ -6187,7 +6265,12 @@ async def run_thread( await proxy_logging_obj.post_call_failure_hook( user_api_key_dict=user_api_key_dict, original_exception=e, request_data=data ) - traceback.print_exc() + verbose_proxy_logger.error( + "litellm.proxy.proxy_server.run_thread(): Exception occured - {}".format( + str(e) + ) + ) + verbose_proxy_logger.debug(traceback.format_exc()) if isinstance(e, HTTPException): raise ProxyException( message=getattr(e, "message", str(e.detail)), @@ -6196,7 +6279,6 @@ async def run_thread( code=getattr(e, "status_code", status.HTTP_400_BAD_REQUEST), ) else: - error_traceback = traceback.format_exc() error_msg = f"{str(e)}" raise ProxyException( message=getattr(e, "message", error_msg), @@ -6335,7 +6417,12 @@ async def create_batch( await proxy_logging_obj.post_call_failure_hook( user_api_key_dict=user_api_key_dict, original_exception=e, request_data=data ) - traceback.print_exc() + verbose_proxy_logger.error( + "litellm.proxy.proxy_server.create_batch(): Exception occured - {}".format( + str(e) + ) + ) + verbose_proxy_logger.debug(traceback.format_exc()) if isinstance(e, HTTPException): raise ProxyException( message=getattr(e, "message", str(e.detail)), @@ -6344,7 +6431,6 @@ async def create_batch( code=getattr(e, "status_code", status.HTTP_400_BAD_REQUEST), ) else: - error_traceback = traceback.format_exc() error_msg = f"{str(e)}" raise ProxyException( message=getattr(e, "message", error_msg), @@ -6478,7 +6564,12 @@ async def retrieve_batch( await proxy_logging_obj.post_call_failure_hook( user_api_key_dict=user_api_key_dict, original_exception=e, request_data=data ) - traceback.print_exc() + verbose_proxy_logger.error( + "litellm.proxy.proxy_server.retrieve_batch(): Exception occured - {}".format( + str(e) + ) + ) + verbose_proxy_logger.debug(traceback.format_exc()) if isinstance(e, HTTPException): raise ProxyException( message=getattr(e, "message", str(e.detail)), @@ -6631,7 +6722,12 @@ async def create_file( await proxy_logging_obj.post_call_failure_hook( user_api_key_dict=user_api_key_dict, original_exception=e, request_data=data ) - traceback.print_exc() + verbose_proxy_logger.error( + "litellm.proxy.proxy_server.create_file(): Exception occured - {}".format( + str(e) + ) + ) + verbose_proxy_logger.debug(traceback.format_exc()) if isinstance(e, HTTPException): raise ProxyException( message=getattr(e, "message", str(e.detail)), @@ -6640,7 +6736,6 @@ async def create_file( code=getattr(e, "status_code", status.HTTP_400_BAD_REQUEST), ) else: - error_traceback = traceback.format_exc() error_msg = f"{str(e)}" raise ProxyException( message=getattr(e, "message", error_msg), @@ -6816,7 +6911,12 @@ async def moderations( await proxy_logging_obj.post_call_failure_hook( user_api_key_dict=user_api_key_dict, original_exception=e, request_data=data ) - traceback.print_exc() + verbose_proxy_logger.error( + "litellm.proxy.proxy_server.moderations(): Exception occured - {}".format( + str(e) + ) + ) + verbose_proxy_logger.debug(traceback.format_exc()) if isinstance(e, HTTPException): raise ProxyException( message=getattr(e, "message", str(e)), @@ -6825,7 +6925,6 @@ async def moderations( code=getattr(e, "status_code", status.HTTP_400_BAD_REQUEST), ) else: - error_traceback = traceback.format_exc() error_msg = f"{str(e)}" raise ProxyException( message=getattr(e, "message", error_msg), @@ -7115,9 +7214,33 @@ async def generate_key_fn( ) ) + # Enterprise Feature - Audit Logging. Enable with litellm.store_audit_logs = True + if litellm.store_audit_logs is True: + _updated_values = json.dumps(response) + asyncio.create_task( + create_audit_log_for_update( + request_data=LiteLLM_AuditLogs( + id=str(uuid.uuid4()), + updated_at=datetime.now(timezone.utc), + changed_by=user_api_key_dict.user_id + or litellm_proxy_admin_name, + table_name=LitellmTableNames.KEY_TABLE_NAME, + object_id=response.get("token_id", ""), + action="created", + updated_values=_updated_values, + before_value=None, + ) + ) + ) + return GenerateKeyResponse(**response) except Exception as e: - traceback.print_exc() + verbose_proxy_logger.error( + "litellm.proxy.proxy_server.generate_key_fn(): Exception occured - {}".format( + str(e) + ) + ) + verbose_proxy_logger.debug(traceback.format_exc()) if isinstance(e, HTTPException): raise ProxyException( message=getattr(e, "detail", f"Authentication Error({str(e)})"), @@ -7138,7 +7261,11 @@ async def generate_key_fn( @router.post( "/key/update", tags=["key management"], dependencies=[Depends(user_api_key_auth)] ) -async def update_key_fn(request: Request, data: UpdateKeyRequest): +async def update_key_fn( + request: Request, + data: UpdateKeyRequest, + user_api_key_dict: UserAPIKeyAuth = Depends(user_api_key_auth), +): """ Update an existing key """ @@ -7150,6 +7277,16 @@ async def update_key_fn(request: Request, data: UpdateKeyRequest): if prisma_client is None: raise Exception("Not connected to DB!") + existing_key_row = await prisma_client.get_data( + token=data.key, table_name="key", query_type="find_unique" + ) + + if existing_key_row is None: + raise HTTPException( + status_code=404, + detail={"error": f"Team not found, passed team_id={data.team_id}"}, + ) + # get non default values for key non_default_values = {} for k, v in data_json.items(): @@ -7176,6 +7313,29 @@ async def update_key_fn(request: Request, data: UpdateKeyRequest): hashed_token = hash_token(key) user_api_key_cache.delete_cache(hashed_token) + # Enterprise Feature - Audit Logging. Enable with litellm.store_audit_logs = True + if litellm.store_audit_logs is True: + _updated_values = json.dumps(data_json) + + _before_value = existing_key_row.json(exclude_none=True) + _before_value = json.dumps(_before_value) + + asyncio.create_task( + create_audit_log_for_update( + request_data=LiteLLM_AuditLogs( + id=str(uuid.uuid4()), + updated_at=datetime.now(timezone.utc), + changed_by=user_api_key_dict.user_id + or litellm_proxy_admin_name, + table_name=LitellmTableNames.KEY_TABLE_NAME, + object_id=data.key, + action="updated", + updated_values=_updated_values, + before_value=_before_value, + ) + ) + ) + return {"key": key, **response["data"]} # update based on remaining passed in values except Exception as e: @@ -7238,6 +7398,34 @@ async def delete_key_fn( ): user_id = None # unless they're admin + # Enterprise Feature - Audit Logging. Enable with litellm.store_audit_logs = True + # we do this after the first for loop, since first for loop is for validation. we only want this inserted after validation passes + if litellm.store_audit_logs is True: + # make an audit log for each team deleted + for key in data.keys: + key_row = await prisma_client.get_data( # type: ignore + token=key, table_name="key", query_type="find_unique" + ) + + key_row = key_row.json(exclude_none=True) + _key_row = json.dumps(key_row) + + asyncio.create_task( + create_audit_log_for_update( + request_data=LiteLLM_AuditLogs( + id=str(uuid.uuid4()), + updated_at=datetime.now(timezone.utc), + changed_by=user_api_key_dict.user_id + or litellm_proxy_admin_name, + table_name=LitellmTableNames.KEY_TABLE_NAME, + object_id=key, + action="deleted", + updated_values="{}", + before_value=_key_row, + ) + ) + ) + number_deleted_keys = await delete_verification_token( tokens=keys, user_id=user_id ) @@ -9507,7 +9695,12 @@ async def user_info( } return response_data except Exception as e: - traceback.print_exc() + verbose_proxy_logger.error( + "litellm.proxy.proxy_server.user_info(): Exception occured - {}".format( + str(e) + ) + ) + verbose_proxy_logger.debug(traceback.format_exc()) if isinstance(e, HTTPException): raise ProxyException( message=getattr(e, "detail", f"Authentication Error({str(e)})"), @@ -9602,7 +9795,12 @@ async def user_update(data: UpdateUserRequest): return response # update based on remaining passed in values except Exception as e: - traceback.print_exc() + verbose_proxy_logger.error( + "litellm.proxy.proxy_server.user_update(): Exception occured - {}".format( + str(e) + ) + ) + verbose_proxy_logger.debug(traceback.format_exc()) if isinstance(e, HTTPException): raise ProxyException( message=getattr(e, "detail", f"Authentication Error({str(e)})"), @@ -9655,7 +9853,12 @@ async def user_request_model(request: Request): return {"status": "success"} # update based on remaining passed in values except Exception as e: - traceback.print_exc() + verbose_proxy_logger.error( + "litellm.proxy.proxy_server.user_request_model(): Exception occured - {}".format( + str(e) + ) + ) + verbose_proxy_logger.debug(traceback.format_exc()) if isinstance(e, HTTPException): raise ProxyException( message=getattr(e, "detail", f"Authentication Error({str(e)})"), @@ -9697,7 +9900,12 @@ async def user_get_requests(): return {"requests": response} # update based on remaining passed in values except Exception as e: - traceback.print_exc() + verbose_proxy_logger.error( + "litellm.proxy.proxy_server.user_get_requests(): Exception occured - {}".format( + str(e) + ) + ) + verbose_proxy_logger.debug(traceback.format_exc()) if isinstance(e, HTTPException): raise ProxyException( message=getattr(e, "detail", f"Authentication Error({str(e)})"), @@ -10087,7 +10295,12 @@ async def update_end_user( # update based on remaining passed in values except Exception as e: - traceback.print_exc() + verbose_proxy_logger.error( + "litellm.proxy.proxy_server.update_end_user(): Exception occured - {}".format( + str(e) + ) + ) + verbose_proxy_logger.debug(traceback.format_exc()) if isinstance(e, HTTPException): raise ProxyException( message=getattr(e, "detail", f"Internal Server Error({str(e)})"), @@ -10171,7 +10384,12 @@ async def delete_end_user( # update based on remaining passed in values except Exception as e: - traceback.print_exc() + verbose_proxy_logger.error( + "litellm.proxy.proxy_server.delete_end_user(): Exception occured - {}".format( + str(e) + ) + ) + verbose_proxy_logger.debug(traceback.format_exc()) if isinstance(e, HTTPException): raise ProxyException( message=getattr(e, "detail", f"Internal Server Error({str(e)})"), @@ -10365,12 +10583,65 @@ async def new_team( } }, ) + + # Enterprise Feature - Audit Logging. Enable with litellm.store_audit_logs = True + if litellm.store_audit_logs is True: + _updated_values = complete_team_data.json(exclude_none=True) + _updated_values = json.dumps(_updated_values) + + asyncio.create_task( + create_audit_log_for_update( + request_data=LiteLLM_AuditLogs( + id=str(uuid.uuid4()), + updated_at=datetime.now(timezone.utc), + changed_by=user_api_key_dict.user_id or litellm_proxy_admin_name, + table_name=LitellmTableNames.TEAM_TABLE_NAME, + object_id=data.team_id, + action="created", + updated_values=_updated_values, + before_value=None, + ) + ) + ) + try: return team_row.model_dump() except Exception as e: return team_row.dict() +async def create_audit_log_for_update(request_data: LiteLLM_AuditLogs): + if premium_user is not True: + return + + if litellm.store_audit_logs is not True: + return + if prisma_client is None: + raise Exception("prisma_client is None, no DB connected") + + verbose_proxy_logger.debug("creating audit log for %s", request_data) + + if isinstance(request_data.updated_values, dict): + request_data.updated_values = json.dumps(request_data.updated_values) + + if isinstance(request_data.before_value, dict): + request_data.before_value = json.dumps(request_data.before_value) + + _request_data = request_data.dict(exclude_none=True) + + try: + await prisma_client.db.litellm_auditlog.create( + data={ + **_request_data, # type: ignore + } + ) + except Exception as e: + # [Non-Blocking Exception. Do not allow blocking LLM API call] + verbose_proxy_logger.error(f"Failed Creating audit log {e}") + + return + + @router.post( "/team/update", tags=["team management"], dependencies=[Depends(user_api_key_auth)] ) @@ -10443,6 +10714,27 @@ async def update_team( team_id=data.team_id, ) + # Enterprise Feature - Audit Logging. Enable with litellm.store_audit_logs = True + if litellm.store_audit_logs is True: + _before_value = existing_team_row.json(exclude_none=True) + _before_value = json.dumps(_before_value) + _after_value: str = json.dumps(updated_kv) + + asyncio.create_task( + create_audit_log_for_update( + request_data=LiteLLM_AuditLogs( + id=str(uuid.uuid4()), + updated_at=datetime.now(timezone.utc), + changed_by=user_api_key_dict.user_id or litellm_proxy_admin_name, + table_name=LitellmTableNames.TEAM_TABLE_NAME, + object_id=data.team_id, + action="updated", + updated_values=_after_value, + before_value=_before_value, + ) + ) + ) + return team_row @@ -10714,6 +11006,35 @@ async def delete_team( detail={"error": f"Team not found, passed team_id={team_id}"}, ) + # Enterprise Feature - Audit Logging. Enable with litellm.store_audit_logs = True + # we do this after the first for loop, since first for loop is for validation. we only want this inserted after validation passes + if litellm.store_audit_logs is True: + # make an audit log for each team deleted + for team_id in data.team_ids: + team_row = await prisma_client.get_data( # type: ignore + team_id=team_id, table_name="team", query_type="find_unique" + ) + + _team_row = team_row.json(exclude_none=True) + + asyncio.create_task( + create_audit_log_for_update( + request_data=LiteLLM_AuditLogs( + id=str(uuid.uuid4()), + updated_at=datetime.now(timezone.utc), + changed_by=user_api_key_dict.user_id + or litellm_proxy_admin_name, + table_name=LitellmTableNames.TEAM_TABLE_NAME, + object_id=team_id, + action="deleted", + updated_values="{}", + before_value=_team_row, + ) + ) + ) + + # End of Audit logging + ## DELETE ASSOCIATED KEYS await prisma_client.delete_data(team_id_list=data.team_ids, table_name="key") ## DELETE TEAMS @@ -11371,7 +11692,12 @@ async def add_new_model( return model_response except Exception as e: - traceback.print_exc() + verbose_proxy_logger.error( + "litellm.proxy.proxy_server.add_new_model(): Exception occured - {}".format( + str(e) + ) + ) + verbose_proxy_logger.debug(traceback.format_exc()) if isinstance(e, HTTPException): raise ProxyException( message=getattr(e, "detail", f"Authentication Error({str(e)})"), @@ -11485,7 +11811,12 @@ async def update_model( return model_response except Exception as e: - traceback.print_exc() + verbose_proxy_logger.error( + "litellm.proxy.proxy_server.update_model(): Exception occured - {}".format( + str(e) + ) + ) + verbose_proxy_logger.debug(traceback.format_exc()) if isinstance(e, HTTPException): raise ProxyException( message=getattr(e, "detail", f"Authentication Error({str(e)})"), @@ -13719,7 +14050,12 @@ async def update_config(config_info: ConfigYAML): return {"message": "Config updated successfully"} except Exception as e: - traceback.print_exc() + verbose_proxy_logger.error( + "litellm.proxy.proxy_server.update_config(): Exception occured - {}".format( + str(e) + ) + ) + verbose_proxy_logger.debug(traceback.format_exc()) if isinstance(e, HTTPException): raise ProxyException( message=getattr(e, "detail", f"Authentication Error({str(e)})"), @@ -14192,7 +14528,12 @@ async def get_config(): "available_callbacks": all_available_callbacks, } except Exception as e: - traceback.print_exc() + verbose_proxy_logger.error( + "litellm.proxy.proxy_server.get_config(): Exception occured - {}".format( + str(e) + ) + ) + verbose_proxy_logger.debug(traceback.format_exc()) if isinstance(e, HTTPException): raise ProxyException( message=getattr(e, "detail", f"Authentication Error({str(e)})"), @@ -14443,7 +14784,12 @@ async def health_services_endpoint( } except Exception as e: - traceback.print_exc() + verbose_proxy_logger.error( + "litellm.proxy.proxy_server.health_services_endpoint(): Exception occured - {}".format( + str(e) + ) + ) + verbose_proxy_logger.debug(traceback.format_exc()) if isinstance(e, HTTPException): raise ProxyException( message=getattr(e, "detail", f"Authentication Error({str(e)})"), @@ -14522,7 +14868,12 @@ async def health_endpoint( "unhealthy_count": len(unhealthy_endpoints), } except Exception as e: - traceback.print_exc() + verbose_proxy_logger.error( + "litellm.proxy.proxy_server.py::health_endpoint(): Exception occured - {}".format( + str(e) + ) + ) + verbose_proxy_logger.debug(traceback.format_exc()) raise e diff --git a/litellm/proxy/schema.prisma b/litellm/proxy/schema.prisma index 243f063376..7cc688ee8e 100644 --- a/litellm/proxy/schema.prisma +++ b/litellm/proxy/schema.prisma @@ -243,4 +243,16 @@ model LiteLLM_InvitationLink { liteLLM_user_table_user LiteLLM_UserTable @relation("UserId", fields: [user_id], references: [user_id]) liteLLM_user_table_created LiteLLM_UserTable @relation("CreatedBy", fields: [created_by], references: [user_id]) liteLLM_user_table_updated LiteLLM_UserTable @relation("UpdatedBy", fields: [updated_by], references: [user_id]) +} + + +model LiteLLM_AuditLog { + id String @id @default(uuid()) + updated_at DateTime @default(now()) + changed_by String // user or system that performed the action + action String // create, update, delete + table_name String // on of LitellmTableNames.TEAM_TABLE_NAME, LitellmTableNames.USER_TABLE_NAME, LitellmTableNames.PROXY_MODEL_TABLE_NAME, + object_id String // id of the object being audited. This can be the key id, team id, user id, model id + before_value Json? // value of the row + updated_values Json? // value of the row after change } \ No newline at end of file diff --git a/litellm/proxy/secret_managers/aws_secret_manager.py b/litellm/proxy/secret_managers/aws_secret_manager.py index a40b1dffa2..8dd6772cf7 100644 --- a/litellm/proxy/secret_managers/aws_secret_manager.py +++ b/litellm/proxy/secret_managers/aws_secret_manager.py @@ -8,7 +8,8 @@ Requires: * `pip install boto3>=1.28.57` """ -import litellm, os +import litellm +import os from typing import Optional from litellm.proxy._types import KeyManagementSystem @@ -38,3 +39,21 @@ def load_aws_secret_manager(use_aws_secret_manager: Optional[bool]): except Exception as e: raise e + + +def load_aws_kms(use_aws_kms: Optional[bool]): + if use_aws_kms is None or use_aws_kms is False: + return + try: + import boto3 + + validate_environment() + + # Create a Secrets Manager client + kms_client = boto3.client("kms", region_name=os.getenv("AWS_REGION_NAME")) + + litellm.secret_manager_client = kms_client + litellm._key_management_system = KeyManagementSystem.AWS_KMS + + except Exception as e: + raise e diff --git a/litellm/proxy/utils.py b/litellm/proxy/utils.py index eed59664b1..b3d00e8e74 100644 --- a/litellm/proxy/utils.py +++ b/litellm/proxy/utils.py @@ -2709,13 +2709,15 @@ def decrypt_value(value: bytes, master_key: str) -> str: # LiteLLM Admin UI - Non SSO Login -html_form = """ +url_to_redirect_to = os.getenv("PROXY_BASE_URL", "") +url_to_redirect_to += "/login" +html_form = f""" LiteLLM Login -
+

LiteLLM Login

By default Username is "admin" and Password is your set LiteLLM Proxy `MASTER_KEY`

@@ -2771,8 +2773,6 @@ html_form = """
- - """ @@ -2837,3 +2837,17 @@ missing_keys_html_form = """ """ + + +def get_error_message_str(e: Exception) -> str: + error_message = "" + if isinstance(e, HTTPException): + if isinstance(e.detail, str): + error_message = e.detail + elif isinstance(e.detail, dict): + error_message = json.dumps(e.detail) + else: + error_message = str(e) + else: + error_message = str(e) + return error_message diff --git a/litellm/router.py b/litellm/router.py index e3fed496f1..0187b441f7 100644 --- a/litellm/router.py +++ b/litellm/router.py @@ -220,8 +220,6 @@ class Router: [] ) # names of models under litellm_params. ex. azure/chatgpt-v-2 self.deployment_latency_map = {} - ### SCHEDULER ### - self.scheduler = Scheduler(polling_interval=polling_interval) ### CACHING ### cache_type: Literal["local", "redis"] = "local" # default to an in-memory cache redis_cache = None @@ -259,6 +257,10 @@ class Router: redis_cache=redis_cache, in_memory_cache=InMemoryCache() ) # use a dual cache (Redis+In-Memory) for tracking cooldowns, usage, etc. + ### SCHEDULER ### + self.scheduler = Scheduler( + polling_interval=polling_interval, redis_cache=redis_cache + ) self.default_deployment = None # use this to track the users default deployment, when they want to use model = * self.default_max_parallel_requests = default_max_parallel_requests @@ -2096,8 +2098,8 @@ class Router: except Exception as e: raise e except Exception as e: - verbose_router_logger.debug(f"An exception occurred - {str(e)}") - traceback.print_exc() + verbose_router_logger.error(f"An exception occurred - {str(e)}") + verbose_router_logger.debug(traceback.format_exc()) raise original_exception async def async_function_with_retries(self, *args, **kwargs): @@ -4048,6 +4050,12 @@ class Router: for idx in reversed(invalid_model_indices): _returned_deployments.pop(idx) + ## ORDER FILTERING ## -> if user set 'order' in deployments, return deployments with lowest order (e.g. order=1 > order=2) + if len(_returned_deployments) > 0: + _returned_deployments = litellm.utils._get_order_filtered_deployments( + _returned_deployments + ) + return _returned_deployments def _common_checks_available_deployment( diff --git a/litellm/router_strategy/lowest_cost.py b/litellm/router_strategy/lowest_cost.py index 1670490e16..46cbb2181e 100644 --- a/litellm/router_strategy/lowest_cost.py +++ b/litellm/router_strategy/lowest_cost.py @@ -1,11 +1,9 @@ #### What this does #### # picks based on response time (for streaming, this is time to first token) -from pydantic import BaseModel, Extra, Field, root_validator -import os, requests, random # type: ignore +from pydantic import BaseModel from typing import Optional, Union, List, Dict from datetime import datetime, timedelta -import random - +from litellm import verbose_logger import traceback from litellm.caching import DualCache from litellm.integrations.custom_logger import CustomLogger @@ -119,7 +117,12 @@ class LowestCostLoggingHandler(CustomLogger): if self.test_flag: self.logged_success += 1 except Exception as e: - traceback.print_exc() + verbose_logger.error( + "litellm.proxy.hooks.prompt_injection_detection.py::async_pre_call_hook(): Exception occured - {}".format( + str(e) + ) + ) + verbose_logger.debug(traceback.format_exc()) pass async def async_log_success_event(self, kwargs, response_obj, start_time, end_time): @@ -201,7 +204,12 @@ class LowestCostLoggingHandler(CustomLogger): if self.test_flag: self.logged_success += 1 except Exception as e: - traceback.print_exc() + verbose_logger.error( + "litellm.proxy.hooks.prompt_injection_detection.py::async_pre_call_hook(): Exception occured - {}".format( + str(e) + ) + ) + verbose_logger.debug(traceback.format_exc()) pass async def async_get_available_deployments( diff --git a/litellm/router_strategy/lowest_latency.py b/litellm/router_strategy/lowest_latency.py index 1e4b151ada..5d71847510 100644 --- a/litellm/router_strategy/lowest_latency.py +++ b/litellm/router_strategy/lowest_latency.py @@ -1,16 +1,16 @@ #### What this does #### # picks based on response time (for streaming, this is time to first token) -from pydantic import BaseModel, Extra, Field, root_validator # type: ignore -import dotenv, os, requests, random # type: ignore +from pydantic import BaseModel +import random from typing import Optional, Union, List, Dict from datetime import datetime, timedelta -import random import traceback from litellm.caching import DualCache from litellm.integrations.custom_logger import CustomLogger from litellm import ModelResponse from litellm import token_counter import litellm +from litellm import verbose_logger class LiteLLMBase(BaseModel): @@ -165,7 +165,12 @@ class LowestLatencyLoggingHandler(CustomLogger): if self.test_flag: self.logged_success += 1 except Exception as e: - traceback.print_exc() + verbose_logger.error( + "litellm.proxy.hooks.prompt_injection_detection.py::async_pre_call_hook(): Exception occured - {}".format( + str(e) + ) + ) + verbose_logger.debug(traceback.format_exc()) pass async def async_log_failure_event(self, kwargs, response_obj, start_time, end_time): @@ -229,7 +234,12 @@ class LowestLatencyLoggingHandler(CustomLogger): # do nothing if it's not a timeout error return except Exception as e: - traceback.print_exc() + verbose_logger.error( + "litellm.proxy.hooks.prompt_injection_detection.py::async_pre_call_hook(): Exception occured - {}".format( + str(e) + ) + ) + verbose_logger.debug(traceback.format_exc()) pass async def async_log_success_event(self, kwargs, response_obj, start_time, end_time): @@ -352,7 +362,12 @@ class LowestLatencyLoggingHandler(CustomLogger): if self.test_flag: self.logged_success += 1 except Exception as e: - traceback.print_exc() + verbose_logger.error( + "litellm.router_strategy.lowest_latency.py::async_log_success_event(): Exception occured - {}".format( + str(e) + ) + ) + verbose_logger.debug(traceback.format_exc()) pass def get_available_deployments( diff --git a/litellm/router_strategy/lowest_tpm_rpm.py b/litellm/router_strategy/lowest_tpm_rpm.py index 15460051b8..a21c69abf7 100644 --- a/litellm/router_strategy/lowest_tpm_rpm.py +++ b/litellm/router_strategy/lowest_tpm_rpm.py @@ -11,6 +11,7 @@ from litellm.integrations.custom_logger import CustomLogger from litellm._logging import verbose_router_logger from litellm.utils import print_verbose + class LiteLLMBase(BaseModel): """ Implements default functions, all pydantic objects should have. @@ -23,16 +24,20 @@ class LiteLLMBase(BaseModel): # if using pydantic v1 return self.dict() + class RoutingArgs(LiteLLMBase): - ttl: int = 1 * 60 # 1min (RPM/TPM expire key) - + ttl: int = 1 * 60 # 1min (RPM/TPM expire key) + + class LowestTPMLoggingHandler(CustomLogger): test_flag: bool = False logged_success: int = 0 logged_failure: int = 0 default_cache_time_seconds: int = 1 * 60 * 60 # 1 hour - def __init__(self, router_cache: DualCache, model_list: list, routing_args: dict = {}): + def __init__( + self, router_cache: DualCache, model_list: list, routing_args: dict = {} + ): self.router_cache = router_cache self.model_list = model_list self.routing_args = RoutingArgs(**routing_args) @@ -72,19 +77,28 @@ class LowestTPMLoggingHandler(CustomLogger): request_count_dict = self.router_cache.get_cache(key=tpm_key) or {} request_count_dict[id] = request_count_dict.get(id, 0) + total_tokens - self.router_cache.set_cache(key=tpm_key, value=request_count_dict, ttl=self.routing_args.ttl) + self.router_cache.set_cache( + key=tpm_key, value=request_count_dict, ttl=self.routing_args.ttl + ) ## RPM request_count_dict = self.router_cache.get_cache(key=rpm_key) or {} request_count_dict[id] = request_count_dict.get(id, 0) + 1 - self.router_cache.set_cache(key=rpm_key, value=request_count_dict, ttl=self.routing_args.ttl) + self.router_cache.set_cache( + key=rpm_key, value=request_count_dict, ttl=self.routing_args.ttl + ) ### TESTING ### if self.test_flag: self.logged_success += 1 except Exception as e: - traceback.print_exc() + verbose_router_logger.error( + "litellm.router_strategy.lowest_tpm_rpm.py::async_log_success_event(): Exception occured - {}".format( + str(e) + ) + ) + verbose_router_logger.debug(traceback.format_exc()) pass async def async_log_success_event(self, kwargs, response_obj, start_time, end_time): @@ -123,19 +137,28 @@ class LowestTPMLoggingHandler(CustomLogger): request_count_dict = self.router_cache.get_cache(key=tpm_key) or {} request_count_dict[id] = request_count_dict.get(id, 0) + total_tokens - self.router_cache.set_cache(key=tpm_key, value=request_count_dict, ttl=self.routing_args.ttl) + self.router_cache.set_cache( + key=tpm_key, value=request_count_dict, ttl=self.routing_args.ttl + ) ## RPM request_count_dict = self.router_cache.get_cache(key=rpm_key) or {} request_count_dict[id] = request_count_dict.get(id, 0) + 1 - self.router_cache.set_cache(key=rpm_key, value=request_count_dict, ttl=self.routing_args.ttl) + self.router_cache.set_cache( + key=rpm_key, value=request_count_dict, ttl=self.routing_args.ttl + ) ### TESTING ### if self.test_flag: self.logged_success += 1 except Exception as e: - traceback.print_exc() + verbose_router_logger.error( + "litellm.router_strategy.lowest_tpm_rpm.py::async_log_success_event(): Exception occured - {}".format( + str(e) + ) + ) + verbose_router_logger.debug(traceback.format_exc()) pass def get_available_deployments( diff --git a/litellm/router_strategy/lowest_tpm_rpm_v2.py b/litellm/router_strategy/lowest_tpm_rpm_v2.py index 40e75031ad..e3b8c8b770 100644 --- a/litellm/router_strategy/lowest_tpm_rpm_v2.py +++ b/litellm/router_strategy/lowest_tpm_rpm_v2.py @@ -1,19 +1,19 @@ #### What this does #### # identifies lowest tpm deployment from pydantic import BaseModel -import dotenv, os, requests, random +import random from typing import Optional, Union, List, Dict -import datetime as datetime_og -from datetime import datetime -import traceback, asyncio, httpx +import traceback +import httpx import litellm from litellm import token_counter from litellm.caching import DualCache from litellm.integrations.custom_logger import CustomLogger -from litellm._logging import verbose_router_logger +from litellm._logging import verbose_router_logger, verbose_logger from litellm.utils import print_verbose, get_utc_datetime from litellm.types.router import RouterErrors + class LiteLLMBase(BaseModel): """ Implements default functions, all pydantic objects should have. @@ -22,12 +22,14 @@ class LiteLLMBase(BaseModel): def json(self, **kwargs): try: return self.model_dump() # noqa - except: + except Exception as e: # if using pydantic v1 return self.dict() + class RoutingArgs(LiteLLMBase): - ttl: int = 1 * 60 # 1min (RPM/TPM expire key) + ttl: int = 1 * 60 # 1min (RPM/TPM expire key) + class LowestTPMLoggingHandler_v2(CustomLogger): """ @@ -47,7 +49,9 @@ class LowestTPMLoggingHandler_v2(CustomLogger): logged_failure: int = 0 default_cache_time_seconds: int = 1 * 60 * 60 # 1 hour - def __init__(self, router_cache: DualCache, model_list: list, routing_args: dict = {}): + def __init__( + self, router_cache: DualCache, model_list: list, routing_args: dict = {} + ): self.router_cache = router_cache self.model_list = model_list self.routing_args = RoutingArgs(**routing_args) @@ -104,7 +108,9 @@ class LowestTPMLoggingHandler_v2(CustomLogger): ) else: # if local result below limit, check redis ## prevent unnecessary redis checks - result = self.router_cache.increment_cache(key=rpm_key, value=1, ttl=self.routing_args.ttl) + result = self.router_cache.increment_cache( + key=rpm_key, value=1, ttl=self.routing_args.ttl + ) if result is not None and result > deployment_rpm: raise litellm.RateLimitError( message="Deployment over defined rpm limit={}. current usage={}".format( @@ -244,12 +250,19 @@ class LowestTPMLoggingHandler_v2(CustomLogger): # update cache ## TPM - self.router_cache.increment_cache(key=tpm_key, value=total_tokens, ttl=self.routing_args.ttl) + self.router_cache.increment_cache( + key=tpm_key, value=total_tokens, ttl=self.routing_args.ttl + ) ### TESTING ### if self.test_flag: self.logged_success += 1 except Exception as e: - traceback.print_exc() + verbose_logger.error( + "litellm.proxy.hooks.prompt_injection_detection.py::async_pre_call_hook(): Exception occured - {}".format( + str(e) + ) + ) + verbose_logger.debug(traceback.format_exc()) pass async def async_log_success_event(self, kwargs, response_obj, start_time, end_time): @@ -295,7 +308,12 @@ class LowestTPMLoggingHandler_v2(CustomLogger): if self.test_flag: self.logged_success += 1 except Exception as e: - traceback.print_exc() + verbose_logger.error( + "litellm.proxy.hooks.prompt_injection_detection.py::async_pre_call_hook(): Exception occured - {}".format( + str(e) + ) + ) + verbose_logger.debug(traceback.format_exc()) pass def _common_checks_available_deployment( diff --git a/litellm/scheduler.py b/litellm/scheduler.py index aad4ba4e3e..8ee7f0e078 100644 --- a/litellm/scheduler.py +++ b/litellm/scheduler.py @@ -1,13 +1,14 @@ -import heapq, time +import heapq from pydantic import BaseModel from typing import Optional import enum -from litellm.caching import DualCache +from litellm.caching import DualCache, RedisCache from litellm import print_verbose class SchedulerCacheKeys(enum.Enum): queue = "scheduler:queue" + default_in_memory_ttl = 5 # cache queue in-memory for 5s when redis cache available class DefaultPriorities(enum.Enum): @@ -25,18 +26,24 @@ class FlowItem(BaseModel): class Scheduler: cache: DualCache - def __init__(self, polling_interval: Optional[float] = None): + def __init__( + self, + polling_interval: Optional[float] = None, + redis_cache: Optional[RedisCache] = None, + ): """ polling_interval: float or null - frequency of polling queue. Default is 3ms. """ self.queue: list = [] - self.cache = DualCache() + default_in_memory_ttl: Optional[float] = None + if redis_cache is not None: + # if redis-cache available frequently poll that instead of using in-memory. + default_in_memory_ttl = SchedulerCacheKeys.default_in_memory_ttl.value + self.cache = DualCache( + redis_cache=redis_cache, default_in_memory_ttl=default_in_memory_ttl + ) self.polling_interval = polling_interval or 0.03 # default to 3ms - def update_variables(self, cache: Optional[DualCache] = None): - if cache is not None: - self.cache = cache - async def add_request(self, request: FlowItem): # We use the priority directly, as lower values indicate higher priority # get the queue diff --git a/litellm/tests/test_assistants.py b/litellm/tests/test_assistants.py index 5f565f67cc..d5f047a092 100644 --- a/litellm/tests/test_assistants.py +++ b/litellm/tests/test_assistants.py @@ -198,7 +198,11 @@ async def test_aarun_thread_litellm(sync_mode, provider, is_streaming): ) assert isinstance(messages.data[0], Message) else: - pytest.fail("An unexpected error occurred when running the thread") + pytest.fail( + "An unexpected error occurred when running the thread, {}".format( + run + ) + ) else: added_message = await litellm.a_add_message(**data) @@ -226,4 +230,8 @@ async def test_aarun_thread_litellm(sync_mode, provider, is_streaming): ) assert isinstance(messages.data[0], Message) else: - pytest.fail("An unexpected error occurred when running the thread") + pytest.fail( + "An unexpected error occurred when running the thread, {}".format( + run + ) + ) diff --git a/litellm/tests/test_completion.py b/litellm/tests/test_completion.py index bcbe4944c5..f3e62338c7 100644 --- a/litellm/tests/test_completion.py +++ b/litellm/tests/test_completion.py @@ -2169,6 +2169,7 @@ def test_completion_azure_key_completion_arg(): logprobs=True, max_tokens=10, ) + print(f"response: {response}") print("Hidden Params", response._hidden_params) @@ -2544,6 +2545,8 @@ def test_replicate_custom_prompt_dict(): "content": "what is yc write 1 paragraph", } ], + mock_response="Hello world", + mock_response="hello world", repetition_penalty=0.1, num_retries=3, ) diff --git a/litellm/tests/test_image_generation.py b/litellm/tests/test_image_generation.py index fea014e67a..1d3d53f70a 100644 --- a/litellm/tests/test_image_generation.py +++ b/litellm/tests/test_image_generation.py @@ -76,7 +76,7 @@ def test_image_generation_azure_dall_e_3(): ) print(f"response: {response}") assert len(response.data) > 0 - except litellm.RateLimitError as e: + except litellm.InternalServerError as e: pass except litellm.ContentPolicyViolationError: pass # OpenAI randomly raises these errors - skip when they occur diff --git a/litellm/tests/test_lowest_cost_routing.py b/litellm/tests/test_lowest_cost_routing.py index 217b4a9700..a793ba0a2c 100644 --- a/litellm/tests/test_lowest_cost_routing.py +++ b/litellm/tests/test_lowest_cost_routing.py @@ -102,18 +102,18 @@ async def test_get_available_deployments_custom_price(): @pytest.mark.asyncio async def test_lowest_cost_routing(): """ - Test if router returns model with the lowest cost + Test if router, returns model with the lowest cost """ model_list = [ { - "model_name": "gpt-3.5-turbo", + "model_name": "gpt-4", "litellm_params": {"model": "gpt-4"}, "model_info": {"id": "openai-gpt-4"}, }, { "model_name": "gpt-3.5-turbo", - "litellm_params": {"model": "groq/llama3-8b-8192"}, - "model_info": {"id": "groq-llama"}, + "litellm_params": {"model": "gpt-3.5-turbo"}, + "model_info": {"id": "gpt-3.5-turbo"}, }, ] @@ -127,7 +127,7 @@ async def test_lowest_cost_routing(): print( response._hidden_params["model_id"] ) # expect groq-llama, since groq/llama has lowest cost - assert "groq-llama" == response._hidden_params["model_id"] + assert "gpt-3.5-turbo" == response._hidden_params["model_id"] async def _deploy(lowest_cost_logger, deployment_id, tokens_used, duration): diff --git a/litellm/tests/test_router.py b/litellm/tests/test_router.py index d76dec25c7..02bf9a16b8 100644 --- a/litellm/tests/test_router.py +++ b/litellm/tests/test_router.py @@ -38,6 +38,48 @@ def test_router_sensitive_keys(): assert "special-key" not in str(e) +def test_router_order(): + """ + Asserts for 2 models in a model group, model with order=1 always called first + """ + router = Router( + model_list=[ + { + "model_name": "gpt-3.5-turbo", + "litellm_params": { + "model": "gpt-4o", + "api_key": os.getenv("OPENAI_API_KEY"), + "mock_response": "Hello world", + "order": 1, + }, + "model_info": {"id": "1"}, + }, + { + "model_name": "gpt-3.5-turbo", + "litellm_params": { + "model": "gpt-4o", + "api_key": "bad-key", + "mock_response": Exception("this is a bad key"), + "order": 2, + }, + "model_info": {"id": "2"}, + }, + ], + num_retries=0, + allowed_fails=0, + enable_pre_call_checks=True, + ) + + for _ in range(100): + response = router.completion( + model="gpt-3.5-turbo", + messages=[{"role": "user", "content": "Hey, how's it going?"}], + ) + + assert isinstance(response, litellm.ModelResponse) + assert response._hidden_params["model_id"] == "1" + + @pytest.mark.parametrize("num_retries", [None, 2]) @pytest.mark.parametrize("max_retries", [None, 4]) def test_router_num_retries_init(num_retries, max_retries): diff --git a/litellm/tests/test_token_counter.py b/litellm/tests/test_token_counter.py index 194dfb8af3..a6f7cd7616 100644 --- a/litellm/tests/test_token_counter.py +++ b/litellm/tests/test_token_counter.py @@ -186,3 +186,13 @@ def test_load_test_token_counter(model): total_time = end_time - start_time print("model={}, total test time={}".format(model, total_time)) assert total_time < 10, f"Total encoding time > 10s, {total_time}" + +def test_openai_token_with_image_and_text(): + model = "gpt-4o" + full_request = {'model': 'gpt-4o', 'tools': [{'type': 'function', 'function': {'name': 'json', 'parameters': {'type': 'object', 'required': ['clause'], 'properties': {'clause': {'type': 'string'}}}, 'description': 'Respond with a JSON object.'}}], 'logprobs': False, 'messages': [{'role': 'user', 'content': [{'text': '\n Just some long text, long long text, and you know it will be longer than 7 tokens definetly.', 'type': 'text'}]}], 'tool_choice': {'type': 'function', 'function': {'name': 'json'}}, 'exclude_models': [], 'disable_fallback': False, 'exclude_providers': []} + messages = full_request.get("messages", []) + + token_count = token_counter(model=model, messages=messages) + print(token_count) + +test_openai_token_with_image_and_text() \ No newline at end of file diff --git a/litellm/utils.py b/litellm/utils.py index 728173f383..dfee72ea7d 100644 --- a/litellm/utils.py +++ b/litellm/utils.py @@ -1374,8 +1374,12 @@ class Logging: callback_func=callback, ) except Exception as e: - traceback.print_exc() - print_verbose( + verbose_logger.error( + "litellm.Logging.pre_call(): Exception occured - {}".format( + str(e) + ) + ) + verbose_logger.debug( f"LiteLLM.LoggingError: [Non-Blocking] Exception occurred while input logging with integrations {traceback.format_exc()}" ) print_verbose( @@ -4062,6 +4066,7 @@ def openai_token_counter( for c in value: if c["type"] == "text": text += c["text"] + num_tokens += len(encoding.encode(c["text"], disallowed_special=())) elif c["type"] == "image_url": if isinstance(c["image_url"], dict): image_url_dict = c["image_url"] @@ -6194,6 +6199,27 @@ def calculate_max_parallel_requests( return None +def _get_order_filtered_deployments(healthy_deployments: List[Dict]) -> List: + min_order = min( + ( + deployment["litellm_params"]["order"] + for deployment in healthy_deployments + if "order" in deployment["litellm_params"] + ), + default=None, + ) + + if min_order is not None: + filtered_deployments = [ + deployment + for deployment in healthy_deployments + if deployment["litellm_params"].get("order") == min_order + ] + + return filtered_deployments + return healthy_deployments + + def _get_model_region( custom_llm_provider: str, litellm_params: LiteLLM_Params ) -> Optional[str]: @@ -7336,6 +7362,10 @@ def get_provider_fields(custom_llm_provider: str) -> List[ProviderField]: if custom_llm_provider == "databricks": return litellm.DatabricksConfig().get_required_params() + + elif custom_llm_provider == "ollama": + return litellm.OllamaConfig().get_required_params() + else: return [] @@ -9782,8 +9812,7 @@ def exception_type( elif custom_llm_provider == "azure": if "Internal server error" in error_str: exception_mapping_worked = True - raise APIError( - status_code=500, + raise litellm.InternalServerError( message=f"AzureException Internal server error - {original_exception.message}", llm_provider="azure", model=model, @@ -10033,6 +10062,8 @@ def get_secret( ): key_management_system = litellm._key_management_system key_management_settings = litellm._key_management_settings + args = locals() + if secret_name.startswith("os.environ/"): secret_name = secret_name.replace("os.environ/", "") @@ -10120,13 +10151,13 @@ def get_secret( key_manager = "local" if ( - key_manager == KeyManagementSystem.AZURE_KEY_VAULT + key_manager == KeyManagementSystem.AZURE_KEY_VAULT.value or type(client).__module__ + "." + type(client).__name__ == "azure.keyvault.secrets._client.SecretClient" ): # support Azure Secret Client - from azure.keyvault.secrets import SecretClient secret = client.get_secret(secret_name).value elif ( - key_manager == KeyManagementSystem.GOOGLE_KMS + key_manager == KeyManagementSystem.GOOGLE_KMS.value or client.__class__.__name__ == "KeyManagementServiceClient" ): encrypted_secret: Any = os.getenv(secret_name) @@ -10154,6 +10185,25 @@ def get_secret( secret = response.plaintext.decode( "utf-8" ) # assumes the original value was encoded with utf-8 + elif key_manager == KeyManagementSystem.AWS_KMS.value: + """ + Only check the tokens which start with 'aws_kms/'. This prevents latency impact caused by checking all keys. + """ + encrypted_value = os.getenv(secret_name, None) + if encrypted_value is None: + raise Exception("encrypted value for AWS KMS cannot be None.") + # Decode the base64 encoded ciphertext + ciphertext_blob = base64.b64decode(encrypted_value) + + # Set up the parameters for the decrypt call + params = {"CiphertextBlob": ciphertext_blob} + + # Perform the decryption + response = client.decrypt(**params) + + # Extract and decode the plaintext + plaintext = response["Plaintext"] + secret = plaintext.decode("utf-8") elif key_manager == KeyManagementSystem.AWS_SECRET_MANAGER.value: try: get_secret_value_response = client.get_secret_value( @@ -10174,10 +10224,14 @@ def get_secret( for k, v in secret_dict.items(): secret = v print_verbose(f"secret: {secret}") + elif key_manager == "local": + secret = os.getenv(secret_name) else: # assume the default is infisicial client secret = client.get_secret(secret_name).secret_value except Exception as e: # check if it's in os.environ - print_verbose(f"An exception occurred - {str(e)}") + verbose_logger.error( + f"An exception occurred - {str(e)}\n\n{traceback.format_exc()}" + ) secret = os.getenv(secret_name) try: secret_value_as_bool = ast.literal_eval(secret) @@ -10511,7 +10565,12 @@ class CustomStreamWrapper: "finish_reason": finish_reason, } except Exception as e: - traceback.print_exc() + verbose_logger.error( + "litellm.CustomStreamWrapper.handle_predibase_chunk(): Exception occured - {}".format( + str(e) + ) + ) + verbose_logger.debug(traceback.format_exc()) raise e def handle_huggingface_chunk(self, chunk): @@ -10555,7 +10614,12 @@ class CustomStreamWrapper: "finish_reason": finish_reason, } except Exception as e: - traceback.print_exc() + verbose_logger.error( + "litellm.CustomStreamWrapper.handle_huggingface_chunk(): Exception occured - {}".format( + str(e) + ) + ) + verbose_logger.debug(traceback.format_exc()) raise e def handle_ai21_chunk(self, chunk): # fake streaming @@ -10790,7 +10854,12 @@ class CustomStreamWrapper: "usage": usage, } except Exception as e: - traceback.print_exc() + verbose_logger.error( + "litellm.CustomStreamWrapper.handle_openai_chat_completion_chunk(): Exception occured - {}".format( + str(e) + ) + ) + verbose_logger.debug(traceback.format_exc()) raise e def handle_azure_text_completion_chunk(self, chunk): @@ -10871,7 +10940,12 @@ class CustomStreamWrapper: else: return "" except: - traceback.print_exc() + verbose_logger.error( + "litellm.CustomStreamWrapper.handle_baseten_chunk(): Exception occured - {}".format( + str(e) + ) + ) + verbose_logger.debug(traceback.format_exc()) return "" def handle_cloudlfare_stream(self, chunk): @@ -11070,7 +11144,12 @@ class CustomStreamWrapper: "is_finished": True, } except: - traceback.print_exc() + verbose_logger.error( + "litellm.CustomStreamWrapper.handle_clarifai_chunk(): Exception occured - {}".format( + str(e) + ) + ) + verbose_logger.debug(traceback.format_exc()) return "" def model_response_creator(self): @@ -11557,7 +11636,12 @@ class CustomStreamWrapper: tool["type"] = "function" model_response.choices[0].delta = Delta(**_json_delta) except Exception as e: - traceback.print_exc() + verbose_logger.error( + "litellm.CustomStreamWrapper.chunk_creator(): Exception occured - {}".format( + str(e) + ) + ) + verbose_logger.debug(traceback.format_exc()) model_response.choices[0].delta = Delta() else: try: diff --git a/poetry.lock b/poetry.lock index ed4c4138d7..64896fdb82 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 1.7.1 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.8.3 and should not be changed by hand. [[package]] name = "aiohttp" @@ -2114,6 +2114,7 @@ files = [ {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938"}, {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d"}, {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515"}, + {file = "PyYAML-6.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:326c013efe8048858a6d312ddd31d56e468118ad4cdeda36c719bf5bb6192290"}, {file = "PyYAML-6.0.1-cp310-cp310-win32.whl", hash = "sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924"}, {file = "PyYAML-6.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d"}, {file = "PyYAML-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007"}, @@ -2121,8 +2122,15 @@ files = [ {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d"}, {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc"}, {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673"}, + {file = "PyYAML-6.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e7d73685e87afe9f3b36c799222440d6cf362062f78be1013661b00c5c6f678b"}, {file = "PyYAML-6.0.1-cp311-cp311-win32.whl", hash = "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741"}, {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, + {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, + {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, + {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, + {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, + {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, + {file = "PyYAML-6.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:0d3304d8c0adc42be59c5f8a4d9e3d7379e6955ad754aa9d6ab7a398b59dd1df"}, {file = "PyYAML-6.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47"}, {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98"}, {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c"}, @@ -2139,6 +2147,7 @@ files = [ {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5"}, {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696"}, {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735"}, + {file = "PyYAML-6.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:49a183be227561de579b4a36efbb21b3eab9651dd81b1858589f796549873dd6"}, {file = "PyYAML-6.0.1-cp38-cp38-win32.whl", hash = "sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206"}, {file = "PyYAML-6.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62"}, {file = "PyYAML-6.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8"}, @@ -2146,6 +2155,7 @@ files = [ {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6"}, {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0"}, {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c"}, + {file = "PyYAML-6.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:04ac92ad1925b2cff1db0cfebffb6ffc43457495c9b3c39d3fcae417d7125dc5"}, {file = "PyYAML-6.0.1-cp39-cp39-win32.whl", hash = "sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c"}, {file = "PyYAML-6.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486"}, {file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"}, @@ -3140,4 +3150,4 @@ proxy = ["PyJWT", "apscheduler", "backoff", "cryptography", "fastapi", "fastapi- [metadata] lock-version = "2.0" python-versions = ">=3.8.1,<4.0, !=3.9.7" -content-hash = "a54d969a1a707413e7cd3ce869d14ef73dd41bb9d36ebf0fb878d9e929bc15b3" +content-hash = "6a37992b63b11d254f5f40687bd96898b1d9515728f663f30dcc81c4ef8df7b7" diff --git a/pyproject.toml b/pyproject.toml index 2b9a81ae4c..c346640396 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "litellm" -version = "1.40.3" +version = "1.40.5" description = "Library to easily interface with LLM API providers" authors = ["BerriAI"] license = "MIT" @@ -62,7 +62,8 @@ extra_proxy = [ "azure-identity", "azure-keyvault-secrets", "google-cloud-kms", - "resend" + "resend", + "pynacl" ] [tool.poetry.scripts] @@ -79,7 +80,7 @@ requires = ["poetry-core", "wheel"] build-backend = "poetry.core.masonry.api" [tool.commitizen] -version = "1.40.3" +version = "1.40.5" version_files = [ "pyproject.toml:^version" ] diff --git a/schema.prisma b/schema.prisma index 243f063376..7cc688ee8e 100644 --- a/schema.prisma +++ b/schema.prisma @@ -243,4 +243,16 @@ model LiteLLM_InvitationLink { liteLLM_user_table_user LiteLLM_UserTable @relation("UserId", fields: [user_id], references: [user_id]) liteLLM_user_table_created LiteLLM_UserTable @relation("CreatedBy", fields: [created_by], references: [user_id]) liteLLM_user_table_updated LiteLLM_UserTable @relation("UpdatedBy", fields: [updated_by], references: [user_id]) +} + + +model LiteLLM_AuditLog { + id String @id @default(uuid()) + updated_at DateTime @default(now()) + changed_by String // user or system that performed the action + action String // create, update, delete + table_name String // on of LitellmTableNames.TEAM_TABLE_NAME, LitellmTableNames.USER_TABLE_NAME, LitellmTableNames.PROXY_MODEL_TABLE_NAME, + object_id String // id of the object being audited. This can be the key id, team id, user id, model id + before_value Json? // value of the row + updated_values Json? // value of the row after change } \ No newline at end of file diff --git a/ui/litellm-dashboard/src/components/model_dashboard.tsx b/ui/litellm-dashboard/src/components/model_dashboard.tsx index adb45346b7..73e5a7a8f1 100644 --- a/ui/litellm-dashboard/src/components/model_dashboard.tsx +++ b/ui/litellm-dashboard/src/components/model_dashboard.tsx @@ -145,6 +145,7 @@ enum Providers { OpenAI_Compatible = "OpenAI-Compatible Endpoints (Groq, Together AI, Mistral AI, etc.)", Vertex_AI = "Vertex AI (Anthropic, Gemini, etc.)", Databricks = "Databricks", + Ollama = "Ollama", } const provider_map: Record = { @@ -156,6 +157,7 @@ const provider_map: Record = { OpenAI_Compatible: "openai", Vertex_AI: "vertex_ai", Databricks: "databricks", + Ollama: "ollama", }; const retry_policy_map: Record = { @@ -1747,6 +1749,7 @@ const ModelDashboard: React.FC = ({ )} {selectedProvider != Providers.Bedrock && selectedProvider != Providers.Vertex_AI && + selectedProvider != Providers.Ollama && (dynamicProviderForm === undefined || dynamicProviderForm.fields.length == 0) && (