From 4e63bca21e6cd617f5ae11716bda963d055c14ee Mon Sep 17 00:00:00 2001 From: ishaan-jaff Date: Tue, 27 Feb 2024 12:52:20 -0800 Subject: [PATCH 01/12] (feat) clickhouse run view setup --- litellm/integrations/clickhouse.py | 110 +++++++++++++++++++++++++++++ litellm/proxy/proxy_config.yaml | 2 +- 2 files changed, 111 insertions(+), 1 deletion(-) diff --git a/litellm/integrations/clickhouse.py b/litellm/integrations/clickhouse.py index 280d64c4a2..1533dd6c61 100644 --- a/litellm/integrations/clickhouse.py +++ b/litellm/integrations/clickhouse.py @@ -27,6 +27,111 @@ import litellm, uuid from litellm._logging import print_verbose, verbose_logger +def _create_clickhouse_material_views(client=None, table_names=[]): + # Create Materialized Views if they don't exist + # Materialized Views send new inserted rows to the aggregate tables + + verbose_logger.debug("Clickhouse: Creating Materialized Views") + if "daily_aggregated_spend_per_model_mv" not in table_names: + verbose_logger.debug("Clickhouse: Creating daily_aggregated_spend_per_model_mv") + client.command( + """ + CREATE MATERIALIZED VIEW daily_aggregated_spend_per_model_mv + TO daily_aggregated_spend_per_model + AS + SELECT + toDate(startTime) as day, + sumState(spend) AS DailySpend, + model as model + FROM spend_logs + GROUP BY + day, model + """ + ) + if "daily_aggregated_spend_per_api_key_mv" not in table_names: + verbose_logger.debug( + "Clickhouse: Creating daily_aggregated_spend_per_api_key_mv" + ) + client.command( + """ + CREATE MATERIALIZED VIEW daily_aggregated_spend_per_api_key_mv + TO daily_aggregated_spend_per_api_key + AS + SELECT + toDate(startTime) as day, + sumState(spend) AS DailySpend, + api_key as api_key + FROM spend_logs + GROUP BY + day, api_key + """ + ) + if "daily_aggregated_spend_mv" not in table_names: + verbose_logger.debug("Clickhouse: Creating daily_aggregated_spend_mv") + client.command( + """ + CREATE MATERIALIZED VIEW daily_aggregated_spend_mv + TO daily_aggregated_spend + AS + SELECT + toDate(startTime) as day, + sumState(spend) AS DailySpend + FROM spend_logs + GROUP BY + day + """ + ) + + +def _create_clickhouse_aggregate_tables(client=None, table_names=[]): + # Basic Logging works without this - this is only used for low latency reporting apis + verbose_logger.debug("Clickhouse: Creating Aggregate Tables") + + # Create Aggregeate Tables if they don't exist + if "daily_aggregated_spend_per_model" not in table_names: + verbose_logger.debug("Clickhouse: Creating daily_aggregated_spend_per_model") + client.command( + """ + CREATE TABLE daily_aggregated_spend_per_model + ( + `day` Date, + `DailySpend` AggregateFunction(sum, Float64), + `model` String + ) + ENGINE = SummingMergeTree() + ORDER BY (day, model); + """ + ) + if "daily_aggregated_spend_per_api_key" not in table_names: + verbose_logger.debug("Clickhouse: Creating daily_aggregated_spend_per_api_key") + client.command( + """ + CREATE TABLE daily_aggregated_spend_per_api_key + ( + `day` Date, + `DailySpend` AggregateFunction(sum, Float64), + `api_key` String + ) + ENGINE = SummingMergeTree() + ORDER BY (day, api_key); + """ + ) + if "daily_aggregated_spend" not in table_names: + verbose_logger.debug("Clickhouse: Creating daily_aggregated_spend") + client.command( + """ + CREATE TABLE daily_aggregated_spend + ( + `day` Date, + `DailySpend` AggregateFunction(sum, Float64), + ) + ENGINE = SummingMergeTree() + ORDER BY (day); + """ + ) + return + + def _start_clickhouse(): import clickhouse_connect @@ -86,6 +191,11 @@ def _start_clickhouse(): response = client.query("DESCRIBE default.spend_logs") verbose_logger.debug(f"spend logs schema ={response.result_rows}") + # RUN Enterprise Clickhouse Setup + # TLDR: For Enterprise - we create views / aggregate tables for low latency reporting APIs + _create_clickhouse_aggregate_tables(client=client, table_names=table_names) + _create_clickhouse_material_views(client=client, table_names=table_names) + class ClickhouseLogger: # Class variables or attributes diff --git a/litellm/proxy/proxy_config.yaml b/litellm/proxy/proxy_config.yaml index 74a780c716..5755293e00 100644 --- a/litellm/proxy/proxy_config.yaml +++ b/litellm/proxy/proxy_config.yaml @@ -43,7 +43,7 @@ model_list: api_key: os.environ/OPENAI_API_KEY litellm_settings: fallbacks: [{"openai-gpt-3.5": ["azure-gpt-3.5"]}] - success_callback: ['langfuse'] + success_callback: ['clickhouse', 'langfuse'] # setting callback class # callbacks: custom_callbacks.proxy_handler_instance # sets litellm.callbacks = [proxy_handler_instance] From 16ff445e7a321405219c687a2d3e6c61853b96ac Mon Sep 17 00:00:00 2001 From: ishaan-jaff Date: Tue, 27 Feb 2024 12:55:50 -0800 Subject: [PATCH 02/12] (feat) clickhouse update views / aggregate tables --- litellm/integrations/clickhouse.py | 30 ++++++++++++++++++++++++++++++ 1 file changed, 30 insertions(+) diff --git a/litellm/integrations/clickhouse.py b/litellm/integrations/clickhouse.py index 1533dd6c61..82ecd3256d 100644 --- a/litellm/integrations/clickhouse.py +++ b/litellm/integrations/clickhouse.py @@ -66,6 +66,22 @@ def _create_clickhouse_material_views(client=None, table_names=[]): day, api_key """ ) + if "daily_aggregated_spend_per_user_mv" not in table_names: + verbose_logger.debug("Clickhouse: Creating daily_aggregated_spend_per_user_mv") + client.command( + """ + CREATE MATERIALIZED VIEW daily_aggregated_spend_per_user_mv + TO daily_aggregated_spend_per_user + AS + SELECT + toDate(startTime) as day, + sumState(spend) AS DailySpend, + user as user + FROM spend_logs + GROUP BY + day, user + """ + ) if "daily_aggregated_spend_mv" not in table_names: verbose_logger.debug("Clickhouse: Creating daily_aggregated_spend_mv") client.command( @@ -116,6 +132,20 @@ def _create_clickhouse_aggregate_tables(client=None, table_names=[]): ORDER BY (day, api_key); """ ) + if "daily_aggregated_spend_per_user" not in table_names: + verbose_logger.debug("Clickhouse: Creating daily_aggregated_spend_per_user") + client.command( + """ + CREATE TABLE daily_aggregated_spend_per_user + ( + `day` Date, + `DailySpend` AggregateFunction(sum, Float64), + `user` String + ) + ENGINE = SummingMergeTree() + ORDER BY (day, user); + """ + ) if "daily_aggregated_spend" not in table_names: verbose_logger.debug("Clickhouse: Creating daily_aggregated_spend") client.command( From 468cd960ac53b302d90f7eebbf6366e718683b91 Mon Sep 17 00:00:00 2001 From: Krish Dholakia Date: Tue, 27 Feb 2024 14:36:33 -0800 Subject: [PATCH 03/12] Update model_prices_and_context_window.json --- model_prices_and_context_window.json | 11 +++++++++++ 1 file changed, 11 insertions(+) diff --git a/model_prices_and_context_window.json b/model_prices_and_context_window.json index c55c164d38..6c663200d3 100644 --- a/model_prices_and_context_window.json +++ b/model_prices_and_context_window.json @@ -706,6 +706,17 @@ "litellm_provider": "vertex_ai-vision-models", "mode": "chat" }, + "gemini-1.0-pro-vision-001": { + "max_tokens": 16384, + "max_output_tokens": 2048, + "max_images_per_prompt": 16, + "max_videos_per_prompt": 1, + "max_video_length": 2, + "input_cost_per_token": 0.00000025, + "output_cost_per_token": 0.0000005, + "litellm_provider": "vertex_ai-vision-models", + "mode": "chat" + }, "gemini-1.5-pro-vision": { "max_tokens": 8192, "max_input_tokens": 1000000, From 9f7b322ae2adce2dfbdfcd7d7d1f0af04cf4c542 Mon Sep 17 00:00:00 2001 From: Krrish Dholakia Date: Tue, 27 Feb 2024 14:45:34 -0800 Subject: [PATCH 04/12] fix(utils.py): map optional params for gemini pro vision --- litellm/utils.py | 1 + 1 file changed, 1 insertion(+) diff --git a/litellm/utils.py b/litellm/utils.py index 6bc229e96e..0ef50f20c3 100644 --- a/litellm/utils.py +++ b/litellm/utils.py @@ -4351,6 +4351,7 @@ def get_optional_params( or model in litellm.vertex_code_text_models or model in litellm.vertex_language_models or model in litellm.vertex_embedding_models + or model in litellm.vertex_vision_models ): print_verbose(f"(start) INSIDE THE VERTEX AI OPTIONAL PARAM BLOCK") ## check if unsupported param passed in From d8669dea64b08baa983aa2ec4ed1563c474adbbd Mon Sep 17 00:00:00 2001 From: Krrish Dholakia Date: Tue, 27 Feb 2024 14:46:14 -0800 Subject: [PATCH 05/12] =?UTF-8?q?bump:=20version=201.27.11=20=E2=86=92=201?= =?UTF-8?q?.27.12?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- litellm/model_prices_and_context_window_backup.json | 11 +++++++++++ pyproject.toml | 4 ++-- 2 files changed, 13 insertions(+), 2 deletions(-) diff --git a/litellm/model_prices_and_context_window_backup.json b/litellm/model_prices_and_context_window_backup.json index c55c164d38..6c663200d3 100644 --- a/litellm/model_prices_and_context_window_backup.json +++ b/litellm/model_prices_and_context_window_backup.json @@ -706,6 +706,17 @@ "litellm_provider": "vertex_ai-vision-models", "mode": "chat" }, + "gemini-1.0-pro-vision-001": { + "max_tokens": 16384, + "max_output_tokens": 2048, + "max_images_per_prompt": 16, + "max_videos_per_prompt": 1, + "max_video_length": 2, + "input_cost_per_token": 0.00000025, + "output_cost_per_token": 0.0000005, + "litellm_provider": "vertex_ai-vision-models", + "mode": "chat" + }, "gemini-1.5-pro-vision": { "max_tokens": 8192, "max_input_tokens": 1000000, diff --git a/pyproject.toml b/pyproject.toml index e914e69414..295d7e902d 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "litellm" -version = "1.27.11" +version = "1.27.12" description = "Library to easily interface with LLM API providers" authors = ["BerriAI"] license = "MIT" @@ -74,7 +74,7 @@ requires = ["poetry-core", "wheel"] build-backend = "poetry.core.masonry.api" [tool.commitizen] -version = "1.27.11" +version = "1.27.12" version_files = [ "pyproject.toml:^version" ] From 411787963bb8f795b6875668bc5eb41fb916d43a Mon Sep 17 00:00:00 2001 From: Krrish Dholakia Date: Tue, 27 Feb 2024 14:52:45 -0800 Subject: [PATCH 06/12] test(test_proxy_custom_logger.py): fix proxy test --- litellm/tests/test_proxy_custom_logger.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/litellm/tests/test_proxy_custom_logger.py b/litellm/tests/test_proxy_custom_logger.py index 34e427ef4d..64ed08897f 100644 --- a/litellm/tests/test_proxy_custom_logger.py +++ b/litellm/tests/test_proxy_custom_logger.py @@ -274,7 +274,7 @@ def test_chat_completion_stream(client): print("\n\n decode_data", data) # Access the content of choices[0]['message']['content'] - content = data["choices"][0]["delta"]["content"] or "" + content = data["choices"][0]["delta"].get("content", None) or "" # Process the content as needed print("Content:", content) From e7b88c2134a013f527304de29358238a5593f91f Mon Sep 17 00:00:00 2001 From: Krish Dholakia Date: Tue, 27 Feb 2024 16:47:09 -0800 Subject: [PATCH 07/12] Update model_prices_and_context_window.json --- model_prices_and_context_window.json | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/model_prices_and_context_window.json b/model_prices_and_context_window.json index 6c663200d3..5bd0bcdff1 100644 --- a/model_prices_and_context_window.json +++ b/model_prices_and_context_window.json @@ -687,6 +687,15 @@ "litellm_provider": "vertex_ai-language-models", "mode": "chat" }, + "gemini-1.5-pro-preview-0215": { + "max_tokens": 8192, + "max_input_tokens": 1000000, + "max_output_tokens": 8192, + "input_cost_per_token": 0, + "output_cost_per_token": 0, + "litellm_provider": "vertex_ai-language-models", + "mode": "chat" + }, "gemini-pro-vision": { "max_tokens": 16384, "max_output_tokens": 2048, From f3144dd9cf790eda0c1ee070c01af087668e589a Mon Sep 17 00:00:00 2001 From: ishaan-jaff Date: Tue, 27 Feb 2024 17:44:35 -0800 Subject: [PATCH 08/12] (docs) vertex ai --- docs/my-website/docs/providers/vertex.md | 19 +++++++++++-------- ...odel_prices_and_context_window_backup.json | 9 +++++++++ 2 files changed, 20 insertions(+), 8 deletions(-) diff --git a/docs/my-website/docs/providers/vertex.md b/docs/my-website/docs/providers/vertex.md index 70ee9eca93..20225473b9 100644 --- a/docs/my-website/docs/providers/vertex.md +++ b/docs/my-website/docs/providers/vertex.md @@ -25,14 +25,17 @@ response = litellm.completion(model="gemini-pro", messages=[{"role": "user", "co 1. Modify the config.yaml ```yaml -litellm_settings: - vertex_project: "hardy-device-38811" # Your Project ID - vertex_location: "us-central1" # proj location - -model_list: - -model_name: team1-gemini-pro - litellm_params: - model: gemini-pro +model_list: + - model_name: gemini-vision + litellm_params: + model: vertex_ai/gemini-1.0-pro-vision-001 + vertex_project: "project-id" + vertex_location: "us-central1" + - model_name: gemini-vision + litellm_params: + model: vertex_ai/gemini-1.0-pro-vision-001 + vertex_project: "project-id2" + vertex_location: "us-east" ``` 2. Start the proxy diff --git a/litellm/model_prices_and_context_window_backup.json b/litellm/model_prices_and_context_window_backup.json index 6c663200d3..5bd0bcdff1 100644 --- a/litellm/model_prices_and_context_window_backup.json +++ b/litellm/model_prices_and_context_window_backup.json @@ -687,6 +687,15 @@ "litellm_provider": "vertex_ai-language-models", "mode": "chat" }, + "gemini-1.5-pro-preview-0215": { + "max_tokens": 8192, + "max_input_tokens": 1000000, + "max_output_tokens": 8192, + "input_cost_per_token": 0, + "output_cost_per_token": 0, + "litellm_provider": "vertex_ai-language-models", + "mode": "chat" + }, "gemini-pro-vision": { "max_tokens": 16384, "max_output_tokens": 2048, From 7485fa797c7d2eee715eebbcc7de2e7cf7a8b265 Mon Sep 17 00:00:00 2001 From: ishaan-jaff Date: Tue, 27 Feb 2024 17:55:05 -0800 Subject: [PATCH 09/12] (docs) vertex ai litellm proxy --- docs/my-website/docs/providers/vertex.md | 32 ++++++++++++++++++++++++ 1 file changed, 32 insertions(+) diff --git a/docs/my-website/docs/providers/vertex.md b/docs/my-website/docs/providers/vertex.md index 20225473b9..d959498ce7 100644 --- a/docs/my-website/docs/providers/vertex.md +++ b/docs/my-website/docs/providers/vertex.md @@ -1,3 +1,6 @@ +import Tabs from '@theme/Tabs'; +import TabItem from '@theme/TabItem'; + # VertexAI - Google [Gemini, Model Garden] @@ -22,8 +25,16 @@ response = litellm.completion(model="gemini-pro", messages=[{"role": "user", "co ## OpenAI Proxy Usage +Here's how to use Vertex AI with the LiteLLM Proxy Server + 1. Modify the config.yaml + + + + +Use this when you need to set a different location for each vertex model + ```yaml model_list: - model_name: gemini-vision @@ -38,6 +49,27 @@ model_list: vertex_location: "us-east" ``` + + + + +Use this when you have one vertex location for all models + +```yaml +litellm_settings: + vertex_project: "hardy-device-38811" # Your Project ID + vertex_location: "us-central1" # proj location + +model_list: + -model_name: team1-gemini-pro + litellm_params: + model: gemini-pro +``` + + + + + 2. Start the proxy ```bash From 6352c8869cbb2522aff0df5f5177a72070c0c5e5 Mon Sep 17 00:00:00 2001 From: Krrish Dholakia Date: Tue, 27 Feb 2024 18:34:07 -0800 Subject: [PATCH 10/12] refactor(vertex_ai.py): add more logging to show the vertex ai client init params --- litellm/llms/vertex_ai.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/litellm/llms/vertex_ai.py b/litellm/llms/vertex_ai.py index f4447a9e91..18c06d4d67 100644 --- a/litellm/llms/vertex_ai.py +++ b/litellm/llms/vertex_ai.py @@ -278,7 +278,11 @@ def completion( import google.auth ## Load credentials with the correct quota project ref: https://github.com/googleapis/python-aiplatform/issues/2557#issuecomment-1709284744 + print_verbose( + f"VERTEX AI: vertex_project={vertex_project}; vertex_location={vertex_location}" + ) creds, _ = google.auth.default(quota_project_id=vertex_project) + print_verbose(f"VERTEX AI: creds={creds}") vertexai.init( project=vertex_project, location=vertex_location, credentials=creds ) From 9ad8b7c8fb003662c5c95549f97e7edaae94ee2c Mon Sep 17 00:00:00 2001 From: ishaan-jaff Date: Tue, 27 Feb 2024 19:41:01 -0800 Subject: [PATCH 11/12] (chore) move clickhouse to correct directory --- enterprise/utils.py | 135 ++++++++++++++++++++++++++++ litellm/integrations/clickhouse.py | 138 +---------------------------- 2 files changed, 138 insertions(+), 135 deletions(-) diff --git a/enterprise/utils.py b/enterprise/utils.py index f4916b689d..3b5a90fc0f 100644 --- a/enterprise/utils.py +++ b/enterprise/utils.py @@ -110,3 +110,138 @@ async def view_spend_logs_from_clickhouse( "log_count": num_rows, } return response_data + + +def _create_clickhouse_material_views(client=None, table_names=[]): + # Create Materialized Views if they don't exist + # Materialized Views send new inserted rows to the aggregate tables + + verbose_logger.debug("Clickhouse: Creating Materialized Views") + if "daily_aggregated_spend_per_model_mv" not in table_names: + verbose_logger.debug("Clickhouse: Creating daily_aggregated_spend_per_model_mv") + client.command( + """ + CREATE MATERIALIZED VIEW daily_aggregated_spend_per_model_mv + TO daily_aggregated_spend_per_model + AS + SELECT + toDate(startTime) as day, + sumState(spend) AS DailySpend, + model as model + FROM spend_logs + GROUP BY + day, model + """ + ) + if "daily_aggregated_spend_per_api_key_mv" not in table_names: + verbose_logger.debug( + "Clickhouse: Creating daily_aggregated_spend_per_api_key_mv" + ) + client.command( + """ + CREATE MATERIALIZED VIEW daily_aggregated_spend_per_api_key_mv + TO daily_aggregated_spend_per_api_key + AS + SELECT + toDate(startTime) as day, + sumState(spend) AS DailySpend, + api_key as api_key + FROM spend_logs + GROUP BY + day, api_key + """ + ) + if "daily_aggregated_spend_per_user_mv" not in table_names: + verbose_logger.debug("Clickhouse: Creating daily_aggregated_spend_per_user_mv") + client.command( + """ + CREATE MATERIALIZED VIEW daily_aggregated_spend_per_user_mv + TO daily_aggregated_spend_per_user + AS + SELECT + toDate(startTime) as day, + sumState(spend) AS DailySpend, + user as user + FROM spend_logs + GROUP BY + day, user + """ + ) + if "daily_aggregated_spend_mv" not in table_names: + verbose_logger.debug("Clickhouse: Creating daily_aggregated_spend_mv") + client.command( + """ + CREATE MATERIALIZED VIEW daily_aggregated_spend_mv + TO daily_aggregated_spend + AS + SELECT + toDate(startTime) as day, + sumState(spend) AS DailySpend + FROM spend_logs + GROUP BY + day + """ + ) + + +def _create_clickhouse_aggregate_tables(client=None, table_names=[]): + # Basic Logging works without this - this is only used for low latency reporting apis + verbose_logger.debug("Clickhouse: Creating Aggregate Tables") + + # Create Aggregeate Tables if they don't exist + if "daily_aggregated_spend_per_model" not in table_names: + verbose_logger.debug("Clickhouse: Creating daily_aggregated_spend_per_model") + client.command( + """ + CREATE TABLE daily_aggregated_spend_per_model + ( + `day` Date, + `DailySpend` AggregateFunction(sum, Float64), + `model` String + ) + ENGINE = SummingMergeTree() + ORDER BY (day, model); + """ + ) + if "daily_aggregated_spend_per_api_key" not in table_names: + verbose_logger.debug("Clickhouse: Creating daily_aggregated_spend_per_api_key") + client.command( + """ + CREATE TABLE daily_aggregated_spend_per_api_key + ( + `day` Date, + `DailySpend` AggregateFunction(sum, Float64), + `api_key` String + ) + ENGINE = SummingMergeTree() + ORDER BY (day, api_key); + """ + ) + if "daily_aggregated_spend_per_user" not in table_names: + verbose_logger.debug("Clickhouse: Creating daily_aggregated_spend_per_user") + client.command( + """ + CREATE TABLE daily_aggregated_spend_per_user + ( + `day` Date, + `DailySpend` AggregateFunction(sum, Float64), + `user` String + ) + ENGINE = SummingMergeTree() + ORDER BY (day, user); + """ + ) + if "daily_aggregated_spend" not in table_names: + verbose_logger.debug("Clickhouse: Creating daily_aggregated_spend") + client.command( + """ + CREATE TABLE daily_aggregated_spend + ( + `day` Date, + `DailySpend` AggregateFunction(sum, Float64), + ) + ENGINE = SummingMergeTree() + ORDER BY (day); + """ + ) + return diff --git a/litellm/integrations/clickhouse.py b/litellm/integrations/clickhouse.py index 82ecd3256d..a2a0b800e4 100644 --- a/litellm/integrations/clickhouse.py +++ b/litellm/integrations/clickhouse.py @@ -27,141 +27,6 @@ import litellm, uuid from litellm._logging import print_verbose, verbose_logger -def _create_clickhouse_material_views(client=None, table_names=[]): - # Create Materialized Views if they don't exist - # Materialized Views send new inserted rows to the aggregate tables - - verbose_logger.debug("Clickhouse: Creating Materialized Views") - if "daily_aggregated_spend_per_model_mv" not in table_names: - verbose_logger.debug("Clickhouse: Creating daily_aggregated_spend_per_model_mv") - client.command( - """ - CREATE MATERIALIZED VIEW daily_aggregated_spend_per_model_mv - TO daily_aggregated_spend_per_model - AS - SELECT - toDate(startTime) as day, - sumState(spend) AS DailySpend, - model as model - FROM spend_logs - GROUP BY - day, model - """ - ) - if "daily_aggregated_spend_per_api_key_mv" not in table_names: - verbose_logger.debug( - "Clickhouse: Creating daily_aggregated_spend_per_api_key_mv" - ) - client.command( - """ - CREATE MATERIALIZED VIEW daily_aggregated_spend_per_api_key_mv - TO daily_aggregated_spend_per_api_key - AS - SELECT - toDate(startTime) as day, - sumState(spend) AS DailySpend, - api_key as api_key - FROM spend_logs - GROUP BY - day, api_key - """ - ) - if "daily_aggregated_spend_per_user_mv" not in table_names: - verbose_logger.debug("Clickhouse: Creating daily_aggregated_spend_per_user_mv") - client.command( - """ - CREATE MATERIALIZED VIEW daily_aggregated_spend_per_user_mv - TO daily_aggregated_spend_per_user - AS - SELECT - toDate(startTime) as day, - sumState(spend) AS DailySpend, - user as user - FROM spend_logs - GROUP BY - day, user - """ - ) - if "daily_aggregated_spend_mv" not in table_names: - verbose_logger.debug("Clickhouse: Creating daily_aggregated_spend_mv") - client.command( - """ - CREATE MATERIALIZED VIEW daily_aggregated_spend_mv - TO daily_aggregated_spend - AS - SELECT - toDate(startTime) as day, - sumState(spend) AS DailySpend - FROM spend_logs - GROUP BY - day - """ - ) - - -def _create_clickhouse_aggregate_tables(client=None, table_names=[]): - # Basic Logging works without this - this is only used for low latency reporting apis - verbose_logger.debug("Clickhouse: Creating Aggregate Tables") - - # Create Aggregeate Tables if they don't exist - if "daily_aggregated_spend_per_model" not in table_names: - verbose_logger.debug("Clickhouse: Creating daily_aggregated_spend_per_model") - client.command( - """ - CREATE TABLE daily_aggregated_spend_per_model - ( - `day` Date, - `DailySpend` AggregateFunction(sum, Float64), - `model` String - ) - ENGINE = SummingMergeTree() - ORDER BY (day, model); - """ - ) - if "daily_aggregated_spend_per_api_key" not in table_names: - verbose_logger.debug("Clickhouse: Creating daily_aggregated_spend_per_api_key") - client.command( - """ - CREATE TABLE daily_aggregated_spend_per_api_key - ( - `day` Date, - `DailySpend` AggregateFunction(sum, Float64), - `api_key` String - ) - ENGINE = SummingMergeTree() - ORDER BY (day, api_key); - """ - ) - if "daily_aggregated_spend_per_user" not in table_names: - verbose_logger.debug("Clickhouse: Creating daily_aggregated_spend_per_user") - client.command( - """ - CREATE TABLE daily_aggregated_spend_per_user - ( - `day` Date, - `DailySpend` AggregateFunction(sum, Float64), - `user` String - ) - ENGINE = SummingMergeTree() - ORDER BY (day, user); - """ - ) - if "daily_aggregated_spend" not in table_names: - verbose_logger.debug("Clickhouse: Creating daily_aggregated_spend") - client.command( - """ - CREATE TABLE daily_aggregated_spend - ( - `day` Date, - `DailySpend` AggregateFunction(sum, Float64), - ) - ENGINE = SummingMergeTree() - ORDER BY (day); - """ - ) - return - - def _start_clickhouse(): import clickhouse_connect @@ -223,6 +88,9 @@ def _start_clickhouse(): # RUN Enterprise Clickhouse Setup # TLDR: For Enterprise - we create views / aggregate tables for low latency reporting APIs + from litellm.proxy.enterprise.utils import _create_clickhouse_aggregate_tables + from litellm.proxy.enterprise.utils import _create_clickhouse_material_views + _create_clickhouse_aggregate_tables(client=client, table_names=table_names) _create_clickhouse_material_views(client=client, table_names=table_names) From 6f60acefa5df625000a672066fd4830e89a3d94b Mon Sep 17 00:00:00 2001 From: Krrish Dholakia Date: Tue, 27 Feb 2024 19:50:40 -0800 Subject: [PATCH 12/12] build(config.yml): install fastapi during docker build test --- .circleci/config.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.circleci/config.yml b/.circleci/config.yml index a24ae1d8eb..daa4d59ec4 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -130,6 +130,7 @@ jobs: pip install "langfuse>=2.0.0" pip install numpydoc pip install prisma + pip install fastapi pip install "httpx==0.24.1" pip install "gunicorn==21.2.0" pip install "anyio==3.7.1"