fix(litellm_proxy_extras): add baselining db script (#9942)

* fix(litellm_proxy_extras): add baselining db script

Fixes https://github.com/BerriAI/litellm/issues/9885

* fix(prisma_client.py): fix ruff errors

* ci(config.yml): add publish_proxy_extras step

* fix(config.yml): compare contents between versions to check for changes

* fix(config.yml): fix check

* fix: install toml

* fix: update check

* fix: ensure versions in sync

* fix: fix version compare

* fix: correct the cost for 'gemini/gemini-2.5-pro-preview-03-25' (#9896)

* fix: Typo in the cost 'gemini/gemini-2.5-pro-preview-03-25', closes #9854

* chore: update in backup file as well

* Litellm add managed files db (#9930)

* fix(openai.py): ensure openai file object shows up on logs

* fix(managed_files.py): return unified file id as b64 str

allows retrieve file id to work as expected

* fix(managed_files.py): apply decoded file id transformation

* fix: add unit test for file id + decode logic

* fix: initial commit for litellm_proxy support with CRUD Endpoints

* fix(managed_files.py): support retrieve file operation

* fix(managed_files.py): support for DELETE endpoint for files

* fix(managed_files.py): retrieve file content support

supports retrieve file content api from openai

* fix: fix linting error

* test: update tests

* fix: fix linting error

* feat(managed_files.py): support reading / writing files in DB

* feat(managed_files.py): support deleting file from DB on delete

* test: update testing

* fix(spend_tracking_utils.py): ensure each file create request is logged correctly

* fix(managed_files.py): fix storing / returning managed file object from cache

* fix(files/main.py): pass litellm params to azure route

* test: fix test

* build: add new prisma migration

* build: bump requirements

* test: add more testing

* refactor: cleanup post merge w/ main

* fix: fix code qa errors

* [DB / Infra] Add new column team_member_permissions  (#9941)

* add team_member_permissions to team table

* add migration.sql file

* fix poetry lock

* fix prisma migrations

* fix poetry lock

* fix migration

* ui new build

* fix(factory.py): correct indentation for message index increment in ollama,  This fixes bug #9822 (#9943)

* fix(factory.py): correct indentation for message index increment in ollama_pt function

* test: add unit tests for ollama_pt function handling various message types

* ci: update test

* fix: fix check

* ci: see what dir looks like

* ci: more checks

* ci: fix filepath

* ci: cleanup

* ci: fix ci

---------

Co-authored-by: Nilanjan De <nilanjan.de@gmail.com>
Co-authored-by: Ishaan Jaff <ishaanjaffer0324@gmail.com>
Co-authored-by: Dan Shaw <dan@danieljshaw.com>
This commit is contained in:
Krish Dholakia 2025-04-12 10:29:34 -07:00 committed by GitHub
parent cead9114a9
commit d71aa2284b
3 changed files with 215 additions and 89 deletions

View file

@ -2390,6 +2390,108 @@ jobs:
echo "triggering load testing server for version ${VERSION} and commit ${CIRCLE_SHA1}" echo "triggering load testing server for version ${VERSION} and commit ${CIRCLE_SHA1}"
curl -X POST "https://proxyloadtester-production.up.railway.app/start/load/test?version=${VERSION}&commit_hash=${CIRCLE_SHA1}&release_type=nightly" curl -X POST "https://proxyloadtester-production.up.railway.app/start/load/test?version=${VERSION}&commit_hash=${CIRCLE_SHA1}&release_type=nightly"
publish_proxy_extras:
docker:
- image: cimg/python:3.8
working_directory: ~/project/litellm-proxy-extras
environment:
TWINE_USERNAME: __token__
steps:
- checkout:
path: ~/project
- run:
name: Check if litellm-proxy-extras dir or pyproject.toml was modified
command: |
echo "Install TOML package."
python -m pip install toml
# Get current version from pyproject.toml
CURRENT_VERSION=$(python -c "import toml; print(toml.load('pyproject.toml')['tool']['poetry']['version'])")
# Get last published version from PyPI
LAST_VERSION=$(curl -s https://pypi.org/pypi/litellm-proxy-extras/json | python -c "import json, sys; print(json.load(sys.stdin)['info']['version'])")
echo "Current version: $CURRENT_VERSION"
echo "Last published version: $LAST_VERSION"
# Compare versions using Python's packaging.version
VERSION_COMPARE=$(python -c "from packaging import version; print(1 if version.parse('$CURRENT_VERSION') < version.parse('$LAST_VERSION') else 0)")
echo "Version compare: $VERSION_COMPARE"
if [ "$VERSION_COMPARE" = "1" ]; then
echo "Error: Current version ($CURRENT_VERSION) is less than last published version ($LAST_VERSION)"
exit 1
fi
# If versions are equal or current is greater, check contents
pip download --no-deps litellm-proxy-extras==$LAST_VERSION -d /tmp
# Find the downloaded file
DOWNLOADED_FILE=$(ls /tmp/litellm_proxy_extras-*.tar.gz)
tar -xzf "$DOWNLOADED_FILE" -C /tmp
echo "Downloaded file: $DOWNLOADED_FILE"
echo "Contents of extracted package:"
ls -R /tmp/litellm_proxy_extras-$LAST_VERSION
# Compare contents
if ! diff -r /tmp/litellm_proxy_extras-$LAST_VERSION/litellm_proxy_extras ./litellm_proxy_extras; then
if [ "$CURRENT_VERSION" = "$LAST_VERSION" ]; then
echo "Error: Changes detected in litellm-proxy-extras but version was not bumped"
echo "Current version: $CURRENT_VERSION"
echo "Last published version: $LAST_VERSION"
echo "Changes:"
diff -r /tmp/litellm_proxy_extras-$LAST_VERSION/litellm_proxy_extras ./litellm_proxy_extras
exit 1
fi
else
echo "No changes detected in litellm-proxy-extras. Skipping PyPI publish."
circleci step halt
fi
# Check if there are changes
if [ -n "$(git diff --name-only $CIRCLE_SHA1^..$CIRCLE_SHA1 | grep -E 'litellm-proxy-extras/|litellm-proxy-extras/pyproject\.toml')" ]; then
echo "litellm-proxy-extras or its pyproject.toml updated"
else
echo "No changes to litellm-proxy-extras. Skipping PyPI publish."
circleci step halt
fi
- run:
name: Get new version
command: |
cd litellm-proxy-extras
NEW_VERSION=$(python -c "import toml; print(toml.load('pyproject.toml')['tool']['poetry']['version'])")
echo "export NEW_VERSION=$NEW_VERSION" >> $BASH_ENV
- run:
name: Check if versions match
command: |
cd ~/project
# Check pyproject.toml
CURRENT_VERSION=$(python -c "import toml; print(toml.load('pyproject.toml')['tool']['poetry']['dependencies']['litellm-proxy-extras'].split('\"')[1])")
if [ "$CURRENT_VERSION" != "$NEW_VERSION" ]; then
echo "Error: Version in pyproject.toml ($CURRENT_VERSION) doesn't match new version ($NEW_VERSION)"
exit 1
fi
# Check requirements.txt
REQ_VERSION=$(grep -oP 'litellm-proxy-extras==\K[0-9.]+' requirements.txt)
if [ "$REQ_VERSION" != "$NEW_VERSION" ]; then
echo "Error: Version in requirements.txt ($REQ_VERSION) doesn't match new version ($NEW_VERSION)"
exit 1
fi
- run:
name: Publish to PyPI
command: |
cd litellm-proxy-extras
echo -e "[pypi]\nusername = $PYPI_PUBLISH_USERNAME\npassword = $PYPI_PUBLISH_PASSWORD" > ~/.pypirc
python -m pip install --upgrade pip build twine setuptools wheel
rm -rf build dist
python -m build
twine upload --verbose dist/*
e2e_ui_testing: e2e_ui_testing:
machine: machine:
image: ubuntu-2204:2023.10.1 image: ubuntu-2204:2023.10.1
@ -2785,6 +2887,16 @@ workflows:
only: only:
- main - main
- /litellm_.*/ - /litellm_.*/
- publish_proxy_extras:
filters:
branches:
only:
- main
- /litellm_.*/
# filters:
# branches:
# only:
# - main
- publish_to_pypi: - publish_to_pypi:
requires: requires:
- local_testing - local_testing
@ -2819,7 +2931,5 @@ workflows:
- proxy_build_from_pip_tests - proxy_build_from_pip_tests
- proxy_pass_through_endpoint_tests - proxy_pass_through_endpoint_tests
- check_code_and_doc_quality - check_code_and_doc_quality
filters: - publish_proxy_extras
branches:
only:
- main

View file

@ -1,7 +1,9 @@
import glob
import os import os
import random import random
import subprocess import subprocess
import time import time
from pathlib import Path
from typing import Optional from typing import Optional
from litellm_proxy_extras._logging import logger from litellm_proxy_extras._logging import logger
@ -14,6 +16,94 @@ def str_to_bool(value: Optional[str]) -> bool:
class ProxyExtrasDBManager: class ProxyExtrasDBManager:
@staticmethod
def _get_prisma_dir() -> str:
"""Get the path to the migrations directory"""
migrations_dir = os.path.dirname(__file__)
return migrations_dir
@staticmethod
def _create_baseline_migration(schema_path: str) -> bool:
"""Create a baseline migration for an existing database"""
prisma_dir = ProxyExtrasDBManager._get_prisma_dir()
prisma_dir_path = Path(prisma_dir)
init_dir = prisma_dir_path / "migrations" / "0_init"
# Create migrations/0_init directory
init_dir.mkdir(parents=True, exist_ok=True)
# Generate migration SQL file
migration_file = init_dir / "migration.sql"
try:
# Generate migration diff with increased timeout
subprocess.run(
[
"prisma",
"migrate",
"diff",
"--from-empty",
"--to-schema-datamodel",
str(schema_path),
"--script",
],
stdout=open(migration_file, "w"),
check=True,
timeout=30,
) # 30 second timeout
# Mark migration as applied with increased timeout
subprocess.run(
[
"prisma",
"migrate",
"resolve",
"--applied",
"0_init",
],
check=True,
timeout=30,
)
return True
except subprocess.TimeoutExpired:
logger.warning(
"Migration timed out - the database might be under heavy load."
)
return False
except subprocess.CalledProcessError as e:
logger.warning(f"Error creating baseline migration: {e}")
return False
@staticmethod
def _get_migration_names(migrations_dir: str) -> list:
"""Get all migration directory names from the migrations folder"""
migration_paths = glob.glob(f"{migrations_dir}/migrations/*/migration.sql")
logger.info(f"Found {len(migration_paths)} migrations at {migrations_dir}")
return [Path(p).parent.name for p in migration_paths]
@staticmethod
def _resolve_all_migrations(migrations_dir: str):
"""Mark all existing migrations as applied"""
migration_names = ProxyExtrasDBManager._get_migration_names(migrations_dir)
logger.info(f"Resolving {len(migration_names)} migrations")
for migration_name in migration_names:
try:
logger.info(f"Resolving migration: {migration_name}")
subprocess.run(
["prisma", "migrate", "resolve", "--applied", migration_name],
timeout=60,
check=True,
capture_output=True,
text=True,
)
logger.debug(f"Resolved migration: {migration_name}")
except subprocess.CalledProcessError as e:
if "is already recorded as applied in the database." not in e.stderr:
logger.warning(
f"Failed to resolve migration {migration_name}: {e.stderr}"
)
@staticmethod @staticmethod
def setup_database(schema_path: str, use_migrate: bool = False) -> bool: def setup_database(schema_path: str, use_migrate: bool = False) -> bool:
""" """
@ -30,7 +120,7 @@ class ProxyExtrasDBManager:
use_migrate = str_to_bool(os.getenv("USE_PRISMA_MIGRATE")) or use_migrate use_migrate = str_to_bool(os.getenv("USE_PRISMA_MIGRATE")) or use_migrate
for attempt in range(4): for attempt in range(4):
original_dir = os.getcwd() original_dir = os.getcwd()
migrations_dir = os.path.dirname(__file__) migrations_dir = ProxyExtrasDBManager._get_prisma_dir()
os.chdir(migrations_dir) os.chdir(migrations_dir)
try: try:
@ -55,8 +145,16 @@ class ProxyExtrasDBManager:
"P3005" in e.stderr "P3005" in e.stderr
and "database schema is not empty" in e.stderr and "database schema is not empty" in e.stderr
): ):
logger.info("Error: Database schema is not empty") logger.info(
return False "Database schema is not empty, creating baseline migration"
)
ProxyExtrasDBManager._create_baseline_migration(schema_path)
logger.info(
"Baseline migration created, resolving all migrations"
)
ProxyExtrasDBManager._resolve_all_migrations(migrations_dir)
logger.info("✅ All migrations resolved.")
return True
else: else:
# Use prisma db push with increased timeout # Use prisma db push with increased timeout
subprocess.run( subprocess.run(

View file

@ -3,7 +3,6 @@ This file contains the PrismaWrapper class, which is used to wrap the Prisma cli
""" """
import asyncio import asyncio
import glob
import os import os
import random import random
import subprocess import subprocess
@ -11,7 +10,6 @@ import time
import urllib import urllib
import urllib.parse import urllib.parse
from datetime import datetime, timedelta from datetime import datetime, timedelta
from pathlib import Path
from typing import Any, Optional, Union from typing import Any, Optional, Union
from litellm._logging import verbose_proxy_logger from litellm._logging import verbose_proxy_logger
@ -126,86 +124,6 @@ class PrismaManager:
dname = os.path.dirname(os.path.dirname(abspath)) dname = os.path.dirname(os.path.dirname(abspath))
return dname return dname
@staticmethod
def _create_baseline_migration(schema_path: str) -> bool:
"""Create a baseline migration for an existing database"""
prisma_dir = PrismaManager._get_prisma_dir()
prisma_dir_path = Path(prisma_dir)
init_dir = prisma_dir_path / "migrations" / "0_init"
# Create migrations/0_init directory
init_dir.mkdir(parents=True, exist_ok=True)
# Generate migration SQL file
migration_file = init_dir / "migration.sql"
try:
# Generate migration diff with increased timeout
subprocess.run(
[
"prisma",
"migrate",
"diff",
"--from-empty",
"--to-schema-datamodel",
str(schema_path),
"--script",
],
stdout=open(migration_file, "w"),
check=True,
timeout=30,
) # 30 second timeout
# Mark migration as applied with increased timeout
subprocess.run(
[
"prisma",
"migrate",
"resolve",
"--applied",
"0_init",
],
check=True,
timeout=30,
)
return True
except subprocess.TimeoutExpired:
verbose_proxy_logger.warning(
"Migration timed out - the database might be under heavy load."
)
return False
except subprocess.CalledProcessError as e:
verbose_proxy_logger.warning(f"Error creating baseline migration: {e}")
return False
@staticmethod
def _get_migration_names(migrations_dir: str) -> list:
"""Get all migration directory names from the migrations folder"""
migration_paths = glob.glob(f"{migrations_dir}/*/migration.sql")
return [Path(p).parent.name for p in migration_paths]
@staticmethod
def _resolve_all_migrations(migrations_dir: str):
"""Mark all existing migrations as applied"""
migration_names = PrismaManager._get_migration_names(migrations_dir)
for migration_name in migration_names:
try:
verbose_proxy_logger.info(f"Resolving migration: {migration_name}")
subprocess.run(
["prisma", "migrate", "resolve", "--applied", migration_name],
timeout=60,
check=True,
capture_output=True,
text=True,
)
verbose_proxy_logger.debug(f"Resolved migration: {migration_name}")
except subprocess.CalledProcessError as e:
if "is already recorded as applied in the database." not in e.stderr:
verbose_proxy_logger.warning(
f"Failed to resolve migration {migration_name}: {e.stderr}"
)
@staticmethod @staticmethod
def setup_database(use_migrate: bool = False) -> bool: def setup_database(use_migrate: bool = False) -> bool:
""" """