mirror of
https://github.com/BerriAI/litellm.git
synced 2025-04-24 18:24:20 +00:00
2936 lines
104 KiB
YAML
2936 lines
104 KiB
YAML
version: 2.1
|
|
orbs:
|
|
codecov: codecov/codecov@4.0.1
|
|
node: circleci/node@5.1.0 # Add this line to declare the node orb
|
|
|
|
commands:
|
|
setup_google_dns:
|
|
steps:
|
|
- run:
|
|
name: "Configure Google DNS"
|
|
command: |
|
|
# Backup original resolv.conf
|
|
sudo cp /etc/resolv.conf /etc/resolv.conf.backup
|
|
# Add both local and Google DNS servers
|
|
echo "nameserver 127.0.0.11" | sudo tee /etc/resolv.conf
|
|
echo "nameserver 8.8.8.8" | sudo tee -a /etc/resolv.conf
|
|
echo "nameserver 8.8.4.4" | sudo tee -a /etc/resolv.conf
|
|
|
|
jobs:
|
|
local_testing:
|
|
docker:
|
|
- image: cimg/python:3.11
|
|
auth:
|
|
username: ${DOCKERHUB_USERNAME}
|
|
password: ${DOCKERHUB_PASSWORD}
|
|
working_directory: ~/project
|
|
|
|
steps:
|
|
- checkout
|
|
- setup_google_dns
|
|
- run:
|
|
name: Show git commit hash
|
|
command: |
|
|
echo "Git commit hash: $CIRCLE_SHA1"
|
|
|
|
- restore_cache:
|
|
keys:
|
|
- v1-dependencies-{{ checksum ".circleci/requirements.txt" }}
|
|
- run:
|
|
name: Install Dependencies
|
|
command: |
|
|
python -m pip install --upgrade pip
|
|
python -m pip install -r .circleci/requirements.txt
|
|
pip install "pytest==7.3.1"
|
|
pip install "pytest-retry==1.6.3"
|
|
pip install "pytest-asyncio==0.21.1"
|
|
pip install "pytest-cov==5.0.0"
|
|
pip install mypy
|
|
pip install "google-generativeai==0.3.2"
|
|
pip install "google-cloud-aiplatform==1.43.0"
|
|
pip install pyarrow
|
|
pip install "boto3==1.34.34"
|
|
pip install "aioboto3==12.3.0"
|
|
pip install langchain
|
|
pip install lunary==0.2.5
|
|
pip install "azure-identity==1.16.1"
|
|
pip install "langfuse==2.45.0"
|
|
pip install "logfire==0.29.0"
|
|
pip install numpydoc
|
|
pip install traceloop-sdk==0.21.1
|
|
pip install opentelemetry-api==1.25.0
|
|
pip install opentelemetry-sdk==1.25.0
|
|
pip install opentelemetry-exporter-otlp==1.25.0
|
|
pip install openai==1.68.2
|
|
pip install prisma==0.11.0
|
|
pip install "detect_secrets==1.5.0"
|
|
pip install "httpx==0.24.1"
|
|
pip install "respx==0.21.1"
|
|
pip install fastapi
|
|
pip install "gunicorn==21.2.0"
|
|
pip install "anyio==4.2.0"
|
|
pip install "aiodynamo==23.10.1"
|
|
pip install "asyncio==3.4.3"
|
|
pip install "apscheduler==3.10.4"
|
|
pip install "PyGithub==1.59.1"
|
|
pip install argon2-cffi
|
|
pip install "pytest-mock==3.12.0"
|
|
pip install python-multipart
|
|
pip install google-cloud-aiplatform
|
|
pip install prometheus-client==0.20.0
|
|
pip install "pydantic==2.10.2"
|
|
pip install "diskcache==5.6.1"
|
|
pip install "Pillow==10.3.0"
|
|
pip install "jsonschema==4.22.0"
|
|
pip install "pytest-xdist==3.6.1"
|
|
pip install "websockets==13.1.0"
|
|
pip uninstall posthog -y
|
|
- save_cache:
|
|
paths:
|
|
- ./venv
|
|
key: v1-dependencies-{{ checksum ".circleci/requirements.txt" }}
|
|
- run:
|
|
name: Run prisma ./docker/entrypoint.sh
|
|
command: |
|
|
set +e
|
|
chmod +x docker/entrypoint.sh
|
|
./docker/entrypoint.sh
|
|
set -e
|
|
- run:
|
|
name: Black Formatting
|
|
command: |
|
|
cd litellm
|
|
python -m pip install black
|
|
python -m black .
|
|
cd ..
|
|
- run:
|
|
name: Linting Testing
|
|
command: |
|
|
cd litellm
|
|
python -m pip install types-requests types-setuptools types-redis types-PyYAML
|
|
if ! python -m mypy . --ignore-missing-imports; then
|
|
echo "mypy detected errors"
|
|
exit 1
|
|
fi
|
|
cd ..
|
|
|
|
# Run pytest and generate JUnit XML report
|
|
- run:
|
|
name: Run tests
|
|
command: |
|
|
pwd
|
|
ls
|
|
python -m pytest -vv tests/local_testing --cov=litellm --cov-report=xml -x --junitxml=test-results/junit.xml --durations=5 -k "not test_python_38.py and not test_basic_python_version.py and not router and not assistants and not langfuse and not caching and not cache" -n 4
|
|
no_output_timeout: 120m
|
|
- run:
|
|
name: Rename the coverage files
|
|
command: |
|
|
mv coverage.xml local_testing_coverage.xml
|
|
mv .coverage local_testing_coverage
|
|
|
|
# Store test results
|
|
- store_test_results:
|
|
path: test-results
|
|
- persist_to_workspace:
|
|
root: .
|
|
paths:
|
|
- local_testing_coverage.xml
|
|
- local_testing_coverage
|
|
langfuse_logging_unit_tests:
|
|
docker:
|
|
- image: cimg/python:3.11
|
|
auth:
|
|
username: ${DOCKERHUB_USERNAME}
|
|
password: ${DOCKERHUB_PASSWORD}
|
|
working_directory: ~/project
|
|
|
|
steps:
|
|
- checkout
|
|
- setup_google_dns
|
|
- run:
|
|
name: Show git commit hash
|
|
command: |
|
|
echo "Git commit hash: $CIRCLE_SHA1"
|
|
|
|
- restore_cache:
|
|
keys:
|
|
- v1-dependencies-{{ checksum ".circleci/requirements.txt" }}
|
|
- run:
|
|
name: Install Dependencies
|
|
command: |
|
|
python -m pip install --upgrade pip
|
|
python -m pip install -r .circleci/requirements.txt
|
|
pip install "pytest==7.3.1"
|
|
pip install "pytest-retry==1.6.3"
|
|
pip install "pytest-asyncio==0.21.1"
|
|
pip install "pytest-cov==5.0.0"
|
|
pip install mypy
|
|
pip install "google-generativeai==0.3.2"
|
|
pip install "google-cloud-aiplatform==1.43.0"
|
|
pip install pyarrow
|
|
pip install "boto3==1.34.34"
|
|
pip install "aioboto3==12.3.0"
|
|
pip install langchain
|
|
pip install lunary==0.2.5
|
|
pip install "azure-identity==1.16.1"
|
|
pip install "langfuse==2.45.0"
|
|
pip install "logfire==0.29.0"
|
|
pip install numpydoc
|
|
pip install traceloop-sdk==0.21.1
|
|
pip install opentelemetry-api==1.25.0
|
|
pip install opentelemetry-sdk==1.25.0
|
|
pip install opentelemetry-exporter-otlp==1.25.0
|
|
pip install openai==1.68.2
|
|
pip install prisma==0.11.0
|
|
pip install "detect_secrets==1.5.0"
|
|
pip install "httpx==0.24.1"
|
|
pip install "respx==0.21.1"
|
|
pip install fastapi
|
|
pip install "gunicorn==21.2.0"
|
|
pip install "anyio==4.2.0"
|
|
pip install "aiodynamo==23.10.1"
|
|
pip install "asyncio==3.4.3"
|
|
pip install "apscheduler==3.10.4"
|
|
pip install "PyGithub==1.59.1"
|
|
pip install argon2-cffi
|
|
pip install "pytest-mock==3.12.0"
|
|
pip install python-multipart
|
|
pip install google-cloud-aiplatform
|
|
pip install prometheus-client==0.20.0
|
|
pip install "pydantic==2.10.2"
|
|
pip install "diskcache==5.6.1"
|
|
pip install "Pillow==10.3.0"
|
|
pip install "jsonschema==4.22.0"
|
|
pip install "websockets==13.1.0"
|
|
- save_cache:
|
|
paths:
|
|
- ./venv
|
|
key: v1-dependencies-{{ checksum ".circleci/requirements.txt" }}
|
|
- run:
|
|
name: Run prisma ./docker/entrypoint.sh
|
|
command: |
|
|
set +e
|
|
chmod +x docker/entrypoint.sh
|
|
./docker/entrypoint.sh
|
|
set -e
|
|
|
|
# Run pytest and generate JUnit XML report
|
|
- run:
|
|
name: Run tests
|
|
command: |
|
|
pwd
|
|
ls
|
|
python -m pytest -vv tests/local_testing --cov=litellm --cov-report=xml -x --junitxml=test-results/junit.xml --durations=5 -k "langfuse"
|
|
no_output_timeout: 120m
|
|
- run:
|
|
name: Rename the coverage files
|
|
command: |
|
|
mv coverage.xml langfuse_coverage.xml
|
|
mv .coverage langfuse_coverage
|
|
|
|
# Store test results
|
|
- store_test_results:
|
|
path: test-results
|
|
- persist_to_workspace:
|
|
root: .
|
|
paths:
|
|
- langfuse_coverage.xml
|
|
- langfuse_coverage
|
|
caching_unit_tests:
|
|
docker:
|
|
- image: cimg/python:3.11
|
|
auth:
|
|
username: ${DOCKERHUB_USERNAME}
|
|
password: ${DOCKERHUB_PASSWORD}
|
|
working_directory: ~/project
|
|
|
|
steps:
|
|
- checkout
|
|
- setup_google_dns
|
|
- run:
|
|
name: DNS lookup for Redis host
|
|
command: |
|
|
sudo apt-get update
|
|
sudo apt-get install -y dnsutils
|
|
dig redis-19899.c239.us-east-1-2.ec2.redns.redis-cloud.com +short
|
|
- run:
|
|
name: Show git commit hash
|
|
command: |
|
|
echo "Git commit hash: $CIRCLE_SHA1"
|
|
|
|
- restore_cache:
|
|
keys:
|
|
- v1-dependencies-{{ checksum ".circleci/requirements.txt" }}
|
|
- run:
|
|
name: Install Dependencies
|
|
command: |
|
|
python -m pip install --upgrade pip
|
|
python -m pip install -r .circleci/requirements.txt
|
|
pip install "pytest==7.3.1"
|
|
pip install "pytest-retry==1.6.3"
|
|
pip install "pytest-asyncio==0.21.1"
|
|
pip install "pytest-cov==5.0.0"
|
|
pip install mypy
|
|
pip install "google-generativeai==0.3.2"
|
|
pip install "google-cloud-aiplatform==1.43.0"
|
|
pip install pyarrow
|
|
pip install "boto3==1.34.34"
|
|
pip install "aioboto3==12.3.0"
|
|
pip install langchain
|
|
pip install lunary==0.2.5
|
|
pip install "azure-identity==1.16.1"
|
|
pip install "langfuse==2.45.0"
|
|
pip install "logfire==0.29.0"
|
|
pip install numpydoc
|
|
pip install traceloop-sdk==0.21.1
|
|
pip install opentelemetry-api==1.25.0
|
|
pip install opentelemetry-sdk==1.25.0
|
|
pip install opentelemetry-exporter-otlp==1.25.0
|
|
pip install openai==1.68.2
|
|
pip install prisma==0.11.0
|
|
pip install "detect_secrets==1.5.0"
|
|
pip install "httpx==0.24.1"
|
|
pip install "respx==0.21.1"
|
|
pip install fastapi
|
|
pip install "gunicorn==21.2.0"
|
|
pip install "anyio==4.2.0"
|
|
pip install "aiodynamo==23.10.1"
|
|
pip install "asyncio==3.4.3"
|
|
pip install "apscheduler==3.10.4"
|
|
pip install "PyGithub==1.59.1"
|
|
pip install argon2-cffi
|
|
pip install "pytest-mock==3.12.0"
|
|
pip install python-multipart
|
|
pip install google-cloud-aiplatform
|
|
pip install prometheus-client==0.20.0
|
|
pip install "pydantic==2.10.2"
|
|
pip install "diskcache==5.6.1"
|
|
pip install "Pillow==10.3.0"
|
|
pip install "jsonschema==4.22.0"
|
|
pip install "websockets==13.1.0"
|
|
- save_cache:
|
|
paths:
|
|
- ./venv
|
|
key: v1-dependencies-{{ checksum ".circleci/requirements.txt" }}
|
|
- run:
|
|
name: Run prisma ./docker/entrypoint.sh
|
|
command: |
|
|
set +e
|
|
chmod +x docker/entrypoint.sh
|
|
./docker/entrypoint.sh
|
|
set -e
|
|
|
|
# Run pytest and generate JUnit XML report
|
|
- run:
|
|
name: Run tests
|
|
command: |
|
|
pwd
|
|
ls
|
|
python -m pytest -vv tests/local_testing --cov=litellm --cov-report=xml -x --junitxml=test-results/junit.xml --durations=5 -k "caching or cache"
|
|
no_output_timeout: 120m
|
|
- run:
|
|
name: Rename the coverage files
|
|
command: |
|
|
mv coverage.xml caching_coverage.xml
|
|
mv .coverage caching_coverage
|
|
|
|
# Store test results
|
|
- store_test_results:
|
|
path: test-results
|
|
- persist_to_workspace:
|
|
root: .
|
|
paths:
|
|
- caching_coverage.xml
|
|
- caching_coverage
|
|
auth_ui_unit_tests:
|
|
docker:
|
|
- image: cimg/python:3.11
|
|
auth:
|
|
username: ${DOCKERHUB_USERNAME}
|
|
password: ${DOCKERHUB_PASSWORD}
|
|
working_directory: ~/project
|
|
|
|
steps:
|
|
- checkout
|
|
- setup_google_dns
|
|
- run:
|
|
name: Install Dependencies
|
|
command: |
|
|
python -m pip install --upgrade pip
|
|
python -m pip install -r requirements.txt
|
|
pip install "pytest==7.3.1"
|
|
pip install "pytest-retry==1.6.3"
|
|
pip install "pytest-asyncio==0.21.1"
|
|
pip install "pytest-cov==5.0.0"
|
|
- save_cache:
|
|
paths:
|
|
- ./venv
|
|
key: v1-dependencies-{{ checksum ".circleci/requirements.txt" }}
|
|
- run:
|
|
name: Run prisma ./docker/entrypoint.sh
|
|
command: |
|
|
set +e
|
|
chmod +x docker/entrypoint.sh
|
|
./docker/entrypoint.sh
|
|
set -e
|
|
# Run pytest and generate JUnit XML report
|
|
- run:
|
|
name: Run tests
|
|
command: |
|
|
pwd
|
|
ls
|
|
python -m pytest -vv tests/proxy_admin_ui_tests -x --cov=litellm --cov-report=xml --junitxml=test-results/junit.xml --durations=5
|
|
no_output_timeout: 120m
|
|
|
|
- run:
|
|
name: Rename the coverage files
|
|
command: |
|
|
mv coverage.xml auth_ui_unit_tests_coverage.xml
|
|
mv .coverage auth_ui_unit_tests_coverage
|
|
|
|
# Store test results
|
|
- store_test_results:
|
|
path: test-results
|
|
|
|
- persist_to_workspace:
|
|
root: .
|
|
paths:
|
|
- auth_ui_unit_tests_coverage.xml
|
|
- auth_ui_unit_tests_coverage
|
|
litellm_router_testing: # Runs all tests with the "router" keyword
|
|
docker:
|
|
- image: cimg/python:3.11
|
|
auth:
|
|
username: ${DOCKERHUB_USERNAME}
|
|
password: ${DOCKERHUB_PASSWORD}
|
|
working_directory: ~/project
|
|
|
|
steps:
|
|
- checkout
|
|
- setup_google_dns
|
|
- run:
|
|
name: Install Dependencies
|
|
command: |
|
|
python -m pip install --upgrade pip
|
|
python -m pip install -r requirements.txt
|
|
pip install "pytest==7.3.1"
|
|
pip install "respx==0.21.1"
|
|
pip install "pytest-cov==5.0.0"
|
|
pip install "pytest-retry==1.6.3"
|
|
pip install "pytest-asyncio==0.21.1"
|
|
# Run pytest and generate JUnit XML report
|
|
- run:
|
|
name: Run tests
|
|
command: |
|
|
pwd
|
|
ls
|
|
python -m pytest tests/local_testing tests/router_unit_tests --cov=litellm --cov-report=xml -vv -k "router" -x -v --junitxml=test-results/junit.xml --durations=5
|
|
no_output_timeout: 120m
|
|
- run:
|
|
name: Rename the coverage files
|
|
command: |
|
|
mv coverage.xml litellm_router_coverage.xml
|
|
mv .coverage litellm_router_coverage
|
|
# Store test results
|
|
- store_test_results:
|
|
path: test-results
|
|
|
|
- persist_to_workspace:
|
|
root: .
|
|
paths:
|
|
- litellm_router_coverage.xml
|
|
- litellm_router_coverage
|
|
litellm_proxy_security_tests:
|
|
docker:
|
|
- image: cimg/python:3.11
|
|
auth:
|
|
username: ${DOCKERHUB_USERNAME}
|
|
password: ${DOCKERHUB_PASSWORD}
|
|
working_directory: ~/project
|
|
steps:
|
|
- checkout
|
|
- setup_google_dns
|
|
- run:
|
|
name: Show git commit hash
|
|
command: |
|
|
echo "Git commit hash: $CIRCLE_SHA1"
|
|
- run:
|
|
name: Install Dependencies
|
|
command: |
|
|
python -m pip install --upgrade pip
|
|
python -m pip install -r requirements.txt
|
|
pip install "pytest==7.3.1"
|
|
pip install "pytest-retry==1.6.3"
|
|
pip install "pytest-asyncio==0.21.1"
|
|
pip install "pytest-cov==5.0.0"
|
|
- run:
|
|
name: Run prisma ./docker/entrypoint.sh
|
|
command: |
|
|
set +e
|
|
chmod +x docker/entrypoint.sh
|
|
./docker/entrypoint.sh
|
|
set -e
|
|
# Run pytest and generate JUnit XML report
|
|
- run:
|
|
name: Run tests
|
|
command: |
|
|
pwd
|
|
ls
|
|
python -m pytest tests/proxy_security_tests --cov=litellm --cov-report=xml -vv -x -v --junitxml=test-results/junit.xml --durations=5
|
|
no_output_timeout: 120m
|
|
- run:
|
|
name: Rename the coverage files
|
|
command: |
|
|
mv coverage.xml litellm_proxy_security_tests_coverage.xml
|
|
mv .coverage litellm_proxy_security_tests_coverage
|
|
# Store test results
|
|
- store_test_results:
|
|
path: test-results
|
|
- persist_to_workspace:
|
|
root: .
|
|
paths:
|
|
- litellm_proxy_security_tests_coverage.xml
|
|
- litellm_proxy_security_tests_coverage
|
|
litellm_proxy_unit_testing: # Runs all tests with the "proxy", "key", "jwt" filenames
|
|
docker:
|
|
- image: cimg/python:3.11
|
|
auth:
|
|
username: ${DOCKERHUB_USERNAME}
|
|
password: ${DOCKERHUB_PASSWORD}
|
|
working_directory: ~/project
|
|
steps:
|
|
- checkout
|
|
- run:
|
|
name: Install PostgreSQL
|
|
command: |
|
|
sudo apt-get update
|
|
sudo apt-get install postgresql postgresql-contrib
|
|
echo 'export PATH=/usr/lib/postgresql/*/bin:$PATH' >> $BASH_ENV
|
|
- setup_google_dns
|
|
- run:
|
|
name: Show git commit hash
|
|
command: |
|
|
echo "Git commit hash: $CIRCLE_SHA1"
|
|
|
|
- restore_cache:
|
|
keys:
|
|
- v1-dependencies-{{ checksum ".circleci/requirements.txt" }}
|
|
- run:
|
|
name: Install Dependencies
|
|
command: |
|
|
python -m pip install --upgrade pip
|
|
python -m pip install -r .circleci/requirements.txt
|
|
pip install "pytest==7.3.1"
|
|
pip install "pytest-retry==1.6.3"
|
|
pip install "pytest-asyncio==0.21.1"
|
|
pip install "pytest-cov==5.0.0"
|
|
pip install mypy
|
|
pip install "google-generativeai==0.3.2"
|
|
pip install "google-cloud-aiplatform==1.43.0"
|
|
pip install pyarrow
|
|
pip install "boto3==1.34.34"
|
|
pip install "aioboto3==12.3.0"
|
|
pip install langchain
|
|
pip install lunary==0.2.5
|
|
pip install "azure-identity==1.16.1"
|
|
pip install "langfuse==2.45.0"
|
|
pip install "logfire==0.29.0"
|
|
pip install numpydoc
|
|
pip install traceloop-sdk==0.21.1
|
|
pip install opentelemetry-api==1.25.0
|
|
pip install opentelemetry-sdk==1.25.0
|
|
pip install opentelemetry-exporter-otlp==1.25.0
|
|
pip install openai==1.68.2
|
|
pip install prisma==0.11.0
|
|
pip install "detect_secrets==1.5.0"
|
|
pip install "httpx==0.24.1"
|
|
pip install "respx==0.21.1"
|
|
pip install fastapi
|
|
pip install "gunicorn==21.2.0"
|
|
pip install "anyio==4.2.0"
|
|
pip install "aiodynamo==23.10.1"
|
|
pip install "asyncio==3.4.3"
|
|
pip install "apscheduler==3.10.4"
|
|
pip install "PyGithub==1.59.1"
|
|
pip install argon2-cffi
|
|
pip install "pytest-mock==3.12.0"
|
|
pip install python-multipart
|
|
pip install google-cloud-aiplatform
|
|
pip install prometheus-client==0.20.0
|
|
pip install "pydantic==2.10.2"
|
|
pip install "diskcache==5.6.1"
|
|
pip install "Pillow==10.3.0"
|
|
pip install "jsonschema==4.22.0"
|
|
pip install "pytest-postgresql==7.0.1"
|
|
- save_cache:
|
|
paths:
|
|
- ./venv
|
|
key: v1-dependencies-{{ checksum ".circleci/requirements.txt" }}
|
|
- run:
|
|
name: Run prisma ./docker/entrypoint.sh
|
|
command: |
|
|
set +e
|
|
chmod +x docker/entrypoint.sh
|
|
./docker/entrypoint.sh
|
|
set -e
|
|
# Run pytest and generate JUnit XML report
|
|
- run:
|
|
name: Run tests
|
|
command: |
|
|
pwd
|
|
ls
|
|
python -m pytest tests/proxy_unit_tests --cov=litellm --cov-report=xml -vv -x -v --junitxml=test-results/junit.xml --durations=5
|
|
no_output_timeout: 120m
|
|
- run:
|
|
name: Rename the coverage files
|
|
command: |
|
|
mv coverage.xml litellm_proxy_unit_tests_coverage.xml
|
|
mv .coverage litellm_proxy_unit_tests_coverage
|
|
# Store test results
|
|
- store_test_results:
|
|
path: test-results
|
|
|
|
- persist_to_workspace:
|
|
root: .
|
|
paths:
|
|
- litellm_proxy_unit_tests_coverage.xml
|
|
- litellm_proxy_unit_tests_coverage
|
|
litellm_assistants_api_testing: # Runs all tests with the "assistants" keyword
|
|
docker:
|
|
- image: cimg/python:3.13.1
|
|
auth:
|
|
username: ${DOCKERHUB_USERNAME}
|
|
password: ${DOCKERHUB_PASSWORD}
|
|
working_directory: ~/project
|
|
|
|
steps:
|
|
- checkout
|
|
- setup_google_dns
|
|
- run:
|
|
name: Install Dependencies
|
|
command: |
|
|
python -m pip install --upgrade pip
|
|
pip install wheel
|
|
pip install --upgrade pip wheel setuptools
|
|
python -m pip install -r requirements.txt
|
|
pip install "pytest==7.3.1"
|
|
pip install "respx==0.21.1"
|
|
pip install "pytest-retry==1.6.3"
|
|
pip install "pytest-asyncio==0.21.1"
|
|
pip install "pytest-cov==5.0.0"
|
|
# Run pytest and generate JUnit XML report
|
|
- run:
|
|
name: Run tests
|
|
command: |
|
|
pwd
|
|
ls
|
|
python -m pytest tests/local_testing/ -vv -k "assistants" --cov=litellm --cov-report=xml -x -s -v --junitxml=test-results/junit.xml --durations=5
|
|
no_output_timeout: 120m
|
|
- run:
|
|
name: Rename the coverage files
|
|
command: |
|
|
mv coverage.xml litellm_assistants_api_coverage.xml
|
|
mv .coverage litellm_assistants_api_coverage
|
|
# Store test results
|
|
- store_test_results:
|
|
path: test-results
|
|
- persist_to_workspace:
|
|
root: .
|
|
paths:
|
|
- litellm_assistants_api_coverage.xml
|
|
- litellm_assistants_api_coverage
|
|
load_testing:
|
|
docker:
|
|
- image: cimg/python:3.11
|
|
auth:
|
|
username: ${DOCKERHUB_USERNAME}
|
|
password: ${DOCKERHUB_PASSWORD}
|
|
working_directory: ~/project
|
|
|
|
steps:
|
|
- checkout
|
|
- setup_google_dns
|
|
- run:
|
|
name: Install Dependencies
|
|
command: |
|
|
python -m pip install --upgrade pip
|
|
python -m pip install -r requirements.txt
|
|
pip install "pytest==7.3.1"
|
|
pip install "pytest-retry==1.6.3"
|
|
pip install "pytest-cov==5.0.0"
|
|
pip install "pytest-asyncio==0.21.1"
|
|
pip install "respx==0.21.1"
|
|
- run:
|
|
name: Show current pydantic version
|
|
command: |
|
|
python -m pip show pydantic
|
|
# Run pytest and generate JUnit XML report
|
|
- run:
|
|
name: Run tests
|
|
command: |
|
|
pwd
|
|
ls
|
|
python -m pytest -vv tests/load_tests -x -s -v --junitxml=test-results/junit.xml --durations=5
|
|
no_output_timeout: 120m
|
|
|
|
# Store test results
|
|
- store_test_results:
|
|
path: test-results
|
|
llm_translation_testing:
|
|
docker:
|
|
- image: cimg/python:3.11
|
|
auth:
|
|
username: ${DOCKERHUB_USERNAME}
|
|
password: ${DOCKERHUB_PASSWORD}
|
|
working_directory: ~/project
|
|
|
|
steps:
|
|
- checkout
|
|
- setup_google_dns
|
|
- run:
|
|
name: Install Dependencies
|
|
command: |
|
|
python -m pip install --upgrade pip
|
|
python -m pip install -r requirements.txt
|
|
pip install "pytest==7.3.1"
|
|
pip install "pytest-retry==1.6.3"
|
|
pip install "pytest-cov==5.0.0"
|
|
pip install "pytest-asyncio==0.21.1"
|
|
pip install "respx==0.21.1"
|
|
# Run pytest and generate JUnit XML report
|
|
- run:
|
|
name: Run tests
|
|
command: |
|
|
pwd
|
|
ls
|
|
python -m pytest -vv tests/llm_translation --cov=litellm --cov-report=xml -x -v --junitxml=test-results/junit.xml --durations=5
|
|
no_output_timeout: 120m
|
|
- run:
|
|
name: Rename the coverage files
|
|
command: |
|
|
mv coverage.xml llm_translation_coverage.xml
|
|
mv .coverage llm_translation_coverage
|
|
|
|
# Store test results
|
|
- store_test_results:
|
|
path: test-results
|
|
- persist_to_workspace:
|
|
root: .
|
|
paths:
|
|
- llm_translation_coverage.xml
|
|
- llm_translation_coverage
|
|
mcp_testing:
|
|
docker:
|
|
- image: cimg/python:3.11
|
|
auth:
|
|
username: ${DOCKERHUB_USERNAME}
|
|
password: ${DOCKERHUB_PASSWORD}
|
|
working_directory: ~/project
|
|
|
|
steps:
|
|
- checkout
|
|
- setup_google_dns
|
|
- run:
|
|
name: Install Dependencies
|
|
command: |
|
|
python -m pip install --upgrade pip
|
|
python -m pip install -r requirements.txt
|
|
pip install "pytest==7.3.1"
|
|
pip install "pytest-retry==1.6.3"
|
|
pip install "pytest-cov==5.0.0"
|
|
pip install "pytest-asyncio==0.21.1"
|
|
pip install "respx==0.21.1"
|
|
pip install "pydantic==2.10.2"
|
|
pip install "mcp==1.5.0"
|
|
# Run pytest and generate JUnit XML report
|
|
- run:
|
|
name: Run tests
|
|
command: |
|
|
pwd
|
|
ls
|
|
python -m pytest -vv tests/mcp_tests --cov=litellm --cov-report=xml -x -s -v --junitxml=test-results/junit.xml --durations=5
|
|
no_output_timeout: 120m
|
|
- run:
|
|
name: Rename the coverage files
|
|
command: |
|
|
mv coverage.xml mcp_coverage.xml
|
|
mv .coverage mcp_coverage
|
|
|
|
# Store test results
|
|
- store_test_results:
|
|
path: test-results
|
|
- persist_to_workspace:
|
|
root: .
|
|
paths:
|
|
- mcp_coverage.xml
|
|
- mcp_coverage
|
|
llm_responses_api_testing:
|
|
docker:
|
|
- image: cimg/python:3.11
|
|
auth:
|
|
username: ${DOCKERHUB_USERNAME}
|
|
password: ${DOCKERHUB_PASSWORD}
|
|
working_directory: ~/project
|
|
|
|
steps:
|
|
- checkout
|
|
- setup_google_dns
|
|
- run:
|
|
name: Install Dependencies
|
|
command: |
|
|
python -m pip install --upgrade pip
|
|
python -m pip install -r requirements.txt
|
|
pip install "pytest==7.3.1"
|
|
pip install "pytest-retry==1.6.3"
|
|
pip install "pytest-cov==5.0.0"
|
|
pip install "pytest-asyncio==0.21.1"
|
|
pip install "respx==0.21.1"
|
|
# Run pytest and generate JUnit XML report
|
|
- run:
|
|
name: Run tests
|
|
command: |
|
|
pwd
|
|
ls
|
|
python -m pytest -vv tests/llm_responses_api_testing --cov=litellm --cov-report=xml -x -s -v --junitxml=test-results/junit.xml --durations=5
|
|
no_output_timeout: 120m
|
|
- run:
|
|
name: Rename the coverage files
|
|
command: |
|
|
mv coverage.xml llm_responses_api_coverage.xml
|
|
mv .coverage llm_responses_api_coverage
|
|
|
|
# Store test results
|
|
- store_test_results:
|
|
path: test-results
|
|
- persist_to_workspace:
|
|
root: .
|
|
paths:
|
|
- llm_responses_api_coverage.xml
|
|
- llm_responses_api_coverage
|
|
litellm_mapped_tests:
|
|
docker:
|
|
- image: cimg/python:3.11
|
|
auth:
|
|
username: ${DOCKERHUB_USERNAME}
|
|
password: ${DOCKERHUB_PASSWORD}
|
|
working_directory: ~/project
|
|
|
|
steps:
|
|
- checkout
|
|
- setup_google_dns
|
|
- run:
|
|
name: Install Dependencies
|
|
command: |
|
|
python -m pip install --upgrade pip
|
|
python -m pip install -r requirements.txt
|
|
pip install "pytest-mock==3.12.0"
|
|
pip install "pytest==7.3.1"
|
|
pip install "pytest-retry==1.6.3"
|
|
pip install "pytest-cov==5.0.0"
|
|
pip install "pytest-asyncio==0.21.1"
|
|
pip install "respx==0.21.1"
|
|
pip install "hypercorn==0.17.3"
|
|
pip install "pydantic==2.10.2"
|
|
pip install "mcp==1.5.0"
|
|
# Run pytest and generate JUnit XML report
|
|
- run:
|
|
name: Run tests
|
|
command: |
|
|
pwd
|
|
ls
|
|
python -m pytest -vv tests/litellm --cov=litellm --cov-report=xml -x -s -v --junitxml=test-results/junit.xml --durations=5
|
|
no_output_timeout: 120m
|
|
- run:
|
|
name: Rename the coverage files
|
|
command: |
|
|
mv coverage.xml litellm_mapped_tests_coverage.xml
|
|
mv .coverage litellm_mapped_tests_coverage
|
|
|
|
# Store test results
|
|
- store_test_results:
|
|
path: test-results
|
|
- persist_to_workspace:
|
|
root: .
|
|
paths:
|
|
- litellm_mapped_tests_coverage.xml
|
|
- litellm_mapped_tests_coverage
|
|
batches_testing:
|
|
docker:
|
|
- image: cimg/python:3.11
|
|
auth:
|
|
username: ${DOCKERHUB_USERNAME}
|
|
password: ${DOCKERHUB_PASSWORD}
|
|
working_directory: ~/project
|
|
|
|
steps:
|
|
- checkout
|
|
- setup_google_dns
|
|
- run:
|
|
name: Install Dependencies
|
|
command: |
|
|
python -m pip install --upgrade pip
|
|
python -m pip install -r requirements.txt
|
|
pip install "respx==0.21.1"
|
|
pip install "pytest==7.3.1"
|
|
pip install "pytest-retry==1.6.3"
|
|
pip install "pytest-asyncio==0.21.1"
|
|
pip install "pytest-cov==5.0.0"
|
|
pip install "google-generativeai==0.3.2"
|
|
pip install "google-cloud-aiplatform==1.43.0"
|
|
# Run pytest and generate JUnit XML report
|
|
- run:
|
|
name: Run tests
|
|
command: |
|
|
pwd
|
|
ls
|
|
python -m pytest -vv tests/batches_tests --cov=litellm --cov-report=xml -x -s -v --junitxml=test-results/junit.xml --durations=5
|
|
no_output_timeout: 120m
|
|
- run:
|
|
name: Rename the coverage files
|
|
command: |
|
|
mv coverage.xml batches_coverage.xml
|
|
mv .coverage batches_coverage
|
|
|
|
# Store test results
|
|
- store_test_results:
|
|
path: test-results
|
|
- persist_to_workspace:
|
|
root: .
|
|
paths:
|
|
- batches_coverage.xml
|
|
- batches_coverage
|
|
litellm_utils_testing:
|
|
docker:
|
|
- image: cimg/python:3.11
|
|
auth:
|
|
username: ${DOCKERHUB_USERNAME}
|
|
password: ${DOCKERHUB_PASSWORD}
|
|
working_directory: ~/project
|
|
|
|
steps:
|
|
- checkout
|
|
- setup_google_dns
|
|
- run:
|
|
name: Install Dependencies
|
|
command: |
|
|
python -m pip install --upgrade pip
|
|
pip install numpydoc
|
|
python -m pip install -r requirements.txt
|
|
pip install "respx==0.21.1"
|
|
pip install "pytest==7.3.1"
|
|
pip install "pytest-retry==1.6.3"
|
|
pip install "pytest-asyncio==0.21.1"
|
|
pip install "pytest-cov==5.0.0"
|
|
pip install "google-generativeai==0.3.2"
|
|
pip install "google-cloud-aiplatform==1.43.0"
|
|
# Run pytest and generate JUnit XML report
|
|
- run:
|
|
name: Run tests
|
|
command: |
|
|
pwd
|
|
ls
|
|
python -m pytest -vv tests/litellm_utils_tests --cov=litellm --cov-report=xml -x -s -v --junitxml=test-results/junit.xml --durations=5
|
|
no_output_timeout: 120m
|
|
- run:
|
|
name: Rename the coverage files
|
|
command: |
|
|
mv coverage.xml litellm_utils_coverage.xml
|
|
mv .coverage litellm_utils_coverage
|
|
|
|
# Store test results
|
|
- store_test_results:
|
|
path: test-results
|
|
- persist_to_workspace:
|
|
root: .
|
|
paths:
|
|
- litellm_utils_coverage.xml
|
|
- litellm_utils_coverage
|
|
|
|
pass_through_unit_testing:
|
|
docker:
|
|
- image: cimg/python:3.11
|
|
auth:
|
|
username: ${DOCKERHUB_USERNAME}
|
|
password: ${DOCKERHUB_PASSWORD}
|
|
working_directory: ~/project
|
|
|
|
steps:
|
|
- checkout
|
|
- setup_google_dns
|
|
- run:
|
|
name: Install Dependencies
|
|
command: |
|
|
python -m pip install --upgrade pip
|
|
python -m pip install -r requirements.txt
|
|
pip install "pytest==7.3.1"
|
|
pip install "pytest-retry==1.6.3"
|
|
pip install "pytest-cov==5.0.0"
|
|
pip install "pytest-asyncio==0.21.1"
|
|
pip install "respx==0.21.1"
|
|
# Run pytest and generate JUnit XML report
|
|
- run:
|
|
name: Run tests
|
|
command: |
|
|
pwd
|
|
ls
|
|
python -m pytest -vv tests/pass_through_unit_tests --cov=litellm --cov-report=xml -x -s -v --junitxml=test-results/junit.xml --durations=5
|
|
no_output_timeout: 120m
|
|
- run:
|
|
name: Rename the coverage files
|
|
command: |
|
|
mv coverage.xml pass_through_unit_tests_coverage.xml
|
|
mv .coverage pass_through_unit_tests_coverage
|
|
|
|
# Store test results
|
|
- store_test_results:
|
|
path: test-results
|
|
- persist_to_workspace:
|
|
root: .
|
|
paths:
|
|
- pass_through_unit_tests_coverage.xml
|
|
- pass_through_unit_tests_coverage
|
|
image_gen_testing:
|
|
docker:
|
|
- image: cimg/python:3.11
|
|
auth:
|
|
username: ${DOCKERHUB_USERNAME}
|
|
password: ${DOCKERHUB_PASSWORD}
|
|
working_directory: ~/project
|
|
|
|
steps:
|
|
- checkout
|
|
- setup_google_dns
|
|
- run:
|
|
name: Install Dependencies
|
|
command: |
|
|
python -m pip install --upgrade pip
|
|
python -m pip install -r requirements.txt
|
|
pip install "pytest==7.3.1"
|
|
pip install "pytest-retry==1.6.3"
|
|
pip install "pytest-cov==5.0.0"
|
|
pip install "pytest-asyncio==0.21.1"
|
|
pip install "respx==0.21.1"
|
|
# Run pytest and generate JUnit XML report
|
|
- run:
|
|
name: Run tests
|
|
command: |
|
|
pwd
|
|
ls
|
|
python -m pytest -vv tests/image_gen_tests --cov=litellm --cov-report=xml -x -s -v --junitxml=test-results/junit.xml --durations=5
|
|
no_output_timeout: 120m
|
|
- run:
|
|
name: Rename the coverage files
|
|
command: |
|
|
mv coverage.xml image_gen_coverage.xml
|
|
mv .coverage image_gen_coverage
|
|
|
|
# Store test results
|
|
- store_test_results:
|
|
path: test-results
|
|
- persist_to_workspace:
|
|
root: .
|
|
paths:
|
|
- image_gen_coverage.xml
|
|
- image_gen_coverage
|
|
logging_testing:
|
|
docker:
|
|
- image: cimg/python:3.11
|
|
auth:
|
|
username: ${DOCKERHUB_USERNAME}
|
|
password: ${DOCKERHUB_PASSWORD}
|
|
working_directory: ~/project
|
|
|
|
steps:
|
|
- checkout
|
|
- setup_google_dns
|
|
- run:
|
|
name: Install Dependencies
|
|
command: |
|
|
python -m pip install --upgrade pip
|
|
python -m pip install -r requirements.txt
|
|
pip install "pytest==7.3.1"
|
|
pip install "pytest-retry==1.6.3"
|
|
pip install "pytest-cov==5.0.0"
|
|
pip install "pytest-asyncio==0.21.1"
|
|
pip install pytest-mock
|
|
pip install "respx==0.21.1"
|
|
pip install "google-generativeai==0.3.2"
|
|
pip install "google-cloud-aiplatform==1.43.0"
|
|
pip install "mlflow==2.17.2"
|
|
# Run pytest and generate JUnit XML report
|
|
- run:
|
|
name: Run tests
|
|
command: |
|
|
pwd
|
|
ls
|
|
python -m pytest -vv tests/logging_callback_tests --cov=litellm --cov-report=xml -x -s -v --junitxml=test-results/junit.xml --durations=5
|
|
no_output_timeout: 120m
|
|
- run:
|
|
name: Rename the coverage files
|
|
command: |
|
|
mv coverage.xml logging_coverage.xml
|
|
mv .coverage logging_coverage
|
|
|
|
# Store test results
|
|
- store_test_results:
|
|
path: test-results
|
|
- persist_to_workspace:
|
|
root: .
|
|
paths:
|
|
- logging_coverage.xml
|
|
- logging_coverage
|
|
installing_litellm_on_python:
|
|
docker:
|
|
- image: circleci/python:3.8
|
|
auth:
|
|
username: ${DOCKERHUB_USERNAME}
|
|
password: ${DOCKERHUB_PASSWORD}
|
|
working_directory: ~/project
|
|
|
|
steps:
|
|
- checkout
|
|
- setup_google_dns
|
|
- run:
|
|
name: Install Dependencies
|
|
command: |
|
|
python -m pip install --upgrade pip
|
|
pip install python-dotenv
|
|
pip install pytest
|
|
pip install tiktoken
|
|
pip install aiohttp
|
|
pip install openai
|
|
pip install click
|
|
pip install "boto3==1.34.34"
|
|
pip install jinja2
|
|
pip install "tokenizers==0.20.0"
|
|
pip install "uvloop==0.21.0"
|
|
pip install jsonschema
|
|
- run:
|
|
name: Run tests
|
|
command: |
|
|
pwd
|
|
ls
|
|
python -m pytest -vv tests/local_testing/test_basic_python_version.py
|
|
|
|
installing_litellm_on_python_3_13:
|
|
docker:
|
|
- image: cimg/python:3.13.1
|
|
auth:
|
|
username: ${DOCKERHUB_USERNAME}
|
|
password: ${DOCKERHUB_PASSWORD}
|
|
working_directory: ~/project
|
|
|
|
steps:
|
|
- checkout
|
|
- setup_google_dns
|
|
- run:
|
|
name: Install Dependencies
|
|
command: |
|
|
python -m pip install --upgrade pip
|
|
python -m pip install wheel setuptools
|
|
python -m pip install -r requirements.txt
|
|
pip install "pytest==7.3.1"
|
|
pip install "pytest-retry==1.6.3"
|
|
pip install "pytest-asyncio==0.21.1"
|
|
pip install "pytest-cov==5.0.0"
|
|
pip install "tomli==2.2.1"
|
|
- run:
|
|
name: Run tests
|
|
command: |
|
|
pwd
|
|
ls
|
|
python -m pytest -vv tests/local_testing/test_basic_python_version.py
|
|
helm_chart_testing:
|
|
machine:
|
|
image: ubuntu-2204:2023.10.1 # Use machine executor instead of docker
|
|
resource_class: medium
|
|
working_directory: ~/project
|
|
|
|
steps:
|
|
- checkout
|
|
- setup_google_dns
|
|
# Install Helm
|
|
- run:
|
|
name: Install Helm
|
|
command: |
|
|
curl https://raw.githubusercontent.com/helm/helm/main/scripts/get-helm-3 | bash
|
|
|
|
# Install kind
|
|
- run:
|
|
name: Install Kind
|
|
command: |
|
|
curl -Lo ./kind https://kind.sigs.k8s.io/dl/v0.20.0/kind-linux-amd64
|
|
chmod +x ./kind
|
|
sudo mv ./kind /usr/local/bin/kind
|
|
|
|
# Install kubectl
|
|
- run:
|
|
name: Install kubectl
|
|
command: |
|
|
curl -LO "https://dl.k8s.io/release/$(curl -L -s https://dl.k8s.io/release/stable.txt)/bin/linux/amd64/kubectl"
|
|
chmod +x kubectl
|
|
sudo mv kubectl /usr/local/bin/
|
|
|
|
# Create kind cluster
|
|
- run:
|
|
name: Create Kind Cluster
|
|
command: |
|
|
kind create cluster --name litellm-test
|
|
|
|
# Run helm lint
|
|
- run:
|
|
name: Run helm lint
|
|
command: |
|
|
helm lint ./deploy/charts/litellm-helm
|
|
|
|
# Run helm tests
|
|
- run:
|
|
name: Run helm tests
|
|
command: |
|
|
helm install litellm ./deploy/charts/litellm-helm -f ./deploy/charts/litellm-helm/ci/test-values.yaml
|
|
# Wait for pod to be ready
|
|
echo "Waiting 30 seconds for pod to be ready..."
|
|
sleep 30
|
|
|
|
# Print pod logs before running tests
|
|
echo "Printing pod logs..."
|
|
kubectl logs $(kubectl get pods -l app.kubernetes.io/name=litellm -o jsonpath="{.items[0].metadata.name}")
|
|
|
|
# Run the helm tests
|
|
helm test litellm --logs
|
|
helm test litellm --logs
|
|
|
|
# Cleanup
|
|
- run:
|
|
name: Cleanup
|
|
command: |
|
|
kind delete cluster --name litellm-test
|
|
when: always # This ensures cleanup runs even if previous steps fail
|
|
|
|
|
|
check_code_and_doc_quality:
|
|
docker:
|
|
- image: cimg/python:3.11
|
|
auth:
|
|
username: ${DOCKERHUB_USERNAME}
|
|
password: ${DOCKERHUB_PASSWORD}
|
|
working_directory: ~/project/litellm
|
|
|
|
steps:
|
|
- checkout
|
|
- setup_google_dns
|
|
- run:
|
|
name: Install Dependencies
|
|
command: |
|
|
python -m pip install --upgrade pip
|
|
pip install ruff
|
|
pip install pylint
|
|
pip install pyright
|
|
pip install beautifulsoup4
|
|
pip install .
|
|
curl https://raw.githubusercontent.com/helm/helm/main/scripts/get-helm-3 | bash
|
|
- run: python -c "from litellm import *" || (echo '🚨 import failed, this means you introduced unprotected imports! 🚨'; exit 1)
|
|
- run: ruff check ./litellm
|
|
# - run: python ./tests/documentation_tests/test_general_setting_keys.py
|
|
- run: python ./tests/code_coverage_tests/check_licenses.py
|
|
- run: python ./tests/code_coverage_tests/router_code_coverage.py
|
|
- run: python ./tests/code_coverage_tests/callback_manager_test.py
|
|
- run: python ./tests/code_coverage_tests/recursive_detector.py
|
|
- run: python ./tests/code_coverage_tests/test_router_strategy_async.py
|
|
- run: python ./tests/code_coverage_tests/litellm_logging_code_coverage.py
|
|
- run: python ./tests/code_coverage_tests/bedrock_pricing.py
|
|
- run: python ./tests/documentation_tests/test_env_keys.py
|
|
- run: python ./tests/documentation_tests/test_router_settings.py
|
|
- run: python ./tests/documentation_tests/test_api_docs.py
|
|
- run: python ./tests/code_coverage_tests/ensure_async_clients_test.py
|
|
- run: python ./tests/code_coverage_tests/enforce_llms_folder_style.py
|
|
- run: python ./tests/documentation_tests/test_circular_imports.py
|
|
- run: python ./tests/code_coverage_tests/prevent_key_leaks_in_exceptions.py
|
|
- run: helm lint ./deploy/charts/litellm-helm
|
|
|
|
db_migration_disable_update_check:
|
|
machine:
|
|
image: ubuntu-2204:2023.10.1
|
|
resource_class: xlarge
|
|
working_directory: ~/project
|
|
steps:
|
|
- checkout
|
|
- setup_google_dns
|
|
- run:
|
|
name: Install Python 3.9
|
|
command: |
|
|
curl https://repo.anaconda.com/miniconda/Miniconda3-latest-Linux-x86_64.sh --output miniconda.sh
|
|
bash miniconda.sh -b -p $HOME/miniconda
|
|
export PATH="$HOME/miniconda/bin:$PATH"
|
|
conda init bash
|
|
source ~/.bashrc
|
|
conda create -n myenv python=3.9 -y
|
|
conda activate myenv
|
|
python --version
|
|
- run:
|
|
name: Install Dependencies
|
|
command: |
|
|
pip install "pytest==7.3.1"
|
|
pip install "pytest-asyncio==0.21.1"
|
|
pip install aiohttp
|
|
- run:
|
|
name: Build Docker image
|
|
command: |
|
|
docker build -t myapp . -f ./docker/Dockerfile.database
|
|
- run:
|
|
name: Run Docker container
|
|
command: |
|
|
docker run -d \
|
|
-p 4000:4000 \
|
|
-e DATABASE_URL=$PROXY_DATABASE_URL \
|
|
-e DISABLE_SCHEMA_UPDATE="True" \
|
|
-v $(pwd)/litellm/proxy/example_config_yaml/bad_schema.prisma:/app/schema.prisma \
|
|
-v $(pwd)/litellm/proxy/example_config_yaml/bad_schema.prisma:/app/litellm/proxy/schema.prisma \
|
|
-v $(pwd)/litellm/proxy/example_config_yaml/disable_schema_update.yaml:/app/config.yaml \
|
|
--name my-app \
|
|
myapp:latest \
|
|
--config /app/config.yaml \
|
|
--port 4000
|
|
- run:
|
|
name: Install curl and dockerize
|
|
command: |
|
|
sudo apt-get update
|
|
sudo apt-get install -y curl
|
|
sudo wget https://github.com/jwilder/dockerize/releases/download/v0.6.1/dockerize-linux-amd64-v0.6.1.tar.gz
|
|
sudo tar -C /usr/local/bin -xzvf dockerize-linux-amd64-v0.6.1.tar.gz
|
|
sudo rm dockerize-linux-amd64-v0.6.1.tar.gz
|
|
|
|
- run:
|
|
name: Wait for container to be ready
|
|
command: dockerize -wait http://localhost:4000 -timeout 1m
|
|
- run:
|
|
name: Check container logs for expected message
|
|
command: |
|
|
echo "=== Printing Full Container Startup Logs ==="
|
|
docker logs my-app
|
|
echo "=== End of Full Container Startup Logs ==="
|
|
|
|
if docker logs my-app 2>&1 | grep -q "prisma schema out of sync with db. Consider running these sql_commands to sync the two"; then
|
|
echo "Expected message found in logs. Test passed."
|
|
else
|
|
echo "Expected message not found in logs. Test failed."
|
|
exit 1
|
|
fi
|
|
- run:
|
|
name: Run Basic Proxy Startup Tests (Health Readiness and Chat Completion)
|
|
command: |
|
|
python -m pytest -vv tests/basic_proxy_startup_tests -x --junitxml=test-results/junit-2.xml --durations=5
|
|
no_output_timeout: 120m
|
|
|
|
|
|
build_and_test:
|
|
machine:
|
|
image: ubuntu-2204:2023.10.1
|
|
resource_class: xlarge
|
|
working_directory: ~/project
|
|
steps:
|
|
- checkout
|
|
- setup_google_dns
|
|
- run:
|
|
name: Install Docker CLI (In case it's not already installed)
|
|
command: |
|
|
sudo apt-get update
|
|
sudo apt-get install -y docker-ce docker-ce-cli containerd.io
|
|
- run:
|
|
name: Install Python 3.9
|
|
command: |
|
|
curl https://repo.anaconda.com/miniconda/Miniconda3-latest-Linux-x86_64.sh --output miniconda.sh
|
|
bash miniconda.sh -b -p $HOME/miniconda
|
|
export PATH="$HOME/miniconda/bin:$PATH"
|
|
conda init bash
|
|
source ~/.bashrc
|
|
conda create -n myenv python=3.9 -y
|
|
conda activate myenv
|
|
python --version
|
|
- run:
|
|
name: Install Dependencies
|
|
command: |
|
|
pip install "pytest==7.3.1"
|
|
pip install "pytest-asyncio==0.21.1"
|
|
pip install aiohttp
|
|
python -m pip install --upgrade pip
|
|
python -m pip install -r .circleci/requirements.txt
|
|
pip install "pytest==7.3.1"
|
|
pip install "pytest-retry==1.6.3"
|
|
pip install "pytest-mock==3.12.0"
|
|
pip install "pytest-asyncio==0.21.1"
|
|
pip install mypy
|
|
pip install "google-generativeai==0.3.2"
|
|
pip install "google-cloud-aiplatform==1.43.0"
|
|
pip install pyarrow
|
|
pip install "boto3==1.34.34"
|
|
pip install "aioboto3==12.3.0"
|
|
pip install langchain
|
|
pip install "langfuse>=2.0.0"
|
|
pip install "logfire==0.29.0"
|
|
pip install numpydoc
|
|
pip install prisma
|
|
pip install fastapi
|
|
pip install jsonschema
|
|
pip install "httpx==0.24.1"
|
|
pip install "gunicorn==21.2.0"
|
|
pip install "anyio==3.7.1"
|
|
pip install "aiodynamo==23.10.1"
|
|
pip install "asyncio==3.4.3"
|
|
pip install "PyGithub==1.59.1"
|
|
pip install "openai==1.68.2"
|
|
- run:
|
|
name: Install Grype
|
|
command: |
|
|
curl -sSfL https://raw.githubusercontent.com/anchore/grype/main/install.sh | sudo sh -s -- -b /usr/local/bin
|
|
- run:
|
|
name: Build and Scan Docker Images
|
|
command: |
|
|
# Build and scan Dockerfile.database
|
|
echo "Building and scanning Dockerfile.database..."
|
|
docker build -t litellm-database:latest -f ./docker/Dockerfile.database .
|
|
grype litellm-database:latest --fail-on high
|
|
|
|
# Build and scan main Dockerfile
|
|
echo "Building and scanning main Dockerfile..."
|
|
docker build -t litellm:latest .
|
|
grype litellm:latest --fail-on high
|
|
- run:
|
|
name: Build Docker image
|
|
command: docker build -t my-app:latest -f ./docker/Dockerfile.database .
|
|
- run:
|
|
name: Run Docker container
|
|
command: |
|
|
docker run -d \
|
|
-p 4000:4000 \
|
|
-e DATABASE_URL=$PROXY_DATABASE_URL \
|
|
-e AZURE_API_KEY=$AZURE_API_KEY \
|
|
-e REDIS_HOST=$REDIS_HOST \
|
|
-e REDIS_PASSWORD=$REDIS_PASSWORD \
|
|
-e REDIS_PORT=$REDIS_PORT \
|
|
-e AZURE_FRANCE_API_KEY=$AZURE_FRANCE_API_KEY \
|
|
-e AZURE_EUROPE_API_KEY=$AZURE_EUROPE_API_KEY \
|
|
-e MISTRAL_API_KEY=$MISTRAL_API_KEY \
|
|
-e AWS_ACCESS_KEY_ID=$AWS_ACCESS_KEY_ID \
|
|
-e GROQ_API_KEY=$GROQ_API_KEY \
|
|
-e ANTHROPIC_API_KEY=$ANTHROPIC_API_KEY \
|
|
-e COHERE_API_KEY=$COHERE_API_KEY \
|
|
-e AWS_SECRET_ACCESS_KEY=$AWS_SECRET_ACCESS_KEY \
|
|
-e AWS_REGION_NAME=$AWS_REGION_NAME \
|
|
-e AUTO_INFER_REGION=True \
|
|
-e OPENAI_API_KEY=$OPENAI_API_KEY \
|
|
-e USE_DDTRACE=True \
|
|
-e DD_API_KEY=$DD_API_KEY \
|
|
-e DD_SITE=$DD_SITE \
|
|
-e LITELLM_LICENSE=$LITELLM_LICENSE \
|
|
-e LANGFUSE_PROJECT1_PUBLIC=$LANGFUSE_PROJECT1_PUBLIC \
|
|
-e LANGFUSE_PROJECT2_PUBLIC=$LANGFUSE_PROJECT2_PUBLIC \
|
|
-e LANGFUSE_PROJECT1_SECRET=$LANGFUSE_PROJECT1_SECRET \
|
|
-e LANGFUSE_PROJECT2_SECRET=$LANGFUSE_PROJECT2_SECRET \
|
|
--name my-app \
|
|
-v $(pwd)/proxy_server_config.yaml:/app/config.yaml \
|
|
my-app:latest \
|
|
--config /app/config.yaml \
|
|
--port 4000 \
|
|
--detailed_debug \
|
|
- run:
|
|
name: Install curl and dockerize
|
|
command: |
|
|
sudo apt-get update
|
|
sudo apt-get install -y curl
|
|
sudo wget https://github.com/jwilder/dockerize/releases/download/v0.6.1/dockerize-linux-amd64-v0.6.1.tar.gz
|
|
sudo tar -C /usr/local/bin -xzvf dockerize-linux-amd64-v0.6.1.tar.gz
|
|
sudo rm dockerize-linux-amd64-v0.6.1.tar.gz
|
|
- run:
|
|
name: Start outputting logs
|
|
command: docker logs -f my-app
|
|
background: true
|
|
- run:
|
|
name: Wait for app to be ready
|
|
command: dockerize -wait http://localhost:4000 -timeout 5m
|
|
- run:
|
|
name: Run tests
|
|
command: |
|
|
pwd
|
|
ls
|
|
python -m pytest -s -vv tests/*.py -x --junitxml=test-results/junit.xml --durations=5 --ignore=tests/otel_tests --ignore=tests/spend_tracking_tests --ignore=tests/pass_through_tests --ignore=tests/proxy_admin_ui_tests --ignore=tests/load_tests --ignore=tests/llm_translation --ignore=tests/llm_responses_api_testing --ignore=tests/mcp_tests --ignore=tests/image_gen_tests --ignore=tests/pass_through_unit_tests
|
|
no_output_timeout: 120m
|
|
|
|
# Store test results
|
|
- store_test_results:
|
|
path: test-results
|
|
e2e_openai_endpoints:
|
|
machine:
|
|
image: ubuntu-2204:2023.10.1
|
|
resource_class: xlarge
|
|
working_directory: ~/project
|
|
steps:
|
|
- checkout
|
|
- setup_google_dns
|
|
- run:
|
|
name: Install Docker CLI (In case it's not already installed)
|
|
command: |
|
|
sudo apt-get update
|
|
sudo apt-get install -y docker-ce docker-ce-cli containerd.io
|
|
- run:
|
|
name: Install Python 3.9
|
|
command: |
|
|
curl https://repo.anaconda.com/miniconda/Miniconda3-latest-Linux-x86_64.sh --output miniconda.sh
|
|
bash miniconda.sh -b -p $HOME/miniconda
|
|
export PATH="$HOME/miniconda/bin:$PATH"
|
|
conda init bash
|
|
source ~/.bashrc
|
|
conda create -n myenv python=3.9 -y
|
|
conda activate myenv
|
|
python --version
|
|
- run:
|
|
name: Install Dependencies
|
|
command: |
|
|
pip install "pytest==7.3.1"
|
|
pip install "pytest-asyncio==0.21.1"
|
|
pip install aiohttp
|
|
python -m pip install --upgrade pip
|
|
python -m pip install -r .circleci/requirements.txt
|
|
pip install "pytest==7.3.1"
|
|
pip install "pytest-retry==1.6.3"
|
|
pip install "pytest-mock==3.12.0"
|
|
pip install "pytest-asyncio==0.21.1"
|
|
pip install mypy
|
|
pip install "jsonlines==4.0.0"
|
|
pip install "google-generativeai==0.3.2"
|
|
pip install "google-cloud-aiplatform==1.43.0"
|
|
pip install pyarrow
|
|
pip install "boto3==1.34.34"
|
|
pip install "aioboto3==12.3.0"
|
|
pip install langchain
|
|
pip install "langchain_mcp_adapters==0.0.5"
|
|
pip install "langfuse>=2.0.0"
|
|
pip install "logfire==0.29.0"
|
|
pip install numpydoc
|
|
pip install prisma
|
|
pip install fastapi
|
|
pip install jsonschema
|
|
pip install "httpx==0.24.1"
|
|
pip install "gunicorn==21.2.0"
|
|
pip install "anyio==3.7.1"
|
|
pip install "aiodynamo==23.10.1"
|
|
pip install "asyncio==3.4.3"
|
|
pip install "PyGithub==1.59.1"
|
|
pip install "openai==1.68.2"
|
|
# Run pytest and generate JUnit XML report
|
|
- run:
|
|
name: Build Docker image
|
|
command: docker build -t my-app:latest -f ./docker/Dockerfile.database .
|
|
- run:
|
|
name: Run Docker container
|
|
command: |
|
|
docker run -d \
|
|
-p 4000:4000 \
|
|
-e DATABASE_URL=$PROXY_DATABASE_URL \
|
|
-e AZURE_API_KEY=$AZURE_BATCHES_API_KEY \
|
|
-e AZURE_API_BASE=$AZURE_BATCHES_API_BASE \
|
|
-e AZURE_API_VERSION="2024-05-01-preview" \
|
|
-e REDIS_HOST=$REDIS_HOST \
|
|
-e REDIS_PASSWORD=$REDIS_PASSWORD \
|
|
-e REDIS_PORT=$REDIS_PORT \
|
|
-e AZURE_FRANCE_API_KEY=$AZURE_FRANCE_API_KEY \
|
|
-e AZURE_EUROPE_API_KEY=$AZURE_EUROPE_API_KEY \
|
|
-e MISTRAL_API_KEY=$MISTRAL_API_KEY \
|
|
-e AWS_ACCESS_KEY_ID=$AWS_ACCESS_KEY_ID \
|
|
-e GROQ_API_KEY=$GROQ_API_KEY \
|
|
-e ANTHROPIC_API_KEY=$ANTHROPIC_API_KEY \
|
|
-e COHERE_API_KEY=$COHERE_API_KEY \
|
|
-e AWS_SECRET_ACCESS_KEY=$AWS_SECRET_ACCESS_KEY \
|
|
-e AWS_REGION_NAME=$AWS_REGION_NAME \
|
|
-e AUTO_INFER_REGION=True \
|
|
-e USE_DDTRACE=True \
|
|
-e DD_API_KEY=$DD_API_KEY \
|
|
-e DD_SITE=$DD_SITE \
|
|
-e OPENAI_API_KEY=$OPENAI_API_KEY \
|
|
-e LITELLM_LICENSE=$LITELLM_LICENSE \
|
|
-e LANGFUSE_PROJECT1_PUBLIC=$LANGFUSE_PROJECT1_PUBLIC \
|
|
-e LANGFUSE_PROJECT2_PUBLIC=$LANGFUSE_PROJECT2_PUBLIC \
|
|
-e LANGFUSE_PROJECT1_SECRET=$LANGFUSE_PROJECT1_SECRET \
|
|
-e LANGFUSE_PROJECT2_SECRET=$LANGFUSE_PROJECT2_SECRET \
|
|
--name my-app \
|
|
-v $(pwd)/litellm/proxy/example_config_yaml/oai_misc_config.yaml:/app/config.yaml \
|
|
my-app:latest \
|
|
--config /app/config.yaml \
|
|
--port 4000 \
|
|
--detailed_debug \
|
|
- run:
|
|
name: Install curl and dockerize
|
|
command: |
|
|
sudo apt-get update
|
|
sudo apt-get install -y curl
|
|
sudo wget https://github.com/jwilder/dockerize/releases/download/v0.6.1/dockerize-linux-amd64-v0.6.1.tar.gz
|
|
sudo tar -C /usr/local/bin -xzvf dockerize-linux-amd64-v0.6.1.tar.gz
|
|
sudo rm dockerize-linux-amd64-v0.6.1.tar.gz
|
|
- run:
|
|
name: Start outputting logs
|
|
command: docker logs -f my-app
|
|
background: true
|
|
- run:
|
|
name: Wait for app to be ready
|
|
command: dockerize -wait http://localhost:4000 -timeout 5m
|
|
- run:
|
|
name: Run tests
|
|
command: |
|
|
pwd
|
|
ls
|
|
python -m pytest -s -vv tests/openai_endpoints_tests --junitxml=test-results/junit.xml --durations=5
|
|
no_output_timeout: 120m
|
|
|
|
# Store test results
|
|
- store_test_results:
|
|
path: test-results
|
|
proxy_logging_guardrails_model_info_tests:
|
|
machine:
|
|
image: ubuntu-2204:2023.10.1
|
|
resource_class: xlarge
|
|
working_directory: ~/project
|
|
steps:
|
|
- checkout
|
|
- setup_google_dns
|
|
- run:
|
|
name: Install Docker CLI (In case it's not already installed)
|
|
command: |
|
|
sudo apt-get update
|
|
sudo apt-get install -y docker-ce docker-ce-cli containerd.io
|
|
- run:
|
|
name: Install Python 3.9
|
|
command: |
|
|
curl https://repo.anaconda.com/miniconda/Miniconda3-latest-Linux-x86_64.sh --output miniconda.sh
|
|
bash miniconda.sh -b -p $HOME/miniconda
|
|
export PATH="$HOME/miniconda/bin:$PATH"
|
|
conda init bash
|
|
source ~/.bashrc
|
|
conda create -n myenv python=3.9 -y
|
|
conda activate myenv
|
|
python --version
|
|
- run:
|
|
name: Install Dependencies
|
|
command: |
|
|
pip install "pytest==7.3.1"
|
|
pip install "pytest-asyncio==0.21.1"
|
|
pip install aiohttp
|
|
python -m pip install --upgrade pip
|
|
python -m pip install -r .circleci/requirements.txt
|
|
pip install "pytest==7.3.1"
|
|
pip install "pytest-retry==1.6.3"
|
|
pip install "pytest-mock==3.12.0"
|
|
pip install "pytest-asyncio==0.21.1"
|
|
pip install mypy
|
|
pip install "google-generativeai==0.3.2"
|
|
pip install "google-cloud-aiplatform==1.43.0"
|
|
pip install pyarrow
|
|
pip install "boto3==1.34.34"
|
|
pip install "aioboto3==12.3.0"
|
|
pip install langchain
|
|
pip install "langfuse>=2.0.0"
|
|
pip install "logfire==0.29.0"
|
|
pip install numpydoc
|
|
pip install prisma
|
|
pip install fastapi
|
|
pip install jsonschema
|
|
pip install "httpx==0.24.1"
|
|
pip install "gunicorn==21.2.0"
|
|
pip install "anyio==3.7.1"
|
|
pip install "aiodynamo==23.10.1"
|
|
pip install "asyncio==3.4.3"
|
|
pip install "PyGithub==1.59.1"
|
|
pip install "openai==1.68.2"
|
|
- run:
|
|
name: Build Docker image
|
|
command: docker build -t my-app:latest -f ./docker/Dockerfile.database .
|
|
- run:
|
|
name: Run Docker container
|
|
# intentionally give bad redis credentials here
|
|
# the OTEL test - should get this as a trace
|
|
command: |
|
|
docker run -d \
|
|
-p 4000:4000 \
|
|
-e DATABASE_URL=$PROXY_DATABASE_URL \
|
|
-e REDIS_HOST=$REDIS_HOST \
|
|
-e REDIS_PASSWORD=$REDIS_PASSWORD \
|
|
-e REDIS_PORT=$REDIS_PORT \
|
|
-e LITELLM_MASTER_KEY="sk-1234" \
|
|
-e OPENAI_API_KEY=$OPENAI_API_KEY \
|
|
-e LITELLM_LICENSE=$LITELLM_LICENSE \
|
|
-e OTEL_EXPORTER="in_memory" \
|
|
-e APORIA_API_BASE_2=$APORIA_API_BASE_2 \
|
|
-e APORIA_API_KEY_2=$APORIA_API_KEY_2 \
|
|
-e APORIA_API_BASE_1=$APORIA_API_BASE_1 \
|
|
-e AWS_ACCESS_KEY_ID=$AWS_ACCESS_KEY_ID \
|
|
-e AWS_SECRET_ACCESS_KEY=$AWS_SECRET_ACCESS_KEY \
|
|
-e USE_DDTRACE=True \
|
|
-e DD_API_KEY=$DD_API_KEY \
|
|
-e DD_SITE=$DD_SITE \
|
|
-e AWS_REGION_NAME=$AWS_REGION_NAME \
|
|
-e APORIA_API_KEY_1=$APORIA_API_KEY_1 \
|
|
-e COHERE_API_KEY=$COHERE_API_KEY \
|
|
-e GCS_FLUSH_INTERVAL="1" \
|
|
--name my-app \
|
|
-v $(pwd)/litellm/proxy/example_config_yaml/otel_test_config.yaml:/app/config.yaml \
|
|
-v $(pwd)/litellm/proxy/example_config_yaml/custom_guardrail.py:/app/custom_guardrail.py \
|
|
my-app:latest \
|
|
--config /app/config.yaml \
|
|
--port 4000 \
|
|
--detailed_debug \
|
|
- run:
|
|
name: Install curl and dockerize
|
|
command: |
|
|
sudo apt-get update
|
|
sudo apt-get install -y curl
|
|
sudo wget https://github.com/jwilder/dockerize/releases/download/v0.6.1/dockerize-linux-amd64-v0.6.1.tar.gz
|
|
sudo tar -C /usr/local/bin -xzvf dockerize-linux-amd64-v0.6.1.tar.gz
|
|
sudo rm dockerize-linux-amd64-v0.6.1.tar.gz
|
|
- run:
|
|
name: Start outputting logs
|
|
command: docker logs -f my-app
|
|
background: true
|
|
- run:
|
|
name: Wait for app to be ready
|
|
command: dockerize -wait http://localhost:4000 -timeout 5m
|
|
- run:
|
|
name: Run tests
|
|
command: |
|
|
pwd
|
|
ls
|
|
python -m pytest -vv tests/otel_tests -x --junitxml=test-results/junit.xml --durations=5
|
|
no_output_timeout:
|
|
120m
|
|
# Clean up first container
|
|
- run:
|
|
name: Stop and remove first container
|
|
command: |
|
|
docker stop my-app
|
|
docker rm my-app
|
|
|
|
# Second Docker Container Run with Different Config
|
|
# NOTE: We intentionally pass a "bad" license here. We need to ensure proxy starts and serves request even with bad license
|
|
- run:
|
|
name: Run Second Docker container
|
|
command: |
|
|
docker run -d \
|
|
-p 4000:4000 \
|
|
-e DATABASE_URL=$PROXY_DATABASE_URL \
|
|
-e REDIS_HOST=$REDIS_HOST \
|
|
-e REDIS_PASSWORD=$REDIS_PASSWORD \
|
|
-e REDIS_PORT=$REDIS_PORT \
|
|
-e LITELLM_MASTER_KEY="sk-1234" \
|
|
-e OPENAI_API_KEY=$OPENAI_API_KEY \
|
|
-e LITELLM_LICENSE="bad-license" \
|
|
--name my-app-3 \
|
|
-v $(pwd)/litellm/proxy/example_config_yaml/enterprise_config.yaml:/app/config.yaml \
|
|
my-app:latest \
|
|
--config /app/config.yaml \
|
|
--port 4000 \
|
|
--detailed_debug
|
|
|
|
- run:
|
|
name: Start outputting logs for second container
|
|
command: docker logs -f my-app-2
|
|
background: true
|
|
|
|
- run:
|
|
name: Wait for second app to be ready
|
|
command: dockerize -wait http://localhost:4000 -timeout 5m
|
|
|
|
- run:
|
|
name: Run second round of tests
|
|
command: |
|
|
python -m pytest -vv tests/basic_proxy_startup_tests -x --junitxml=test-results/junit-2.xml --durations=5
|
|
no_output_timeout: 120m
|
|
|
|
# Store test results
|
|
- store_test_results:
|
|
path: test-results
|
|
proxy_spend_accuracy_tests:
|
|
machine:
|
|
image: ubuntu-2204:2023.10.1
|
|
resource_class: xlarge
|
|
working_directory: ~/project
|
|
steps:
|
|
- checkout
|
|
- setup_google_dns
|
|
- run:
|
|
name: Install Docker CLI (In case it's not already installed)
|
|
command: |
|
|
sudo apt-get update
|
|
sudo apt-get install -y docker-ce docker-ce-cli containerd.io
|
|
- run:
|
|
name: Install Python 3.9
|
|
command: |
|
|
curl https://repo.anaconda.com/miniconda/Miniconda3-latest-Linux-x86_64.sh --output miniconda.sh
|
|
bash miniconda.sh -b -p $HOME/miniconda
|
|
export PATH="$HOME/miniconda/bin:$PATH"
|
|
conda init bash
|
|
source ~/.bashrc
|
|
conda create -n myenv python=3.9 -y
|
|
conda activate myenv
|
|
python --version
|
|
- run:
|
|
name: Install Dependencies
|
|
command: |
|
|
pip install "pytest==7.3.1"
|
|
pip install "pytest-asyncio==0.21.1"
|
|
pip install aiohttp
|
|
python -m pip install --upgrade pip
|
|
python -m pip install -r requirements.txt
|
|
- run:
|
|
name: Build Docker image
|
|
command: docker build -t my-app:latest -f ./docker/Dockerfile.database .
|
|
- run:
|
|
name: Run Docker container
|
|
# intentionally give bad redis credentials here
|
|
# the OTEL test - should get this as a trace
|
|
command: |
|
|
docker run -d \
|
|
-p 4000:4000 \
|
|
-e DATABASE_URL=$PROXY_DATABASE_URL \
|
|
-e REDIS_HOST=$REDIS_HOST \
|
|
-e REDIS_PASSWORD=$REDIS_PASSWORD \
|
|
-e REDIS_PORT=$REDIS_PORT \
|
|
-e LITELLM_MASTER_KEY="sk-1234" \
|
|
-e OPENAI_API_KEY=$OPENAI_API_KEY \
|
|
-e LITELLM_LICENSE=$LITELLM_LICENSE \
|
|
-e AWS_ACCESS_KEY_ID=$AWS_ACCESS_KEY_ID \
|
|
-e AWS_SECRET_ACCESS_KEY=$AWS_SECRET_ACCESS_KEY \
|
|
-e USE_DDTRACE=True \
|
|
-e DD_API_KEY=$DD_API_KEY \
|
|
-e DD_SITE=$DD_SITE \
|
|
-e AWS_REGION_NAME=$AWS_REGION_NAME \
|
|
--name my-app \
|
|
-v $(pwd)/litellm/proxy/example_config_yaml/spend_tracking_config.yaml:/app/config.yaml \
|
|
my-app:latest \
|
|
--config /app/config.yaml \
|
|
--port 4000 \
|
|
--detailed_debug \
|
|
- run:
|
|
name: Install curl and dockerize
|
|
command: |
|
|
sudo apt-get update
|
|
sudo apt-get install -y curl
|
|
sudo wget https://github.com/jwilder/dockerize/releases/download/v0.6.1/dockerize-linux-amd64-v0.6.1.tar.gz
|
|
sudo tar -C /usr/local/bin -xzvf dockerize-linux-amd64-v0.6.1.tar.gz
|
|
sudo rm dockerize-linux-amd64-v0.6.1.tar.gz
|
|
- run:
|
|
name: Start outputting logs
|
|
command: docker logs -f my-app
|
|
background: true
|
|
- run:
|
|
name: Wait for app to be ready
|
|
command: dockerize -wait http://localhost:4000 -timeout 5m
|
|
- run:
|
|
name: Run tests
|
|
command: |
|
|
pwd
|
|
ls
|
|
python -m pytest -vv tests/spend_tracking_tests -x --junitxml=test-results/junit.xml --durations=5
|
|
no_output_timeout:
|
|
120m
|
|
# Clean up first container
|
|
- run:
|
|
name: Stop and remove first container
|
|
command: |
|
|
docker stop my-app
|
|
docker rm my-app
|
|
|
|
proxy_multi_instance_tests:
|
|
machine:
|
|
image: ubuntu-2204:2023.10.1
|
|
resource_class: xlarge
|
|
working_directory: ~/project
|
|
steps:
|
|
- checkout
|
|
- setup_google_dns
|
|
- run:
|
|
name: Install Docker CLI (In case it's not already installed)
|
|
command: |
|
|
sudo apt-get update
|
|
sudo apt-get install -y docker-ce docker-ce-cli containerd.io
|
|
- run:
|
|
name: Install Python 3.9
|
|
command: |
|
|
curl https://repo.anaconda.com/miniconda/Miniconda3-latest-Linux-x86_64.sh --output miniconda.sh
|
|
bash miniconda.sh -b -p $HOME/miniconda
|
|
export PATH="$HOME/miniconda/bin:$PATH"
|
|
conda init bash
|
|
source ~/.bashrc
|
|
conda create -n myenv python=3.9 -y
|
|
conda activate myenv
|
|
python --version
|
|
- run:
|
|
name: Install Dependencies
|
|
command: |
|
|
pip install "pytest==7.3.1"
|
|
pip install "pytest-asyncio==0.21.1"
|
|
pip install aiohttp
|
|
python -m pip install --upgrade pip
|
|
python -m pip install -r requirements.txt
|
|
pip install "pytest==7.3.1"
|
|
pip install "pytest-retry==1.6.3"
|
|
pip install "pytest-mock==3.12.0"
|
|
pip install "pytest-asyncio==0.21.1"
|
|
- run:
|
|
name: Build Docker image
|
|
command: docker build -t my-app:latest -f ./docker/Dockerfile.database .
|
|
- run:
|
|
name: Run Docker container 1
|
|
# intentionally give bad redis credentials here
|
|
# the OTEL test - should get this as a trace
|
|
command: |
|
|
docker run -d \
|
|
-p 4000:4000 \
|
|
-e DATABASE_URL=$PROXY_DATABASE_URL \
|
|
-e REDIS_HOST=$REDIS_HOST \
|
|
-e REDIS_PASSWORD=$REDIS_PASSWORD \
|
|
-e REDIS_PORT=$REDIS_PORT \
|
|
-e LITELLM_MASTER_KEY="sk-1234" \
|
|
-e LITELLM_LICENSE=$LITELLM_LICENSE \
|
|
-e USE_DDTRACE=True \
|
|
-e DD_API_KEY=$DD_API_KEY \
|
|
-e DD_SITE=$DD_SITE \
|
|
--name my-app \
|
|
-v $(pwd)/litellm/proxy/example_config_yaml/multi_instance_simple_config.yaml:/app/config.yaml \
|
|
my-app:latest \
|
|
--config /app/config.yaml \
|
|
--port 4000 \
|
|
--detailed_debug \
|
|
- run:
|
|
name: Run Docker container 2
|
|
command: |
|
|
docker run -d \
|
|
-p 4001:4001 \
|
|
-e DATABASE_URL=$PROXY_DATABASE_URL \
|
|
-e REDIS_HOST=$REDIS_HOST \
|
|
-e REDIS_PASSWORD=$REDIS_PASSWORD \
|
|
-e REDIS_PORT=$REDIS_PORT \
|
|
-e LITELLM_MASTER_KEY="sk-1234" \
|
|
-e LITELLM_LICENSE=$LITELLM_LICENSE \
|
|
-e USE_DDTRACE=True \
|
|
-e DD_API_KEY=$DD_API_KEY \
|
|
-e DD_SITE=$DD_SITE \
|
|
--name my-app-2 \
|
|
-v $(pwd)/litellm/proxy/example_config_yaml/multi_instance_simple_config.yaml:/app/config.yaml \
|
|
my-app:latest \
|
|
--config /app/config.yaml \
|
|
--port 4001 \
|
|
--detailed_debug
|
|
- run:
|
|
name: Install curl and dockerize
|
|
command: |
|
|
sudo apt-get update
|
|
sudo apt-get install -y curl
|
|
sudo wget https://github.com/jwilder/dockerize/releases/download/v0.6.1/dockerize-linux-amd64-v0.6.1.tar.gz
|
|
sudo tar -C /usr/local/bin -xzvf dockerize-linux-amd64-v0.6.1.tar.gz
|
|
sudo rm dockerize-linux-amd64-v0.6.1.tar.gz
|
|
- run:
|
|
name: Start outputting logs
|
|
command: docker logs -f my-app
|
|
background: true
|
|
- run:
|
|
name: Wait for instance 1 to be ready
|
|
command: dockerize -wait http://localhost:4000 -timeout 5m
|
|
- run:
|
|
name: Wait for instance 2 to be ready
|
|
command: dockerize -wait http://localhost:4001 -timeout 5m
|
|
- run:
|
|
name: Run tests
|
|
command: |
|
|
pwd
|
|
ls
|
|
python -m pytest -vv tests/multi_instance_e2e_tests -x --junitxml=test-results/junit.xml --durations=5
|
|
no_output_timeout:
|
|
120m
|
|
# Clean up first container
|
|
# Store test results
|
|
- store_test_results:
|
|
path: test-results
|
|
|
|
proxy_store_model_in_db_tests:
|
|
machine:
|
|
image: ubuntu-2204:2023.10.1
|
|
resource_class: xlarge
|
|
working_directory: ~/project
|
|
steps:
|
|
- checkout
|
|
- setup_google_dns
|
|
- run:
|
|
name: Install Docker CLI (In case it's not already installed)
|
|
command: |
|
|
sudo apt-get update
|
|
sudo apt-get install -y docker-ce docker-ce-cli containerd.io
|
|
- run:
|
|
name: Install Python 3.9
|
|
command: |
|
|
curl https://repo.anaconda.com/miniconda/Miniconda3-latest-Linux-x86_64.sh --output miniconda.sh
|
|
bash miniconda.sh -b -p $HOME/miniconda
|
|
export PATH="$HOME/miniconda/bin:$PATH"
|
|
conda init bash
|
|
source ~/.bashrc
|
|
conda create -n myenv python=3.9 -y
|
|
conda activate myenv
|
|
python --version
|
|
- run:
|
|
name: Install Dependencies
|
|
command: |
|
|
pip install "pytest==7.3.1"
|
|
pip install "pytest-asyncio==0.21.1"
|
|
pip install aiohttp
|
|
python -m pip install --upgrade pip
|
|
python -m pip install -r requirements.txt
|
|
pip install "pytest==7.3.1"
|
|
pip install "pytest-retry==1.6.3"
|
|
pip install "pytest-mock==3.12.0"
|
|
pip install "pytest-asyncio==0.21.1"
|
|
pip install "assemblyai==0.37.0"
|
|
- run:
|
|
name: Build Docker image
|
|
command: docker build -t my-app:latest -f ./docker/Dockerfile.database .
|
|
- run:
|
|
name: Run Docker container
|
|
# intentionally give bad redis credentials here
|
|
# the OTEL test - should get this as a trace
|
|
command: |
|
|
docker run -d \
|
|
-p 4000:4000 \
|
|
-e DATABASE_URL=$CLEAN_STORE_MODEL_IN_DB_DATABASE_URL \
|
|
-e STORE_MODEL_IN_DB="True" \
|
|
-e LITELLM_MASTER_KEY="sk-1234" \
|
|
-e LITELLM_LICENSE=$LITELLM_LICENSE \
|
|
--name my-app \
|
|
-v $(pwd)/litellm/proxy/example_config_yaml/store_model_db_config.yaml:/app/config.yaml \
|
|
my-app:latest \
|
|
--config /app/config.yaml \
|
|
--port 4000 \
|
|
--detailed_debug \
|
|
- run:
|
|
name: Install curl and dockerize
|
|
command: |
|
|
sudo apt-get update
|
|
sudo apt-get install -y curl
|
|
sudo wget https://github.com/jwilder/dockerize/releases/download/v0.6.1/dockerize-linux-amd64-v0.6.1.tar.gz
|
|
sudo tar -C /usr/local/bin -xzvf dockerize-linux-amd64-v0.6.1.tar.gz
|
|
sudo rm dockerize-linux-amd64-v0.6.1.tar.gz
|
|
- run:
|
|
name: Start outputting logs
|
|
command: docker logs -f my-app
|
|
background: true
|
|
- run:
|
|
name: Wait for app to be ready
|
|
command: dockerize -wait http://localhost:4000 -timeout 5m
|
|
- run:
|
|
name: Run tests
|
|
command: |
|
|
pwd
|
|
ls
|
|
python -m pytest -vv tests/store_model_in_db_tests -x --junitxml=test-results/junit.xml --durations=5
|
|
no_output_timeout:
|
|
120m
|
|
# Clean up first container
|
|
|
|
proxy_build_from_pip_tests:
|
|
# Change from docker to machine executor
|
|
machine:
|
|
image: ubuntu-2204:2023.10.1
|
|
resource_class: xlarge
|
|
working_directory: ~/project
|
|
steps:
|
|
- checkout
|
|
- setup_google_dns
|
|
# Remove Docker CLI installation since it's already available in machine executor
|
|
- run:
|
|
name: Install Python 3.13
|
|
command: |
|
|
curl https://repo.anaconda.com/miniconda/Miniconda3-latest-Linux-x86_64.sh --output miniconda.sh
|
|
bash miniconda.sh -b -p $HOME/miniconda
|
|
export PATH="$HOME/miniconda/bin:$PATH"
|
|
conda init bash
|
|
source ~/.bashrc
|
|
conda create -n myenv python=3.13 -y
|
|
conda activate myenv
|
|
python --version
|
|
- run:
|
|
name: Install Dependencies
|
|
command: |
|
|
pip install "pytest==7.3.1"
|
|
pip install "pytest-asyncio==0.21.1"
|
|
pip install aiohttp
|
|
python -m pip install --upgrade pip
|
|
pip install "pytest==7.3.1"
|
|
pip install "pytest-retry==1.6.3"
|
|
pip install "pytest-mock==3.12.0"
|
|
pip install "pytest-asyncio==0.21.1"
|
|
pip install mypy
|
|
- run:
|
|
name: Build Docker image
|
|
command: |
|
|
cd docker/build_from_pip
|
|
docker build -t my-app:latest -f Dockerfile.build_from_pip .
|
|
- run:
|
|
name: Run Docker container
|
|
# intentionally give bad redis credentials here
|
|
# the OTEL test - should get this as a trace
|
|
command: |
|
|
cd docker/build_from_pip
|
|
docker run -d \
|
|
-p 4000:4000 \
|
|
-e DATABASE_URL=$PROXY_DATABASE_URL \
|
|
-e REDIS_HOST=$REDIS_HOST \
|
|
-e REDIS_PASSWORD=$REDIS_PASSWORD \
|
|
-e REDIS_PORT=$REDIS_PORT \
|
|
-e LITELLM_MASTER_KEY="sk-1234" \
|
|
-e OPENAI_API_KEY=$OPENAI_API_KEY \
|
|
-e LITELLM_LICENSE=$LITELLM_LICENSE \
|
|
-e OTEL_EXPORTER="in_memory" \
|
|
-e APORIA_API_BASE_2=$APORIA_API_BASE_2 \
|
|
-e APORIA_API_KEY_2=$APORIA_API_KEY_2 \
|
|
-e APORIA_API_BASE_1=$APORIA_API_BASE_1 \
|
|
-e AWS_ACCESS_KEY_ID=$AWS_ACCESS_KEY_ID \
|
|
-e AWS_SECRET_ACCESS_KEY=$AWS_SECRET_ACCESS_KEY \
|
|
-e AWS_REGION_NAME=$AWS_REGION_NAME \
|
|
-e APORIA_API_KEY_1=$APORIA_API_KEY_1 \
|
|
-e COHERE_API_KEY=$COHERE_API_KEY \
|
|
-e USE_DDTRACE=True \
|
|
-e DD_API_KEY=$DD_API_KEY \
|
|
-e DD_SITE=$DD_SITE \
|
|
-e GCS_FLUSH_INTERVAL="1" \
|
|
--name my-app \
|
|
-v $(pwd)/litellm_config.yaml:/app/config.yaml \
|
|
my-app:latest \
|
|
--config /app/config.yaml \
|
|
--port 4000 \
|
|
--detailed_debug \
|
|
- run:
|
|
name: Install curl and dockerize
|
|
command: |
|
|
sudo apt-get update
|
|
sudo apt-get install -y curl
|
|
sudo wget https://github.com/jwilder/dockerize/releases/download/v0.6.1/dockerize-linux-amd64-v0.6.1.tar.gz
|
|
sudo tar -C /usr/local/bin -xzvf dockerize-linux-amd64-v0.6.1.tar.gz
|
|
sudo rm dockerize-linux-amd64-v0.6.1.tar.gz
|
|
- run:
|
|
name: Start outputting logs
|
|
command: docker logs -f my-app
|
|
background: true
|
|
- run:
|
|
name: Wait for app to be ready
|
|
command: dockerize -wait http://localhost:4000 -timeout 5m
|
|
- run:
|
|
name: Run tests
|
|
command: |
|
|
python -m pytest -vv tests/basic_proxy_startup_tests -x --junitxml=test-results/junit-2.xml --durations=5
|
|
no_output_timeout:
|
|
120m
|
|
# Clean up first container
|
|
- run:
|
|
name: Stop and remove first container
|
|
command: |
|
|
docker stop my-app
|
|
docker rm my-app
|
|
proxy_pass_through_endpoint_tests:
|
|
machine:
|
|
image: ubuntu-2204:2023.10.1
|
|
resource_class: xlarge
|
|
working_directory: ~/project
|
|
steps:
|
|
- checkout
|
|
- setup_google_dns
|
|
- run:
|
|
name: Install Docker CLI (In case it's not already installed)
|
|
command: |
|
|
sudo apt-get update
|
|
sudo apt-get install -y docker-ce docker-ce-cli containerd.io
|
|
- run:
|
|
name: Install Python 3.9
|
|
command: |
|
|
curl https://repo.anaconda.com/miniconda/Miniconda3-latest-Linux-x86_64.sh --output miniconda.sh
|
|
bash miniconda.sh -b -p $HOME/miniconda
|
|
export PATH="$HOME/miniconda/bin:$PATH"
|
|
conda init bash
|
|
source ~/.bashrc
|
|
conda create -n myenv python=3.9 -y
|
|
conda activate myenv
|
|
python --version
|
|
- run:
|
|
name: Install Dependencies
|
|
command: |
|
|
pip install "pytest==7.3.1"
|
|
pip install "pytest-retry==1.6.3"
|
|
pip install "pytest-asyncio==0.21.1"
|
|
pip install "google-cloud-aiplatform==1.43.0"
|
|
pip install aiohttp
|
|
pip install "openai==1.68.2"
|
|
pip install "assemblyai==0.37.0"
|
|
python -m pip install --upgrade pip
|
|
pip install "pydantic==2.10.2"
|
|
pip install "pytest==7.3.1"
|
|
pip install "pytest-mock==3.12.0"
|
|
pip install "pytest-asyncio==0.21.1"
|
|
pip install "boto3==1.34.34"
|
|
pip install mypy
|
|
pip install pyarrow
|
|
pip install numpydoc
|
|
pip install prisma
|
|
pip install fastapi
|
|
pip install jsonschema
|
|
pip install "httpx==0.27.0"
|
|
pip install "anyio==3.7.1"
|
|
pip install "asyncio==3.4.3"
|
|
pip install "PyGithub==1.59.1"
|
|
pip install "google-cloud-aiplatform==1.59.0"
|
|
pip install "anthropic==0.49.0"
|
|
pip install "langchain_mcp_adapters==0.0.5"
|
|
pip install "langchain_openai==0.2.1"
|
|
pip install "langgraph==0.3.18"
|
|
# Run pytest and generate JUnit XML report
|
|
- run:
|
|
name: Build Docker image
|
|
command: docker build -t my-app:latest -f ./docker/Dockerfile.database .
|
|
- run:
|
|
name: Run Docker container
|
|
command: |
|
|
docker run -d \
|
|
-p 4000:4000 \
|
|
-e DATABASE_URL=$PROXY_DATABASE_URL \
|
|
-e LITELLM_MASTER_KEY="sk-1234" \
|
|
-e OPENAI_API_KEY=$OPENAI_API_KEY \
|
|
-e GEMINI_API_KEY=$GEMINI_API_KEY \
|
|
-e ANTHROPIC_API_KEY=$ANTHROPIC_API_KEY \
|
|
-e ASSEMBLYAI_API_KEY=$ASSEMBLYAI_API_KEY \
|
|
-e USE_DDTRACE=True \
|
|
-e DD_API_KEY=$DD_API_KEY \
|
|
-e DD_SITE=$DD_SITE \
|
|
-e LITELLM_LICENSE=$LITELLM_LICENSE \
|
|
--name my-app \
|
|
-v $(pwd)/litellm/proxy/example_config_yaml/pass_through_config.yaml:/app/config.yaml \
|
|
-v $(pwd)/litellm/proxy/example_config_yaml/custom_auth_basic.py:/app/custom_auth_basic.py \
|
|
my-app:latest \
|
|
--config /app/config.yaml \
|
|
--port 4000 \
|
|
--detailed_debug \
|
|
- run:
|
|
name: Install curl and dockerize
|
|
command: |
|
|
sudo apt-get update
|
|
sudo apt-get install -y curl
|
|
sudo wget https://github.com/jwilder/dockerize/releases/download/v0.6.1/dockerize-linux-amd64-v0.6.1.tar.gz
|
|
sudo tar -C /usr/local/bin -xzvf dockerize-linux-amd64-v0.6.1.tar.gz
|
|
sudo rm dockerize-linux-amd64-v0.6.1.tar.gz
|
|
- run:
|
|
name: Start outputting logs
|
|
command: docker logs -f my-app
|
|
background: true
|
|
- run:
|
|
name: Wait for app to be ready
|
|
command: dockerize -wait http://localhost:4000 -timeout 5m
|
|
# Add Ruby installation and testing before the existing Node.js and Python tests
|
|
- run:
|
|
name: Install Ruby and Bundler
|
|
command: |
|
|
# Import GPG keys first
|
|
gpg --keyserver hkp://keyserver.ubuntu.com --recv-keys 409B6B1796C275462A1703113804BB82D39DC0E3 7D2BAF1CF37B13E2069D6956105BD0E739499BDB || {
|
|
curl -sSL https://rvm.io/mpapis.asc | gpg --import -
|
|
curl -sSL https://rvm.io/pkuczynski.asc | gpg --import -
|
|
}
|
|
|
|
# Install Ruby version manager (RVM)
|
|
curl -sSL https://get.rvm.io | bash -s stable
|
|
|
|
# Source RVM from the correct location
|
|
source $HOME/.rvm/scripts/rvm
|
|
|
|
# Install Ruby 3.2.2
|
|
rvm install 3.2.2
|
|
rvm use 3.2.2 --default
|
|
|
|
# Install latest Bundler
|
|
gem install bundler
|
|
|
|
- run:
|
|
name: Run Ruby tests
|
|
command: |
|
|
source $HOME/.rvm/scripts/rvm
|
|
cd tests/pass_through_tests/ruby_passthrough_tests
|
|
bundle install
|
|
bundle exec rspec
|
|
no_output_timeout: 30m
|
|
# New steps to run Node.js test
|
|
- run:
|
|
name: Install Node.js
|
|
command: |
|
|
export DEBIAN_FRONTEND=noninteractive
|
|
curl -fsSL https://deb.nodesource.com/setup_18.x | sudo -E bash -
|
|
sudo apt-get update
|
|
sudo apt-get install -y nodejs
|
|
node --version
|
|
npm --version
|
|
|
|
- run:
|
|
name: Install Node.js dependencies
|
|
command: |
|
|
npm install @google-cloud/vertexai
|
|
npm install @google/generative-ai
|
|
npm install --save-dev jest
|
|
|
|
- run:
|
|
name: Run Vertex AI, Google AI Studio Node.js tests
|
|
command: |
|
|
npx jest tests/pass_through_tests --verbose
|
|
no_output_timeout: 30m
|
|
- run:
|
|
name: Run tests
|
|
command: |
|
|
pwd
|
|
ls
|
|
python -m pytest -vv tests/pass_through_tests/ -x --junitxml=test-results/junit.xml --durations=5
|
|
no_output_timeout: 120m
|
|
# Store test results
|
|
- store_test_results:
|
|
path: test-results
|
|
|
|
upload-coverage:
|
|
docker:
|
|
- image: cimg/python:3.9
|
|
steps:
|
|
- checkout
|
|
- attach_workspace:
|
|
at: .
|
|
# Check file locations
|
|
- run:
|
|
name: Check coverage file location
|
|
command: |
|
|
echo "Current directory:"
|
|
ls -la
|
|
echo "\nContents of tests/llm_translation:"
|
|
ls -la tests/llm_translation
|
|
- run:
|
|
name: Combine Coverage
|
|
command: |
|
|
python -m venv venv
|
|
. venv/bin/activate
|
|
pip install coverage
|
|
coverage combine llm_translation_coverage llm_responses_api_coverage mcp_coverage logging_coverage litellm_router_coverage local_testing_coverage litellm_assistants_api_coverage auth_ui_unit_tests_coverage langfuse_coverage caching_coverage litellm_proxy_unit_tests_coverage image_gen_coverage pass_through_unit_tests_coverage batches_coverage litellm_proxy_security_tests_coverage
|
|
coverage xml
|
|
- codecov/upload:
|
|
file: ./coverage.xml
|
|
|
|
publish_to_pypi:
|
|
docker:
|
|
- image: cimg/python:3.8
|
|
working_directory: ~/project
|
|
|
|
environment:
|
|
TWINE_USERNAME: __token__
|
|
|
|
steps:
|
|
- checkout
|
|
|
|
- run:
|
|
name: Copy model_prices_and_context_window File to model_prices_and_context_window_backup
|
|
command: |
|
|
cp model_prices_and_context_window.json litellm/model_prices_and_context_window_backup.json
|
|
|
|
- run:
|
|
name: Check if litellm dir, tests dir, or pyproject.toml was modified
|
|
command: |
|
|
if [ -n "$(git diff --name-only $CIRCLE_SHA1^..$CIRCLE_SHA1 | grep -E 'pyproject\.toml|litellm/|tests/')" ]; then
|
|
echo "litellm, tests, or pyproject.toml updated"
|
|
else
|
|
echo "No changes to litellm, tests, or pyproject.toml. Skipping PyPI publish."
|
|
circleci step halt
|
|
fi
|
|
|
|
- run:
|
|
name: Checkout code
|
|
command: git checkout $CIRCLE_SHA1
|
|
|
|
# Check if setup.py is modified and publish to PyPI
|
|
- run:
|
|
name: PyPI publish
|
|
command: |
|
|
echo "Install TOML package."
|
|
python -m pip install toml
|
|
VERSION=$(python -c "import toml; print(toml.load('pyproject.toml')['tool']['poetry']['version'])")
|
|
PACKAGE_NAME=$(python -c "import toml; print(toml.load('pyproject.toml')['tool']['poetry']['name'])")
|
|
if ! pip show -v $PACKAGE_NAME | grep -q "Version: ${VERSION}"; then
|
|
echo "pyproject.toml modified"
|
|
echo -e "[pypi]\nusername = $PYPI_PUBLISH_USERNAME\npassword = $PYPI_PUBLISH_PASSWORD" > ~/.pypirc
|
|
python -m pip install --upgrade pip
|
|
pip install build
|
|
pip install wheel
|
|
pip install --upgrade twine setuptools
|
|
rm -rf build dist
|
|
|
|
echo "Building package"
|
|
python -m build
|
|
|
|
echo "Twine upload to dist"
|
|
echo "Contents of dist directory:"
|
|
ls dist/
|
|
twine upload --verbose dist/*
|
|
else
|
|
echo "Version ${VERSION} of package is already published on PyPI. Skipping PyPI publish."
|
|
circleci step halt
|
|
fi
|
|
- run:
|
|
name: Trigger Github Action for new Docker Container + Trigger Load Testing
|
|
command: |
|
|
echo "Install TOML package."
|
|
python3 -m pip install toml
|
|
VERSION=$(python3 -c "import toml; print(toml.load('pyproject.toml')['tool']['poetry']['version'])")
|
|
echo "LiteLLM Version ${VERSION}"
|
|
curl -X POST \
|
|
-H "Accept: application/vnd.github.v3+json" \
|
|
-H "Authorization: Bearer $GITHUB_TOKEN" \
|
|
"https://api.github.com/repos/BerriAI/litellm/actions/workflows/ghcr_deploy.yml/dispatches" \
|
|
-d "{\"ref\":\"main\", \"inputs\":{\"tag\":\"v${VERSION}-nightly\", \"commit_hash\":\"$CIRCLE_SHA1\"}}"
|
|
echo "triggering load testing server for version ${VERSION} and commit ${CIRCLE_SHA1}"
|
|
curl -X POST "https://proxyloadtester-production.up.railway.app/start/load/test?version=${VERSION}&commit_hash=${CIRCLE_SHA1}&release_type=nightly"
|
|
|
|
publish_proxy_extras:
|
|
docker:
|
|
- image: cimg/python:3.8
|
|
working_directory: ~/project/litellm-proxy-extras
|
|
environment:
|
|
TWINE_USERNAME: __token__
|
|
|
|
steps:
|
|
- checkout:
|
|
path: ~/project
|
|
|
|
- run:
|
|
name: Check if litellm-proxy-extras dir or pyproject.toml was modified
|
|
command: |
|
|
echo "Install TOML package."
|
|
python -m pip install toml
|
|
# Get current version from pyproject.toml
|
|
CURRENT_VERSION=$(python -c "import toml; print(toml.load('pyproject.toml')['tool']['poetry']['version'])")
|
|
|
|
# Get last published version from PyPI
|
|
LAST_VERSION=$(curl -s https://pypi.org/pypi/litellm-proxy-extras/json | python -c "import json, sys; print(json.load(sys.stdin)['info']['version'])")
|
|
|
|
echo "Current version: $CURRENT_VERSION"
|
|
echo "Last published version: $LAST_VERSION"
|
|
|
|
# Compare versions using Python's packaging.version
|
|
VERSION_COMPARE=$(python -c "from packaging import version; print(1 if version.parse('$CURRENT_VERSION') < version.parse('$LAST_VERSION') else 0)")
|
|
|
|
echo "Version compare: $VERSION_COMPARE"
|
|
if [ "$VERSION_COMPARE" = "1" ]; then
|
|
echo "Error: Current version ($CURRENT_VERSION) is less than last published version ($LAST_VERSION)"
|
|
exit 1
|
|
fi
|
|
|
|
# If versions are equal or current is greater, check contents
|
|
pip download --no-deps litellm-proxy-extras==$LAST_VERSION -d /tmp
|
|
|
|
echo "Contents of /tmp directory:"
|
|
ls -la /tmp
|
|
|
|
# Find the downloaded file (could be .whl or .tar.gz)
|
|
DOWNLOADED_FILE=$(ls /tmp/litellm_proxy_extras-*)
|
|
echo "Downloaded file: $DOWNLOADED_FILE"
|
|
|
|
# Extract based on file extension
|
|
if [[ "$DOWNLOADED_FILE" == *.whl ]]; then
|
|
echo "Extracting wheel file..."
|
|
unzip -q "$DOWNLOADED_FILE" -d /tmp/extracted
|
|
EXTRACTED_DIR="/tmp/extracted"
|
|
else
|
|
echo "Extracting tar.gz file..."
|
|
tar -xzf "$DOWNLOADED_FILE" -C /tmp
|
|
EXTRACTED_DIR="/tmp/litellm_proxy_extras-$LAST_VERSION"
|
|
fi
|
|
|
|
echo "Contents of extracted package:"
|
|
ls -R "$EXTRACTED_DIR"
|
|
|
|
# Compare contents
|
|
if ! diff -r "$EXTRACTED_DIR/litellm_proxy_extras" ./litellm_proxy_extras; then
|
|
if [ "$CURRENT_VERSION" = "$LAST_VERSION" ]; then
|
|
echo "Error: Changes detected in litellm-proxy-extras but version was not bumped"
|
|
echo "Current version: $CURRENT_VERSION"
|
|
echo "Last published version: $LAST_VERSION"
|
|
echo "Changes:"
|
|
diff -r "$EXTRACTED_DIR/litellm_proxy_extras" ./litellm_proxy_extras
|
|
exit 1
|
|
fi
|
|
else
|
|
echo "No changes detected in litellm-proxy-extras. Skipping PyPI publish."
|
|
circleci step halt
|
|
fi
|
|
|
|
- run:
|
|
name: Get new version
|
|
command: |
|
|
cd litellm-proxy-extras
|
|
NEW_VERSION=$(python -c "import toml; print(toml.load('pyproject.toml')['tool']['poetry']['version'])")
|
|
echo "export NEW_VERSION=$NEW_VERSION" >> $BASH_ENV
|
|
|
|
- run:
|
|
name: Check if versions match
|
|
command: |
|
|
cd ~/project
|
|
# Check pyproject.toml
|
|
CURRENT_VERSION=$(python -c "import toml; print(toml.load('pyproject.toml')['tool']['poetry']['dependencies']['litellm-proxy-extras'].split('\"')[1])")
|
|
if [ "$CURRENT_VERSION" != "$NEW_VERSION" ]; then
|
|
echo "Error: Version in pyproject.toml ($CURRENT_VERSION) doesn't match new version ($NEW_VERSION)"
|
|
exit 1
|
|
fi
|
|
|
|
# Check requirements.txt
|
|
REQ_VERSION=$(grep -oP 'litellm-proxy-extras==\K[0-9.]+' requirements.txt)
|
|
if [ "$REQ_VERSION" != "$NEW_VERSION" ]; then
|
|
echo "Error: Version in requirements.txt ($REQ_VERSION) doesn't match new version ($NEW_VERSION)"
|
|
exit 1
|
|
fi
|
|
|
|
- run:
|
|
name: Publish to PyPI
|
|
command: |
|
|
cd litellm-proxy-extras
|
|
echo -e "[pypi]\nusername = $PYPI_PUBLISH_USERNAME\npassword = $PYPI_PUBLISH_PASSWORD" > ~/.pypirc
|
|
python -m pip install --upgrade pip build twine setuptools wheel
|
|
rm -rf build dist
|
|
python -m build
|
|
twine upload --verbose dist/*
|
|
|
|
e2e_ui_testing:
|
|
machine:
|
|
image: ubuntu-2204:2023.10.1
|
|
resource_class: xlarge
|
|
working_directory: ~/project
|
|
steps:
|
|
- checkout
|
|
- setup_google_dns
|
|
- run:
|
|
name: Build UI
|
|
command: |
|
|
# Set up nvm
|
|
export NVM_DIR="/opt/circleci/.nvm"
|
|
source "$NVM_DIR/nvm.sh"
|
|
source "$NVM_DIR/bash_completion"
|
|
|
|
# Install and use Node version
|
|
nvm install v18.17.0
|
|
nvm use v18.17.0
|
|
|
|
cd ui/litellm-dashboard
|
|
|
|
# Install dependencies first
|
|
npm install
|
|
|
|
# Now source the build script
|
|
source ./build_ui.sh
|
|
- run:
|
|
name: Install Docker CLI (In case it's not already installed)
|
|
command: |
|
|
sudo apt-get update
|
|
sudo apt-get install -y docker-ce docker-ce-cli containerd.io
|
|
- run:
|
|
name: Install Python 3.9
|
|
command: |
|
|
curl https://repo.anaconda.com/miniconda/Miniconda3-latest-Linux-x86_64.sh --output miniconda.sh
|
|
bash miniconda.sh -b -p $HOME/miniconda
|
|
export PATH="$HOME/miniconda/bin:$PATH"
|
|
conda init bash
|
|
source ~/.bashrc
|
|
conda create -n myenv python=3.9 -y
|
|
conda activate myenv
|
|
python --version
|
|
- run:
|
|
name: Install Dependencies
|
|
command: |
|
|
npm install -D @playwright/test
|
|
npm install @google-cloud/vertexai
|
|
pip install "pytest==7.3.1"
|
|
pip install "pytest-retry==1.6.3"
|
|
pip install "pytest-asyncio==0.21.1"
|
|
pip install aiohttp
|
|
pip install "openai==1.68.2"
|
|
python -m pip install --upgrade pip
|
|
pip install "pydantic==2.10.2"
|
|
pip install "pytest==7.3.1"
|
|
pip install "pytest-mock==3.12.0"
|
|
pip install "pytest-asyncio==0.21.1"
|
|
pip install mypy
|
|
pip install pyarrow
|
|
pip install numpydoc
|
|
pip install prisma
|
|
pip install fastapi
|
|
pip install jsonschema
|
|
pip install "httpx==0.24.1"
|
|
pip install "anyio==3.7.1"
|
|
pip install "asyncio==3.4.3"
|
|
- run:
|
|
name: Install Playwright Browsers
|
|
command: |
|
|
npx playwright install
|
|
|
|
- run:
|
|
name: Build Docker image
|
|
command: docker build -t my-app:latest -f ./docker/Dockerfile.database .
|
|
- run:
|
|
name: Run Docker container
|
|
command: |
|
|
docker run -d \
|
|
-p 4000:4000 \
|
|
-e DATABASE_URL=$SMALL_DATABASE_URL \
|
|
-e LITELLM_MASTER_KEY="sk-1234" \
|
|
-e OPENAI_API_KEY=$OPENAI_API_KEY \
|
|
-e UI_USERNAME="admin" \
|
|
-e UI_PASSWORD="gm" \
|
|
-e LITELLM_LICENSE=$LITELLM_LICENSE \
|
|
--name my-app \
|
|
-v $(pwd)/litellm/proxy/example_config_yaml/simple_config.yaml:/app/config.yaml \
|
|
my-app:latest \
|
|
--config /app/config.yaml \
|
|
--port 4000 \
|
|
--detailed_debug
|
|
- run:
|
|
name: Install curl and dockerize
|
|
command: |
|
|
sudo apt-get update
|
|
sudo apt-get install -y curl
|
|
sudo wget https://github.com/jwilder/dockerize/releases/download/v0.6.1/dockerize-linux-amd64-v0.6.1.tar.gz
|
|
sudo tar -C /usr/local/bin -xzvf dockerize-linux-amd64-v0.6.1.tar.gz
|
|
sudo rm dockerize-linux-amd64-v0.6.1.tar.gz
|
|
- run:
|
|
name: Start outputting logs
|
|
command: docker logs -f my-app
|
|
background: true
|
|
- run:
|
|
name: Wait for app to be ready
|
|
command: dockerize -wait http://localhost:4000 -timeout 5m
|
|
- run:
|
|
name: Run Playwright Tests
|
|
command: |
|
|
npx playwright test e2e_ui_tests/ --reporter=html --output=test-results
|
|
no_output_timeout: 120m
|
|
- store_test_results:
|
|
path: test-results
|
|
|
|
test_nonroot_image:
|
|
machine:
|
|
image: ubuntu-2204:2023.10.1
|
|
resource_class: xlarge
|
|
working_directory: ~/project
|
|
steps:
|
|
- checkout
|
|
- setup_google_dns
|
|
- run:
|
|
name: Build Docker image
|
|
command: |
|
|
docker build -t non_root_image:latest . -f ./docker/Dockerfile.non_root
|
|
- run:
|
|
name: Install Container Structure Test
|
|
command: |
|
|
curl -LO https://github.com/GoogleContainerTools/container-structure-test/releases/download/v1.19.3/container-structure-test-linux-amd64
|
|
chmod +x container-structure-test-linux-amd64
|
|
sudo mv container-structure-test-linux-amd64 /usr/local/bin/container-structure-test
|
|
- run:
|
|
name: Run Container Structure Test
|
|
command: |
|
|
container-structure-test test --image non_root_image:latest --config docker/tests/nonroot.yaml
|
|
|
|
test_bad_database_url:
|
|
machine:
|
|
image: ubuntu-2204:2023.10.1
|
|
resource_class: xlarge
|
|
working_directory: ~/project
|
|
steps:
|
|
- checkout
|
|
- setup_google_dns
|
|
- run:
|
|
name: Build Docker image
|
|
command: |
|
|
docker build -t myapp . -f ./docker/Dockerfile.non_root
|
|
- run:
|
|
name: Run Docker container with bad DATABASE_URL
|
|
command: |
|
|
docker run --name my-app \
|
|
-p 4000:4000 \
|
|
-e DATABASE_URL="postgresql://wrong:wrong@wrong:5432/wrong" \
|
|
myapp:latest \
|
|
--port 4000 > docker_output.log 2>&1 || true
|
|
- run:
|
|
name: Display Docker logs
|
|
command: cat docker_output.log
|
|
- run:
|
|
name: Check for expected error
|
|
command: |
|
|
if grep -q "Error: P1001: Can't reach database server at" docker_output.log && \
|
|
grep -q "httpx.ConnectError: All connection attempts failed" docker_output.log && \
|
|
grep -q "ERROR: Application startup failed. Exiting." docker_output.log; then
|
|
echo "Expected error found. Test passed."
|
|
else
|
|
echo "Expected error not found. Test failed."
|
|
cat docker_output.log
|
|
exit 1
|
|
fi
|
|
|
|
workflows:
|
|
version: 2
|
|
build_and_test:
|
|
jobs:
|
|
- local_testing:
|
|
filters:
|
|
branches:
|
|
only:
|
|
- main
|
|
- /litellm_.*/
|
|
- langfuse_logging_unit_tests:
|
|
filters:
|
|
branches:
|
|
only:
|
|
- main
|
|
- /litellm_.*/
|
|
- caching_unit_tests:
|
|
filters:
|
|
branches:
|
|
only:
|
|
- main
|
|
- /litellm_.*/
|
|
- litellm_proxy_unit_testing:
|
|
filters:
|
|
branches:
|
|
only:
|
|
- main
|
|
- /litellm_.*/
|
|
- litellm_proxy_security_tests:
|
|
filters:
|
|
branches:
|
|
only:
|
|
- main
|
|
- /litellm_.*/
|
|
- litellm_assistants_api_testing:
|
|
filters:
|
|
branches:
|
|
only:
|
|
- main
|
|
- /litellm_.*/
|
|
- litellm_router_testing:
|
|
filters:
|
|
branches:
|
|
only:
|
|
- main
|
|
- /litellm_.*/
|
|
- check_code_and_doc_quality:
|
|
filters:
|
|
branches:
|
|
only:
|
|
- main
|
|
- /litellm_.*/
|
|
- auth_ui_unit_tests:
|
|
filters:
|
|
branches:
|
|
only:
|
|
- main
|
|
- /litellm_.*/
|
|
- e2e_ui_testing:
|
|
filters:
|
|
branches:
|
|
only:
|
|
- main
|
|
- /litellm_.*/
|
|
- build_and_test:
|
|
filters:
|
|
branches:
|
|
only:
|
|
- main
|
|
- /litellm_.*/
|
|
- e2e_openai_endpoints:
|
|
filters:
|
|
branches:
|
|
only:
|
|
- main
|
|
- /litellm_.*/
|
|
- proxy_logging_guardrails_model_info_tests:
|
|
filters:
|
|
branches:
|
|
only:
|
|
- main
|
|
- /litellm_.*/
|
|
- proxy_spend_accuracy_tests:
|
|
filters:
|
|
branches:
|
|
only:
|
|
- main
|
|
- /litellm_.*/
|
|
- proxy_multi_instance_tests:
|
|
filters:
|
|
branches:
|
|
only:
|
|
- main
|
|
- /litellm_.*/
|
|
- proxy_store_model_in_db_tests:
|
|
filters:
|
|
branches:
|
|
only:
|
|
- main
|
|
- /litellm_.*/
|
|
- proxy_build_from_pip_tests:
|
|
filters:
|
|
branches:
|
|
only:
|
|
- main
|
|
- /litellm_.*/
|
|
- proxy_pass_through_endpoint_tests:
|
|
filters:
|
|
branches:
|
|
only:
|
|
- main
|
|
- /litellm_.*/
|
|
- llm_translation_testing:
|
|
filters:
|
|
branches:
|
|
only:
|
|
- main
|
|
- /litellm_.*/
|
|
- mcp_testing:
|
|
filters:
|
|
branches:
|
|
only:
|
|
- main
|
|
- /litellm_.*/
|
|
- llm_responses_api_testing:
|
|
filters:
|
|
branches:
|
|
only:
|
|
- main
|
|
- /litellm_.*/
|
|
- litellm_mapped_tests:
|
|
filters:
|
|
branches:
|
|
only:
|
|
- main
|
|
- /litellm_.*/
|
|
- batches_testing:
|
|
filters:
|
|
branches:
|
|
only:
|
|
- main
|
|
- /litellm_.*/
|
|
- litellm_utils_testing:
|
|
filters:
|
|
branches:
|
|
only:
|
|
- main
|
|
- /litellm_.*/
|
|
- pass_through_unit_testing:
|
|
filters:
|
|
branches:
|
|
only:
|
|
- main
|
|
- /litellm_.*/
|
|
- image_gen_testing:
|
|
filters:
|
|
branches:
|
|
only:
|
|
- main
|
|
- /litellm_.*/
|
|
- logging_testing:
|
|
filters:
|
|
branches:
|
|
only:
|
|
- main
|
|
- /litellm_.*/
|
|
- upload-coverage:
|
|
requires:
|
|
- llm_translation_testing
|
|
- mcp_testing
|
|
- llm_responses_api_testing
|
|
- litellm_mapped_tests
|
|
- batches_testing
|
|
- litellm_utils_testing
|
|
- pass_through_unit_testing
|
|
- image_gen_testing
|
|
- logging_testing
|
|
- litellm_router_testing
|
|
- caching_unit_tests
|
|
- litellm_proxy_unit_testing
|
|
- litellm_proxy_security_tests
|
|
- langfuse_logging_unit_tests
|
|
- local_testing
|
|
- litellm_assistants_api_testing
|
|
- auth_ui_unit_tests
|
|
- db_migration_disable_update_check:
|
|
filters:
|
|
branches:
|
|
only:
|
|
- main
|
|
- /litellm_.*/
|
|
- installing_litellm_on_python:
|
|
filters:
|
|
branches:
|
|
only:
|
|
- main
|
|
- /litellm_.*/
|
|
- installing_litellm_on_python_3_13:
|
|
filters:
|
|
branches:
|
|
only:
|
|
- main
|
|
- /litellm_.*/
|
|
- helm_chart_testing:
|
|
filters:
|
|
branches:
|
|
only:
|
|
- main
|
|
- /litellm_.*/
|
|
- load_testing:
|
|
filters:
|
|
branches:
|
|
only:
|
|
- main
|
|
- /litellm_.*/
|
|
- test_bad_database_url:
|
|
filters:
|
|
branches:
|
|
only:
|
|
- main
|
|
- /litellm_.*/
|
|
- publish_proxy_extras:
|
|
filters:
|
|
branches:
|
|
only:
|
|
- main
|
|
- publish_to_pypi:
|
|
requires:
|
|
- local_testing
|
|
- build_and_test
|
|
- e2e_openai_endpoints
|
|
- load_testing
|
|
- test_bad_database_url
|
|
- llm_translation_testing
|
|
- mcp_testing
|
|
- llm_responses_api_testing
|
|
- litellm_mapped_tests
|
|
- batches_testing
|
|
- litellm_utils_testing
|
|
- pass_through_unit_testing
|
|
- image_gen_testing
|
|
- logging_testing
|
|
- litellm_router_testing
|
|
- caching_unit_tests
|
|
- langfuse_logging_unit_tests
|
|
- litellm_assistants_api_testing
|
|
- auth_ui_unit_tests
|
|
- db_migration_disable_update_check
|
|
- e2e_ui_testing
|
|
- litellm_proxy_unit_testing
|
|
- litellm_proxy_security_tests
|
|
- installing_litellm_on_python
|
|
- installing_litellm_on_python_3_13
|
|
- proxy_logging_guardrails_model_info_tests
|
|
- proxy_spend_accuracy_tests
|
|
- proxy_multi_instance_tests
|
|
- proxy_store_model_in_db_tests
|
|
- proxy_build_from_pip_tests
|
|
- proxy_pass_through_endpoint_tests
|
|
- check_code_and_doc_quality
|
|
- publish_proxy_extras
|
|
|