diff --git a/.circleci/config.yml b/.circleci/config.yml index 585502710..efc0c720c 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -363,6 +363,100 @@ jobs: - store_test_results: path: test-results + proxy_pass_through_endpoint_tests: + machine: + image: ubuntu-2204:2023.10.1 + resource_class: xlarge + working_directory: ~/project + steps: + - checkout + - run: + name: Install Docker CLI (In case it's not already installed) + command: | + sudo apt-get update + sudo apt-get install -y docker-ce docker-ce-cli containerd.io + - run: + name: Install Python 3.9 + command: | + curl https://repo.anaconda.com/miniconda/Miniconda3-latest-Linux-x86_64.sh --output miniconda.sh + bash miniconda.sh -b -p $HOME/miniconda + export PATH="$HOME/miniconda/bin:$PATH" + conda init bash + source ~/.bashrc + conda create -n myenv python=3.9 -y + conda activate myenv + python --version + - run: + name: Install Dependencies + command: | + pip install "pytest==7.3.1" + pip install "pytest-retry==1.6.3" + pip install "pytest-asyncio==0.21.1" + pip install "google-cloud-aiplatform==1.43.0" + pip install aiohttp + pip install "openai==1.40.0" + python -m pip install --upgrade pip + pip install "pydantic==2.7.1" + pip install "pytest==7.3.1" + pip install "pytest-mock==3.12.0" + pip install "pytest-asyncio==0.21.1" + pip install "boto3==1.34.34" + pip install mypy + pip install pyarrow + pip install numpydoc + pip install prisma + pip install fastapi + pip install jsonschema + pip install "httpx==0.24.1" + pip install "anyio==3.7.1" + pip install "asyncio==3.4.3" + pip install "PyGithub==1.59.1" + - run: + name: Build Docker image + command: docker build -t my-app:latest -f Dockerfile.database . + - run: + name: Run Docker container + command: | + docker run -d \ + -p 4000:4000 \ + -e DATABASE_URL=$PROXY_DATABASE_URL \ + -e LITELLM_MASTER_KEY="sk-1234" \ + -e OPENAI_API_KEY=$OPENAI_API_KEY \ + -e LITELLM_LICENSE=$LITELLM_LICENSE \ + --name my-app \ + -v $(pwd)/litellm/proxy/example_config_yaml/pass_through_config.yaml:/app/config.yaml \ + -v $(pwd)/litellm/proxy/example_config_yaml/custom_auth_basic.py:/app/custom_auth_basic.py \ + my-app:latest \ + --config /app/config.yaml \ + --port 4000 \ + --detailed_debug \ + - run: + name: Install curl and dockerize + command: | + sudo apt-get update + sudo apt-get install -y curl + sudo wget https://github.com/jwilder/dockerize/releases/download/v0.6.1/dockerize-linux-amd64-v0.6.1.tar.gz + sudo tar -C /usr/local/bin -xzvf dockerize-linux-amd64-v0.6.1.tar.gz + sudo rm dockerize-linux-amd64-v0.6.1.tar.gz + - run: + name: Start outputting logs + command: docker logs -f my-app + background: true + - run: + name: Wait for app to be ready + command: dockerize -wait http://localhost:4000 -timeout 5m + - run: + name: Run tests + command: | + pwd + ls + python -m pytest -vv tests/pass_through_tests/test_vertex_ai.py -x --junitxml=test-results/junit.xml --durations=5 + no_output_timeout: 120m + + # Store test results + - store_test_results: + path: test-results + publish_to_pypi: docker: - image: cimg/python:3.8 @@ -457,6 +551,12 @@ workflows: only: - main - /litellm_.*/ + - proxy_pass_through_endpoint_tests: + filters: + branches: + only: + - main + - /litellm_.*/ - installing_litellm_on_python: filters: branches: @@ -468,6 +568,7 @@ workflows: - local_testing - build_and_test - proxy_log_to_otel_tests + - proxy_pass_through_endpoint_tests filters: branches: only: diff --git a/litellm/proxy/example_config_yaml/custom_auth_basic.py b/litellm/proxy/example_config_yaml/custom_auth_basic.py new file mode 100644 index 000000000..2726b1c3d --- /dev/null +++ b/litellm/proxy/example_config_yaml/custom_auth_basic.py @@ -0,0 +1,14 @@ +from fastapi import Request + +from litellm.proxy._types import UserAPIKeyAuth + + +async def user_api_key_auth(request: Request, api_key: str) -> UserAPIKeyAuth: + try: + return UserAPIKeyAuth( + api_key="best-api-key-ever", + user_id="best-user-id-ever", + team_id="best-team-id-ever", + ) + except: + raise Exception diff --git a/litellm/proxy/example_config_yaml/pass_through_config.yaml b/litellm/proxy/example_config_yaml/pass_through_config.yaml new file mode 100644 index 000000000..db9558b3c --- /dev/null +++ b/litellm/proxy/example_config_yaml/pass_through_config.yaml @@ -0,0 +1,9 @@ +model_list: + - model_name: fake-openai-endpoint + litellm_params: + model: openai/fake + api_key: fake-key + api_base: https://exampleopenaiendpoint-production.up.railway.app/ +general_settings: + master_key: sk-1234 + custom_auth: example_config_yaml.custom_auth.user_api_key_auth \ No newline at end of file diff --git a/tests/pass_through_tests/test_vertex_ai.py b/tests/pass_through_tests/test_vertex_ai.py new file mode 100644 index 000000000..10660ddb9 --- /dev/null +++ b/tests/pass_through_tests/test_vertex_ai.py @@ -0,0 +1,77 @@ +""" +Test Vertex AI Pass Through + +1. use Credentials client side, Assert SpendLog was created +""" + +import vertexai +from vertexai.preview.generative_models import GenerativeModel +import tempfile +import json +import os +from google.oauth2 import service_account +import google.auth.transport.requests + + +# Path to your service account JSON file +SERVICE_ACCOUNT_FILE = "path/to/your/service-account.json" + + +def load_vertex_ai_credentials(): + # Define the path to the vertex_key.json file + print("loading vertex ai credentials") + filepath = os.path.dirname(os.path.abspath(__file__)) + vertex_key_path = filepath + "/vertex_key.json" + + # Read the existing content of the file or create an empty dictionary + try: + with open(vertex_key_path, "r") as file: + # Read the file content + print("Read vertexai file path") + content = file.read() + + # If the file is empty or not valid JSON, create an empty dictionary + if not content or not content.strip(): + service_account_key_data = {} + else: + # Attempt to load the existing JSON content + file.seek(0) + service_account_key_data = json.load(file) + except FileNotFoundError: + # If the file doesn't exist, create an empty dictionary + service_account_key_data = {} + + # Update the service_account_key_data with environment variables + private_key_id = os.environ.get("VERTEX_AI_PRIVATE_KEY_ID", "") + private_key = os.environ.get("VERTEX_AI_PRIVATE_KEY", "") + private_key = private_key.replace("\\n", "\n") + service_account_key_data["private_key_id"] = private_key_id + service_account_key_data["private_key"] = private_key + + # Create a temporary file + with tempfile.NamedTemporaryFile(mode="w+", delete=False) as temp_file: + # Write the updated content to the temporary files + json.dump(service_account_key_data, temp_file, indent=2) + + # Export the temporary file as GOOGLE_APPLICATION_CREDENTIALS + os.environ["GOOGLE_APPLICATION_CREDENTIALS"] = os.path.abspath(temp_file.name) + + +LITE_LLM_ENDPOINT = "http://localhost:4000" + + +async def test_basic_vertex_ai_pass_through_with_spendlog(): + + vertexai.init( + project="adroit-crow-413218", + location="us-central1", + api_endpoint=f"{LITE_LLM_ENDPOINT}/vertex-ai", + api_transport="rest", + ) + + model = GenerativeModel(model_name="gemini-1.0-pro") + response = model.generate_content("hi") + + print("response", response) + + pass diff --git a/tests/pass_through_tests/vertex_key.json b/tests/pass_through_tests/vertex_key.json new file mode 100644 index 000000000..e2fd8512b --- /dev/null +++ b/tests/pass_through_tests/vertex_key.json @@ -0,0 +1,13 @@ +{ + "type": "service_account", + "project_id": "adroit-crow-413218", + "private_key_id": "", + "private_key": "", + "client_email": "test-adroit-crow@adroit-crow-413218.iam.gserviceaccount.com", + "client_id": "104886546564708740969", + "auth_uri": "https://accounts.google.com/o/oauth2/auth", + "token_uri": "https://oauth2.googleapis.com/token", + "auth_provider_x509_cert_url": "https://www.googleapis.com/oauth2/v1/certs", + "client_x509_cert_url": "https://www.googleapis.com/robot/v1/metadata/x509/test-adroit-crow%40adroit-crow-413218.iam.gserviceaccount.com", + "universe_domain": "googleapis.com" +}