diff --git a/litellm/proxy/proxy_cli.py b/litellm/proxy/proxy_cli.py index d63fe0adf7..3d19e0b044 100644 --- a/litellm/proxy/proxy_cli.py +++ b/litellm/proxy/proxy_cli.py @@ -86,7 +86,7 @@ def is_port_in_use(port): @click.option('--host', default='0.0.0.0', help='Host for the server to listen on.') @click.option('--port', default=8000, help='Port to bind the server to.') @click.option('--api_base', default=None, help='API base URL.') -@click.option('--model', default=None, help='The model name to pass to litellm expects') +@click.option('--model', '-m', default=None, help='The model name to pass to litellm expects') @click.option('--alias', default=None, help='The alias for the model - use this to give a litellm model name (e.g. "huggingface/codellama/CodeLlama-7b-Instruct-hf") a more user-friendly name ("codellama")') @click.option('--add_key', default=None, help='The model name to pass to litellm expects') @click.option('--headers', default=None, help='headers for the API call') @@ -158,6 +158,7 @@ def run_server(host, port, api_base, model, alias, add_key, headers, save, debug print("\033[1;32mDone successfully\033[0m") return if model and "ollama" in model: + print(f"ollama called") run_ollama_serve() if cost == True: print_cost_logs() diff --git a/litellm/tests/test_proxy.py b/litellm/tests/test_proxy.py new file mode 100644 index 0000000000..634834e008 --- /dev/null +++ b/litellm/tests/test_proxy.py @@ -0,0 +1,28 @@ +#### What this tests #### +# This tests the OpenAI-proxy server + +import sys, os +import traceback +sys.path.insert(0, os.path.abspath('../..')) # Adds the parent directory to the system path +from dotenv import load_dotenv + +load_dotenv() +import unittest +from unittest.mock import patch +from click.testing import CliRunner +import pytest +import litellm +from litellm.proxy.llm import litellm_completion + +def test_azure_call(): + try: + data = { + "model": "azure/chatgpt-v-2", + "messages": [{"role": "user", "content": "Hey!"}] + } + result = litellm_completion(data=data, user_api_base=os.getenv("AZURE_API_BASE"), type="chat_completion", user_temperature=None, user_max_tokens=None, user_model=None, user_headers=None, user_debug=False) + return result + except Exception as e: + pytest.fail(f"An error occurred: {e}") + +test_azure_call() \ No newline at end of file