diff --git a/docs/my-website/docs/providers/openai.md b/docs/my-website/docs/providers/openai.md index 634f9dbbb..6f29b1136 100644 --- a/docs/my-website/docs/providers/openai.md +++ b/docs/my-website/docs/providers/openai.md @@ -90,11 +90,15 @@ os.environ["OPENAI_API_KEY"] = "" # set custom api base to your proxy # either set .env or litellm.api_base # os.environ["OPENAI_API_BASE"] = "" -litellm.api_base = "https://openai-proxy.berriai.repl.co" +litellm.api_base = "your-openai-proxy-url" messages = [{ "content": "Hello, how are you?","role": "user"}] # openai call -response = completion("gpt-3.5-turbo", messages) -``` \ No newline at end of file +response = completion("openai/your-model-name", messages) +``` + +If you need to set api_base dynamically, just pass it in completions instead - `completions(...,api_base="your-proxy-api-base")` + +For more check out [setting API Base/Keys](../set_keys.md) \ No newline at end of file diff --git a/litellm/__pycache__/__init__.cpython-311.pyc b/litellm/__pycache__/__init__.cpython-311.pyc index a670e0d25..72b2291b4 100644 Binary files a/litellm/__pycache__/__init__.cpython-311.pyc and b/litellm/__pycache__/__init__.cpython-311.pyc differ diff --git a/litellm/__pycache__/main.cpython-311.pyc b/litellm/__pycache__/main.cpython-311.pyc index 795fb81a0..e58c70e65 100644 Binary files a/litellm/__pycache__/main.cpython-311.pyc and b/litellm/__pycache__/main.cpython-311.pyc differ diff --git a/litellm/__pycache__/utils.cpython-311.pyc b/litellm/__pycache__/utils.cpython-311.pyc index 5c9909cc2..aaa54ab89 100644 Binary files a/litellm/__pycache__/utils.cpython-311.pyc and b/litellm/__pycache__/utils.cpython-311.pyc differ