Merge pull request #9375 from colesmcintosh/fix-litellm-proxy-docs

docs(litellm_proxy): correct parameter assignment in litellm proxy docs
This commit is contained in:
Ishaan Jaff 2025-03-19 11:14:19 -07:00 committed by GitHub
commit 818c7d9089

View file

@ -57,7 +57,7 @@ messages = [{ "content": "Hello, how are you?","role": "user"}]
# litellm proxy call
response = completion(
model="litellm_proxy/your-model-name",
messages,
messages=messages,
api_base = "your-litellm-proxy-url",
api_key = "your-litellm-proxy-api-key"
)
@ -76,7 +76,7 @@ messages = [{ "content": "Hello, how are you?","role": "user"}]
# openai call
response = completion(
model="litellm_proxy/your-model-name",
messages,
messages=messages,
api_base = "your-litellm-proxy-url",
stream=True
)