mirror of
https://github.com/BerriAI/litellm.git
synced 2025-04-27 03:34:10 +00:00
Merge pull request #9375 from colesmcintosh/fix-litellm-proxy-docs
docs(litellm_proxy): correct parameter assignment in litellm proxy docs
This commit is contained in:
commit
818c7d9089
1 changed files with 2 additions and 2 deletions
|
@ -57,7 +57,7 @@ messages = [{ "content": "Hello, how are you?","role": "user"}]
|
|||
# litellm proxy call
|
||||
response = completion(
|
||||
model="litellm_proxy/your-model-name",
|
||||
messages,
|
||||
messages=messages,
|
||||
api_base = "your-litellm-proxy-url",
|
||||
api_key = "your-litellm-proxy-api-key"
|
||||
)
|
||||
|
@ -76,7 +76,7 @@ messages = [{ "content": "Hello, how are you?","role": "user"}]
|
|||
# openai call
|
||||
response = completion(
|
||||
model="litellm_proxy/your-model-name",
|
||||
messages,
|
||||
messages=messages,
|
||||
api_base = "your-litellm-proxy-url",
|
||||
stream=True
|
||||
)
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue