forked from phoenix/litellm-mirror
Update README.md
This commit is contained in:
parent
5161573bce
commit
a2448d549b
1 changed files with 1 additions and 0 deletions
|
@ -56,6 +56,7 @@ Streaming is supported for OpenAI, Azure, Anthropic models
|
|||
response = completion(model="gpt-3.5-turbo", messages=messages, stream=True)
|
||||
for chunk in response:
|
||||
print(chunk['choices'][0]['delta'])
|
||||
|
||||
# claude 2
|
||||
result = litellm.completion('claude-2', messages, stream=True)
|
||||
for chunk in result:
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue