fix(anthropic_text.py): add support for async text completion calls

This commit is contained in:
Krrish Dholakia 2024-04-15 08:15:00 -07:00
parent 183fb24848
commit 1cd0551a1e
6 changed files with 324 additions and 98 deletions

View file

@ -380,6 +380,51 @@ def test_completion_claude_stream():
# test_completion_claude_stream()
def test_completion_claude_2_stream():
litellm.set_verbose = True
response = completion(
model="claude-2",
messages=[{"role": "user", "content": "hello from litellm"}],
stream=True,
)
complete_response = ""
# Add any assertions here to check the response
idx = 0
for chunk in response:
print(chunk)
# print(chunk.choices[0].delta)
chunk, finished = streaming_format_tests(idx, chunk)
if finished:
break
complete_response += chunk
idx += 1
if complete_response.strip() == "":
raise Exception("Empty response received")
print(f"completion_response: {complete_response}")
@pytest.mark.asyncio
async def test_acompletion_claude_2_stream():
litellm.set_verbose = True
response = await litellm.acompletion(
model="claude-2",
messages=[{"role": "user", "content": "hello from litellm"}],
stream=True,
)
complete_response = ""
# Add any assertions here to check the response
idx = 0
async for chunk in response:
print(chunk)
# print(chunk.choices[0].delta)
chunk, finished = streaming_format_tests(idx, chunk)
if finished:
break
complete_response += chunk
idx += 1
if complete_response.strip() == "":
raise Exception("Empty response received")
print(f"completion_response: {complete_response}")
def test_completion_palm_stream():