forked from phoenix/litellm-mirror
test - stream_options on OpenAI text_completion
This commit is contained in:
parent
66053f14ae
commit
a29fcc057b
1 changed files with 33 additions and 0 deletions
|
@ -1534,6 +1534,39 @@ def test_openai_stream_options_call():
|
||||||
assert all(chunk.usage is None for chunk in chunks[:-1])
|
assert all(chunk.usage is None for chunk in chunks[:-1])
|
||||||
|
|
||||||
|
|
||||||
|
def test_openai_stream_options_call_text_completion():
|
||||||
|
litellm.set_verbose = False
|
||||||
|
response = litellm.text_completion(
|
||||||
|
model="gpt-3.5-turbo-instruct",
|
||||||
|
prompt="say GM - we're going to make it ",
|
||||||
|
stream=True,
|
||||||
|
stream_options={"include_usage": True},
|
||||||
|
max_tokens=10,
|
||||||
|
)
|
||||||
|
usage = None
|
||||||
|
chunks = []
|
||||||
|
for chunk in response:
|
||||||
|
print("chunk: ", chunk)
|
||||||
|
chunks.append(chunk)
|
||||||
|
|
||||||
|
last_chunk = chunks[-1]
|
||||||
|
print("last chunk: ", last_chunk)
|
||||||
|
|
||||||
|
"""
|
||||||
|
Assert that:
|
||||||
|
- Last Chunk includes Usage
|
||||||
|
- All chunks prior to last chunk have usage=None
|
||||||
|
"""
|
||||||
|
|
||||||
|
assert last_chunk.usage is not None
|
||||||
|
assert last_chunk.usage.total_tokens > 0
|
||||||
|
assert last_chunk.usage.prompt_tokens > 0
|
||||||
|
assert last_chunk.usage.completion_tokens > 0
|
||||||
|
|
||||||
|
# assert all non last chunks have usage=None
|
||||||
|
assert all(chunk.usage is None for chunk in chunks[:-1])
|
||||||
|
|
||||||
|
|
||||||
def test_openai_text_completion_call():
|
def test_openai_text_completion_call():
|
||||||
try:
|
try:
|
||||||
litellm.set_verbose = True
|
litellm.set_verbose = True
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue