forked from phoenix/litellm-mirror
(test) add streaming sagemaker test
This commit is contained in:
parent
bc691cbbcd
commit
b7281825d3
1 changed files with 3 additions and 0 deletions
|
@ -1053,9 +1053,12 @@ def test_completion_chat_sagemaker():
|
||||||
response = completion(
|
response = completion(
|
||||||
model="sagemaker/jumpstart-dft-meta-textgeneration-llama-2-7b-f",
|
model="sagemaker/jumpstart-dft-meta-textgeneration-llama-2-7b-f",
|
||||||
messages=messages,
|
messages=messages,
|
||||||
|
stream=True,
|
||||||
)
|
)
|
||||||
# Add any assertions here to check the response
|
# Add any assertions here to check the response
|
||||||
print(response)
|
print(response)
|
||||||
|
for chunk in response:
|
||||||
|
print(chunk)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
pytest.fail(f"Error occurred: {e}")
|
pytest.fail(f"Error occurred: {e}")
|
||||||
# test_completion_chat_sagemaker()
|
# test_completion_chat_sagemaker()
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue