add max_tokens

This commit is contained in:
WilliamEspegren 2023-09-17 10:27:32 +02:00
parent 684b11b456
commit 40850ecf53

View file

@ -10,7 +10,8 @@ def test_stream_chunk_builder():
response = completion(
model="gpt-3.5-turbo",
messages=messages,
stream=True
stream=True,
max_tokens=10,
)
for chunk in response: