fix tg ai stream check

This commit is contained in:
ishaan-jaff 2023-08-17 10:49:48 -07:00
parent 35fd82fd3f
commit c22e0fe173

View file

@ -324,7 +324,7 @@ def completion(
## LOGGING ## LOGGING
logging(model=model, input=prompt, custom_llm_provider=custom_llm_provider, logger_fn=logger_fn) logging(model=model, input=prompt, custom_llm_provider=custom_llm_provider, logger_fn=logger_fn)
if stream == True or optional_params['stream_tokens'] == True: if stream == True:
return together_ai_completion_streaming({ return together_ai_completion_streaming({
"model": model, "model": model,
"prompt": prompt, "prompt": prompt,