forked from phoenix/litellm-mirror
fix tg ai stream check
This commit is contained in:
parent
35fd82fd3f
commit
c22e0fe173
1 changed files with 1 additions and 1 deletions
|
@ -324,7 +324,7 @@ def completion(
|
||||||
|
|
||||||
## LOGGING
|
## LOGGING
|
||||||
logging(model=model, input=prompt, custom_llm_provider=custom_llm_provider, logger_fn=logger_fn)
|
logging(model=model, input=prompt, custom_llm_provider=custom_llm_provider, logger_fn=logger_fn)
|
||||||
if stream == True or optional_params['stream_tokens'] == True:
|
if stream == True:
|
||||||
return together_ai_completion_streaming({
|
return together_ai_completion_streaming({
|
||||||
"model": model,
|
"model": model,
|
||||||
"prompt": prompt,
|
"prompt": prompt,
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue