mirror of
https://github.com/BerriAI/litellm.git
synced 2025-04-25 10:44:24 +00:00
fix print stement in sent_first_chunk
This commit is contained in:
parent
6b2eebfb64
commit
7afa3841e5
1 changed files with 1 additions and 1 deletions
|
@ -2507,7 +2507,7 @@ class CustomStreamWrapper:
|
|||
model_response = ModelResponse(stream=True, model=self.model)
|
||||
try:
|
||||
# return this for all models
|
||||
print(f"self.sent_first_chunk: {self.sent_first_chunk}")
|
||||
print_verbose(f"self.sent_first_chunk: {self.sent_first_chunk}")
|
||||
if self.sent_first_chunk == False:
|
||||
model_response.choices[0].delta.role = "assistant"
|
||||
self.sent_first_chunk = True
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue