mirror of
https://github.com/BerriAI/litellm.git
synced 2025-04-27 11:43:54 +00:00
(feat) return custom_llm_provider in streaming response
This commit is contained in:
parent
0a3c89e741
commit
69552146f9
1 changed files with 1 additions and 0 deletions
|
@ -7452,6 +7452,7 @@ class CustomStreamWrapper:
|
|||
|
||||
def chunk_creator(self, chunk):
|
||||
model_response = ModelResponse(stream=True, model=self.model)
|
||||
model_response._hidden_params["custom_llm_provider"] = self.custom_llm_provider
|
||||
model_response.choices = [StreamingChoices()]
|
||||
model_response.choices[0].finish_reason = None
|
||||
response_obj = {}
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue