Merge pull request #4009 from BerriAI/litellm_fix_streaming_cost_cal

fix(utils.py): fix cost calculation for openai-compatible streaming object
This commit is contained in:
Krish Dholakia 2024-06-04 21:00:22 -07:00 committed by GitHub
commit e678dce88b
9 changed files with 230 additions and 88 deletions

View file

@ -4536,7 +4536,7 @@ def stream_chunk_builder_text_completion(chunks: list, messages: Optional[List]
def stream_chunk_builder(
chunks: list, messages: Optional[list] = None, start_time=None, end_time=None
):
) -> Union[ModelResponse, TextCompletionResponse]:
model_response = litellm.ModelResponse()
### SORT CHUNKS BASED ON CREATED ORDER ##
print_verbose("Goes into checking if chunk has hiddden created at param")