rebuild chunks to openAI response

Rebuild the cunks, but does not include the "usage"
This commit is contained in:
WilliamEspegren 2023-09-17 13:07:54 +02:00
parent e07b3f5bfe
commit 5544a9251f
No known key found for this signature in database
GPG key ID: 30E0CDDF1B5262CD
4 changed files with 69 additions and 3 deletions

BIN
dist/litellm-0.1.687-py3-none-any.whl vendored Normal file

Binary file not shown.

BIN
dist/litellm-0.1.687.tar.gz vendored Normal file

Binary file not shown.

View file

@ -1124,5 +1124,55 @@ def config_completion(**kwargs):
"No config path set, please set a config path using `litellm.config_path = 'path/to/config.json'`" "No config path set, please set a config path using `litellm.config_path = 'path/to/config.json'`"
) )
def stream_chunk_builder(chunks:list): def stream_chunk_builder(chunks: list, messages: list = None):
pass id = chunks[0]["id"]
object = chunks[0]["object"]
created = chunks[0]["created"]
model = chunks[0]["model"]
role = chunks[0]["choices"][0]["delta"]["role"]
finnish_reason = chunks[-1]["choices"][0]["finish_reason"]
# Initialize the response dictionary
response = {
"id": id,
"object": object,
"created": created,
"model": model,
"choices": [
{
"index": 0,
"message": {
"role": role,
"content": ""
},
"finish_reason": finnish_reason,
}
],
# "usage": {
# "prompt_tokens": 0, # Modify as needed
# "completion_tokens": 0, # Modify as needed
# "total_tokens": 0 # Modify as needed
# }
}
# Extract the "content" strings from the nested dictionaries within "choices"
content_list = []
for chunk in chunks:
choices = chunk["choices"]
for choice in choices:
delta = choice.get("delta", {})
content = delta.get("content", "")
content_list.append(content)
# Combine the "content" strings into a single string
combined_content = "".join(content_list)
# Update the "content" field within the response dictionary
response["choices"][0]["message"]["content"] = combined_content
# # Update usage information if needed
# response["usage"]["completion_tokens"] = token
return response

View file

@ -19,5 +19,21 @@ def test_stream_chunk_builder():
for chunk in response: for chunk in response:
chunks.append(chunk) chunks.append(chunk)
print(chunks) try:
rebuilt_response = stream_chunk_builder(chunks)
# exract the response from the rebuilt response
rebuilt_response["id"]
rebuilt_response["object"]
rebuilt_response["created"]
rebuilt_response["model"]
rebuilt_response["choices"]
rebuilt_response["choices"][0]["index"]
choices = rebuilt_response["choices"][0]
message = choices["message"]
role = message["role"]
content = message["content"]
finnish_reason = choices["finish_reason"]
except:
raise Exception("stream_chunk_builder failed to rebuild response")
test_stream_chunk_builder() test_stream_chunk_builder()