forked from phoenix/litellm-mirror
LiteLLM Minor Fixes and Improvements (09/10/2024) (#5618)
* fix(cost_calculator.py): move to debug for noisy warning message on cost calculation error Fixes https://github.com/BerriAI/litellm/issues/5610 * fix(databricks/cost_calculator.py): Handles model name issues for databricks models * fix(main.py): fix stream chunk builder for multiple tool calls Fixes https://github.com/BerriAI/litellm/issues/5591 * fix: correctly set user_alias when passed in Fixes https://github.com/BerriAI/litellm/issues/5612 * fix(types/utils.py): allow passing role for message object https://github.com/BerriAI/litellm/issues/5621 * fix(litellm_logging.py): Fix langfuse logging across multiple projects Fixes issue where langfuse logger was re-using the old logging object * feat(proxy/_types.py): support adding key-based tags for tag-based routing Enable tag based routing at key-level * fix(proxy/_types.py): fix inheritance * test(test_key_generate_prisma.py): fix test * test: fix test * fix(litellm_logging.py): return used callback object
This commit is contained in:
parent
a451cfc2d6
commit
0295a22561
15 changed files with 673 additions and 96 deletions
|
@ -5310,7 +5310,7 @@ def stream_chunk_builder(
|
|||
]
|
||||
|
||||
if len(tool_call_chunks) > 0:
|
||||
argument_list = []
|
||||
argument_list: List = []
|
||||
delta = tool_call_chunks[0]["choices"][0]["delta"]
|
||||
message = response["choices"][0]["message"]
|
||||
message["tool_calls"] = []
|
||||
|
@ -5319,6 +5319,7 @@ def stream_chunk_builder(
|
|||
type = None
|
||||
tool_calls_list = []
|
||||
prev_index = None
|
||||
prev_name = None
|
||||
prev_id = None
|
||||
curr_id = None
|
||||
curr_index = 0
|
||||
|
@ -5346,27 +5347,32 @@ def stream_chunk_builder(
|
|||
type = tool_calls[0].type
|
||||
if prev_index is None:
|
||||
prev_index = curr_index
|
||||
if prev_name is None:
|
||||
prev_name = name
|
||||
if curr_index != prev_index: # new tool call
|
||||
combined_arguments = "".join(argument_list)
|
||||
tool_calls_list.append(
|
||||
{
|
||||
"id": prev_id,
|
||||
"index": prev_index,
|
||||
"function": {"arguments": combined_arguments, "name": name},
|
||||
"function": {
|
||||
"arguments": combined_arguments,
|
||||
"name": prev_name,
|
||||
},
|
||||
"type": type,
|
||||
}
|
||||
)
|
||||
argument_list = [] # reset
|
||||
prev_index = curr_index
|
||||
prev_id = curr_id
|
||||
prev_name = name
|
||||
|
||||
combined_arguments = (
|
||||
"".join(argument_list) or "{}"
|
||||
) # base case, return empty dict
|
||||
|
||||
tool_calls_list.append(
|
||||
{
|
||||
"id": id,
|
||||
"index": curr_index,
|
||||
"function": {"arguments": combined_arguments, "name": name},
|
||||
"type": type,
|
||||
}
|
||||
|
@ -5422,7 +5428,7 @@ def stream_chunk_builder(
|
|||
for choice in choices:
|
||||
delta = choice.get("delta", {})
|
||||
content = delta.get("content", "")
|
||||
if content == None:
|
||||
if content is None:
|
||||
continue # openai v1.0.0 sets content = None for chunks
|
||||
content_list.append(content)
|
||||
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue