mirror of
https://github.com/BerriAI/litellm.git
synced 2025-04-27 03:34:10 +00:00
LiteLLM Minor Fixes & Improvements (01/08/2025) - p2 (#7643)
* fix(streaming_chunk_builder_utils.py): add test for groq tool calling + streaming + combine chunks Addresses https://github.com/BerriAI/litellm/issues/7621 * fix(streaming_utils.py): fix modelresponseiterator for openai like chunk parser ensures chunk parser uses the correct tool call id when translating the chunk Fixes https://github.com/BerriAI/litellm/issues/7621 * build(model_hub.tsx): display cost pricing on model hub * build(model_hub.tsx): show cost per token pricing + complete model information * fix(types/utils.py): fix usage object handling
This commit is contained in:
parent
39ee4c6bb4
commit
1e3370f3cb
9 changed files with 206 additions and 21 deletions
|
@ -103,7 +103,8 @@ class ChunkProcessor:
|
|||
def get_combined_tool_content(
|
||||
self, tool_call_chunks: List[Dict[str, Any]]
|
||||
) -> List[ChatCompletionMessageToolCall]:
|
||||
argument_list: List = []
|
||||
|
||||
argument_list: List[str] = []
|
||||
delta = tool_call_chunks[0]["choices"][0]["delta"]
|
||||
id = None
|
||||
name = None
|
||||
|
@ -171,6 +172,7 @@ class ChunkProcessor:
|
|||
),
|
||||
)
|
||||
)
|
||||
|
||||
return tool_calls_list
|
||||
|
||||
def get_combined_function_call_content(
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue