mirror of
https://github.com/BerriAI/litellm.git
synced 2025-04-26 03:04:13 +00:00
test(utils.py): handle scenario where text tokens + reasoning tokens … (#10165)
* test(utils.py): handle scenario where text tokens + reasoning tokens set, but reasoning tokens not charged separately Addresses https://github.com/BerriAI/litellm/pull/10141#discussion_r2051555332 * fix(vertex_and_google_ai_studio.py): only set content if non-empty str
This commit is contained in:
parent
99db1b7690
commit
03b5399f86
5 changed files with 90 additions and 49 deletions
|
@ -587,14 +587,15 @@ class VertexGeminiConfig(VertexAIBaseConfig, BaseConfig):
|
|||
_content_str += "data:{};base64,{}".format(
|
||||
part["inlineData"]["mimeType"], part["inlineData"]["data"]
|
||||
)
|
||||
if part.get("thought") is True:
|
||||
if reasoning_content_str is None:
|
||||
reasoning_content_str = ""
|
||||
reasoning_content_str += _content_str
|
||||
else:
|
||||
if content_str is None:
|
||||
content_str = ""
|
||||
content_str += _content_str
|
||||
if len(_content_str) > 0:
|
||||
if part.get("thought") is True:
|
||||
if reasoning_content_str is None:
|
||||
reasoning_content_str = ""
|
||||
reasoning_content_str += _content_str
|
||||
else:
|
||||
if content_str is None:
|
||||
content_str = ""
|
||||
content_str += _content_str
|
||||
|
||||
return content_str, reasoning_content_str
|
||||
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue