forked from phoenix/litellm-mirror
fix(utils.py): fix codestral streaming
This commit is contained in:
parent
75fba18c9f
commit
fe7e68adc8
2 changed files with 5 additions and 2 deletions
|
@ -45,7 +45,10 @@ def cost_router(
|
||||||
- str, the specific google cost calc function it should route to.
|
- str, the specific google cost calc function it should route to.
|
||||||
"""
|
"""
|
||||||
if custom_llm_provider == "vertex_ai" and (
|
if custom_llm_provider == "vertex_ai" and (
|
||||||
"claude" in model or "llama" in model or "mistral" in model
|
"claude" in model
|
||||||
|
or "llama" in model
|
||||||
|
or "mistral" in model
|
||||||
|
or "codestral" in model
|
||||||
):
|
):
|
||||||
return "cost_per_token"
|
return "cost_per_token"
|
||||||
elif custom_llm_provider == "gemini":
|
elif custom_llm_provider == "gemini":
|
||||||
|
|
|
@ -9711,7 +9711,7 @@ class CustomStreamWrapper:
|
||||||
print_verbose(f"completion obj content: {completion_obj['content']}")
|
print_verbose(f"completion obj content: {completion_obj['content']}")
|
||||||
if response_obj["is_finished"]:
|
if response_obj["is_finished"]:
|
||||||
self.received_finish_reason = response_obj["finish_reason"]
|
self.received_finish_reason = response_obj["finish_reason"]
|
||||||
if response_obj["usage"] is not None:
|
if "usage" in response_obj is not None:
|
||||||
model_response.usage = litellm.Usage(
|
model_response.usage = litellm.Usage(
|
||||||
prompt_tokens=response_obj["usage"].prompt_tokens,
|
prompt_tokens=response_obj["usage"].prompt_tokens,
|
||||||
completion_tokens=response_obj["usage"].completion_tokens,
|
completion_tokens=response_obj["usage"].completion_tokens,
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue