mirror of
https://github.com/BerriAI/litellm.git
synced 2025-04-26 11:14:04 +00:00
fix(anthropic.py): add more logging
This commit is contained in:
parent
d620b4dc5d
commit
55612a6a5f
1 changed files with 6 additions and 8 deletions
|
@ -271,17 +271,14 @@ def completion(
|
||||||
0
|
0
|
||||||
].finish_reason
|
].finish_reason
|
||||||
# streaming_model_response.choices = [litellm.utils.StreamingChoices()]
|
# streaming_model_response.choices = [litellm.utils.StreamingChoices()]
|
||||||
streaming_model_response.choices[0].index = model_response.choices[0].index
|
streaming_choice = litellm.utils.StreamingChoices()
|
||||||
|
streaming_choice.index = model_response.choices[0].index
|
||||||
_tool_calls = []
|
_tool_calls = []
|
||||||
print_verbose(
|
print_verbose(
|
||||||
f"type of model_response.choices[0]: {type(model_response.choices[0])}"
|
f"type of model_response.choices[0]: {type(model_response.choices[0])}"
|
||||||
)
|
)
|
||||||
print_verbose(
|
print_verbose(f"type of streaming_choice: {type(streaming_choice)}")
|
||||||
f"type of streaming_model_response.choices[0]: {type(streaming_model_response.choices[0])}"
|
if isinstance(model_response.choices[0], litellm.Choices):
|
||||||
)
|
|
||||||
if isinstance(model_response.choices[0], litellm.Choices) and isinstance(
|
|
||||||
streaming_model_response.choices[0], litellm.utils.StreamingChoices
|
|
||||||
):
|
|
||||||
if getattr(
|
if getattr(
|
||||||
model_response.choices[0].message, "tool_calls", None
|
model_response.choices[0].message, "tool_calls", None
|
||||||
) is not None and isinstance(
|
) is not None and isinstance(
|
||||||
|
@ -295,7 +292,8 @@ def completion(
|
||||||
role=model_response.choices[0].message.role,
|
role=model_response.choices[0].message.role,
|
||||||
tool_calls=_tool_calls,
|
tool_calls=_tool_calls,
|
||||||
)
|
)
|
||||||
streaming_model_response.choices[0].delta = delta_obj
|
streaming_choice.delta = delta_obj
|
||||||
|
streaming_model_response.choices = [streaming_choice]
|
||||||
completion_stream = model_response_iterator(
|
completion_stream = model_response_iterator(
|
||||||
model_response=streaming_model_response
|
model_response=streaming_model_response
|
||||||
)
|
)
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue