mirror of
https://github.com/BerriAI/litellm.git
synced 2025-04-26 19:24:27 +00:00
fix(ollama_chat.py): fix ollama chat completion token counting
This commit is contained in:
parent
3b977679f8
commit
d189e95045
2 changed files with 6 additions and 5 deletions
|
@ -983,9 +983,6 @@ class Logging:
|
|||
verbose_logger.debug(
|
||||
f"RAW RESPONSE:\n{self.model_call_details.get('original_response', self.model_call_details)}\n\n"
|
||||
)
|
||||
verbose_logger.debug(
|
||||
f"Logging Details Post-API Call: LiteLLM Params: {self.model_call_details}"
|
||||
)
|
||||
if self.logger_fn and callable(self.logger_fn):
|
||||
try:
|
||||
self.logger_fn(
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue