mirror of
https://github.com/BerriAI/litellm.git
synced 2025-04-26 11:14:04 +00:00
build(pyproject.toml): add new dev dependencies - for type checking (#9631)
* build(pyproject.toml): add new dev dependencies - for type checking * build: reformat files to fit black * ci: reformat to fit black * ci(test-litellm.yml): make tests run clear * build(pyproject.toml): add ruff * fix: fix ruff checks * build(mypy/): fix mypy linting errors * fix(hashicorp_secret_manager.py): fix passing cert for tls auth * build(mypy/): resolve all mypy errors * test: update test * fix: fix black formatting * build(pre-commit-config.yaml): use poetry run black * fix(proxy_server.py): fix linting error * fix: fix ruff safe representation error
This commit is contained in:
parent
72198737f8
commit
d7b294dd0a
214 changed files with 1553 additions and 1433 deletions
|
@ -48,14 +48,17 @@ class MlflowLogger(CustomLogger):
|
|||
|
||||
def _extract_and_set_chat_attributes(self, span, kwargs, response_obj):
|
||||
try:
|
||||
from mlflow.tracing.utils import set_span_chat_messages, set_span_chat_tools
|
||||
from mlflow.tracing.utils import set_span_chat_messages # type: ignore
|
||||
from mlflow.tracing.utils import set_span_chat_tools # type: ignore
|
||||
except ImportError:
|
||||
return
|
||||
|
||||
inputs = self._construct_input(kwargs)
|
||||
input_messages = inputs.get("messages", [])
|
||||
output_messages = [c.message.model_dump(exclude_none=True)
|
||||
for c in getattr(response_obj, "choices", [])]
|
||||
output_messages = [
|
||||
c.message.model_dump(exclude_none=True)
|
||||
for c in getattr(response_obj, "choices", [])
|
||||
]
|
||||
if messages := [*input_messages, *output_messages]:
|
||||
set_span_chat_messages(span, messages)
|
||||
if tools := inputs.get("tools"):
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue