diff --git a/litellm/integrations/traceloop.py b/litellm/integrations/traceloop.py index 33b9c92da..019f77771 100644 --- a/litellm/integrations/traceloop.py +++ b/litellm/integrations/traceloop.py @@ -11,7 +11,7 @@ class TraceloopLogger: try: tracer = self.tracer_wrapper.get_tracer() - + model = kwargs.get("model") # LiteLLM uses the standard OpenAI library, so it's already handled by Traceloop SDK diff --git a/litellm/llms/azure.py b/litellm/llms/azure.py index 70036ae74..2be62ebc1 100644 --- a/litellm/llms/azure.py +++ b/litellm/llms/azure.py @@ -294,6 +294,12 @@ class AzureChatCompletion(BaseLLM): azure_client = AzureOpenAI(**azure_client_params) else: azure_client = client + ## LOGGING + logging_obj.pre_call( + input=data['messages'], + api_key=azure_client.api_key, + additional_args={"headers": {"Authorization": f"Bearer {azure_client.api_key}"}, "api_base": azure_client._base_url._uri_reference, "acompletion": True, "complete_input_dict": data}, + ) response = azure_client.chat.completions.create(**data) streamwrapper = CustomStreamWrapper(completion_stream=response, model=model, custom_llm_provider="azure",logging_obj=logging_obj) return streamwrapper