forked from phoenix/litellm-mirror
fix(azure.py): fix azure streaming logging
This commit is contained in:
parent
3923c389fd
commit
7c2fad2d57
2 changed files with 7 additions and 1 deletions
|
@ -11,7 +11,7 @@ class TraceloopLogger:
|
||||||
|
|
||||||
try:
|
try:
|
||||||
tracer = self.tracer_wrapper.get_tracer()
|
tracer = self.tracer_wrapper.get_tracer()
|
||||||
|
|
||||||
model = kwargs.get("model")
|
model = kwargs.get("model")
|
||||||
|
|
||||||
# LiteLLM uses the standard OpenAI library, so it's already handled by Traceloop SDK
|
# LiteLLM uses the standard OpenAI library, so it's already handled by Traceloop SDK
|
||||||
|
|
|
@ -294,6 +294,12 @@ class AzureChatCompletion(BaseLLM):
|
||||||
azure_client = AzureOpenAI(**azure_client_params)
|
azure_client = AzureOpenAI(**azure_client_params)
|
||||||
else:
|
else:
|
||||||
azure_client = client
|
azure_client = client
|
||||||
|
## LOGGING
|
||||||
|
logging_obj.pre_call(
|
||||||
|
input=data['messages'],
|
||||||
|
api_key=azure_client.api_key,
|
||||||
|
additional_args={"headers": {"Authorization": f"Bearer {azure_client.api_key}"}, "api_base": azure_client._base_url._uri_reference, "acompletion": True, "complete_input_dict": data},
|
||||||
|
)
|
||||||
response = azure_client.chat.completions.create(**data)
|
response = azure_client.chat.completions.create(**data)
|
||||||
streamwrapper = CustomStreamWrapper(completion_stream=response, model=model, custom_llm_provider="azure",logging_obj=logging_obj)
|
streamwrapper = CustomStreamWrapper(completion_stream=response, model=model, custom_llm_provider="azure",logging_obj=logging_obj)
|
||||||
return streamwrapper
|
return streamwrapper
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue