mirror of
https://github.com/BerriAI/litellm.git
synced 2025-04-26 03:04:13 +00:00
fix(azure.py): fix raw response dump
This commit is contained in:
parent
18731cf42b
commit
63adb3f940
2 changed files with 6 additions and 4 deletions
|
@ -1130,7 +1130,8 @@ class AzureChatCompletion(BaseLLM):
|
|||
else:
|
||||
azure_client = client
|
||||
## COMPLETION CALL
|
||||
response = azure_client.embeddings.with_raw_response.create(**data, timeout=timeout) # type: ignore
|
||||
raw_response = azure_client.embeddings.with_raw_response.create(**data, timeout=timeout) # type: ignore
|
||||
response = raw_response.parse()
|
||||
## LOGGING
|
||||
logging_obj.post_call(
|
||||
input=input,
|
||||
|
|
|
@ -2126,7 +2126,8 @@ class OpenAITextCompletion(BaseLLM):
|
|||
openai_client = client
|
||||
|
||||
try:
|
||||
response = openai_client.completions.with_raw_response.create(**data)
|
||||
raw_response = openai_client.completions.with_raw_response.create(**data)
|
||||
response = raw_response.parse()
|
||||
except Exception as e:
|
||||
status_code = getattr(e, "status_code", 500)
|
||||
error_headers = getattr(e, "headers", None)
|
||||
|
@ -2170,8 +2171,8 @@ class OpenAITextCompletion(BaseLLM):
|
|||
else:
|
||||
openai_client = client
|
||||
|
||||
response = await openai_client.completions.with_raw_response.create(**data)
|
||||
|
||||
raw_response = await openai_client.completions.with_raw_response.create(**data)
|
||||
response = raw_response.parse()
|
||||
streamwrapper = CustomStreamWrapper(
|
||||
completion_stream=response,
|
||||
model=model,
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue