forked from phoenix/litellm-mirror
fix(openai.p): adding support for exception mapping for openai-compatible apis via http calls
This commit is contained in:
parent
81becfa2ec
commit
91c8e92e71
8 changed files with 4943 additions and 32 deletions
|
@ -445,7 +445,7 @@ def completion(
|
|||
raise e
|
||||
|
||||
if "stream" in optional_params and optional_params["stream"] == True:
|
||||
response = CustomStreamWrapper(response, model, custom_llm_provider="openai", logging_obj=logging)
|
||||
response = CustomStreamWrapper(response, model, custom_llm_provider=custom_llm_provider, logging_obj=logging)
|
||||
return response
|
||||
## LOGGING
|
||||
logging.post_call(
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue