(fix) proxy: bug non OpenAI LLMs

This commit is contained in:
ishaan-jaff 2023-12-05 09:05:53 -08:00
parent 943bf53b0b
commit 13261287ec

View file

@ -1011,14 +1011,14 @@ class Router:
"""
if client_type == "async":
if kwargs.get("stream") == True:
return deployment["stream_async_client"]
return deployment.get("stream_async_client", None)
else:
return deployment["async_client"]
return deployment.get("async_client", None)
else:
if kwargs.get("stream") == True:
return deployment["stream_client"]
return deployment.get("stream_client", None)
else:
return deployment["client"]
return deployment.get("client", None)
def print_verbose(self, print_statement):
if self.set_verbose or litellm.set_verbose: