From 9780efca4b18aef1562b84d505f5d931784bbdf3 Mon Sep 17 00:00:00 2001 From: ishaan-jaff Date: Wed, 29 Nov 2023 19:45:03 -0800 Subject: [PATCH] (feat) router: async client Azure, OpenAI --- litellm/router.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/litellm/router.py b/litellm/router.py index a10f300f2..fab7e58c2 100644 --- a/litellm/router.py +++ b/litellm/router.py @@ -328,7 +328,7 @@ class Router: data["model"] = original_model_string[:index_of_model_id] else: data["model"] = original_model_string - model_client = deployment.get("client", None) + model_client = deployment.get("async_client", None) return await litellm.aembedding(**{**data, "input": input, "caching": self.cache_responses, "client": model_client, **kwargs})