mirror of
https://github.com/BerriAI/litellm.git
synced 2025-04-26 03:04:13 +00:00
fix(openai-+-azure): async calling
This commit is contained in:
parent
eaa55ac6fd
commit
d6e142428b
2 changed files with 23 additions and 37 deletions
|
@ -105,8 +105,6 @@ class AzureChatCompletion(BaseLLM):
|
||||||
acompletion: bool = False,
|
acompletion: bool = False,
|
||||||
headers: Optional[dict]=None):
|
headers: Optional[dict]=None):
|
||||||
super().completion()
|
super().completion()
|
||||||
if self._client_session is None:
|
|
||||||
self._client_session = self.create_client_session()
|
|
||||||
exception_mapping_worked = False
|
exception_mapping_worked = False
|
||||||
try:
|
try:
|
||||||
if headers is None:
|
if headers is None:
|
||||||
|
@ -142,7 +140,7 @@ class AzureChatCompletion(BaseLLM):
|
||||||
elif "stream" in optional_params and optional_params["stream"] == True:
|
elif "stream" in optional_params and optional_params["stream"] == True:
|
||||||
return self.streaming(logging_obj=logging_obj, api_base=api_base, data=data, headers=headers, model_response=model_response, model=model)
|
return self.streaming(logging_obj=logging_obj, api_base=api_base, data=data, headers=headers, model_response=model_response, model=model)
|
||||||
else:
|
else:
|
||||||
response = self._client_session.post(
|
response = httpx.post(
|
||||||
url=api_base,
|
url=api_base,
|
||||||
json=data,
|
json=data,
|
||||||
headers=headers,
|
headers=headers,
|
||||||
|
@ -159,10 +157,8 @@ class AzureChatCompletion(BaseLLM):
|
||||||
raise e
|
raise e
|
||||||
|
|
||||||
async def acompletion(self, api_base: str, data: dict, headers: dict, model_response: ModelResponse):
|
async def acompletion(self, api_base: str, data: dict, headers: dict, model_response: ModelResponse):
|
||||||
if self._aclient_session is None:
|
|
||||||
self._aclient_session = self.create_aclient_session()
|
|
||||||
client = self._aclient_session
|
|
||||||
try:
|
try:
|
||||||
|
async with httpx.AsyncClient() as client:
|
||||||
response = await client.post(api_base, json=data, headers=headers)
|
response = await client.post(api_base, json=data, headers=headers)
|
||||||
response_json = response.json()
|
response_json = response.json()
|
||||||
if response.status_code != 200:
|
if response.status_code != 200:
|
||||||
|
@ -186,9 +182,7 @@ class AzureChatCompletion(BaseLLM):
|
||||||
model_response: ModelResponse,
|
model_response: ModelResponse,
|
||||||
model: str
|
model: str
|
||||||
):
|
):
|
||||||
if self._client_session is None:
|
with httpx.stream(
|
||||||
self._client_session = self.create_client_session()
|
|
||||||
with self._client_session.stream(
|
|
||||||
url=f"{api_base}",
|
url=f"{api_base}",
|
||||||
json=data,
|
json=data,
|
||||||
headers=headers,
|
headers=headers,
|
||||||
|
@ -209,9 +203,7 @@ class AzureChatCompletion(BaseLLM):
|
||||||
headers: dict,
|
headers: dict,
|
||||||
model_response: ModelResponse,
|
model_response: ModelResponse,
|
||||||
model: str):
|
model: str):
|
||||||
if self._aclient_session is None:
|
client = httpx.AsyncClient()
|
||||||
self._aclient_session = self.create_aclient_session()
|
|
||||||
client = self._aclient_session
|
|
||||||
async with client.stream(
|
async with client.stream(
|
||||||
url=f"{api_base}",
|
url=f"{api_base}",
|
||||||
json=data,
|
json=data,
|
||||||
|
|
|
@ -223,7 +223,7 @@ class OpenAIChatCompletion(BaseLLM):
|
||||||
elif optional_params.get("stream", False):
|
elif optional_params.get("stream", False):
|
||||||
return self.streaming(logging_obj=logging_obj, api_base=api_base, data=data, headers=headers, model_response=model_response, model=model)
|
return self.streaming(logging_obj=logging_obj, api_base=api_base, data=data, headers=headers, model_response=model_response, model=model)
|
||||||
else:
|
else:
|
||||||
response = self._client_session.post(
|
response = httpx.post(
|
||||||
url=api_base,
|
url=api_base,
|
||||||
json=data,
|
json=data,
|
||||||
headers=headers,
|
headers=headers,
|
||||||
|
@ -262,11 +262,8 @@ class OpenAIChatCompletion(BaseLLM):
|
||||||
api_base: str,
|
api_base: str,
|
||||||
data: dict, headers: dict,
|
data: dict, headers: dict,
|
||||||
model_response: ModelResponse):
|
model_response: ModelResponse):
|
||||||
kwargs = locals()
|
|
||||||
if self._aclient_session is None:
|
|
||||||
self._aclient_session = self.create_aclient_session()
|
|
||||||
client = self._aclient_session
|
|
||||||
try:
|
try:
|
||||||
|
async with httpx.AsyncClient() as client:
|
||||||
response = await client.post(api_base, json=data, headers=headers)
|
response = await client.post(api_base, json=data, headers=headers)
|
||||||
response_json = response.json()
|
response_json = response.json()
|
||||||
if response.status_code != 200:
|
if response.status_code != 200:
|
||||||
|
@ -290,9 +287,7 @@ class OpenAIChatCompletion(BaseLLM):
|
||||||
model_response: ModelResponse,
|
model_response: ModelResponse,
|
||||||
model: str
|
model: str
|
||||||
):
|
):
|
||||||
if self._client_session is None:
|
with httpx.stream(
|
||||||
self._client_session = self.create_client_session()
|
|
||||||
with self._client_session.stream(
|
|
||||||
url=f"{api_base}", # type: ignore
|
url=f"{api_base}", # type: ignore
|
||||||
json=data,
|
json=data,
|
||||||
headers=headers,
|
headers=headers,
|
||||||
|
@ -313,9 +308,8 @@ class OpenAIChatCompletion(BaseLLM):
|
||||||
headers: dict,
|
headers: dict,
|
||||||
model_response: ModelResponse,
|
model_response: ModelResponse,
|
||||||
model: str):
|
model: str):
|
||||||
if self._aclient_session is None:
|
client = httpx.AsyncClient()
|
||||||
self._aclient_session = self.create_aclient_session()
|
async with client.stream(
|
||||||
async with self._aclient_session.stream(
|
|
||||||
url=f"{api_base}",
|
url=f"{api_base}",
|
||||||
json=data,
|
json=data,
|
||||||
headers=headers,
|
headers=headers,
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue