add async support for rerank

This commit is contained in:
Ishaan Jaff 2024-08-27 17:02:48 -07:00
parent f33dfe0b95
commit b3892b871d
4 changed files with 130 additions and 30 deletions

View file

@ -28,9 +28,8 @@ class CohereRerank(BaseLLM):
rank_fields: Optional[List[str]] = None, rank_fields: Optional[List[str]] = None,
return_documents: Optional[bool] = True, return_documents: Optional[bool] = True,
max_chunks_per_doc: Optional[int] = None, max_chunks_per_doc: Optional[int] = None,
_is_async: Optional[bool] = False, # New parameter
) -> RerankResponse: ) -> RerankResponse:
client = _get_httpx_client()
request_data = RerankRequest( request_data = RerankRequest(
model=model, model=model,
query=query, query=query,
@ -43,6 +42,10 @@ class CohereRerank(BaseLLM):
request_data_dict = request_data.dict(exclude_none=True) request_data_dict = request_data.dict(exclude_none=True)
if _is_async:
return self.async_rerank(request_data_dict, api_key) # type: ignore # Call async method
client = _get_httpx_client()
response = client.post( response = client.post(
"https://api.cohere.com/v1/rerank", "https://api.cohere.com/v1/rerank",
headers={ headers={
@ -55,4 +58,21 @@ class CohereRerank(BaseLLM):
return RerankResponse(**response.json()) return RerankResponse(**response.json())
pass async def async_rerank(
self,
request_data_dict: Dict[str, Any],
api_key: str,
) -> RerankResponse:
client = _get_async_httpx_client()
response = await client.post(
"https://api.cohere.com/v1/rerank",
headers={
"accept": "application/json",
"content-type": "application/json",
"Authorization": f"bearer {api_key}",
},
json=request_data_dict,
)
return RerankResponse(**response.json())

View file

@ -28,6 +28,7 @@ class TogetherAIRerank(BaseLLM):
rank_fields: Optional[List[str]] = None, rank_fields: Optional[List[str]] = None,
return_documents: Optional[bool] = True, return_documents: Optional[bool] = True,
max_chunks_per_doc: Optional[int] = None, max_chunks_per_doc: Optional[int] = None,
_is_async: Optional[bool] = False,
) -> RerankResponse: ) -> RerankResponse:
client = _get_httpx_client() client = _get_httpx_client()
@ -45,6 +46,9 @@ class TogetherAIRerank(BaseLLM):
if max_chunks_per_doc is not None: if max_chunks_per_doc is not None:
raise ValueError("TogetherAI does not support max_chunks_per_doc") raise ValueError("TogetherAI does not support max_chunks_per_doc")
if _is_async:
return self.async_rerank(request_data_dict, api_key) # type: ignore # Call async method
response = client.post( response = client.post(
"https://api.together.xyz/v1/rerank", "https://api.together.xyz/v1/rerank",
headers={ headers={
@ -68,4 +72,32 @@ class TogetherAIRerank(BaseLLM):
return response return response
async def async_rerank( # New async method
self,
request_data_dict: Dict[str, Any],
api_key: str,
) -> RerankResponse:
client = _get_async_httpx_client() # Use async client
response = await client.post(
"https://api.together.xyz/v1/rerank",
headers={
"accept": "application/json",
"content-type": "application/json",
"authorization": f"Bearer {api_key}",
},
json=request_data_dict,
)
if response.status_code != 200:
raise Exception(response.text)
_json_response = response.json()
return RerankResponse(
id=_json_response.get("id"),
results=_json_response.get("results"),
meta=_json_response.get("meta") or {},
) # Return response
pass pass

View file

@ -23,11 +23,14 @@ together_rerank = TogetherAIRerank()
async def arerank( async def arerank(
model: str, model: str,
query: str, query: str,
documents: List[str], documents: List[Union[str, Dict[str, Any]]],
custom_llm_provider: Literal["cohere", "together_ai"] = "cohere", custom_llm_provider: Optional[Literal["cohere", "together_ai"]] = None,
top_n: int = 3, top_n: Optional[int] = None,
rank_fields: Optional[List[str]] = None,
return_documents: Optional[bool] = True,
max_chunks_per_doc: Optional[int] = None,
**kwargs, **kwargs,
) -> Dict[str, Any]: ) -> Union[RerankResponse, Coroutine[Any, Any, RerankResponse]]:
""" """
Async: Reranks a list of documents based on their relevance to the query Async: Reranks a list of documents based on their relevance to the query
""" """
@ -36,7 +39,16 @@ async def arerank(
kwargs["arerank"] = True kwargs["arerank"] = True
func = partial( func = partial(
rerank, model, query, documents, custom_llm_provider, top_n, **kwargs rerank,
model,
query,
documents,
custom_llm_provider,
top_n,
rank_fields,
return_documents,
max_chunks_per_doc,
**kwargs,
) )
ctx = contextvars.copy_context() ctx = contextvars.copy_context()
@ -114,6 +126,7 @@ def rerank(
return_documents=return_documents, return_documents=return_documents,
max_chunks_per_doc=max_chunks_per_doc, max_chunks_per_doc=max_chunks_per_doc,
api_key=cohere_key, api_key=cohere_key,
_is_async=_is_async,
) )
pass pass
elif _custom_llm_provider == "together_ai": elif _custom_llm_provider == "together_ai":
@ -140,6 +153,7 @@ def rerank(
return_documents=return_documents, return_documents=return_documents,
max_chunks_per_doc=max_chunks_per_doc, max_chunks_per_doc=max_chunks_per_doc,
api_key=together_key, api_key=together_key,
_is_async=_is_async,
) )
else: else:

View file

@ -61,7 +61,10 @@ def assert_response_shape(response, custom_llm_provider):
) )
def test_basic_rerank(): @pytest.mark.asyncio()
@pytest.mark.parametrize("sync_mode", [True, False])
async def test_basic_rerank(sync_mode):
if sync_mode is True:
response = litellm.rerank( response = litellm.rerank(
model="cohere/rerank-english-v3.0", model="cohere/rerank-english-v3.0",
query="hello", query="hello",
@ -75,9 +78,26 @@ def test_basic_rerank():
assert response.results is not None assert response.results is not None
assert_response_shape(response, custom_llm_provider="cohere") assert_response_shape(response, custom_llm_provider="cohere")
else:
response = await litellm.arerank(
model="cohere/rerank-english-v3.0",
query="hello",
documents=["hello", "world"],
top_n=3,
)
print("async re rank response: ", response)
assert response.id is not None
assert response.results is not None
assert_response_shape(response, custom_llm_provider="cohere")
def test_basic_rerank_together_ai(): @pytest.mark.asyncio()
@pytest.mark.parametrize("sync_mode", [True, False])
async def test_basic_rerank_together_ai(sync_mode):
if sync_mode is True:
response = litellm.rerank( response = litellm.rerank(
model="together_ai/Salesforce/Llama-Rank-V1", model="together_ai/Salesforce/Llama-Rank-V1",
query="hello", query="hello",
@ -90,4 +110,18 @@ def test_basic_rerank_together_ai():
assert response.id is not None assert response.id is not None
assert response.results is not None assert response.results is not None
assert_response_shape(response, custom_llm_provider="together_ai")
else:
response = await litellm.arerank(
model="together_ai/Salesforce/Llama-Rank-V1",
query="hello",
documents=["hello", "world"],
top_n=3,
)
print("async re rank response: ", response)
assert response.id is not None
assert response.results is not None
assert_response_shape(response, custom_llm_provider="together_ai") assert_response_shape(response, custom_llm_provider="together_ai")