litellm-mirror/litellm/rerank_api/rerank_utils.py
Krish Dholakia 09462ba80c
Add cohere v2/rerank support (#8421) (#8605)
* Add cohere v2/rerank support (#8421)

* Support v2 endpoint cohere rerank

* Add tests and docs

* Make v1 default if old params used

* Update docs

* Update docs pt 2

* Update tests

* Add e2e test

* Clean up code

* Use inheritence for new config

* Fix linting issues (#8608)

* Fix cohere v2 failing test + linting (#8672)

* Fix test and unused imports

* Fix tests

* fix: fix linting errors

* test: handle tgai instability

* fix: skip service unavailable err

* test: print logs for unstable test

* test: skip unreliable tests

---------

Co-authored-by: vibhavbhat <vibhavb00@gmail.com>
2025-02-22 22:25:29 -08:00

46 lines
1.7 KiB
Python

from typing import Any, Dict, List, Optional, Union
from litellm.llms.base_llm.rerank.transformation import BaseRerankConfig
from litellm.types.rerank import OptionalRerankParams
def get_optional_rerank_params(
rerank_provider_config: BaseRerankConfig,
model: str,
drop_params: bool,
query: str,
documents: List[Union[str, Dict[str, Any]]],
custom_llm_provider: Optional[str] = None,
top_n: Optional[int] = None,
rank_fields: Optional[List[str]] = None,
return_documents: Optional[bool] = True,
max_chunks_per_doc: Optional[int] = None,
max_tokens_per_doc: Optional[int] = None,
non_default_params: Optional[dict] = None,
) -> OptionalRerankParams:
all_non_default_params = non_default_params or {}
if query is not None:
all_non_default_params["query"] = query
if top_n is not None:
all_non_default_params["top_n"] = top_n
if documents is not None:
all_non_default_params["documents"] = documents
if return_documents is not None:
all_non_default_params["return_documents"] = return_documents
if max_chunks_per_doc is not None:
all_non_default_params["max_chunks_per_doc"] = max_chunks_per_doc
if max_tokens_per_doc is not None:
all_non_default_params["max_tokens_per_doc"] = max_tokens_per_doc
return rerank_provider_config.map_cohere_rerank_params(
model=model,
drop_params=drop_params,
query=query,
documents=documents,
custom_llm_provider=custom_llm_provider,
top_n=top_n,
rank_fields=rank_fields,
return_documents=return_documents,
max_chunks_per_doc=max_chunks_per_doc,
max_tokens_per_doc=max_tokens_per_doc,
non_default_params=all_non_default_params,
)