diff --git a/litellm/llms/cohere_chat.py b/litellm/llms/cohere_chat.py index c51ef8ded..adf5a0a79 100644 --- a/litellm/llms/cohere_chat.py +++ b/litellm/llms/cohere_chat.py @@ -43,6 +43,7 @@ class CohereChatConfig: presence_penalty (float, optional): Used to reduce repetitiveness of generated tokens. tools (List[Dict[str, str]], optional): A list of available tools (functions) that the model may suggest invoking. tool_results (List[Dict[str, Any]], optional): A list of results from invoking tools. + seed (int, optional): A seed to assist reproducibility of the model's response. """ preamble: Optional[str] = None @@ -62,6 +63,7 @@ class CohereChatConfig: presence_penalty: Optional[int] = None tools: Optional[list] = None tool_results: Optional[list] = None + seed: Optional[int] = None def __init__( self, @@ -82,6 +84,7 @@ class CohereChatConfig: presence_penalty: Optional[int] = None, tools: Optional[list] = None, tool_results: Optional[list] = None, + seed: Optional[int] = None, ) -> None: locals_ = locals() for key, value in locals_.items(): diff --git a/litellm/utils.py b/litellm/utils.py index 3dc110e85..a8abb0b1a 100644 --- a/litellm/utils.py +++ b/litellm/utils.py @@ -4739,6 +4739,8 @@ def get_optional_params( optional_params["stop_sequences"] = stop if tools is not None: optional_params["tools"] = tools + if seed is not None: + optional_params["seed"] = seed elif custom_llm_provider == "maritalk": ## check if unsupported param passed in supported_params = get_supported_openai_params( @@ -5517,6 +5519,7 @@ def get_supported_openai_params(model: str, custom_llm_provider: str): "n", "tools", "tool_choice", + "seed", ] elif custom_llm_provider == "maritalk": return [