Merge pull request #826 from rodneyxr/ollama-fixes

Fix typo for initial_prompt_value and too many values to unpack error
This commit is contained in:
Ishaan Jaff 2023-11-16 07:55:53 -08:00 committed by GitHub
commit 3f95fd2da5
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
2 changed files with 2 additions and 2 deletions

View file

@ -322,7 +322,7 @@ def completion(
max_retries = kwargs.get("max_retries", None) max_retries = kwargs.get("max_retries", None)
context_window_fallback_dict = kwargs.get("context_window_fallback_dict", None) context_window_fallback_dict = kwargs.get("context_window_fallback_dict", None)
### CUSTOM PROMPT TEMPLATE ### ### CUSTOM PROMPT TEMPLATE ###
initial_prompt_value = kwargs.get("intial_prompt_value", None) initial_prompt_value = kwargs.get("initial_prompt_value", None)
roles = kwargs.get("roles", None) roles = kwargs.get("roles", None)
final_prompt_value = kwargs.get("final_prompt_value", None) final_prompt_value = kwargs.get("final_prompt_value", None)
bos_token = kwargs.get("bos_token", None) bos_token = kwargs.get("bos_token", None)

View file

@ -3159,7 +3159,7 @@ def register_prompt_template(model: str, roles: dict, initial_prompt_value: str
) )
``` ```
""" """
model, _ = get_llm_provider(model=model) model = get_llm_provider(model=model)[0]
litellm.custom_prompt_dict[model] = { litellm.custom_prompt_dict[model] = {
"roles": roles, "roles": roles,
"initial_prompt_value": initial_prompt_value, "initial_prompt_value": initial_prompt_value,