forked from phoenix/litellm-mirror
Merge pull request #826 from rodneyxr/ollama-fixes
Fix typo for initial_prompt_value and too many values to unpack error
This commit is contained in:
commit
3f95fd2da5
2 changed files with 2 additions and 2 deletions
|
@ -322,7 +322,7 @@ def completion(
|
||||||
max_retries = kwargs.get("max_retries", None)
|
max_retries = kwargs.get("max_retries", None)
|
||||||
context_window_fallback_dict = kwargs.get("context_window_fallback_dict", None)
|
context_window_fallback_dict = kwargs.get("context_window_fallback_dict", None)
|
||||||
### CUSTOM PROMPT TEMPLATE ###
|
### CUSTOM PROMPT TEMPLATE ###
|
||||||
initial_prompt_value = kwargs.get("intial_prompt_value", None)
|
initial_prompt_value = kwargs.get("initial_prompt_value", None)
|
||||||
roles = kwargs.get("roles", None)
|
roles = kwargs.get("roles", None)
|
||||||
final_prompt_value = kwargs.get("final_prompt_value", None)
|
final_prompt_value = kwargs.get("final_prompt_value", None)
|
||||||
bos_token = kwargs.get("bos_token", None)
|
bos_token = kwargs.get("bos_token", None)
|
||||||
|
|
|
@ -3159,7 +3159,7 @@ def register_prompt_template(model: str, roles: dict, initial_prompt_value: str
|
||||||
)
|
)
|
||||||
```
|
```
|
||||||
"""
|
"""
|
||||||
model, _ = get_llm_provider(model=model)
|
model = get_llm_provider(model=model)[0]
|
||||||
litellm.custom_prompt_dict[model] = {
|
litellm.custom_prompt_dict[model] = {
|
||||||
"roles": roles,
|
"roles": roles,
|
||||||
"initial_prompt_value": initial_prompt_value,
|
"initial_prompt_value": initial_prompt_value,
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue