mirror of
https://github.com/BerriAI/litellm.git
synced 2025-04-25 10:44:24 +00:00
* test_anthropic_cache_control_hook_system_message * test_anthropic_cache_control_hook.py * should_run_prompt_management_hooks * fix should_run_prompt_management_hooks * test_anthropic_cache_control_hook_specific_index * fix test * fix linting errors * ChatCompletionCachedContent * initial commit for cache control * fixes ui design * fix inserting cache_control_injection_points * fix entering cache control points * fixes for using cache control on ui + backend * update cache control settings on edit model page * fix init custom logger compatible class * fix linting errors * fix linting errors * fix get_chat_completion_prompt
49 lines
1.7 KiB
Python
49 lines
1.7 KiB
Python
from typing import List, Optional, Tuple
|
|
|
|
from litellm.integrations.custom_logger import CustomLogger
|
|
from litellm.integrations.prompt_management_base import (
|
|
PromptManagementBase,
|
|
PromptManagementClient,
|
|
)
|
|
from litellm.types.llms.openai import AllMessageValues
|
|
from litellm.types.utils import StandardCallbackDynamicParams
|
|
|
|
|
|
class CustomPromptManagement(CustomLogger, PromptManagementBase):
|
|
def get_chat_completion_prompt(
|
|
self,
|
|
model: str,
|
|
messages: List[AllMessageValues],
|
|
non_default_params: dict,
|
|
prompt_id: Optional[str],
|
|
prompt_variables: Optional[dict],
|
|
dynamic_callback_params: StandardCallbackDynamicParams,
|
|
) -> Tuple[str, List[AllMessageValues], dict]:
|
|
"""
|
|
Returns:
|
|
- model: str - the model to use (can be pulled from prompt management tool)
|
|
- messages: List[AllMessageValues] - the messages to use (can be pulled from prompt management tool)
|
|
- non_default_params: dict - update with any optional params (e.g. temperature, max_tokens, etc.) to use (can be pulled from prompt management tool)
|
|
"""
|
|
return model, messages, non_default_params
|
|
|
|
@property
|
|
def integration_name(self) -> str:
|
|
return "custom-prompt-management"
|
|
|
|
def should_run_prompt_management(
|
|
self,
|
|
prompt_id: str,
|
|
dynamic_callback_params: StandardCallbackDynamicParams,
|
|
) -> bool:
|
|
return True
|
|
|
|
def _compile_prompt_helper(
|
|
self,
|
|
prompt_id: str,
|
|
prompt_variables: Optional[dict],
|
|
dynamic_callback_params: StandardCallbackDynamicParams,
|
|
) -> PromptManagementClient:
|
|
raise NotImplementedError(
|
|
"Custom prompt management does not support compile prompt helper"
|
|
)
|