fix linting errors

This commit is contained in:
Ishaan Jaff 2025-04-14 19:46:46 -07:00
parent 861a4a3ea3
commit 9b714ae230
6 changed files with 17 additions and 11 deletions

View file

@ -94,7 +94,7 @@ class CustomLogger: # https://docs.litellm.ai/docs/observability/custom_callbac
model: str,
messages: List[AllMessageValues],
non_default_params: dict,
prompt_id: str,
prompt_id: Optional[str],
prompt_variables: Optional[dict],
dynamic_callback_params: StandardCallbackDynamicParams,
) -> Tuple[str, List[AllMessageValues], dict]:

View file

@ -15,7 +15,7 @@ class CustomPromptManagement(CustomLogger, PromptManagementBase):
model: str,
messages: List[AllMessageValues],
non_default_params: dict,
prompt_id: str,
prompt_id: Optional[str],
prompt_variables: Optional[dict],
dynamic_callback_params: StandardCallbackDynamicParams,
) -> Tuple[str, List[AllMessageValues], dict]:

View file

@ -169,10 +169,14 @@ class LangfusePromptManagement(LangFuseLogger, PromptManagementBase, CustomLogge
model: str,
messages: List[AllMessageValues],
non_default_params: dict,
prompt_id: str,
prompt_id: Optional[str],
prompt_variables: Optional[dict],
dynamic_callback_params: StandardCallbackDynamicParams,
) -> Tuple[str, List[AllMessageValues], dict,]:
) -> Tuple[
str,
List[AllMessageValues],
dict,
]:
return self.get_chat_completion_prompt(
model,
messages,

View file

@ -79,10 +79,12 @@ class PromptManagementBase(ABC):
model: str,
messages: List[AllMessageValues],
non_default_params: dict,
prompt_id: str,
prompt_id: Optional[str],
prompt_variables: Optional[dict],
dynamic_callback_params: StandardCallbackDynamicParams,
) -> Tuple[str, List[AllMessageValues], dict,]:
) -> Tuple[str, List[AllMessageValues], dict]:
if prompt_id is None:
raise ValueError("prompt_id is required for Prompt Management Base class")
if not self.should_run_prompt_management(
prompt_id=prompt_id, dynamic_callback_params=dynamic_callback_params
):

View file

@ -458,8 +458,8 @@ class Logging(LiteLLMLoggingBaseClass):
def should_run_prompt_management_hooks(
self,
prompt_id: str,
non_default_params: Dict,
prompt_id: Optional[str] = None,
) -> bool:
"""
Return True if prompt management hooks should be run
@ -476,8 +476,8 @@ class Logging(LiteLLMLoggingBaseClass):
self,
model: str,
messages: List[AllMessageValues],
non_default_params: dict,
prompt_id: str,
non_default_params: Dict,
prompt_id: Optional[str],
prompt_variables: Optional[dict],
prompt_management_logger: Optional[CustomLogger] = None,
) -> Tuple[str, List[AllMessageValues], dict]:
@ -496,7 +496,7 @@ class Logging(LiteLLMLoggingBaseClass):
) = custom_logger.get_chat_completion_prompt(
model=model,
messages=messages,
non_default_params=non_default_params,
non_default_params=non_default_params or {},
prompt_id=prompt_id,
prompt_variables=prompt_variables,
dynamic_callback_params=self.standard_callback_dynamic_params,

View file

@ -12,7 +12,7 @@ class X42PromptManagement(CustomPromptManagement):
model: str,
messages: List[AllMessageValues],
non_default_params: dict,
prompt_id: str,
prompt_id: Optional[str],
prompt_variables: Optional[dict],
dynamic_callback_params: StandardCallbackDynamicParams,
) -> Tuple[str, List[AllMessageValues], dict]: