feat(factory.py): enable 'user_continue_message' for interweaving user/assistant messages when provider requires it

allows bedrock to be used with autogen
This commit is contained in:
Krrish Dholakia 2024-08-22 11:03:33 -07:00
parent 11bfc1dca7
commit 70bf8bd4f4
6 changed files with 54 additions and 8 deletions

View file

@ -2323,6 +2323,7 @@ def get_litellm_params(
output_cost_per_second=None,
cooldown_time=None,
text_completion=None,
user_continue_message=None,
):
litellm_params = {
"acompletion": acompletion,
@ -2347,6 +2348,7 @@ def get_litellm_params(
"output_cost_per_second": output_cost_per_second,
"cooldown_time": cooldown_time,
"text_completion": text_completion,
"user_continue_message": user_continue_message,
}
return litellm_params
@ -7123,6 +7125,14 @@ def exception_type(
llm_provider="bedrock",
response=original_exception.response,
)
elif "A conversation must start with a user message." in error_str:
exception_mapping_worked = True
raise BadRequestError(
message=f"BedrockException - {error_str}\n. Pass in default user message via `completion(..,user_continue_message=)` or enable `litellm.modify_params=True`.",
model=model,
llm_provider="bedrock",
response=original_exception.response,
)
elif (
"Unable to locate credentials" in error_str
or "The security token included in the request is invalid"