mirror of
https://github.com/BerriAI/litellm.git
synced 2025-04-26 19:24:27 +00:00
get_openai_client_cache_key
This commit is contained in:
parent
bb8400a350
commit
55e669d7d8
4 changed files with 199 additions and 10 deletions
|
@ -151,13 +151,23 @@ class BaseOpenAILLM:
|
|||
f"is_async={client_initialization_params.get('is_async')}",
|
||||
]
|
||||
|
||||
for param in BaseOpenAILLM.get_openai_client_initialization_param_fields(
|
||||
client_type=client_type
|
||||
):
|
||||
LITELLM_CLIENT_SPECIFIC_PARAMS = [
|
||||
"timeout",
|
||||
"max_retries",
|
||||
"organization",
|
||||
"api_base",
|
||||
]
|
||||
openai_client_fields = (
|
||||
BaseOpenAILLM.get_openai_client_initialization_param_fields(
|
||||
client_type=client_type
|
||||
)
|
||||
+ LITELLM_CLIENT_SPECIFIC_PARAMS
|
||||
)
|
||||
|
||||
for param in openai_client_fields:
|
||||
key_parts.append(f"{param}={client_initialization_params.get(param)}")
|
||||
|
||||
_cache_key = ",".join(key_parts)
|
||||
|
||||
return _cache_key
|
||||
|
||||
@staticmethod
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue