mirror of
https://github.com/BerriAI/litellm.git
synced 2025-04-25 18:54:30 +00:00
pplx - fix supports tool choice openai param (#8496)
This commit is contained in:
parent
faee508d1f
commit
ca7c5e8773
4 changed files with 63 additions and 81 deletions
|
@ -20,3 +20,23 @@ class PerplexityChatConfig(OpenAIGPTConfig):
|
|||
or get_secret_str("PERPLEXITY_API_KEY")
|
||||
)
|
||||
return api_base, dynamic_api_key
|
||||
|
||||
def get_supported_openai_params(self, model: str) -> list:
|
||||
"""
|
||||
Perplexity supports a subset of OpenAI params
|
||||
|
||||
Ref: https://docs.perplexity.ai/api-reference/chat-completions
|
||||
|
||||
Eg. Perplexity does not support tools, tool_choice, function_call, functions, etc.
|
||||
"""
|
||||
return [
|
||||
"frequency_penalty",
|
||||
"max_tokens",
|
||||
"max_completion_tokens",
|
||||
"presence_penalty",
|
||||
"response_format",
|
||||
"stream",
|
||||
"temperature",
|
||||
"top_p" "max_retries",
|
||||
"extra_headers",
|
||||
]
|
||||
|
|
|
@ -8108,8 +8108,7 @@
|
|||
"input_cost_per_token": 0.00000035,
|
||||
"output_cost_per_token": 0.00000140,
|
||||
"litellm_provider": "perplexity",
|
||||
"mode": "chat" ,
|
||||
"supports_tool_choice": true
|
||||
"mode": "chat"
|
||||
},
|
||||
"perplexity/codellama-70b-instruct": {
|
||||
"max_tokens": 16384,
|
||||
|
@ -8118,8 +8117,7 @@
|
|||
"input_cost_per_token": 0.00000070,
|
||||
"output_cost_per_token": 0.00000280,
|
||||
"litellm_provider": "perplexity",
|
||||
"mode": "chat" ,
|
||||
"supports_tool_choice": true
|
||||
"mode": "chat"
|
||||
},
|
||||
"perplexity/llama-3.1-70b-instruct": {
|
||||
"max_tokens": 131072,
|
||||
|
@ -8128,8 +8126,7 @@
|
|||
"input_cost_per_token": 0.000001,
|
||||
"output_cost_per_token": 0.000001,
|
||||
"litellm_provider": "perplexity",
|
||||
"mode": "chat" ,
|
||||
"supports_tool_choice": true
|
||||
"mode": "chat"
|
||||
},
|
||||
"perplexity/llama-3.1-8b-instruct": {
|
||||
"max_tokens": 131072,
|
||||
|
@ -8138,8 +8135,7 @@
|
|||
"input_cost_per_token": 0.0000002,
|
||||
"output_cost_per_token": 0.0000002,
|
||||
"litellm_provider": "perplexity",
|
||||
"mode": "chat" ,
|
||||
"supports_tool_choice": true
|
||||
"mode": "chat"
|
||||
},
|
||||
"perplexity/llama-3.1-sonar-huge-128k-online": {
|
||||
"max_tokens": 127072,
|
||||
|
@ -8149,8 +8145,7 @@
|
|||
"output_cost_per_token": 0.000005,
|
||||
"litellm_provider": "perplexity",
|
||||
"mode": "chat",
|
||||
"deprecation_date": "2025-02-22",
|
||||
"supports_tool_choice": true
|
||||
"deprecation_date": "2025-02-22"
|
||||
},
|
||||
"perplexity/llama-3.1-sonar-large-128k-online": {
|
||||
"max_tokens": 127072,
|
||||
|
@ -8160,8 +8155,7 @@
|
|||
"output_cost_per_token": 0.000001,
|
||||
"litellm_provider": "perplexity",
|
||||
"mode": "chat",
|
||||
"deprecation_date": "2025-02-22",
|
||||
"supports_tool_choice": true
|
||||
"deprecation_date": "2025-02-22"
|
||||
},
|
||||
"perplexity/llama-3.1-sonar-large-128k-chat": {
|
||||
"max_tokens": 131072,
|
||||
|
@ -8171,8 +8165,7 @@
|
|||
"output_cost_per_token": 0.000001,
|
||||
"litellm_provider": "perplexity",
|
||||
"mode": "chat",
|
||||
"deprecation_date": "2025-02-22",
|
||||
"supports_tool_choice": true
|
||||
"deprecation_date": "2025-02-22"
|
||||
},
|
||||
"perplexity/llama-3.1-sonar-small-128k-chat": {
|
||||
"max_tokens": 131072,
|
||||
|
@ -8182,8 +8175,7 @@
|
|||
"output_cost_per_token": 0.0000002,
|
||||
"litellm_provider": "perplexity",
|
||||
"mode": "chat",
|
||||
"deprecation_date": "2025-02-22",
|
||||
"supports_tool_choice": true
|
||||
"deprecation_date": "2025-02-22"
|
||||
},
|
||||
"perplexity/llama-3.1-sonar-small-128k-online": {
|
||||
"max_tokens": 127072,
|
||||
|
@ -8193,8 +8185,7 @@
|
|||
"output_cost_per_token": 0.0000002,
|
||||
"litellm_provider": "perplexity",
|
||||
"mode": "chat" ,
|
||||
"deprecation_date": "2025-02-22",
|
||||
"supports_tool_choice": true
|
||||
"deprecation_date": "2025-02-22"
|
||||
},
|
||||
"perplexity/sonar": {
|
||||
"max_tokens": 127072,
|
||||
|
@ -8221,8 +8212,7 @@
|
|||
"input_cost_per_token": 0.00000007,
|
||||
"output_cost_per_token": 0.00000028,
|
||||
"litellm_provider": "perplexity",
|
||||
"mode": "chat" ,
|
||||
"supports_tool_choice": true
|
||||
"mode": "chat"
|
||||
},
|
||||
"perplexity/pplx-70b-chat": {
|
||||
"max_tokens": 4096,
|
||||
|
@ -8231,8 +8221,7 @@
|
|||
"input_cost_per_token": 0.00000070,
|
||||
"output_cost_per_token": 0.00000280,
|
||||
"litellm_provider": "perplexity",
|
||||
"mode": "chat" ,
|
||||
"supports_tool_choice": true
|
||||
"mode": "chat"
|
||||
},
|
||||
"perplexity/pplx-7b-online": {
|
||||
"max_tokens": 4096,
|
||||
|
@ -8242,8 +8231,7 @@
|
|||
"output_cost_per_token": 0.00000028,
|
||||
"input_cost_per_request": 0.005,
|
||||
"litellm_provider": "perplexity",
|
||||
"mode": "chat" ,
|
||||
"supports_tool_choice": true
|
||||
"mode": "chat"
|
||||
},
|
||||
"perplexity/pplx-70b-online": {
|
||||
"max_tokens": 4096,
|
||||
|
@ -8253,8 +8241,7 @@
|
|||
"output_cost_per_token": 0.00000280,
|
||||
"input_cost_per_request": 0.005,
|
||||
"litellm_provider": "perplexity",
|
||||
"mode": "chat" ,
|
||||
"supports_tool_choice": true
|
||||
"mode": "chat"
|
||||
},
|
||||
"perplexity/llama-2-70b-chat": {
|
||||
"max_tokens": 4096,
|
||||
|
@ -8263,8 +8250,7 @@
|
|||
"input_cost_per_token": 0.00000070,
|
||||
"output_cost_per_token": 0.00000280,
|
||||
"litellm_provider": "perplexity",
|
||||
"mode": "chat" ,
|
||||
"supports_tool_choice": true
|
||||
"mode": "chat"
|
||||
},
|
||||
"perplexity/mistral-7b-instruct": {
|
||||
"max_tokens": 4096,
|
||||
|
@ -8273,8 +8259,7 @@
|
|||
"input_cost_per_token": 0.00000007,
|
||||
"output_cost_per_token": 0.00000028,
|
||||
"litellm_provider": "perplexity",
|
||||
"mode": "chat" ,
|
||||
"supports_tool_choice": true
|
||||
"mode": "chat"
|
||||
},
|
||||
"perplexity/mixtral-8x7b-instruct": {
|
||||
"max_tokens": 4096,
|
||||
|
@ -8283,8 +8268,7 @@
|
|||
"input_cost_per_token": 0.00000007,
|
||||
"output_cost_per_token": 0.00000028,
|
||||
"litellm_provider": "perplexity",
|
||||
"mode": "chat",
|
||||
"supports_tool_choice": true
|
||||
"mode": "chat"
|
||||
},
|
||||
"perplexity/sonar-small-chat": {
|
||||
"max_tokens": 16384,
|
||||
|
@ -8293,8 +8277,7 @@
|
|||
"input_cost_per_token": 0.00000007,
|
||||
"output_cost_per_token": 0.00000028,
|
||||
"litellm_provider": "perplexity",
|
||||
"mode": "chat",
|
||||
"supports_tool_choice": true
|
||||
"mode": "chat"
|
||||
},
|
||||
"perplexity/sonar-small-online": {
|
||||
"max_tokens": 12000,
|
||||
|
@ -8304,8 +8287,7 @@
|
|||
"output_cost_per_token": 0.00000028,
|
||||
"input_cost_per_request": 0.005,
|
||||
"litellm_provider": "perplexity",
|
||||
"mode": "chat",
|
||||
"supports_tool_choice": true
|
||||
"mode": "chat"
|
||||
},
|
||||
"perplexity/sonar-medium-chat": {
|
||||
"max_tokens": 16384,
|
||||
|
@ -8314,8 +8296,7 @@
|
|||
"input_cost_per_token": 0.0000006,
|
||||
"output_cost_per_token": 0.0000018,
|
||||
"litellm_provider": "perplexity",
|
||||
"mode": "chat",
|
||||
"supports_tool_choice": true
|
||||
"mode": "chat"
|
||||
},
|
||||
"perplexity/sonar-medium-online": {
|
||||
"max_tokens": 12000,
|
||||
|
@ -8325,8 +8306,7 @@
|
|||
"output_cost_per_token": 0.0000018,
|
||||
"input_cost_per_request": 0.005,
|
||||
"litellm_provider": "perplexity",
|
||||
"mode": "chat",
|
||||
"supports_tool_choice": true
|
||||
"mode": "chat"
|
||||
},
|
||||
"fireworks_ai/accounts/fireworks/models/llama-v3p2-1b-instruct": {
|
||||
"max_tokens": 16384,
|
||||
|
|
|
@ -8108,8 +8108,7 @@
|
|||
"input_cost_per_token": 0.00000035,
|
||||
"output_cost_per_token": 0.00000140,
|
||||
"litellm_provider": "perplexity",
|
||||
"mode": "chat" ,
|
||||
"supports_tool_choice": true
|
||||
"mode": "chat"
|
||||
},
|
||||
"perplexity/codellama-70b-instruct": {
|
||||
"max_tokens": 16384,
|
||||
|
@ -8118,8 +8117,7 @@
|
|||
"input_cost_per_token": 0.00000070,
|
||||
"output_cost_per_token": 0.00000280,
|
||||
"litellm_provider": "perplexity",
|
||||
"mode": "chat" ,
|
||||
"supports_tool_choice": true
|
||||
"mode": "chat"
|
||||
},
|
||||
"perplexity/llama-3.1-70b-instruct": {
|
||||
"max_tokens": 131072,
|
||||
|
@ -8128,8 +8126,7 @@
|
|||
"input_cost_per_token": 0.000001,
|
||||
"output_cost_per_token": 0.000001,
|
||||
"litellm_provider": "perplexity",
|
||||
"mode": "chat" ,
|
||||
"supports_tool_choice": true
|
||||
"mode": "chat"
|
||||
},
|
||||
"perplexity/llama-3.1-8b-instruct": {
|
||||
"max_tokens": 131072,
|
||||
|
@ -8138,8 +8135,7 @@
|
|||
"input_cost_per_token": 0.0000002,
|
||||
"output_cost_per_token": 0.0000002,
|
||||
"litellm_provider": "perplexity",
|
||||
"mode": "chat" ,
|
||||
"supports_tool_choice": true
|
||||
"mode": "chat"
|
||||
},
|
||||
"perplexity/llama-3.1-sonar-huge-128k-online": {
|
||||
"max_tokens": 127072,
|
||||
|
@ -8149,8 +8145,7 @@
|
|||
"output_cost_per_token": 0.000005,
|
||||
"litellm_provider": "perplexity",
|
||||
"mode": "chat",
|
||||
"deprecation_date": "2025-02-22",
|
||||
"supports_tool_choice": true
|
||||
"deprecation_date": "2025-02-22"
|
||||
},
|
||||
"perplexity/llama-3.1-sonar-large-128k-online": {
|
||||
"max_tokens": 127072,
|
||||
|
@ -8160,8 +8155,7 @@
|
|||
"output_cost_per_token": 0.000001,
|
||||
"litellm_provider": "perplexity",
|
||||
"mode": "chat",
|
||||
"deprecation_date": "2025-02-22",
|
||||
"supports_tool_choice": true
|
||||
"deprecation_date": "2025-02-22"
|
||||
},
|
||||
"perplexity/llama-3.1-sonar-large-128k-chat": {
|
||||
"max_tokens": 131072,
|
||||
|
@ -8171,8 +8165,7 @@
|
|||
"output_cost_per_token": 0.000001,
|
||||
"litellm_provider": "perplexity",
|
||||
"mode": "chat",
|
||||
"deprecation_date": "2025-02-22",
|
||||
"supports_tool_choice": true
|
||||
"deprecation_date": "2025-02-22"
|
||||
},
|
||||
"perplexity/llama-3.1-sonar-small-128k-chat": {
|
||||
"max_tokens": 131072,
|
||||
|
@ -8182,8 +8175,7 @@
|
|||
"output_cost_per_token": 0.0000002,
|
||||
"litellm_provider": "perplexity",
|
||||
"mode": "chat",
|
||||
"deprecation_date": "2025-02-22",
|
||||
"supports_tool_choice": true
|
||||
"deprecation_date": "2025-02-22"
|
||||
},
|
||||
"perplexity/llama-3.1-sonar-small-128k-online": {
|
||||
"max_tokens": 127072,
|
||||
|
@ -8193,8 +8185,7 @@
|
|||
"output_cost_per_token": 0.0000002,
|
||||
"litellm_provider": "perplexity",
|
||||
"mode": "chat" ,
|
||||
"deprecation_date": "2025-02-22",
|
||||
"supports_tool_choice": true
|
||||
"deprecation_date": "2025-02-22"
|
||||
},
|
||||
"perplexity/sonar": {
|
||||
"max_tokens": 127072,
|
||||
|
@ -8221,8 +8212,7 @@
|
|||
"input_cost_per_token": 0.00000007,
|
||||
"output_cost_per_token": 0.00000028,
|
||||
"litellm_provider": "perplexity",
|
||||
"mode": "chat" ,
|
||||
"supports_tool_choice": true
|
||||
"mode": "chat"
|
||||
},
|
||||
"perplexity/pplx-70b-chat": {
|
||||
"max_tokens": 4096,
|
||||
|
@ -8231,8 +8221,7 @@
|
|||
"input_cost_per_token": 0.00000070,
|
||||
"output_cost_per_token": 0.00000280,
|
||||
"litellm_provider": "perplexity",
|
||||
"mode": "chat" ,
|
||||
"supports_tool_choice": true
|
||||
"mode": "chat"
|
||||
},
|
||||
"perplexity/pplx-7b-online": {
|
||||
"max_tokens": 4096,
|
||||
|
@ -8242,8 +8231,7 @@
|
|||
"output_cost_per_token": 0.00000028,
|
||||
"input_cost_per_request": 0.005,
|
||||
"litellm_provider": "perplexity",
|
||||
"mode": "chat" ,
|
||||
"supports_tool_choice": true
|
||||
"mode": "chat"
|
||||
},
|
||||
"perplexity/pplx-70b-online": {
|
||||
"max_tokens": 4096,
|
||||
|
@ -8253,8 +8241,7 @@
|
|||
"output_cost_per_token": 0.00000280,
|
||||
"input_cost_per_request": 0.005,
|
||||
"litellm_provider": "perplexity",
|
||||
"mode": "chat" ,
|
||||
"supports_tool_choice": true
|
||||
"mode": "chat"
|
||||
},
|
||||
"perplexity/llama-2-70b-chat": {
|
||||
"max_tokens": 4096,
|
||||
|
@ -8263,8 +8250,7 @@
|
|||
"input_cost_per_token": 0.00000070,
|
||||
"output_cost_per_token": 0.00000280,
|
||||
"litellm_provider": "perplexity",
|
||||
"mode": "chat" ,
|
||||
"supports_tool_choice": true
|
||||
"mode": "chat"
|
||||
},
|
||||
"perplexity/mistral-7b-instruct": {
|
||||
"max_tokens": 4096,
|
||||
|
@ -8273,8 +8259,7 @@
|
|||
"input_cost_per_token": 0.00000007,
|
||||
"output_cost_per_token": 0.00000028,
|
||||
"litellm_provider": "perplexity",
|
||||
"mode": "chat" ,
|
||||
"supports_tool_choice": true
|
||||
"mode": "chat"
|
||||
},
|
||||
"perplexity/mixtral-8x7b-instruct": {
|
||||
"max_tokens": 4096,
|
||||
|
@ -8283,8 +8268,7 @@
|
|||
"input_cost_per_token": 0.00000007,
|
||||
"output_cost_per_token": 0.00000028,
|
||||
"litellm_provider": "perplexity",
|
||||
"mode": "chat",
|
||||
"supports_tool_choice": true
|
||||
"mode": "chat"
|
||||
},
|
||||
"perplexity/sonar-small-chat": {
|
||||
"max_tokens": 16384,
|
||||
|
@ -8293,8 +8277,7 @@
|
|||
"input_cost_per_token": 0.00000007,
|
||||
"output_cost_per_token": 0.00000028,
|
||||
"litellm_provider": "perplexity",
|
||||
"mode": "chat",
|
||||
"supports_tool_choice": true
|
||||
"mode": "chat"
|
||||
},
|
||||
"perplexity/sonar-small-online": {
|
||||
"max_tokens": 12000,
|
||||
|
@ -8304,8 +8287,7 @@
|
|||
"output_cost_per_token": 0.00000028,
|
||||
"input_cost_per_request": 0.005,
|
||||
"litellm_provider": "perplexity",
|
||||
"mode": "chat",
|
||||
"supports_tool_choice": true
|
||||
"mode": "chat"
|
||||
},
|
||||
"perplexity/sonar-medium-chat": {
|
||||
"max_tokens": 16384,
|
||||
|
@ -8314,8 +8296,7 @@
|
|||
"input_cost_per_token": 0.0000006,
|
||||
"output_cost_per_token": 0.0000018,
|
||||
"litellm_provider": "perplexity",
|
||||
"mode": "chat",
|
||||
"supports_tool_choice": true
|
||||
"mode": "chat"
|
||||
},
|
||||
"perplexity/sonar-medium-online": {
|
||||
"max_tokens": 12000,
|
||||
|
@ -8325,8 +8306,7 @@
|
|||
"output_cost_per_token": 0.0000018,
|
||||
"input_cost_per_request": 0.005,
|
||||
"litellm_provider": "perplexity",
|
||||
"mode": "chat",
|
||||
"supports_tool_choice": true
|
||||
"mode": "chat"
|
||||
},
|
||||
"fireworks_ai/accounts/fireworks/models/llama-v3p2-1b-instruct": {
|
||||
"max_tokens": 16384,
|
||||
|
|
|
@ -52,6 +52,8 @@ def test_supports_tool_choice_simple_tests():
|
|||
is False
|
||||
)
|
||||
|
||||
assert litellm.utils.supports_tool_choice(model="perplexity/sonar") is False
|
||||
|
||||
|
||||
def test_check_provider_match():
|
||||
"""
|
||||
|
@ -115,7 +117,7 @@ async def test_supports_tool_choice():
|
|||
"""
|
||||
# Load model prices
|
||||
litellm._turn_on_debug()
|
||||
with open("./model_prices_and_context_window.json", "r") as f:
|
||||
with open("../../model_prices_and_context_window.json", "r") as f:
|
||||
model_prices = json.load(f)
|
||||
litellm.model_cost = model_prices
|
||||
config_manager = ProviderConfigManager()
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue