mirror of
https://github.com/BerriAI/litellm.git
synced 2025-04-25 18:54:30 +00:00
feat(factory.py): option to add function details to prompt, if model doesn't support functions param
This commit is contained in:
parent
f6f7c0b891
commit
704be9dcd1
8 changed files with 130 additions and 27 deletions
75
litellm/tests/test_add_function_to_prompt.py
Normal file
75
litellm/tests/test_add_function_to_prompt.py
Normal file
|
@ -0,0 +1,75 @@
|
|||
#### What this tests ####
|
||||
# Allow the user to map the function to the prompt, if the model doesn't support function calling
|
||||
|
||||
import sys, os, pytest
|
||||
import traceback
|
||||
|
||||
sys.path.insert(
|
||||
0, os.path.abspath("../..")
|
||||
) # Adds the parent directory to the system path
|
||||
import litellm
|
||||
|
||||
## case 1: set_function_to_prompt not set
|
||||
def test_function_call_non_openai_model():
|
||||
try:
|
||||
model = "claude-instant-1"
|
||||
messages=[{"role": "user", "content": "what's the weather in sf?"}]
|
||||
functions = [
|
||||
{
|
||||
"name": "get_current_weather",
|
||||
"description": "Get the current weather in a given location",
|
||||
"parameters": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"location": {
|
||||
"type": "string",
|
||||
"description": "The city and state, e.g. San Francisco, CA"
|
||||
},
|
||||
"unit": {
|
||||
"type": "string",
|
||||
"enum": ["celsius", "fahrenheit"]
|
||||
}
|
||||
},
|
||||
"required": ["location"]
|
||||
}
|
||||
}
|
||||
]
|
||||
response = litellm.completion(model=model, messages=messages, functions=functions)
|
||||
pytest.fail(f'An error occurred')
|
||||
except Exception as e:
|
||||
pass
|
||||
|
||||
test_function_call_non_openai_model()
|
||||
|
||||
## case 2: add_function_to_prompt set
|
||||
def test_function_call_non_openai_model_litellm_mod_set():
|
||||
litellm.add_function_to_prompt = True
|
||||
try:
|
||||
model = "claude-instant-1"
|
||||
messages=[{"role": "user", "content": "what's the weather in sf?"}]
|
||||
functions = [
|
||||
{
|
||||
"name": "get_current_weather",
|
||||
"description": "Get the current weather in a given location",
|
||||
"parameters": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"location": {
|
||||
"type": "string",
|
||||
"description": "The city and state, e.g. San Francisco, CA"
|
||||
},
|
||||
"unit": {
|
||||
"type": "string",
|
||||
"enum": ["celsius", "fahrenheit"]
|
||||
}
|
||||
},
|
||||
"required": ["location"]
|
||||
}
|
||||
}
|
||||
]
|
||||
response = litellm.completion(model=model, messages=messages, functions=functions)
|
||||
print(f'response: {response}')
|
||||
except Exception as e:
|
||||
pytest.fail(f'An error occurred {e}')
|
||||
|
||||
# test_function_call_non_openai_model_litellm_mod_set()
|
Loading…
Add table
Add a link
Reference in a new issue