mirror of
https://github.com/BerriAI/litellm.git
synced 2025-04-27 11:43:54 +00:00
(test) function calling
This commit is contained in:
parent
250c891750
commit
b31368ac2f
1 changed files with 5 additions and 1 deletions
|
@ -15,9 +15,11 @@ from litellm import RateLimitError
|
|||
import pytest
|
||||
litellm.num_retries = 3
|
||||
litellm.cache = None
|
||||
litellm.set_verbose=False
|
||||
# litellm.set_verbose=True
|
||||
import json
|
||||
|
||||
# litellm.success_callback = ["langfuse"]
|
||||
|
||||
def get_current_weather(location, unit="fahrenheit"):
|
||||
"""Get the current weather in a given location"""
|
||||
if "tokyo" in location.lower():
|
||||
|
@ -98,6 +100,8 @@ def test_parallel_function_call():
|
|||
second_response = litellm.completion(
|
||||
model="gpt-3.5-turbo-1106",
|
||||
messages=messages,
|
||||
temperature=0.2,
|
||||
seed=22
|
||||
) # get a new response from the model where it can see the function response
|
||||
print("second response\n", second_response)
|
||||
return second_response
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue