mirror of
https://github.com/BerriAI/litellm.git
synced 2025-04-25 18:54:30 +00:00
fix(utils.py): include system fingerprint in streaming response object
This commit is contained in:
parent
78421d89aa
commit
01c7e18f31
3 changed files with 7 additions and 3 deletions
|
@ -95,16 +95,18 @@ def test_stream_chunk_builder_litellm_tool_call():
|
|||
try:
|
||||
litellm.set_verbose = False
|
||||
response = litellm.completion(
|
||||
model="azure/chatgpt-functioncalling",
|
||||
model="azure/gpt-4-nov-release",
|
||||
messages=messages,
|
||||
tools=tools_schema,
|
||||
stream=True,
|
||||
api_key="os.environ/AZURE_FRANCE_API_KEY",
|
||||
api_base="https://openai-france-1234.openai.azure.com",
|
||||
complete_response = True
|
||||
)
|
||||
|
||||
print(f"complete response: {response}")
|
||||
print(f"complete response usage: {response.usage}")
|
||||
|
||||
assert response.system_fingerprint is not None
|
||||
except Exception as e:
|
||||
pytest.fail(f"An exception occurred - {str(e)}")
|
||||
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue