mirror of
https://github.com/BerriAI/litellm.git
synced 2025-04-25 18:54:30 +00:00
(test) hidden params in stream_chunk builder
This commit is contained in:
parent
485f469518
commit
f3d25d2c27
1 changed files with 5 additions and 1 deletions
|
@ -120,7 +120,7 @@ def test_stream_chunk_builder_litellm_tool_call():
|
|||
def test_stream_chunk_builder_litellm_tool_call_regular_message():
|
||||
try:
|
||||
messages = [{"role": "user", "content": "Hey, how's it going?"}]
|
||||
litellm.set_verbose = False
|
||||
# litellm.set_verbose = True
|
||||
response = litellm.completion(
|
||||
model="gpt-3.5-turbo",
|
||||
messages=messages,
|
||||
|
@ -138,6 +138,10 @@ def test_stream_chunk_builder_litellm_tool_call_regular_message():
|
|||
== response.usage.completion_tokens + response.usage.prompt_tokens
|
||||
)
|
||||
|
||||
# check provider is in hidden params
|
||||
print("hidden params", response._hidden_params)
|
||||
assert response._hidden_params["custom_llm_provider"] == "openai"
|
||||
|
||||
except Exception as e:
|
||||
pytest.fail(f"An exception occurred - {str(e)}")
|
||||
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue