mirror of
https://github.com/BerriAI/litellm.git
synced 2025-04-27 11:43:54 +00:00
(test) custom_llm_provider in streaming response
This commit is contained in:
parent
6b2a4714a6
commit
f85b64b85c
1 changed files with 4 additions and 0 deletions
|
@ -262,6 +262,9 @@ def test_completion_azure_stream():
|
||||||
for idx, init_chunk in enumerate(response):
|
for idx, init_chunk in enumerate(response):
|
||||||
chunk, finished = streaming_format_tests(idx, init_chunk)
|
chunk, finished = streaming_format_tests(idx, init_chunk)
|
||||||
complete_response += chunk
|
complete_response += chunk
|
||||||
|
custom_llm_provider = init_chunk._hidden_params["custom_llm_provider"]
|
||||||
|
print(f"custom_llm_provider: {custom_llm_provider}")
|
||||||
|
assert custom_llm_provider == "azure"
|
||||||
if finished:
|
if finished:
|
||||||
assert isinstance(init_chunk.choices[0], litellm.utils.StreamingChoices)
|
assert isinstance(init_chunk.choices[0], litellm.utils.StreamingChoices)
|
||||||
break
|
break
|
||||||
|
@ -923,6 +926,7 @@ def ai21_completion_call_bad_key():
|
||||||
|
|
||||||
# ai21_completion_call_bad_key()
|
# ai21_completion_call_bad_key()
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.asyncio
|
@pytest.mark.asyncio
|
||||||
async def test_hf_completion_tgi_stream():
|
async def test_hf_completion_tgi_stream():
|
||||||
try:
|
try:
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue