mirror of
https://github.com/BerriAI/litellm.git
synced 2025-04-26 11:14:04 +00:00
fix(utils.py): fix streaming
This commit is contained in:
parent
dfb1d34e26
commit
5b06627c09
3 changed files with 6 additions and 5 deletions
|
@ -138,7 +138,7 @@ def test_vertex_ai():
|
||||||
|
|
||||||
def test_vertex_ai_stream():
|
def test_vertex_ai_stream():
|
||||||
load_vertex_ai_credentials()
|
load_vertex_ai_credentials()
|
||||||
litellm.set_verbose = False
|
litellm.set_verbose = True
|
||||||
litellm.vertex_project = "reliablekeys"
|
litellm.vertex_project = "reliablekeys"
|
||||||
import random
|
import random
|
||||||
|
|
||||||
|
@ -159,6 +159,8 @@ def test_vertex_ai_stream():
|
||||||
"code-gecko@latest",
|
"code-gecko@latest",
|
||||||
"code-bison@001",
|
"code-bison@001",
|
||||||
"text-bison@001",
|
"text-bison@001",
|
||||||
|
"gemini-1.5-pro",
|
||||||
|
"gemini-1.5-pro-vision",
|
||||||
]:
|
]:
|
||||||
# our account does not have access to this model
|
# our account does not have access to this model
|
||||||
continue
|
continue
|
||||||
|
|
|
@ -801,7 +801,6 @@ def test_completion_bedrock_claude_stream():
|
||||||
raise Exception("finish reason not set for last chunk")
|
raise Exception("finish reason not set for last chunk")
|
||||||
if complete_response.strip() == "":
|
if complete_response.strip() == "":
|
||||||
raise Exception("Empty response received")
|
raise Exception("Empty response received")
|
||||||
print(f"completion_response: {complete_response}")
|
|
||||||
except RateLimitError:
|
except RateLimitError:
|
||||||
pass
|
pass
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
|
|
|
@ -8705,7 +8705,7 @@ class CustomStreamWrapper:
|
||||||
|
|
||||||
model_response.model = self.model
|
model_response.model = self.model
|
||||||
print_verbose(
|
print_verbose(
|
||||||
f"model_response finish reason 3: {model_response.choices[0].finish_reason}"
|
f"model_response finish reason 3: {model_response.choices[0].finish_reason}; response_obj={response_obj}"
|
||||||
)
|
)
|
||||||
## FUNCTION CALL PARSING
|
## FUNCTION CALL PARSING
|
||||||
if (
|
if (
|
||||||
|
@ -8773,11 +8773,11 @@ class CustomStreamWrapper:
|
||||||
|
|
||||||
## RETURN ARG
|
## RETURN ARG
|
||||||
if (
|
if (
|
||||||
response_obj.get("text", None) is not None
|
completion_obj["content"] is not None
|
||||||
or response_obj.get("original_chunk", None) is not None
|
or response_obj.get("original_chunk", None) is not None
|
||||||
):
|
):
|
||||||
hold = False
|
hold = False
|
||||||
if response_obj.get("content", None) is not None:
|
if completion_obj["content"] is not None:
|
||||||
hold, model_response_str = self.check_special_tokens(
|
hold, model_response_str = self.check_special_tokens(
|
||||||
chunk=completion_obj["content"],
|
chunk=completion_obj["content"],
|
||||||
finish_reason=model_response.choices[0].finish_reason,
|
finish_reason=model_response.choices[0].finish_reason,
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue