forked from phoenix/litellm-mirror
(feat) bedrock add finish_reason to streaming responses
This commit is contained in:
parent
0937d07d6d
commit
e01d83cea6
2 changed files with 11 additions and 6 deletions
|
@ -656,14 +656,14 @@ def test_completion_replicate_stream_bad_key():
|
|||
|
||||
# test_completion_replicate_stream_bad_key()
|
||||
|
||||
def test_completion_bedrock_ai21_stream():
|
||||
def test_completion_bedrock_claude_stream():
|
||||
try:
|
||||
litellm.set_verbose=True
|
||||
litellm.set_verbose=False
|
||||
response = completion(
|
||||
model="bedrock/anthropic.claude-instant-v1",
|
||||
messages=[{"role": "user", "content": "Be as verbose as possible and give as many details as possible, how does a court case get to the Supreme Court?"}],
|
||||
temperature=1,
|
||||
max_tokens=4096,
|
||||
max_tokens=20,
|
||||
stream=True,
|
||||
)
|
||||
print(response)
|
||||
|
@ -687,7 +687,7 @@ def test_completion_bedrock_ai21_stream():
|
|||
except Exception as e:
|
||||
pytest.fail(f"Error occurred: {e}")
|
||||
|
||||
test_completion_bedrock_ai21_stream()
|
||||
# test_completion_bedrock_claude_stream()
|
||||
|
||||
|
||||
def test_completion_sagemaker_stream():
|
||||
|
|
|
@ -3236,9 +3236,14 @@ class CustomStreamWrapper:
|
|||
finish_reason = ""
|
||||
if "outputText" in chunk_data:
|
||||
text = chunk_data['outputText']
|
||||
if "completion" in chunk_data:
|
||||
# anthropic mapping
|
||||
elif "completion" in chunk_data:
|
||||
text = chunk_data['completion'] # bedrock.anthropic
|
||||
if chunk_data.get("completionReason", None):
|
||||
stop_reason = chunk_data.get("stop_reason", None)
|
||||
if stop_reason != None:
|
||||
is_finished = True
|
||||
finish_reason = stop_reason
|
||||
elif chunk_data.get("completionReason", None):
|
||||
is_finished = True
|
||||
finish_reason = chunk_data["completionReason"]
|
||||
elif chunk.get("error", None):
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue