forked from phoenix-oss/llama-stack-mirror
Update spec
This commit is contained in:
parent
9a5803a429
commit
d9d34433fc
9 changed files with 118 additions and 39 deletions
|
@ -415,7 +415,7 @@ class ChatAgent(ShieldRunnerMixin):
|
|||
step_type=StepType.tool_execution.value,
|
||||
step_id=step_id,
|
||||
delta=ToolCallDelta(
|
||||
parse_status=ToolCallParseStatus.success,
|
||||
parse_status=ToolCallParseStatus.succeeded,
|
||||
content=ToolCall(
|
||||
call_id="",
|
||||
tool_name=MEMORY_QUERY_TOOL,
|
||||
|
@ -511,7 +511,7 @@ class ChatAgent(ShieldRunnerMixin):
|
|||
|
||||
delta = event.delta
|
||||
if delta.type == "tool_call":
|
||||
if delta.parse_status == ToolCallParseStatus.success:
|
||||
if delta.parse_status == ToolCallParseStatus.succeeded:
|
||||
tool_calls.append(delta.content)
|
||||
if stream:
|
||||
yield AgentTurnResponseStreamChunk(
|
||||
|
|
|
@ -431,7 +431,7 @@ class MetaReferenceInferenceImpl(
|
|||
event_type=ChatCompletionResponseEventType.progress,
|
||||
delta=ToolCallDelta(
|
||||
content="",
|
||||
parse_status=ToolCallParseStatus.failure,
|
||||
parse_status=ToolCallParseStatus.failed,
|
||||
),
|
||||
stop_reason=stop_reason,
|
||||
)
|
||||
|
@ -443,7 +443,7 @@ class MetaReferenceInferenceImpl(
|
|||
event_type=ChatCompletionResponseEventType.progress,
|
||||
delta=ToolCallDelta(
|
||||
content=tool_call,
|
||||
parse_status=ToolCallParseStatus.success,
|
||||
parse_status=ToolCallParseStatus.succeeded,
|
||||
),
|
||||
stop_reason=stop_reason,
|
||||
)
|
||||
|
|
|
@ -215,7 +215,7 @@ async def convert_chat_completion_response_stream(
|
|||
event_type=event_type,
|
||||
delta=ToolCallDelta(
|
||||
content=tool_call,
|
||||
parse_status=ToolCallParseStatus.success,
|
||||
parse_status=ToolCallParseStatus.succeeded,
|
||||
),
|
||||
)
|
||||
)
|
||||
|
|
|
@ -501,7 +501,7 @@ async def convert_openai_chat_completion_stream(
|
|||
event_type=next(event_type),
|
||||
delta=ToolCallDelta(
|
||||
content=_convert_openai_tool_calls(choice.delta.tool_calls)[0],
|
||||
parse_status=ToolCallParseStatus.success,
|
||||
parse_status=ToolCallParseStatus.succeeded,
|
||||
),
|
||||
logprobs=_convert_openai_logprobs(choice.logprobs),
|
||||
)
|
||||
|
|
|
@ -475,7 +475,7 @@ class TestInference:
|
|||
|
||||
last = grouped[ChatCompletionResponseEventType.progress][-1]
|
||||
# assert last.event.stop_reason == expected_stop_reason
|
||||
assert last.event.delta.parse_status == ToolCallParseStatus.success
|
||||
assert last.event.delta.parse_status == ToolCallParseStatus.succeeded
|
||||
assert last.event.delta.content.type == "tool_call"
|
||||
|
||||
call = last.event.delta.content
|
||||
|
|
|
@ -245,7 +245,7 @@ async def process_chat_completion_stream_response(
|
|||
event_type=ChatCompletionResponseEventType.progress,
|
||||
delta=ToolCallDelta(
|
||||
content="",
|
||||
parse_status=ToolCallParseStatus.failure,
|
||||
parse_status=ToolCallParseStatus.failed,
|
||||
),
|
||||
stop_reason=stop_reason,
|
||||
)
|
||||
|
@ -257,7 +257,7 @@ async def process_chat_completion_stream_response(
|
|||
event_type=ChatCompletionResponseEventType.progress,
|
||||
delta=ToolCallDelta(
|
||||
content=tool_call,
|
||||
parse_status=ToolCallParseStatus.success,
|
||||
parse_status=ToolCallParseStatus.succeeded,
|
||||
),
|
||||
stop_reason=stop_reason,
|
||||
)
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue