mirror of
https://github.com/meta-llama/llama-stack.git
synced 2025-10-05 20:27:35 +00:00
pre-commit
This commit is contained in:
parent
31f211f0ad
commit
be6819fa92
8 changed files with 14 additions and 34 deletions
|
@ -7,7 +7,7 @@
|
|||
from enum import Enum
|
||||
from typing import Any, Literal, Protocol
|
||||
|
||||
from pydantic import BaseModel, Field
|
||||
from pydantic import BaseModel
|
||||
from typing_extensions import runtime_checkable
|
||||
|
||||
from llama_stack.apis.common.content_types import URL, InterleavedContent
|
||||
|
|
|
@ -11,6 +11,7 @@
|
|||
# top-level folder for each specific model found within the models/ directory at
|
||||
# the top-level of this source tree.
|
||||
|
||||
import json
|
||||
import textwrap
|
||||
|
||||
from llama_stack.models.llama.datatypes import (
|
||||
|
@ -184,7 +185,7 @@ def usecases() -> list[UseCase | str]:
|
|||
ToolCall(
|
||||
call_id="tool_call_id",
|
||||
tool_name=BuiltinTool.wolfram_alpha,
|
||||
arguments={"query": "100th decimal of pi"},
|
||||
arguments=json.dumps({"query": "100th decimal of pi"}),
|
||||
)
|
||||
],
|
||||
),
|
||||
|
|
|
@ -11,6 +11,7 @@
|
|||
# top-level folder for each specific model found within the models/ directory at
|
||||
# the top-level of this source tree.
|
||||
|
||||
import json
|
||||
import textwrap
|
||||
|
||||
from llama_stack.models.llama.datatypes import (
|
||||
|
@ -185,7 +186,7 @@ def usecases() -> list[UseCase | str]:
|
|||
ToolCall(
|
||||
call_id="tool_call_id",
|
||||
tool_name=BuiltinTool.wolfram_alpha,
|
||||
arguments={"query": "100th decimal of pi"},
|
||||
arguments=json.dumps({"query": "100th decimal of pi"}),
|
||||
)
|
||||
],
|
||||
),
|
||||
|
|
|
@ -1171,7 +1171,6 @@ async def convert_openai_chat_completion_stream(
|
|||
)
|
||||
|
||||
try:
|
||||
arguments = json.loads(buffer["arguments"])
|
||||
tool_call = ToolCall(
|
||||
call_id=buffer["call_id"],
|
||||
tool_name=buffer["name"],
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue