mirror of
https://github.com/meta-llama/llama-stack.git
synced 2025-10-04 12:07:34 +00:00
Moved ToolPromptFormat and jinja templates to llama_models.llama3.api
This commit is contained in:
parent
ab8193c88c
commit
5655266d58
13 changed files with 21 additions and 388 deletions
|
@ -7,7 +7,7 @@
|
|||
from .datatypes import * # noqa: F403
|
||||
from typing import Optional, Protocol
|
||||
|
||||
from llama_models.llama3.api.datatypes import ToolDefinition
|
||||
from llama_models.llama3.api.datatypes import ToolDefinition, ToolPromptFormat
|
||||
|
||||
# this dependency is annoying and we need a forked up version anyway
|
||||
from llama_models.schema_utils import webmethod
|
||||
|
@ -16,7 +16,7 @@ from llama_models.schema_utils import webmethod
|
|||
@json_schema_type
|
||||
class CompletionRequest(BaseModel):
|
||||
model: str
|
||||
content: InterleavedTextAttachment
|
||||
content: InterleavedTextMedia
|
||||
sampling_params: Optional[SamplingParams] = SamplingParams()
|
||||
|
||||
stream: Optional[bool] = False
|
||||
|
@ -41,7 +41,7 @@ class CompletionResponseStreamChunk(BaseModel):
|
|||
@json_schema_type
|
||||
class BatchCompletionRequest(BaseModel):
|
||||
model: str
|
||||
content_batch: List[InterleavedTextAttachment]
|
||||
content_batch: List[InterleavedTextMedia]
|
||||
sampling_params: Optional[SamplingParams] = SamplingParams()
|
||||
logprobs: Optional[LogProbConfig] = None
|
||||
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue