mirror of
https://github.com/meta-llama/llama-stack.git
synced 2025-10-04 12:07:34 +00:00
Added non-streaming ollama inference impl
This commit is contained in:
parent
5b9c05c5dd
commit
0e75e73fa7
4 changed files with 332 additions and 1 deletions
|
@ -23,6 +23,7 @@ from .datatypes import QuantizationConfig
|
|||
class ImplType(Enum):
|
||||
inline = "inline"
|
||||
remote = "remote"
|
||||
ollama = "ollama"
|
||||
|
||||
|
||||
@json_schema_type
|
||||
|
@ -80,10 +81,17 @@ class RemoteImplConfig(BaseModel):
|
|||
url: str = Field(..., description="The URL of the remote module")
|
||||
|
||||
|
||||
@json_schema_type
|
||||
class OllamaImplConfig(BaseModel):
|
||||
impl_type: Literal[ImplType.ollama.value] = ImplType.ollama.value
|
||||
model: str = Field(..., description="The name of the model in ollama catalog")
|
||||
url: str = Field(..., description="The URL for the ollama server")
|
||||
|
||||
|
||||
@json_schema_type
|
||||
class InferenceConfig(BaseModel):
|
||||
impl_config: Annotated[
|
||||
Union[InlineImplConfig, RemoteImplConfig],
|
||||
Union[InlineImplConfig, RemoteImplConfig, OllamaImplConfig],
|
||||
Field(discriminator="impl_type"),
|
||||
]
|
||||
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue