mirror of
https://github.com/meta-llama/llama-stack.git
synced 2025-10-04 20:14:13 +00:00
Added non-streaming ollama inference impl
This commit is contained in:
parent
5b9c05c5dd
commit
0e75e73fa7
4 changed files with 332 additions and 1 deletions
|
@ -12,6 +12,10 @@ async def get_inference_api_instance(config: InferenceConfig):
|
|||
from .inference import InferenceImpl
|
||||
|
||||
return InferenceImpl(config.impl_config)
|
||||
elif config.impl_config.impl_type == ImplType.ollama.value:
|
||||
from .inference import OllamaInference
|
||||
|
||||
return OllamaInference(config.impl_config)
|
||||
|
||||
from .client import InferenceClient
|
||||
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue