This commit is contained in:
Kai Wu 2024-12-03 19:34:15 -08:00
parent 98c97d3104
commit 09b6ec25d0

View file

@ -13,13 +13,13 @@ from llama_models.datatypes import CoreModelId
from llama_models.llama3.api.chat_format import ChatFormat from llama_models.llama3.api.chat_format import ChatFormat
from llama_models.llama3.api.datatypes import Message from llama_models.llama3.api.datatypes import Message
from llama_models.llama3.api.tokenizer import Tokenizer from llama_models.llama3.api.tokenizer import Tokenizer
from ollama import AsyncClient
from llama_stack.providers.utils.inference.model_registry import ( from llama_stack.providers.utils.inference.model_registry import (
build_model_alias, build_model_alias,
build_model_alias_with_just_provider_model_id, build_model_alias_with_just_provider_model_id,
ModelRegistryHelper, ModelRegistryHelper,
) )
from ollama import AsyncClient
from llama_stack.apis.inference import * # noqa: F403 from llama_stack.apis.inference import * # noqa: F403
from llama_stack.providers.datatypes import ModelsProtocolPrivate from llama_stack.providers.datatypes import ModelsProtocolPrivate