forked from phoenix-oss/llama-stack-mirror
fix: don't import from llama_models (#1436)
# What does this PR do? Some imports were not switched to in-tree copy of the modules. This is a follow-up to: https://github.com/meta-llama/llama-stack/pull/1344 Closes #1435 ## Test Plan Manually started the server... [//]: # (## Documentation) Signed-off-by: Ihar Hrachyshka <ihar.hrachyshka@gmail.com>
This commit is contained in:
parent
6cf79437b3
commit
4d4be03176
1 changed files with 2 additions and 3 deletions
|
@ -7,9 +7,6 @@
|
|||
import time
|
||||
from typing import Any, AsyncGenerator, AsyncIterator, Dict, List, Optional, Union
|
||||
|
||||
from llama_models.llama3.api.chat_format import ChatFormat
|
||||
from llama_models.llama3.api.tokenizer import Tokenizer
|
||||
|
||||
from llama_stack import logcat
|
||||
from llama_stack.apis.common.content_types import (
|
||||
URL,
|
||||
|
@ -62,6 +59,8 @@ from llama_stack.apis.tools import (
|
|||
ToolRuntime,
|
||||
)
|
||||
from llama_stack.apis.vector_io import Chunk, QueryChunksResponse, VectorIO
|
||||
from llama_stack.models.llama.llama3.chat_format import ChatFormat
|
||||
from llama_stack.models.llama.llama3.tokenizer import Tokenizer
|
||||
from llama_stack.providers.datatypes import RoutingTable
|
||||
from llama_stack.providers.utils.telemetry.tracing import get_current_span
|
||||
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue