chore: remove dependency on llama_models completely (#1344)

This commit is contained in:
Ashwin Bharambe 2025-03-01 12:48:08 -08:00 committed by GitHub
parent 7131d5ddeb
commit 8bbd52bb9f
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
43 changed files with 131358 additions and 202 deletions

View file

@ -13,9 +13,6 @@ import re
from typing import List, Optional, Tuple, Union
import httpx
from llama_models.datatypes import StopReason
from llama_models.llama3.api.chat_format import ChatFormat
from llama_models.llama3.api.tokenizer import Tokenizer
from PIL import Image as PIL_Image
from llama_stack.apis.common.content_types import (
@ -44,9 +41,11 @@ from llama_stack.models.llama.datatypes import (
RawMessage,
RawTextItem,
Role,
StopReason,
ToolPromptFormat,
is_multimodal,
)
from llama_stack.models.llama.llama3.chat_format import ChatFormat
from llama_stack.models.llama.llama3.prompt_templates import (
BuiltinToolGenerator,
FunctionTagCustomToolGenerator,
@ -54,6 +53,7 @@ from llama_stack.models.llama.llama3.prompt_templates import (
PythonListCustomToolGenerator,
SystemDefaultGenerator,
)
from llama_stack.models.llama.llama3.tokenizer import Tokenizer
from llama_stack.models.llama.sku_list import resolve_model
from llama_stack.providers.utils.inference import supported_inference_models