mirror of
https://github.com/meta-llama/llama-stack.git
synced 2025-12-20 06:38:43 +00:00
Move InterleavedContent to api/common/content_types.py
This commit is contained in:
parent
a30aaaa2e5
commit
4936794de1
16 changed files with 71 additions and 55 deletions
|
|
@ -12,6 +12,7 @@ from typing import Any, Dict, List, Optional
|
|||
from llama_models.llama3.api.datatypes import * # noqa: F403
|
||||
from llama_stack.apis.inference import * # noqa: F403
|
||||
from llama_stack.apis.safety import * # noqa: F403
|
||||
from llama_stack.apis.common.content_types import ImageContentItem, TextContentItem
|
||||
from llama_stack.distribution.datatypes import Api
|
||||
|
||||
from llama_stack.providers.datatypes import ShieldsProtocolPrivate
|
||||
|
|
|
|||
|
|
@ -21,8 +21,8 @@ from llama_stack.providers.utils.inference.model_registry import (
|
|||
)
|
||||
|
||||
from llama_stack.apis.inference import * # noqa: F403
|
||||
from llama_stack.apis.common.content_types import ImageContentItem, TextContentItem
|
||||
from llama_stack.providers.datatypes import ModelsProtocolPrivate
|
||||
|
||||
from llama_stack.providers.utils.inference.openai_compat import (
|
||||
get_sampling_options,
|
||||
OpenAICompatCompletionChoice,
|
||||
|
|
|
|||
|
|
@ -11,6 +11,7 @@ import pytest
|
|||
|
||||
from llama_models.llama3.api.datatypes import * # noqa: F403
|
||||
from llama_stack.apis.inference import * # noqa: F403
|
||||
from llama_stack.apis.common.content_types import ImageContentItem, TextContentItem
|
||||
|
||||
from .utils import group_chunks
|
||||
|
||||
|
|
|
|||
|
|
@ -8,7 +8,7 @@ import pytest
|
|||
import pytest_asyncio
|
||||
|
||||
from llama_stack.apis.common.type_system import * # noqa: F403
|
||||
from llama_stack.apis.common.deployment_types import URL
|
||||
from llama_stack.apis.common.content_types import URL
|
||||
from llama_stack.apis.datasets import DatasetInput
|
||||
from llama_stack.apis.models import ModelInput
|
||||
|
||||
|
|
|
|||
|
|
@ -10,7 +10,7 @@ from urllib.parse import unquote
|
|||
|
||||
import pandas
|
||||
|
||||
from llama_stack.apis.common.deployment_types import URL
|
||||
from llama_stack.apis.common.content_types import URL
|
||||
|
||||
from llama_stack.providers.utils.memory.vector_store import parse_data_url
|
||||
|
||||
|
|
|
|||
|
|
@ -13,6 +13,8 @@ from llama_models.llama3.api.datatypes import StopReason
|
|||
from llama_stack.apis.inference import * # noqa: F403
|
||||
from pydantic import BaseModel
|
||||
|
||||
from llama_stack.apis.common.content_types import ImageContentItem, TextContentItem
|
||||
|
||||
from llama_stack.providers.utils.inference.prompt_adapter import (
|
||||
convert_image_content_to_url,
|
||||
)
|
||||
|
|
|
|||
|
|
@ -34,19 +34,21 @@ from llama_models.llama3.prompt_templates import (
|
|||
from llama_models.sku_list import resolve_model
|
||||
from PIL import Image as PIL_Image
|
||||
|
||||
from llama_stack.apis.common.deployment_types import URL
|
||||
from llama_stack.apis.common.content_types import (
|
||||
ImageContentItem,
|
||||
InterleavedContent,
|
||||
InterleavedContentItem,
|
||||
TextContentItem,
|
||||
URL,
|
||||
)
|
||||
|
||||
from llama_stack.apis.inference import (
|
||||
ChatCompletionRequest,
|
||||
CompletionRequest,
|
||||
ImageContentItem,
|
||||
InterleavedContent,
|
||||
InterleavedContentItem,
|
||||
Message,
|
||||
ResponseFormat,
|
||||
ResponseFormatType,
|
||||
SystemMessage,
|
||||
TextContentItem,
|
||||
ToolChoice,
|
||||
UserMessage,
|
||||
)
|
||||
|
|
|
|||
|
|
@ -8,7 +8,7 @@ import base64
|
|||
import mimetypes
|
||||
import os
|
||||
|
||||
from llama_stack.apis.common.deployment_types import URL
|
||||
from llama_stack.apis.common.content_types import URL
|
||||
|
||||
|
||||
def data_url_from_file(file_path: str) -> URL:
|
||||
|
|
|
|||
|
|
@ -21,7 +21,7 @@ from pypdf import PdfReader
|
|||
from llama_models.llama3.api.datatypes import * # noqa: F403
|
||||
from llama_models.llama3.api.tokenizer import Tokenizer
|
||||
|
||||
from llama_stack.apis.inference import InterleavedContent, TextContentItem
|
||||
from llama_stack.apis.common.content_types import InterleavedContent, TextContentItem
|
||||
from llama_stack.apis.memory import * # noqa: F403
|
||||
from llama_stack.apis.memory_banks import VectorMemoryBank
|
||||
from llama_stack.providers.datatypes import Api
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue