diff --git a/llama_stack/providers/utils/inference/openai_compat.py b/llama_stack/providers/utils/inference/openai_compat.py index 871e39aaa..ba63be2b6 100644 --- a/llama_stack/providers/utils/inference/openai_compat.py +++ b/llama_stack/providers/utils/inference/openai_compat.py @@ -4,17 +4,28 @@ # This source code is licensed under the terms described in the LICENSE file in # the root directory of this source tree. -from typing import AsyncGenerator, Optional +from typing import AsyncGenerator, List, Optional from llama_models.llama3.api.chat_format import ChatFormat -from llama_models.llama3.api.datatypes import StopReason - -from llama_stack.apis.inference import * # noqa: F403 +from llama_models.llama3.api.datatypes import SamplingParams, StopReason from pydantic import BaseModel from llama_stack.apis.common.content_types import ImageContentItem, TextContentItem +from llama_stack.apis.inference import ( + ChatCompletionResponse, + ChatCompletionResponseEvent, + ChatCompletionResponseEventType, + ChatCompletionResponseStreamChunk, + CompletionMessage, + CompletionResponse, + CompletionResponseStreamChunk, + Message, + ToolCallDelta, + ToolCallParseStatus, +) + from llama_stack.providers.utils.inference.prompt_adapter import ( convert_image_content_to_url, ) diff --git a/llama_stack/providers/utils/kvstore/kvstore.py b/llama_stack/providers/utils/kvstore/kvstore.py index 469f400d0..79cad28b1 100644 --- a/llama_stack/providers/utils/kvstore/kvstore.py +++ b/llama_stack/providers/utils/kvstore/kvstore.py @@ -4,8 +4,10 @@ # This source code is licensed under the terms described in the LICENSE file in # the root directory of this source tree. -from .api import * # noqa: F403 -from .config import * # noqa: F403 +from typing import List, Optional + +from .api import KVStore +from .config import KVStoreConfig, KVStoreType def kvstore_dependencies():