[remove import *][2/n] remove rest of import * in implementations (#690)

# What does this PR do?

- see https://github.com/meta-llama/llama-stack/pull/689
<img width="591" alt="image"
src="https://github.com/user-attachments/assets/76946a67-7373-43b5-8a03-0ad201aa543b"
/>

- leaving `tools/builtin.py` to avoid conflicts


## Test Plan

- see https://github.com/meta-llama/llama-stack/pull/689

## Sources

Please link relevant resources if necessary.


## Before submitting

- [ ] This PR fixes a typo or improves the docs (you can dismiss the
other checks if that's the case).
- [ ] Ran pre-commit to handle lint / formatting issues.
- [ ] Read the [contributor
guideline](https://github.com/meta-llama/llama-stack/blob/main/CONTRIBUTING.md),
      Pull Request section?
- [ ] Updated relevant documentation.
- [ ] Wrote necessary unit or integration tests.
This commit is contained in:
Xi Yan 2024-12-27 15:32:04 -08:00 committed by GitHub
parent bb0a3f5c8e
commit 6be563434e
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
84 changed files with 824 additions and 321 deletions

View file

@ -4,17 +4,28 @@
# This source code is licensed under the terms described in the LICENSE file in
# the root directory of this source tree.
from typing import AsyncGenerator, Optional
from typing import AsyncGenerator, List, Optional
from llama_models.llama3.api.chat_format import ChatFormat
from llama_models.llama3.api.datatypes import StopReason
from llama_stack.apis.inference import * # noqa: F403
from llama_models.llama3.api.datatypes import SamplingParams, StopReason
from pydantic import BaseModel
from llama_stack.apis.common.content_types import ImageContentItem, TextContentItem
from llama_stack.apis.inference import (
ChatCompletionResponse,
ChatCompletionResponseEvent,
ChatCompletionResponseEventType,
ChatCompletionResponseStreamChunk,
CompletionMessage,
CompletionResponse,
CompletionResponseStreamChunk,
Message,
ToolCallDelta,
ToolCallParseStatus,
)
from llama_stack.providers.utils.inference.prompt_adapter import (
convert_image_content_to_url,
)

View file

@ -4,8 +4,10 @@
# This source code is licensed under the terms described in the LICENSE file in
# the root directory of this source tree.
from .api import * # noqa: F403
from .config import * # noqa: F403
from typing import List, Optional
from .api import KVStore
from .config import KVStoreConfig, KVStoreType
def kvstore_dependencies():

View file

@ -9,7 +9,7 @@ from typing import List, Optional
from redis.asyncio import Redis
from ..api import * # noqa: F403
from ..api import KVStore
from ..config import RedisKVStoreConfig

View file

@ -11,7 +11,7 @@ from typing import List, Optional
import aiosqlite
from ..api import * # noqa: F403
from ..api import KVStore
from ..config import SqliteKVStoreConfig

View file

@ -15,14 +15,17 @@ from urllib.parse import unquote
import chardet
import httpx
import numpy as np
from llama_models.llama3.api.tokenizer import Tokenizer
from numpy.typing import NDArray
from pypdf import PdfReader
from llama_models.llama3.api.datatypes import * # noqa: F403
from llama_models.llama3.api.tokenizer import Tokenizer
from llama_stack.apis.common.content_types import InterleavedContent, TextContentItem
from llama_stack.apis.memory import * # noqa: F403
from llama_stack.apis.common.content_types import (
InterleavedContent,
TextContentItem,
URL,
)
from llama_stack.apis.memory import Chunk, MemoryBankDocument, QueryDocumentsResponse
from llama_stack.apis.memory_banks import VectorMemoryBank
from llama_stack.providers.datatypes import Api
from llama_stack.providers.utils.inference.prompt_adapter import (

View file

@ -12,10 +12,18 @@ import threading
import uuid
from datetime import datetime
from functools import wraps
from typing import Any, Callable, Dict, List
from typing import Any, Callable, Dict, List, Optional
from llama_stack.apis.telemetry import * # noqa: F403
from llama_stack.apis.telemetry import (
LogSeverity,
Span,
SpanEndPayload,
SpanStartPayload,
SpanStatus,
StructuredLogEvent,
Telemetry,
UnstructuredLogEvent,
)
from llama_stack.providers.utils.telemetry.trace_protocol import serialize_value
log = logging.getLogger(__name__)