mirror of
https://github.com/meta-llama/llama-stack.git
synced 2025-10-04 04:04:14 +00:00
fix int tests
This commit is contained in:
parent
51a3e9930e
commit
b3b31bf357
3 changed files with 37 additions and 37 deletions
|
@ -11,6 +11,8 @@
|
|||
import uuid
|
||||
from typing import Annotated, Any, Literal, Protocol, runtime_checkable
|
||||
|
||||
from pydantic import BaseModel, Field
|
||||
|
||||
from llama_stack.apis.inference import InterleavedContent
|
||||
from llama_stack.apis.vector_dbs import VectorDB
|
||||
from llama_stack.apis.version import LLAMA_STACK_API_V1
|
||||
|
@ -19,8 +21,6 @@ from llama_stack.providers.utils.vector_io.vector_utils import generate_chunk_id
|
|||
from llama_stack.schema_utils import json_schema_type, webmethod
|
||||
from llama_stack.strong_typing.schema import register_schema
|
||||
|
||||
from pydantic import BaseModel, Field
|
||||
|
||||
|
||||
@json_schema_type
|
||||
class ChunkMetadata(BaseModel):
|
||||
|
@ -350,12 +350,7 @@ class VectorStoreFileLastError(BaseModel):
|
|||
message: str
|
||||
|
||||
|
||||
VectorStoreFileStatus = (
|
||||
Literal["completed"]
|
||||
| Literal["in_progress"]
|
||||
| Literal["cancelled"]
|
||||
| Literal["failed"]
|
||||
)
|
||||
VectorStoreFileStatus = Literal["completed"] | Literal["in_progress"] | Literal["cancelled"] | Literal["failed"]
|
||||
register_schema(VectorStoreFileStatus, name="VectorStoreFileStatus")
|
||||
|
||||
|
||||
|
@ -561,9 +556,7 @@ class VectorIO(Protocol):
|
|||
"""
|
||||
...
|
||||
|
||||
@webmethod(
|
||||
route="/vector_stores/{vector_store_id}", method="GET", level=LLAMA_STACK_API_V1
|
||||
)
|
||||
@webmethod(route="/vector_stores/{vector_store_id}", method="GET", level=LLAMA_STACK_API_V1)
|
||||
async def openai_retrieve_vector_store(
|
||||
self,
|
||||
vector_store_id: str,
|
||||
|
|
|
@ -616,7 +616,6 @@ class OpenAIVectorStoreMixin(ABC):
|
|||
chunk_overlap_tokens,
|
||||
attributes,
|
||||
)
|
||||
|
||||
if not chunks:
|
||||
vector_store_file_object.status = "failed"
|
||||
vector_store_file_object.last_error = VectorStoreFileLastError(
|
||||
|
@ -877,7 +876,6 @@ class OpenAIVectorStoreMixin(ABC):
|
|||
batch_info = self.openai_file_batches[batch_id]
|
||||
batch_object = batch_info["batch_object"]
|
||||
vector_store_id = batch_object.vector_store_id
|
||||
|
||||
for file_id in file_ids:
|
||||
try:
|
||||
# Process each file
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue