From 72e606355d9dba05142d848bd98ae85a777e7050 Mon Sep 17 00:00:00 2001 From: Nathan Weinberg <31703736+nathan-weinberg@users.noreply.github.com> Date: Wed, 16 Jul 2025 11:24:57 -0400 Subject: [PATCH] fix: add shutdown function for localfs provider (#2781) # What does this PR do? this was causing an unnessessary logger warning ## Test Plan Run `LLAMA_STACK_DIR=. ENABLE_OLLAMA=ollama OLLAMA_INFERENCE_MODEL=llama3.2:3b llama stack build --template starter --image-type venv --run` and then `Crtl-C` to shutdown Signed-off-by: Nathan Weinberg --- llama_stack/providers/inline/files/localfs/files.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/llama_stack/providers/inline/files/localfs/files.py b/llama_stack/providers/inline/files/localfs/files.py index 851ce2a6a..bdf8c42c7 100644 --- a/llama_stack/providers/inline/files/localfs/files.py +++ b/llama_stack/providers/inline/files/localfs/files.py @@ -51,6 +51,9 @@ class LocalfsFilesImpl(Files): }, ) + async def shutdown(self) -> None: + pass + def _generate_file_id(self) -> str: """Generate a unique file ID for OpenAI API.""" return f"file-{uuid.uuid4().hex}"