[build-system] requires = ["setuptools>=61.0"] build-backend = "setuptools.build_meta" [project] name = "llama-stack-provider-inference-ollama" version = "0.1.0" description = "Ollama inference provider for running local models through the Ollama runtime" authors = [{ name = "Meta Llama", email = "llama-oss@meta.com" }] requires-python = ">=3.12" license = { "text" = "MIT" } dependencies = [ "ollama", "aiohttp", "h11>=0.16.0", ] [tool.setuptools.packages.find] where = ["."] include = ["llama_stack*"]