llama-stack-mirror/llama_stack/providers/remote/inference/fireworks/pyproject.toml
Charlie Doern 26a490b7fc test: convert tests to use show
Signed-off-by: Charlie Doern <cdoern@redhat.com>
2025-09-22 10:48:38 -04:00

21 lines
598 B
TOML

[build-system]
requires = ["setuptools>=61.0"]
build-backend = "setuptools.build_meta"
[project]
name = "llama-stack-provider-inference-fireworks"
version = "0.1.0"
description = "Fireworks AI inference provider for Llama models and other AI models on the Fireworks platform"
authors = [{ name = "Meta Llama", email = "llama-oss@meta.com" }]
requires-python = ">=3.12"
license = { "text" = "MIT" }
dependencies = [
"fireworks-ai",
"grpcio>=1.67.1,<1.71.0", # Pin grpcio version for compatibility
]
[tool.setuptools.packages.find]
where = ["."]
include = ["llama_stack*"]