mirror of
https://github.com/meta-llama/llama-stack.git
synced 2025-10-09 21:18:38 +00:00
21 lines
598 B
TOML
21 lines
598 B
TOML
[build-system]
|
|
requires = ["setuptools>=61.0"]
|
|
build-backend = "setuptools.build_meta"
|
|
|
|
[project]
|
|
name = "llama-stack-provider-inference-fireworks"
|
|
version = "0.1.0"
|
|
description = "Fireworks AI inference provider for Llama models and other AI models on the Fireworks platform"
|
|
authors = [{ name = "Meta Llama", email = "llama-oss@meta.com" }]
|
|
requires-python = ">=3.12"
|
|
license = { "text" = "MIT" }
|
|
dependencies = [
|
|
"fireworks-ai",
|
|
"grpcio>=1.67.1,<1.71.0", # Pin grpcio version for compatibility
|
|
]
|
|
|
|
|
|
|
|
[tool.setuptools.packages.find]
|
|
where = ["."]
|
|
include = ["llama_stack*"]
|