forked from phoenix-oss/llama-stack-mirror
Move to use pyproject.toml so it is uv compatible
This commit is contained in:
parent
95786d5bdc
commit
c6d9ff2054
3 changed files with 59 additions and 47 deletions
|
@ -1,4 +1,4 @@
|
|||
include requirements.txt
|
||||
include pyproject.toml
|
||||
include distributions/dependencies.json
|
||||
include llama_stack/distribution/*.sh
|
||||
include llama_stack/cli/scripts/*.sh
|
||||
|
|
|
@ -1,3 +1,61 @@
|
|||
[build-system]
|
||||
requires = ["setuptools>=61.0"]
|
||||
build-backend = "setuptools.build_meta"
|
||||
|
||||
[project]
|
||||
name = "llama_stack"
|
||||
version = "0.1.0"
|
||||
authors = [
|
||||
{ name = "Meta Llama", email = "llama-oss@meta.com" },
|
||||
]
|
||||
description = "Llama Stack"
|
||||
readme = "README.md"
|
||||
requires-python = ">=3.10"
|
||||
license = { text = "MIT" }
|
||||
classifiers = [
|
||||
"License :: OSI Approved :: MIT License",
|
||||
"Programming Language :: Python :: 3",
|
||||
"Operating System :: OS Independent",
|
||||
"Intended Audience :: Developers",
|
||||
"Intended Audience :: Information Technology",
|
||||
"Intended Audience :: Science/Research",
|
||||
"Topic :: Scientific/Engineering :: Artificial Intelligence",
|
||||
"Topic :: Scientific/Engineering :: Information Analysis",
|
||||
]
|
||||
dependencies = [
|
||||
"blobfile",
|
||||
"fire",
|
||||
"httpx",
|
||||
"huggingface-hub",
|
||||
"llama-models>=0.1.0",
|
||||
"llama-stack-client>=0.1.0",
|
||||
"prompt-toolkit",
|
||||
"python-dotenv",
|
||||
"pydantic>=2",
|
||||
"requests",
|
||||
"rich",
|
||||
"setuptools",
|
||||
"termcolor",
|
||||
]
|
||||
|
||||
[project.optional-dependencies]
|
||||
dev = [
|
||||
"pytest",
|
||||
"pytest-asyncio",
|
||||
"pytest-nbval", # For notebook testing
|
||||
"black",
|
||||
"ruff",
|
||||
"mypy",
|
||||
"types-requests",
|
||||
"types-setuptools",
|
||||
]
|
||||
|
||||
[project.urls]
|
||||
Homepage = "https://github.com/meta-llama/llama-stack"
|
||||
|
||||
[project.scripts]
|
||||
llama = "llama_stack.cli.llama:main"
|
||||
install-wheel-from-presigned = "llama_stack.cli.scripts.run:install_wheel_from_presigned"
|
||||
|
||||
[tool.setuptools]
|
||||
packages = ["llama_stack"]
|
||||
|
|
46
setup.py
46
setup.py
|
@ -1,46 +0,0 @@
|
|||
# Copyright (c) Meta Platforms, Inc. and affiliates.
|
||||
# All rights reserved.
|
||||
#
|
||||
# This source code is licensed under the terms described in the LICENSE file in
|
||||
# the root directory of this source tree.
|
||||
|
||||
from setuptools import find_packages, setup
|
||||
|
||||
|
||||
# Function to read the requirements.txt file
|
||||
def read_requirements():
|
||||
with open("requirements.txt") as req:
|
||||
content = req.readlines()
|
||||
return [line.strip() for line in content]
|
||||
|
||||
|
||||
setup(
|
||||
name="llama_stack",
|
||||
version="0.1.0",
|
||||
author="Meta Llama",
|
||||
author_email="llama-oss@meta.com",
|
||||
description="Llama Stack",
|
||||
entry_points={
|
||||
"console_scripts": [
|
||||
"llama = llama_stack.cli.llama:main",
|
||||
"install-wheel-from-presigned = llama_stack.cli.scripts.run:install_wheel_from_presigned",
|
||||
]
|
||||
},
|
||||
long_description=open("README.md").read(),
|
||||
long_description_content_type="text/markdown",
|
||||
url="https://github.com/meta-llama/llama-stack",
|
||||
packages=find_packages(),
|
||||
classifiers=[
|
||||
"License :: OSI Approved :: MIT License",
|
||||
"Programming Language :: Python :: 3",
|
||||
"Operating System :: OS Independent",
|
||||
"Intended Audience :: Developers",
|
||||
"Intended Audience :: Information Technology",
|
||||
"Intended Audience :: Science/Research",
|
||||
"Topic :: Scientific/Engineering :: Artificial Intelligence",
|
||||
"Topic :: Scientific/Engineering :: Information Analysis",
|
||||
],
|
||||
python_requires=">=3.10",
|
||||
install_requires=read_requirements(),
|
||||
include_package_data=True,
|
||||
)
|
Loading…
Add table
Add a link
Reference in a new issue