llama-stack-mirror/llama_stack/cli/table.py
Ihar Hrachyshka cc700b2f68
feat: support listing all for llama stack list-providers (#1056)
# What does this PR do?
Support listing all for `llama stack list-providers`.

For ease of reading, sort the output rows by type.

Before the change.

```
 llama stack list-providers
usage: llama stack list-providers [-h] {inference,safety,agents,vector_io,datasetio,scoring,eval,post_training,tool_runtime,telemetry}
llama stack list-providers: error: the following arguments are required: api
```

After the change.

```
+---------------+----------------------------------+----------------------------------------------------------------------------------+
| API Type      | Provider Type                    | PIP Package Dependencies                                                         |
+---------------+----------------------------------+----------------------------------------------------------------------------------+
| agents        | inline::meta-reference           | matplotlib,pillow,pandas,scikit-learn,aiosqlite,psycopg2-binary,redis            |
+---------------+----------------------------------+----------------------------------------------------------------------------------+
| datasetio     | inline::localfs                  | pandas                                                                           |
+---------------+----------------------------------+----------------------------------------------------------------------------------+
| datasetio     | remote::huggingface              | datasets                                                                         |
+---------------+----------------------------------+----------------------------------------------------------------------------------+
| eval          | inline::meta-reference           |                                                                                  |
+---------------+----------------------------------+----------------------------------------------------------------------------------+
| inference     | inline::meta-reference           | accelerate,blobfile,fairscale,torch,torchvision,transformers,zmq,lm-format-      |
|               |                                  | enforcer,sentence-transformers                                                   |
+---------------+----------------------------------+----------------------------------------------------------------------------------+
| inference     | inline::meta-reference-quantized | accelerate,blobfile,fairscale,torch,torchvision,transformers,zmq,lm-format-      |
|               |                                  | enforcer,sentence-transformers,fbgemm-gpu,torchao==0.5.0                         |
+---------------+----------------------------------+----------------------------------------------------------------------------------+
| inference     | inline::sentence-transformers    | sentence-transformers                                                            |
+---------------+----------------------------------+----------------------------------------------------------------------------------+
| inference     | inline::vllm                     | vllm                                                                             |
+---------------+----------------------------------+----------------------------------------------------------------------------------+
| inference     | remote::bedrock                  | boto3                                                                            |
+---------------+----------------------------------+----------------------------------------------------------------------------------+
| inference     | remote::cerebras                 | cerebras_cloud_sdk                                                               |
+---------------+----------------------------------+----------------------------------------------------------------------------------+
| inference     | remote::databricks               | openai                                                                           |
+---------------+----------------------------------+----------------------------------------------------------------------------------+
| inference     | remote::fireworks                | fireworks-ai                                                                     |
+---------------+----------------------------------+----------------------------------------------------------------------------------+
| inference     | remote::groq                     | groq                                                                             |
+---------------+----------------------------------+----------------------------------------------------------------------------------+
| inference     | remote::hf::endpoint             | huggingface_hub,aiohttp                                                          |
+---------------+----------------------------------+----------------------------------------------------------------------------------+
| inference     | remote::hf::serverless           | huggingface_hub,aiohttp                                                          |
+---------------+----------------------------------+----------------------------------------------------------------------------------+
| inference     | remote::nvidia                   | openai                                                                           |
+---------------+----------------------------------+----------------------------------------------------------------------------------+
| inference     | remote::ollama                   | ollama,aiohttp                                                                   |
+---------------+----------------------------------+----------------------------------------------------------------------------------+
| inference     | remote::runpod                   | openai                                                                           |
+---------------+----------------------------------+----------------------------------------------------------------------------------+
| inference     | remote::sambanova                | openai                                                                           |
+---------------+----------------------------------+----------------------------------------------------------------------------------+
| inference     | remote::tgi                      | huggingface_hub,aiohttp                                                          |
+---------------+----------------------------------+----------------------------------------------------------------------------------+
| inference     | remote::together                 | together                                                                         |
+---------------+----------------------------------+----------------------------------------------------------------------------------+
| inference     | remote::vllm                     | openai                                                                           |
+---------------+----------------------------------+----------------------------------------------------------------------------------+
| post_training | inline::torchtune                | torch,torchtune==0.5.0,torchao==0.8.0,numpy                                      |
+---------------+----------------------------------+----------------------------------------------------------------------------------+
| safety        | inline::code-scanner             | codeshield                                                                       |
+---------------+----------------------------------+----------------------------------------------------------------------------------+
| safety        | inline::llama-guard              |                                                                                  |
+---------------+----------------------------------+----------------------------------------------------------------------------------+
| safety        | inline::meta-reference           | transformers,torch --index-url https://download.pytorch.org/whl/cpu              |
+---------------+----------------------------------+----------------------------------------------------------------------------------+
| safety        | inline::prompt-guard             | transformers,torch --index-url https://download.pytorch.org/whl/cpu              |
+---------------+----------------------------------+----------------------------------------------------------------------------------+
| safety        | remote::bedrock                  | boto3                                                                            |
+---------------+----------------------------------+----------------------------------------------------------------------------------+
| scoring       | inline::basic                    |                                                                                  |
+---------------+----------------------------------+----------------------------------------------------------------------------------+
| scoring       | inline::braintrust               | autoevals,openai                                                                 |
+---------------+----------------------------------+----------------------------------------------------------------------------------+
| scoring       | inline::llm-as-judge             |                                                                                  |
+---------------+----------------------------------+----------------------------------------------------------------------------------+
| telemetry     | inline::meta-reference           | opentelemetry-sdk,opentelemetry-exporter-otlp-proto-http                         |
+---------------+----------------------------------+----------------------------------------------------------------------------------+
| tool_runtime  | inline::code-interpreter         |                                                                                  |
+---------------+----------------------------------+----------------------------------------------------------------------------------+
| tool_runtime  | inline::rag-runtime              |                                                                                  |
+---------------+----------------------------------+----------------------------------------------------------------------------------+
| tool_runtime  | remote::bing-search              | requests                                                                         |
+---------------+----------------------------------+----------------------------------------------------------------------------------+
| tool_runtime  | remote::brave-search             | requests                                                                         |
+---------------+----------------------------------+----------------------------------------------------------------------------------+
| tool_runtime  | remote::model-context-protocol   | mcp                                                                              |
+---------------+----------------------------------+----------------------------------------------------------------------------------+
| tool_runtime  | remote::tavily-search            | requests                                                                         |
+---------------+----------------------------------+----------------------------------------------------------------------------------+
| tool_runtime  | remote::wolfram-alpha            | requests                                                                         |
+---------------+----------------------------------+----------------------------------------------------------------------------------+
| vector_io     | inline::chromadb                 | blobfile,chardet,pypdf,tqdm,numpy,scikit-                                        |
|               |                                  | learn,scipy,nltk,sentencepiece,transformers,torch torchvision --index-url        |
|               |                                  | https://download.pytorch.org/whl/cpu,sentence-transformers --no-deps,chromadb    |
+---------------+----------------------------------+----------------------------------------------------------------------------------+
| vector_io     | inline::faiss                    | blobfile,chardet,pypdf,tqdm,numpy,scikit-                                        |
|               |                                  | learn,scipy,nltk,sentencepiece,transformers,torch torchvision --index-url        |
|               |                                  | https://download.pytorch.org/whl/cpu,sentence-transformers --no-deps,faiss-cpu   |
+---------------+----------------------------------+----------------------------------------------------------------------------------+
| vector_io     | inline::meta-reference           | blobfile,chardet,pypdf,tqdm,numpy,scikit-                                        |
|               |                                  | learn,scipy,nltk,sentencepiece,transformers,torch torchvision --index-url        |
|               |                                  | https://download.pytorch.org/whl/cpu,sentence-transformers --no-deps,faiss-cpu   |
+---------------+----------------------------------+----------------------------------------------------------------------------------+
| vector_io     | remote::chromadb                 | blobfile,chardet,pypdf,tqdm,numpy,scikit-                                        |
|               |                                  | learn,scipy,nltk,sentencepiece,transformers,torch torchvision --index-url        |
|               |                                  | https://download.pytorch.org/whl/cpu,sentence-transformers --no-deps,chromadb-   |
|               |                                  | client                                                                           |
+---------------+----------------------------------+----------------------------------------------------------------------------------+
| vector_io     | remote::pgvector                 | blobfile,chardet,pypdf,tqdm,numpy,scikit-                                        |
|               |                                  | learn,scipy,nltk,sentencepiece,transformers,torch torchvision --index-url        |
|               |                                  | https://download.pytorch.org/whl/cpu,sentence-transformers --no-                 |
|               |                                  | deps,psycopg2-binary                                                             |
+---------------+----------------------------------+----------------------------------------------------------------------------------+
| vector_io     | remote::qdrant                   | blobfile,chardet,pypdf,tqdm,numpy,scikit-                                        |
|               |                                  | learn,scipy,nltk,sentencepiece,transformers,torch torchvision --index-url        |
|               |                                  | https://download.pytorch.org/whl/cpu,sentence-transformers --no-deps,qdrant-     |
|               |                                  | client                                                                           |
+---------------+----------------------------------+----------------------------------------------------------------------------------+
| vector_io     | remote::weaviate                 | blobfile,chardet,pypdf,tqdm,numpy,scikit-                                        |
|               |                                  | learn,scipy,nltk,sentencepiece,transformers,torch torchvision --index-url        |
|               |                                  | https://download.pytorch.org/whl/cpu,sentence-transformers --no-deps,weaviate-   |
|               |                                  | client                                                                           |
+---------------+----------------------------------+----------------------------------------------------------------------------------+
```

[//]: # (If resolving an issue, uncomment and update the line below)
[//]: # (Closes #[issue-number])

## Test Plan

Manually.

[//]: # (## Documentation)

Signed-off-by: Ihar Hrachyshka <ihar.hrachyshka@gmail.com>
2025-02-12 22:03:28 -08:00

78 lines
2.3 KiB
Python

# Copyright (c) Meta Platforms, Inc. and affiliates.
# All rights reserved.
#
# This source code is licensed under the terms described in the LICENSE file in
# the root directory of this source tree.
import re
import textwrap
from typing import Iterable
from termcolor import cprint
def strip_ansi_colors(text):
ansi_escape = re.compile(r"\x1B(?:[@-Z\\-_]|\[[0-?]*[ -/]*[@-~])")
return ansi_escape.sub("", text)
def format_row(row, col_widths):
def wrap(text, width):
lines = []
for line in text.split("\n"):
if line.strip() == "":
lines.append("")
else:
lines.extend(textwrap.wrap(line, width, break_long_words=False, replace_whitespace=False))
return lines
wrapped = [wrap(item, width) for item, width in zip(row, col_widths)]
max_lines = max(len(subrow) for subrow in wrapped)
lines = []
for i in range(max_lines):
line = []
for cell_lines, width in zip(wrapped, col_widths):
value = cell_lines[i] if i < len(cell_lines) else ""
line.append(value + " " * (width - len(strip_ansi_colors(value))))
lines.append("| " + (" | ".join(line)) + " |")
return "\n".join(lines)
def print_table(rows, headers=None, separate_rows: bool = False, sort_by: Iterable[int] = tuple()):
def itemlen(item):
return max([len(line) for line in strip_ansi_colors(item).split("\n")])
rows = [[x or "" for x in row] for row in rows]
if sort_by:
rows.sort(key=lambda x: tuple(x[i] for i in sort_by))
if not headers:
col_widths = [max(itemlen(item) for item in col) for col in zip(*rows)]
else:
col_widths = [
max(
itemlen(header),
max(itemlen(item) for item in col),
)
for header, col in zip(headers, zip(*rows))
]
col_widths = [min(w, 80) for w in col_widths]
header_line = "+".join("-" * (width + 2) for width in col_widths)
header_line = f"+{header_line}+"
if headers:
print(header_line)
cprint(format_row(headers, col_widths), "white", attrs=["bold"])
print(header_line)
for row in rows:
print(format_row(row, col_widths))
if separate_rows:
print(header_line)
if not separate_rows:
print(header_line)