From a7dfcef004ad2f8d5685eae5f734e20ec7584503 Mon Sep 17 00:00:00 2001 From: Hardik Shah Date: Fri, 24 Jan 2025 13:04:05 -0800 Subject: [PATCH] make --report take a path when testing url endpoints --- tests/client-sdk/conftest.py | 17 +++++++++++++---- tests/client-sdk/report.py | 22 ++++++++-------------- 2 files changed, 21 insertions(+), 18 deletions(-) diff --git a/tests/client-sdk/conftest.py b/tests/client-sdk/conftest.py index 779c10e21..2a366e985 100644 --- a/tests/client-sdk/conftest.py +++ b/tests/client-sdk/conftest.py @@ -16,8 +16,15 @@ from report import Report def pytest_configure(config): config.option.tbstyle = "short" config.option.disable_warnings = True - if config.getoption("--report"): - config.pluginmanager.register(Report()) + # Note: + # if report_path is not provided (aka no option --report in the pytest command), + # it will be set to False + # if --report will give None ( in this case we infer report_path) + # if --report /a/b is provided, it will be set to the path provided + # We want to handle all these cases and hence explicitly check for False + report_path = config.getoption("--report") + if report_path is not False: + config.pluginmanager.register(Report(report_path)) TEXT_MODEL = "meta-llama/Llama-3.1-8B-Instruct" @@ -27,9 +34,11 @@ VISION_MODEL = "meta-llama/Llama-3.2-11B-Vision-Instruct" def pytest_addoption(parser): parser.addoption( "--report", + action="store", default=False, - action="store_true", - help="Knob to determine if we should generate report, e.g. --output=True", + nargs="?", + type=str, + help="Path where the test report should be written, e.g. --report=/path/to/report.md", ) parser.addoption( "--inference-model", diff --git a/tests/client-sdk/report.py b/tests/client-sdk/report.py index cf7a84d7f..f8f224a37 100644 --- a/tests/client-sdk/report.py +++ b/tests/client-sdk/report.py @@ -9,6 +9,7 @@ import importlib import os from collections import defaultdict from pathlib import Path +from typing import Optional from urllib.parse import urlparse import pytest @@ -85,7 +86,7 @@ SUPPORTED_MODELS = { class Report: - def __init__(self): + def __init__(self, report_path: Optional[str] = None): if os.environ.get("LLAMA_STACK_CONFIG"): config_path_or_template_name = get_env_or_fail("LLAMA_STACK_CONFIG") if config_path_or_template_name.endswith(".yaml"): @@ -107,23 +108,16 @@ class Report: self.image_name = self.client.async_client.config.image_name elif os.environ.get("LLAMA_STACK_BASE_URL"): url = get_env_or_fail("LLAMA_STACK_BASE_URL") - hostname = urlparse(url).netloc - domain = hostname.split(".")[-2] - self.image_name = domain - + self.image_name = urlparse(url).netloc + if report_path is None: + raise ValueError( + "Report path must be provided when LLAMA_STACK_BASE_URL is set" + ) + self.output_path = Path(report_path) self.client = LlamaStackClient( base_url=url, provider_data=None, ) - # We assume that the domain maps to a template - # i.e. https://llamastack-preview.fireworks.ai --> "fireworks" template - # and add report in that directory - output_dir = Path( - importlib.resources.files("llama_stack") / f"templates/{domain}/" - ) - if not output_dir.exists(): - raise ValueError(f"Output dir {output_dir} does not exist") - self.output_path = Path(output_dir / "remote-hosted-report.md") else: raise ValueError("LLAMA_STACK_CONFIG or LLAMA_STACK_BASE_URL must be set")