llama-stack/tests/client-sdk/conftest.py
Sixian Yi edf56884a7
add pytest option to generate a functional report for distribution (#833)
# What does this PR do?

add pytest option (`--report`) to support generating a functional report
for llama stack distribution

## Test Plan
```
export LLAMA_STACK_CONFIG=./llama_stack/templates/fireworks/run.yaml
/opt/miniconda3/envs/stack/bin/pytest -s -v tests/client-sdk/  --report
```

See a report file was generated under
`./llama_stack/templates/fireworks/report.md`


## Sources

Please link relevant resources if necessary.


## Before submitting

- [ ] This PR fixes a typo or improves the docs (you can dismiss the
other checks if that's the case).
- [ ] Ran pre-commit to handle lint / formatting issues.
- [ ] Read the [contributor
guideline](https://github.com/meta-llama/llama-stack/blob/main/CONTRIBUTING.md),
      Pull Request section?
- [ ] Updated relevant documentation.
- [ ] Wrote necessary unit or integration tests.
2025-01-21 21:18:23 -08:00

63 lines
2 KiB
Python

# Copyright (c) Meta Platforms, Inc. and affiliates.
# All rights reserved.
#
# This source code is licensed under the terms described in the LICENSE file in
# the root directory of this source tree.
import os
import pytest
from llama_stack import LlamaStackAsLibraryClient
from llama_stack.providers.tests.env import get_env_or_fail
from llama_stack_client import LlamaStackClient
from report import Report
def pytest_configure(config):
config.option.tbstyle = "short"
config.option.disable_warnings = True
if config.getoption("--report"):
config.pluginmanager.register(Report())
def pytest_addoption(parser):
parser.addoption(
"--report",
default=False,
action="store_true",
help="Knob to determine if we should generate report, e.g. --output=True",
)
TEXT_MODEL = "meta-llama/Llama-3.1-8B-Instruct"
INFERENCE_MODEL = "meta-llama/Llama-3.2-11B-Vision-Instruct"
@pytest.fixture(scope="session")
def provider_data():
# check env for tavily secret, brave secret and inject all into provider data
provider_data = {}
if os.environ.get("TAVILY_SEARCH_API_KEY"):
provider_data["tavily_search_api_key"] = os.environ["TAVILY_SEARCH_API_KEY"]
if os.environ.get("BRAVE_SEARCH_API_KEY"):
provider_data["brave_search_api_key"] = os.environ["BRAVE_SEARCH_API_KEY"]
return provider_data if len(provider_data) > 0 else None
@pytest.fixture(scope="session")
def llama_stack_client(provider_data):
if os.environ.get("LLAMA_STACK_CONFIG"):
client = LlamaStackAsLibraryClient(
get_env_or_fail("LLAMA_STACK_CONFIG"),
provider_data=provider_data,
skip_logger_removal=True,
)
client.initialize()
elif os.environ.get("LLAMA_STACK_BASE_URL"):
client = LlamaStackClient(
base_url=get_env_or_fail("LLAMA_STACK_BASE_URL"),
provider_data=provider_data,
)
else:
raise ValueError("LLAMA_STACK_CONFIG or LLAMA_STACK_BASE_URL must be set")
return client