mirror of
https://github.com/meta-llama/llama-stack.git
synced 2025-12-03 09:53:45 +00:00
feat(tests): add TypeScript client integration test support (#4185)
Integration tests can now validate the TypeScript SDK alongside Python tests when running against server-mode stacks. Currently, this only adds a _small_ number of tests. We should extend only if truly needed -- this smoke check may be sufficient. When `RUN_CLIENT_TS_TESTS=1` is set, the test script runs TypeScript tests after Python tests pass. Tests are mapped via `tests/integration/client-typescript/suites.json` which defines which TypeScript test files correspond to each Python suite/setup combination. The fact that we need exact "test_id"s (which are actually generated by pytest) to be hardcoded inside the Typescript tests (so we hit the recorded paths) is a big smell and it might become grating, but maybe the benefit is worth it if we keep this test suite _small_ and targeted. ## Test Plan Run with TypeScript tests enabled: ```bash OPENAI_API_KEY=dummy RUN_CLIENT_TS_TESTS=1 \ scripts/integration-tests.sh --stack-config server:ci-tests --suite responses --setup gpt ```
This commit is contained in:
parent
4e9633f7c3
commit
40b11efac4
15 changed files with 6208 additions and 10 deletions
|
|
@ -16,16 +16,16 @@ import sys
|
|||
from tests.integration.suites import SETUP_DEFINITIONS, SUITE_DEFINITIONS
|
||||
|
||||
|
||||
def get_setup_env_vars(setup_name, suite_name=None):
|
||||
def get_setup_config(setup_name, suite_name=None):
|
||||
"""
|
||||
Get environment variables for a setup, with optional suite default fallback.
|
||||
Get full configuration (env vars + defaults) for a setup.
|
||||
|
||||
Args:
|
||||
setup_name: Name of the setup (e.g., 'ollama', 'gpt')
|
||||
suite_name: Optional suite name to get default setup if setup_name is None
|
||||
|
||||
Returns:
|
||||
Dictionary of environment variables
|
||||
Dictionary with 'env' and 'defaults' keys
|
||||
"""
|
||||
# If no setup specified, try to get default from suite
|
||||
if not setup_name and suite_name:
|
||||
|
|
@ -34,7 +34,7 @@ def get_setup_env_vars(setup_name, suite_name=None):
|
|||
setup_name = suite.default_setup
|
||||
|
||||
if not setup_name:
|
||||
return {}
|
||||
return {"env": {}, "defaults": {}}
|
||||
|
||||
setup = SETUP_DEFINITIONS.get(setup_name)
|
||||
if not setup:
|
||||
|
|
@ -44,27 +44,31 @@ def get_setup_env_vars(setup_name, suite_name=None):
|
|||
)
|
||||
sys.exit(1)
|
||||
|
||||
return setup.env
|
||||
return {"env": setup.env, "defaults": setup.defaults}
|
||||
|
||||
|
||||
def main():
|
||||
parser = argparse.ArgumentParser(description="Extract environment variables from a test setup")
|
||||
parser = argparse.ArgumentParser(description="Extract environment variables and defaults from a test setup")
|
||||
parser.add_argument("--setup", help="Setup name (e.g., ollama, gpt)")
|
||||
parser.add_argument("--suite", help="Suite name to get default setup from if --setup not provided")
|
||||
parser.add_argument("--format", choices=["bash", "json"], default="bash", help="Output format (default: bash)")
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
env_vars = get_setup_env_vars(args.setup, args.suite)
|
||||
config = get_setup_config(args.setup, args.suite)
|
||||
|
||||
if args.format == "bash":
|
||||
# Output as bash export statements
|
||||
for key, value in env_vars.items():
|
||||
# Output env vars as bash export statements
|
||||
for key, value in config["env"].items():
|
||||
print(f"export {key}='{value}'")
|
||||
# Output defaults as bash export statements with LLAMA_STACK_TEST_ prefix
|
||||
for key, value in config["defaults"].items():
|
||||
env_key = f"LLAMA_STACK_TEST_{key.upper()}"
|
||||
print(f"export {env_key}='{value}'")
|
||||
elif args.format == "json":
|
||||
import json
|
||||
|
||||
print(json.dumps(env_vars))
|
||||
print(json.dumps(config))
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
|
|
|||
|
|
@ -181,6 +181,10 @@ echo "$SETUP_ENV"
|
|||
eval "$SETUP_ENV"
|
||||
echo ""
|
||||
|
||||
# Export suite and setup names for TypeScript tests
|
||||
export LLAMA_STACK_TEST_SUITE="$TEST_SUITE"
|
||||
export LLAMA_STACK_TEST_SETUP="$TEST_SETUP"
|
||||
|
||||
ROOT_DIR="$THIS_DIR/.."
|
||||
cd $ROOT_DIR
|
||||
|
||||
|
|
@ -212,6 +216,71 @@ find_available_port() {
|
|||
return 1
|
||||
}
|
||||
|
||||
run_client_ts_tests() {
|
||||
if ! command -v npm &>/dev/null; then
|
||||
echo "npm could not be found; ensure Node.js is installed"
|
||||
return 1
|
||||
fi
|
||||
|
||||
pushd tests/integration/client-typescript >/dev/null
|
||||
|
||||
# Determine if TS_CLIENT_PATH is a directory path or an npm version
|
||||
if [[ -d "$TS_CLIENT_PATH" ]]; then
|
||||
# It's a directory path - use local checkout
|
||||
if [[ ! -f "$TS_CLIENT_PATH/package.json" ]]; then
|
||||
echo "Error: $TS_CLIENT_PATH exists but doesn't look like llama-stack-client-typescript (no package.json)"
|
||||
popd >/dev/null
|
||||
return 1
|
||||
fi
|
||||
echo "Using local llama-stack-client-typescript from: $TS_CLIENT_PATH"
|
||||
|
||||
# Build the TypeScript client first
|
||||
echo "Building TypeScript client..."
|
||||
pushd "$TS_CLIENT_PATH" >/dev/null
|
||||
npm install --silent
|
||||
npm run build --silent
|
||||
popd >/dev/null
|
||||
|
||||
# Install other dependencies first
|
||||
if [[ "${CI:-}" == "true" || "${CI:-}" == "1" ]]; then
|
||||
npm ci --silent
|
||||
else
|
||||
npm install --silent
|
||||
fi
|
||||
|
||||
# Then install the client from local directory
|
||||
echo "Installing llama-stack-client from: $TS_CLIENT_PATH"
|
||||
npm install "$TS_CLIENT_PATH" --silent
|
||||
else
|
||||
# It's an npm version specifier - install from npm
|
||||
echo "Installing llama-stack-client@${TS_CLIENT_PATH} from npm"
|
||||
if [[ "${CI:-}" == "true" || "${CI:-}" == "1" ]]; then
|
||||
npm ci --silent
|
||||
npm install "llama-stack-client@${TS_CLIENT_PATH}" --silent
|
||||
else
|
||||
npm install "llama-stack-client@${TS_CLIENT_PATH}" --silent
|
||||
fi
|
||||
fi
|
||||
|
||||
# Verify installation
|
||||
echo "Verifying llama-stack-client installation..."
|
||||
if npm list llama-stack-client 2>/dev/null | grep -q llama-stack-client; then
|
||||
echo "✅ llama-stack-client successfully installed"
|
||||
npm list llama-stack-client
|
||||
else
|
||||
echo "❌ llama-stack-client not found in node_modules"
|
||||
echo "Installed packages:"
|
||||
npm list --depth=0
|
||||
popd >/dev/null
|
||||
return 1
|
||||
fi
|
||||
|
||||
echo "Running TypeScript tests for suite $TEST_SUITE (setup $TEST_SETUP)"
|
||||
npm test
|
||||
|
||||
popd >/dev/null
|
||||
}
|
||||
|
||||
# Start Llama Stack Server if needed
|
||||
if [[ "$STACK_CONFIG" == *"server:"* && "$COLLECT_ONLY" == false ]]; then
|
||||
# Find an available port for the server
|
||||
|
|
@ -221,6 +290,7 @@ if [[ "$STACK_CONFIG" == *"server:"* && "$COLLECT_ONLY" == false ]]; then
|
|||
exit 1
|
||||
fi
|
||||
export LLAMA_STACK_PORT
|
||||
export TEST_API_BASE_URL="http://localhost:$LLAMA_STACK_PORT"
|
||||
echo "Will use port: $LLAMA_STACK_PORT"
|
||||
|
||||
stop_server() {
|
||||
|
|
@ -298,6 +368,7 @@ if [[ "$STACK_CONFIG" == *"docker:"* && "$COLLECT_ONLY" == false ]]; then
|
|||
exit 1
|
||||
fi
|
||||
export LLAMA_STACK_PORT
|
||||
export TEST_API_BASE_URL="http://localhost:$LLAMA_STACK_PORT"
|
||||
echo "Will use port: $LLAMA_STACK_PORT"
|
||||
|
||||
echo "=== Building Docker Image for distribution: $DISTRO ==="
|
||||
|
|
@ -506,5 +577,10 @@ else
|
|||
exit 1
|
||||
fi
|
||||
|
||||
# Run TypeScript client tests if TS_CLIENT_PATH is set
|
||||
if [[ $exit_code -eq 0 && -n "${TS_CLIENT_PATH:-}" && "${LLAMA_STACK_TEST_STACK_CONFIG_TYPE:-}" == "server" ]]; then
|
||||
run_client_ts_tests
|
||||
fi
|
||||
|
||||
echo ""
|
||||
echo "=== Integration Tests Complete ==="
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue