diff --git a/.github/actions/setup-typescript-client/action.yml b/.github/actions/setup-typescript-client/action.yml index 05b7eb53f..8b78ba70c 100644 --- a/.github/actions/setup-typescript-client/action.yml +++ b/.github/actions/setup-typescript-client/action.yml @@ -33,4 +33,3 @@ runs: echo "::error::Invalid client-version: ${{ inputs.client-version }}" exit 1 fi - diff --git a/scripts/integration-tests.sh b/scripts/integration-tests.sh index 189f69747..ae88da68a 100755 --- a/scripts/integration-tests.sh +++ b/scripts/integration-tests.sh @@ -237,15 +237,56 @@ run_client_ts_tests() { return 1 fi echo "Using local llama-stack-client-typescript from: $TS_CLIENT_PATH" - npm install --install-links "$TS_CLIENT_PATH" + + # Build the TypeScript client first + echo "Building TypeScript client..." + pushd "$TS_CLIENT_PATH" >/dev/null + npm install --silent + npm run build --silent + popd >/dev/null + + # Install other dependencies first + if [[ "${CI:-}" == "true" || "${CI:-}" == "1" ]]; then + npm ci --silent + else + npm install --silent + fi + + # Then install the client from local directory + echo "Installing llama-stack-client from: $TS_CLIENT_PATH" + npm install "$TS_CLIENT_PATH" --silent + + # Verify installation + echo "Verifying llama-stack-client installation..." + if npm list llama-stack-client 2>/dev/null | grep -q llama-stack-client; then + echo "✅ llama-stack-client successfully installed" + npm list llama-stack-client + else + echo "❌ llama-stack-client not found in node_modules" + echo "Installed packages:" + npm list --depth=0 + return 1 + fi else # It's an npm version specifier - install from npm echo "Installing llama-stack-client@${TS_CLIENT_PATH} from npm" if [[ "${CI:-}" == "true" || "${CI:-}" == "1" ]]; then - npm ci - npm install "llama-stack-client@${TS_CLIENT_PATH}" + npm ci --silent + npm install "llama-stack-client@${TS_CLIENT_PATH}" --silent else - npm install "llama-stack-client@${TS_CLIENT_PATH}" + npm install "llama-stack-client@${TS_CLIENT_PATH}" --silent + fi + + # Verify installation + echo "Verifying llama-stack-client installation..." + if npm list llama-stack-client 2>/dev/null | grep -q llama-stack-client; then + echo "✅ llama-stack-client successfully installed" + npm list llama-stack-client + else + echo "❌ llama-stack-client not found in node_modules" + echo "Installed packages:" + npm list --depth=0 + return 1 fi fi diff --git a/tests/integration/README.md b/tests/integration/README.md index c4c1f52bd..3559b785c 100644 --- a/tests/integration/README.md +++ b/tests/integration/README.md @@ -217,15 +217,15 @@ def test_asymmetric_embeddings(llama_stack_client, embedding_model_id): TypeScript SDK tests can run alongside Python tests when testing against `server:` stacks. Set `TS_CLIENT_PATH` to the path or version of `llama-stack-client-typescript` to enable: ```bash -# Use published npm package +# Use published npm package (responses suite) TS_CLIENT_PATH=^0.3.2 scripts/integration-tests.sh --stack-config server:ci-tests --suite responses --setup gpt # Use local checkout from ~/.cache (recommended for development) git clone https://github.com/llamastack/llama-stack-client-typescript.git ~/.cache/llama-stack-client-typescript TS_CLIENT_PATH=~/.cache/llama-stack-client-typescript scripts/integration-tests.sh --stack-config server:ci-tests --suite responses --setup gpt -# Use any local path -TS_CLIENT_PATH=/path/to/llama-stack-client-typescript scripts/integration-tests.sh --stack-config server:ci-tests --suite responses --setup gpt +# Run base suite with TypeScript tests +TS_CLIENT_PATH=~/.cache/llama-stack-client-typescript scripts/integration-tests.sh --stack-config server:ci-tests --suite base --setup ollama ``` TypeScript tests run immediately after Python tests pass, using the same replay fixtures. The mapping between Python suites/setups and TypeScript test files is defined in `tests/integration/client-typescript/suites.json`. diff --git a/tests/integration/client-typescript/__tests__/responses.test.ts b/tests/integration/client-typescript/__tests__/responses.test.ts index b4ea475c3..ef0f4eef1 100644 --- a/tests/integration/client-typescript/__tests__/responses.test.ts +++ b/tests/integration/client-typescript/__tests__/responses.test.ts @@ -12,6 +12,7 @@ */ import { createTestClient, requireTextModel } from '../setup'; +import { getResponseOutputText } from 'llama-stack-client'; describe('Responses API - Basic', () => { // Test cases matching llama-stack/tests/integration/responses/fixtures/test_cases.py @@ -46,7 +47,7 @@ describe('Responses API - Basic', () => { }); // Verify response has content - const outputText = response.output_text.toLowerCase().trim(); + const outputText = getResponseOutputText(response).toLowerCase().trim(); expect(outputText.length).toBeGreaterThan(0); expect(outputText).toContain(expected.toLowerCase()); @@ -58,7 +59,7 @@ describe('Responses API - Basic', () => { // Verify stored response matches const retrievedResponse = await client.responses.retrieve(response.id); - expect(retrievedResponse.output_text).toBe(response.output_text); + expect(getResponseOutputText(retrievedResponse)).toBe(getResponseOutputText(response)); // Test follow-up with previous_response_id const nextResponse = await client.responses.create({ @@ -66,7 +67,7 @@ describe('Responses API - Basic', () => { input: 'Repeat your previous response in all caps.', previous_response_id: response.id, }); - const nextOutputText = nextResponse.output_text.trim(); + const nextOutputText = getResponseOutputText(nextResponse).trim(); expect(nextOutputText).toContain(expected.toUpperCase()); }); @@ -104,7 +105,7 @@ describe('Responses API - Basic', () => { expect(chunk.response.id).toBe(responseId); // Verify content quality - const outputText = chunk.response.output_text.toLowerCase().trim(); + const outputText = getResponseOutputText(chunk.response).toLowerCase().trim(); expect(outputText.length).toBeGreaterThan(0); expect(outputText).toContain(expected.toLowerCase()); @@ -127,6 +128,6 @@ describe('Responses API - Basic', () => { // Verify stored response matches streamed response const retrievedResponse = await client.responses.retrieve(responseId); - expect(retrievedResponse.output_text).toBe(lastEvent.response.output_text); + expect(getResponseOutputText(retrievedResponse)).toBe(getResponseOutputText(lastEvent.response)); }); }); diff --git a/tests/integration/client-typescript/setup.ts b/tests/integration/client-typescript/setup.ts index 21d368c7b..2943d15cf 100644 --- a/tests/integration/client-typescript/setup.ts +++ b/tests/integration/client-typescript/setup.ts @@ -29,7 +29,7 @@ function loadTestConfig() { } // If setup is specified, load config from Python - let textModel = process.env['LLAMA_STACK_TEST_MODEL']; + let textModel = process.env['LLAMA_STACK_TEST_TEXT_MODEL']; let embeddingModel = process.env['LLAMA_STACK_TEST_EMBEDDING_MODEL']; if (setupName && !textModel) { @@ -114,7 +114,8 @@ export function skipIfNoModel(modelType: 'text' | 'embedding'): typeof test { const model = modelType === 'text' ? TEST_CONFIG.textModel : TEST_CONFIG.embeddingModel; if (!model) { - const message = `Skipping: ${modelType} model not configured (set LLAMA_STACK_TEST_${modelType.toUpperCase()}_MODEL)`; + const envVar = modelType === 'text' ? 'LLAMA_STACK_TEST_TEXT_MODEL' : 'LLAMA_STACK_TEST_EMBEDDING_MODEL'; + const message = `Skipping: ${modelType} model not configured (set ${envVar})`; return test.skip.bind(test) as typeof test; } @@ -128,7 +129,7 @@ export function skipIfNoModel(modelType: 'text' | 'embedding'): typeof test { export function requireTextModel(): string { if (!TEST_CONFIG.textModel) { throw new Error( - 'LLAMA_STACK_TEST_MODEL environment variable is required. ' + + 'LLAMA_STACK_TEST_TEXT_MODEL environment variable is required. ' + 'Run tests using: ./scripts/integration-test.sh', ); } diff --git a/tests/integration/client-typescript/suites.json b/tests/integration/client-typescript/suites.json index ace230acb..5c5b83058 100644 --- a/tests/integration/client-typescript/suites.json +++ b/tests/integration/client-typescript/suites.json @@ -5,16 +5,8 @@ "files": ["__tests__/responses.test.ts"] }, { - "suite": "responses", - "files": ["__tests__/responses.test.ts"] - }, - { - "suite": "inference", + "suite": "base", "setup": "ollama", "files": ["__tests__/inference.test.ts"] - }, - { - "suite": "inference", - "files": ["__tests__/inference.test.ts"] } ]