mirror of
https://github.com/meta-llama/llama-stack.git
synced 2025-10-14 14:27:58 +00:00
Some checks failed
SqlStore Integration Tests / test-postgres (3.12) (push) Failing after 0s
SqlStore Integration Tests / test-postgres (3.13) (push) Failing after 0s
Integration Auth Tests / test-matrix (oauth2_token) (push) Failing after 1s
Python Package Build Test / build (3.12) (push) Failing after 1s
Python Package Build Test / build (3.13) (push) Failing after 1s
Integration Tests (Replay) / Integration Tests (, , , client=, ) (push) Failing after 3s
Test External Providers Installed via Module / test-external-providers-from-module (venv) (push) Has been skipped
Vector IO Integration Tests / test-matrix (push) Failing after 5s
API Conformance Tests / check-schema-compatibility (push) Successful in 9s
Test External API and Providers / test-external (venv) (push) Failing after 4s
Unit Tests / unit-tests (3.12) (push) Failing after 4s
Unit Tests / unit-tests (3.13) (push) Failing after 4s
UI Tests / ui-tests (22) (push) Successful in 38s
Pre-commit / pre-commit (push) Successful in 1m27s
# What does this PR do? Allows passing through extra_body parameters to inference providers. With this, we removed the 2 vllm-specific parameters from completions API into `extra_body`. Before/After <img width="1883" height="324" alt="image" src="https://github.com/user-attachments/assets/acb27c08-c748-46c9-b1da-0de64e9908a1" /> closes #2720 ## Test Plan CI and added new test ``` ❯ uv run pytest -s -v tests/integration/ --stack-config=server:starter --inference-mode=record -k 'not( builtin_tool or safety_with_image or code_interpreter or test_rag ) and test_openai_completion_guided_choice' --setup=vllm --suite=base --color=yes Uninstalled 3 packages in 125ms Installed 3 packages in 19ms INFO 2025-10-10 14:29:54,317 tests.integration.conftest:118 tests: Applying setup 'vllm' for suite base INFO 2025-10-10 14:29:54,331 tests.integration.conftest:47 tests: Test stack config type: server (stack_config=server:starter) ============================================================================================================== test session starts ============================================================================================================== platform darwin -- Python 3.12.11, pytest-8.4.2, pluggy-1.6.0 -- /Users/erichuang/projects/llama-stack-1/.venv/bin/python cachedir: .pytest_cache metadata: {'Python': '3.12.11', 'Platform': 'macOS-15.6.1-arm64-arm-64bit', 'Packages': {'pytest': '8.4.2', 'pluggy': '1.6.0'}, 'Plugins': {'anyio': '4.9.0', 'html': '4.1.1', 'socket': '0.7.0', 'asyncio': '1.1.0', 'json-report': '1.5.0', 'timeout': '2.4.0', 'metadata': '3.1.1', 'cov': '6.2.1', 'nbval': '0.11.0'}} rootdir: /Users/erichuang/projects/llama-stack-1 configfile: pyproject.toml plugins: anyio-4.9.0, html-4.1.1, socket-0.7.0, asyncio-1.1.0, json-report-1.5.0, timeout-2.4.0, metadata-3.1.1, cov-6.2.1, nbval-0.11.0 asyncio: mode=Mode.AUTO, asyncio_default_fixture_loop_scope=None, asyncio_default_test_loop_scope=function collected 285 items / 284 deselected / 1 selected tests/integration/inference/test_openai_completion.py::test_openai_completion_guided_choice[txt=vllm/Qwen/Qwen3-0.6B] instantiating llama_stack_client Starting llama stack server with config 'starter' on port 8321... Waiting for server at http://localhost:8321... (0.0s elapsed) Waiting for server at http://localhost:8321... (0.5s elapsed) Waiting for server at http://localhost:8321... (5.1s elapsed) Waiting for server at http://localhost:8321... (5.6s elapsed) Waiting for server at http://localhost:8321... (10.1s elapsed) Waiting for server at http://localhost:8321... (10.6s elapsed) Server is ready at http://localhost:8321 llama_stack_client instantiated in 11.773s PASSEDTerminating llama stack server process... Terminating process 98444 and its group... Server process and children terminated gracefully ============================================================================================================= slowest 10 durations ============================================================================================================== 11.88s setup tests/integration/inference/test_openai_completion.py::test_openai_completion_guided_choice[txt=vllm/Qwen/Qwen3-0.6B] 3.02s call tests/integration/inference/test_openai_completion.py::test_openai_completion_guided_choice[txt=vllm/Qwen/Qwen3-0.6B] 0.01s teardown tests/integration/inference/test_openai_completion.py::test_openai_completion_guided_choice[txt=vllm/Qwen/Qwen3-0.6B] ================================================================================================ 1 passed, 284 deselected, 3 warnings in 16.21s ================================================================================================= ```
543 lines
16 KiB
JSON
543 lines
16 KiB
JSON
{
|
|
"test_id": null,
|
|
"request": {
|
|
"method": "POST",
|
|
"url": "https://api.fireworks.ai/inference/v1/v1/models",
|
|
"headers": {},
|
|
"body": {},
|
|
"endpoint": "/v1/models",
|
|
"model": ""
|
|
},
|
|
"response": {
|
|
"body": [
|
|
{
|
|
"__type__": "openai.types.model.Model",
|
|
"__data__": {
|
|
"id": "accounts/fireworks/models/flux-1-dev-fp8",
|
|
"created": 1729532889,
|
|
"object": "model",
|
|
"owned_by": "fireworks",
|
|
"kind": "FLUMINA_BASE_MODEL",
|
|
"supports_chat": false,
|
|
"supports_image_input": false,
|
|
"supports_tools": false
|
|
}
|
|
},
|
|
{
|
|
"__type__": "openai.types.model.Model",
|
|
"__data__": {
|
|
"id": "accounts/fireworks/models/flux-kontext-max",
|
|
"created": 1750714611,
|
|
"object": "model",
|
|
"owned_by": "fireworks",
|
|
"kind": "FLUMINA_BASE_MODEL",
|
|
"supports_chat": true,
|
|
"supports_image_input": true,
|
|
"supports_tools": false
|
|
}
|
|
},
|
|
{
|
|
"__type__": "openai.types.model.Model",
|
|
"__data__": {
|
|
"id": "accounts/fireworks/models/flux-kontext-pro",
|
|
"created": 1750488264,
|
|
"object": "model",
|
|
"owned_by": "fireworks",
|
|
"kind": "FLUMINA_BASE_MODEL",
|
|
"supports_chat": true,
|
|
"supports_image_input": true,
|
|
"supports_tools": false
|
|
}
|
|
},
|
|
{
|
|
"__type__": "openai.types.model.Model",
|
|
"__data__": {
|
|
"id": "accounts/sentientfoundation-serverless/models/dobby-mini-unhinged-plus-llama-3-1-8b",
|
|
"created": 1748467427,
|
|
"object": "model",
|
|
"owned_by": "sentientfoundation-serverless",
|
|
"kind": "HF_BASE_MODEL",
|
|
"supports_chat": true,
|
|
"supports_image_input": false,
|
|
"supports_tools": false,
|
|
"context_length": 131072
|
|
}
|
|
},
|
|
{
|
|
"__type__": "openai.types.model.Model",
|
|
"__data__": {
|
|
"id": "accounts/sentientfoundation/models/dobby-unhinged-llama-3-3-70b-new",
|
|
"created": 1739563474,
|
|
"object": "model",
|
|
"owned_by": "sentientfoundation",
|
|
"kind": "HF_BASE_MODEL",
|
|
"supports_chat": true,
|
|
"supports_image_input": false,
|
|
"supports_tools": false,
|
|
"context_length": 131072
|
|
}
|
|
},
|
|
{
|
|
"__type__": "openai.types.model.Model",
|
|
"__data__": {
|
|
"id": "accounts/fireworks/models/gpt-oss-120b",
|
|
"created": 1754345600,
|
|
"object": "model",
|
|
"owned_by": "fireworks",
|
|
"kind": "HF_BASE_MODEL",
|
|
"supports_chat": true,
|
|
"supports_image_input": false,
|
|
"supports_tools": true,
|
|
"context_length": 131072
|
|
}
|
|
},
|
|
{
|
|
"__type__": "openai.types.model.Model",
|
|
"__data__": {
|
|
"id": "accounts/fireworks/models/qwen3-235b-a22b-instruct-2507",
|
|
"created": 1753124424,
|
|
"object": "model",
|
|
"owned_by": "fireworks",
|
|
"kind": "HF_BASE_MODEL",
|
|
"supports_chat": true,
|
|
"supports_image_input": false,
|
|
"supports_tools": true,
|
|
"context_length": 262144
|
|
}
|
|
},
|
|
{
|
|
"__type__": "openai.types.model.Model",
|
|
"__data__": {
|
|
"id": "accounts/fireworks/models/qwen3-235b-a22b-thinking-2507",
|
|
"created": 1753455434,
|
|
"object": "model",
|
|
"owned_by": "fireworks",
|
|
"kind": "HF_BASE_MODEL",
|
|
"supports_chat": true,
|
|
"supports_image_input": false,
|
|
"supports_tools": false,
|
|
"context_length": 262144
|
|
}
|
|
},
|
|
{
|
|
"__type__": "openai.types.model.Model",
|
|
"__data__": {
|
|
"id": "accounts/fireworks/models/deepseek-v3-0324",
|
|
"created": 1742827220,
|
|
"object": "model",
|
|
"owned_by": "fireworks",
|
|
"kind": "HF_BASE_MODEL",
|
|
"supports_chat": true,
|
|
"supports_image_input": false,
|
|
"supports_tools": true,
|
|
"context_length": 163840
|
|
}
|
|
},
|
|
{
|
|
"__type__": "openai.types.model.Model",
|
|
"__data__": {
|
|
"id": "accounts/fireworks/models/kimi-k2-instruct",
|
|
"created": 1752259096,
|
|
"object": "model",
|
|
"owned_by": "fireworks",
|
|
"kind": "HF_BASE_MODEL",
|
|
"supports_chat": true,
|
|
"supports_image_input": false,
|
|
"supports_tools": true,
|
|
"context_length": 131072
|
|
}
|
|
},
|
|
{
|
|
"__type__": "openai.types.model.Model",
|
|
"__data__": {
|
|
"id": "accounts/fireworks/models/gpt-oss-20b",
|
|
"created": 1754345466,
|
|
"object": "model",
|
|
"owned_by": "fireworks",
|
|
"kind": "HF_BASE_MODEL",
|
|
"supports_chat": true,
|
|
"supports_image_input": false,
|
|
"supports_tools": false,
|
|
"context_length": 131072
|
|
}
|
|
},
|
|
{
|
|
"__type__": "openai.types.model.Model",
|
|
"__data__": {
|
|
"id": "accounts/fireworks/models/kimi-k2-instruct-0905",
|
|
"created": 1757018994,
|
|
"object": "model",
|
|
"owned_by": "fireworks",
|
|
"kind": "HF_BASE_MODEL",
|
|
"supports_chat": true,
|
|
"supports_image_input": false,
|
|
"supports_tools": true,
|
|
"context_length": 262144
|
|
}
|
|
},
|
|
{
|
|
"__type__": "openai.types.model.Model",
|
|
"__data__": {
|
|
"id": "accounts/fireworks/models/llama-v3p3-70b-instruct",
|
|
"created": 1733442103,
|
|
"object": "model",
|
|
"owned_by": "fireworks",
|
|
"kind": "HF_BASE_MODEL",
|
|
"supports_chat": true,
|
|
"supports_image_input": false,
|
|
"supports_tools": false,
|
|
"context_length": 131072
|
|
}
|
|
},
|
|
{
|
|
"__type__": "openai.types.model.Model",
|
|
"__data__": {
|
|
"id": "accounts/fireworks/models/qwen3-235b-a22b",
|
|
"created": 1745885249,
|
|
"object": "model",
|
|
"owned_by": "fireworks",
|
|
"kind": "HF_BASE_MODEL",
|
|
"supports_chat": true,
|
|
"supports_image_input": false,
|
|
"supports_tools": true,
|
|
"context_length": 131072
|
|
}
|
|
},
|
|
{
|
|
"__type__": "openai.types.model.Model",
|
|
"__data__": {
|
|
"id": "accounts/fireworks/models/glm-4p5-air",
|
|
"created": 1754089426,
|
|
"object": "model",
|
|
"owned_by": "fireworks",
|
|
"kind": "HF_BASE_MODEL",
|
|
"supports_chat": true,
|
|
"supports_image_input": false,
|
|
"supports_tools": true,
|
|
"context_length": 131072
|
|
}
|
|
},
|
|
{
|
|
"__type__": "openai.types.model.Model",
|
|
"__data__": {
|
|
"id": "accounts/fireworks/models/deepseek-v3p1",
|
|
"created": 1755758988,
|
|
"object": "model",
|
|
"owned_by": "fireworks",
|
|
"kind": "HF_BASE_MODEL",
|
|
"supports_chat": true,
|
|
"supports_image_input": false,
|
|
"supports_tools": true,
|
|
"context_length": 163840
|
|
}
|
|
},
|
|
{
|
|
"__type__": "openai.types.model.Model",
|
|
"__data__": {
|
|
"id": "accounts/fireworks/models/flux-1-schnell-fp8",
|
|
"created": 1729535376,
|
|
"object": "model",
|
|
"owned_by": "fireworks",
|
|
"kind": "FLUMINA_BASE_MODEL",
|
|
"supports_chat": false,
|
|
"supports_image_input": false,
|
|
"supports_tools": false
|
|
}
|
|
},
|
|
{
|
|
"__type__": "openai.types.model.Model",
|
|
"__data__": {
|
|
"id": "accounts/fireworks/models/llama-v3p1-405b-instruct",
|
|
"created": 1721428386,
|
|
"object": "model",
|
|
"owned_by": "fireworks",
|
|
"kind": "HF_BASE_MODEL",
|
|
"supports_chat": true,
|
|
"supports_image_input": false,
|
|
"supports_tools": true,
|
|
"context_length": 131072
|
|
}
|
|
},
|
|
{
|
|
"__type__": "openai.types.model.Model",
|
|
"__data__": {
|
|
"id": "accounts/fireworks/models/llama4-scout-instruct-basic",
|
|
"created": 1743878279,
|
|
"object": "model",
|
|
"owned_by": "fireworks",
|
|
"kind": "HF_BASE_MODEL",
|
|
"supports_chat": true,
|
|
"supports_image_input": true,
|
|
"supports_tools": true,
|
|
"context_length": 1048576
|
|
}
|
|
},
|
|
{
|
|
"__type__": "openai.types.model.Model",
|
|
"__data__": {
|
|
"id": "accounts/fireworks/models/qwen3-30b-a3b",
|
|
"created": 1745878133,
|
|
"object": "model",
|
|
"owned_by": "fireworks",
|
|
"kind": "HF_BASE_MODEL",
|
|
"supports_chat": true,
|
|
"supports_image_input": false,
|
|
"supports_tools": true,
|
|
"context_length": 131072
|
|
}
|
|
},
|
|
{
|
|
"__type__": "openai.types.model.Model",
|
|
"__data__": {
|
|
"id": "accounts/fireworks/models/llama-v3p1-70b-instruct",
|
|
"created": 1721287357,
|
|
"object": "model",
|
|
"owned_by": "fireworks",
|
|
"kind": "HF_BASE_MODEL",
|
|
"supports_chat": true,
|
|
"supports_image_input": false,
|
|
"supports_tools": true,
|
|
"context_length": 131072
|
|
}
|
|
},
|
|
{
|
|
"__type__": "openai.types.model.Model",
|
|
"__data__": {
|
|
"id": "accounts/fireworks/models/deepseek-r1-0528",
|
|
"created": 1748456377,
|
|
"object": "model",
|
|
"owned_by": "fireworks",
|
|
"kind": "HF_BASE_MODEL",
|
|
"supports_chat": true,
|
|
"supports_image_input": false,
|
|
"supports_tools": true,
|
|
"context_length": 163840
|
|
}
|
|
},
|
|
{
|
|
"__type__": "openai.types.model.Model",
|
|
"__data__": {
|
|
"id": "accounts/fireworks/models/mixtral-8x22b-instruct",
|
|
"created": 1713375508,
|
|
"object": "model",
|
|
"owned_by": "fireworks",
|
|
"kind": "HF_BASE_MODEL",
|
|
"supports_chat": true,
|
|
"supports_image_input": false,
|
|
"supports_tools": true,
|
|
"context_length": 65536
|
|
}
|
|
},
|
|
{
|
|
"__type__": "openai.types.model.Model",
|
|
"__data__": {
|
|
"id": "accounts/fireworks/models/llama4-maverick-instruct-basic",
|
|
"created": 1743878495,
|
|
"object": "model",
|
|
"owned_by": "fireworks",
|
|
"kind": "HF_BASE_MODEL",
|
|
"supports_chat": true,
|
|
"supports_image_input": true,
|
|
"supports_tools": true,
|
|
"context_length": 1048576
|
|
}
|
|
},
|
|
{
|
|
"__type__": "openai.types.model.Model",
|
|
"__data__": {
|
|
"id": "accounts/fireworks/models/qwen2p5-vl-32b-instruct",
|
|
"created": 1743392739,
|
|
"object": "model",
|
|
"owned_by": "fireworks",
|
|
"kind": "HF_BASE_MODEL",
|
|
"supports_chat": true,
|
|
"supports_image_input": true,
|
|
"supports_tools": false,
|
|
"context_length": 128000
|
|
}
|
|
},
|
|
{
|
|
"__type__": "openai.types.model.Model",
|
|
"__data__": {
|
|
"id": "accounts/fireworks/models/deepseek-v3p1-terminus",
|
|
"created": 1758586241,
|
|
"object": "model",
|
|
"owned_by": "fireworks",
|
|
"kind": "HF_BASE_MODEL",
|
|
"supports_chat": true,
|
|
"supports_image_input": false,
|
|
"supports_tools": true,
|
|
"context_length": 163840
|
|
}
|
|
},
|
|
{
|
|
"__type__": "openai.types.model.Model",
|
|
"__data__": {
|
|
"id": "accounts/fireworks/models/llama-v3p1-8b-instruct",
|
|
"created": 1721692808,
|
|
"object": "model",
|
|
"owned_by": "fireworks",
|
|
"kind": "HF_BASE_MODEL",
|
|
"supports_chat": true,
|
|
"supports_image_input": false,
|
|
"supports_tools": false,
|
|
"context_length": 131072
|
|
}
|
|
},
|
|
{
|
|
"__type__": "openai.types.model.Model",
|
|
"__data__": {
|
|
"id": "accounts/fireworks/models/qwen3-coder-480b-a35b-instruct",
|
|
"created": 1753211090,
|
|
"object": "model",
|
|
"owned_by": "fireworks",
|
|
"kind": "HF_BASE_MODEL",
|
|
"supports_chat": true,
|
|
"supports_image_input": false,
|
|
"supports_tools": true,
|
|
"context_length": 262144
|
|
}
|
|
},
|
|
{
|
|
"__type__": "openai.types.model.Model",
|
|
"__data__": {
|
|
"id": "accounts/fireworks/models/qwen3-30b-a3b-thinking-2507",
|
|
"created": 1753916446,
|
|
"object": "model",
|
|
"owned_by": "fireworks",
|
|
"kind": "HF_BASE_MODEL",
|
|
"supports_chat": true,
|
|
"supports_image_input": false,
|
|
"supports_tools": false
|
|
}
|
|
},
|
|
{
|
|
"__type__": "openai.types.model.Model",
|
|
"__data__": {
|
|
"id": "accounts/fireworks/models/qwen3-embedding-8b",
|
|
"created": 1755707090,
|
|
"object": "model",
|
|
"owned_by": "fireworks",
|
|
"kind": "EMBEDDING_MODEL",
|
|
"supports_chat": true,
|
|
"supports_image_input": false,
|
|
"supports_tools": false,
|
|
"context_length": 40960
|
|
}
|
|
},
|
|
{
|
|
"__type__": "openai.types.model.Model",
|
|
"__data__": {
|
|
"id": "accounts/fireworks/models/qwen3-reranker-8b",
|
|
"created": 1759865045,
|
|
"object": "model",
|
|
"owned_by": "fireworks",
|
|
"kind": "EMBEDDING_MODEL",
|
|
"supports_chat": true,
|
|
"supports_image_input": false,
|
|
"supports_tools": false,
|
|
"context_length": 40960
|
|
}
|
|
},
|
|
{
|
|
"__type__": "openai.types.model.Model",
|
|
"__data__": {
|
|
"id": "accounts/fireworks/models/glm-4p5",
|
|
"created": 1753809636,
|
|
"object": "model",
|
|
"owned_by": "fireworks",
|
|
"kind": "HF_BASE_MODEL",
|
|
"supports_chat": true,
|
|
"supports_image_input": false,
|
|
"supports_tools": true,
|
|
"context_length": 131072
|
|
}
|
|
},
|
|
{
|
|
"__type__": "openai.types.model.Model",
|
|
"__data__": {
|
|
"id": "accounts/fireworks/models/qwen3-coder-30b-a3b-instruct",
|
|
"created": 1754063588,
|
|
"object": "model",
|
|
"owned_by": "fireworks",
|
|
"kind": "HF_BASE_MODEL",
|
|
"supports_chat": true,
|
|
"supports_image_input": false,
|
|
"supports_tools": false,
|
|
"context_length": 262144
|
|
}
|
|
},
|
|
{
|
|
"__type__": "openai.types.model.Model",
|
|
"__data__": {
|
|
"id": "accounts/fireworks/models/deepseek-r1",
|
|
"created": 1737397673,
|
|
"object": "model",
|
|
"owned_by": "fireworks",
|
|
"kind": "HF_BASE_MODEL",
|
|
"supports_chat": true,
|
|
"supports_image_input": false,
|
|
"supports_tools": false,
|
|
"context_length": 163840
|
|
}
|
|
},
|
|
{
|
|
"__type__": "openai.types.model.Model",
|
|
"__data__": {
|
|
"id": "accounts/fireworks/models/deepseek-v3",
|
|
"created": 1735576668,
|
|
"object": "model",
|
|
"owned_by": "fireworks",
|
|
"kind": "HF_BASE_MODEL",
|
|
"supports_chat": true,
|
|
"supports_image_input": false,
|
|
"supports_tools": true,
|
|
"context_length": 131072
|
|
}
|
|
},
|
|
{
|
|
"__type__": "openai.types.model.Model",
|
|
"__data__": {
|
|
"id": "accounts/fireworks/models/deepseek-r1-basic",
|
|
"created": 1742306746,
|
|
"object": "model",
|
|
"owned_by": "fireworks",
|
|
"kind": "HF_BASE_MODEL",
|
|
"supports_chat": true,
|
|
"supports_image_input": false,
|
|
"supports_tools": false,
|
|
"context_length": 163840
|
|
}
|
|
},
|
|
{
|
|
"__type__": "openai.types.model.Model",
|
|
"__data__": {
|
|
"id": "accounts/fireworks/models/qwen3-30b-a3b-instruct-2507",
|
|
"created": 1753808388,
|
|
"object": "model",
|
|
"owned_by": "fireworks",
|
|
"kind": "HF_BASE_MODEL",
|
|
"supports_chat": true,
|
|
"supports_image_input": false,
|
|
"supports_tools": false,
|
|
"context_length": 262144
|
|
}
|
|
},
|
|
{
|
|
"__type__": "openai.types.model.Model",
|
|
"__data__": {
|
|
"id": "accounts/tvergho-87e44d/models/debatecards-70b-ft-3epoch-dpo-v2",
|
|
"created": 1743381121,
|
|
"object": "model",
|
|
"owned_by": "tvergho-87e44d",
|
|
"kind": "HF_PEFT_ADDON",
|
|
"supports_chat": true,
|
|
"supports_image_input": false,
|
|
"supports_tools": false
|
|
}
|
|
}
|
|
],
|
|
"is_streaming": false
|
|
},
|
|
"id_normalization_mapping": {}
|
|
}
|