mirror of
https://github.com/meta-llama/llama-stack.git
synced 2025-10-21 16:07:16 +00:00
chore(python-deps): bump pymilvus from 2.6.0 to 2.6.1 (#3285)
#1883 -Commit
5c873d53db
pushed by
phoenix-oss
refactor: remove lama-api-client from pyproject.toml (#3299)
#1867 -Commit
4a59961a6c
pushed by
phoenix-oss
chore(ui-deps): bump @types/node from 20.17.47 to 24.3.0 in /llama_stack/ui (#3290)
#1847 -Commit
26b4340de3
pushed by
phoenix-oss
fix: Remove bfcl scoring function as not supported (#3281)
#1808 -Commit
efdb5558b8
pushed by
phoenix-oss
chore(dev): add inequality support to sqlstore where clause (#3272)
#1798 -Commit
ed418653ec
pushed by
phoenix-oss
fix: docker failing to start container [fireworks-ai] (#3267)
#1784 -Commit
30117dea22
pushed by
phoenix-oss
feat: Add example notebook for Langchain + LLAMAStack integration (#3228)
#1726 -Commit
2666029427
pushed by
phoenix-oss
chore(github-deps): bump astral-sh/setup-uv from 6.5.0 to 6.6.0 (#3247)
#1712 -Commit
eed25fc6e4
pushed by
phoenix-oss
feat: Add optional idempotency support to batches API (#3171)
#1691 -Commit
cffc4edf47
pushed by
phoenix-oss
chore(github-deps): bump astral-sh/setup-uv from 6.4.3 to 6.5.0 (#3179)
#1646 -Commit
58e164b8bc
pushed by
phoenix-oss
fix(cli): image name should not default to CONDA_DEFAULT_ENV (#2806)
#859 -Commit
d64e096c5f
pushed by
phoenix-oss
feat: Add webmethod for deleting openai responses (#2160)
#486 -Commit
be9bf68246
pushed by
phoenix-oss
fix: allow running vector tests with embedding dimension (#2467)
#318 -Commit
6039d922c0
pushed by
phoenix-oss
fix: #1867 InferenceRouter has no attribute formatter (#2422)
#215 -Commit
4e37b49cdc
pushed by
phoenix-oss
feat: Add OpenAI compat /v1/vector_store APIs (#2423)
#207 -Commit
5ac43268e8
pushed by
phoenix-oss