mirror of
https://github.com/meta-llama/llama-stack.git
synced 2025-12-11 19:56:03 +00:00
chore: bump openai package version
To match https://github.com/llamastack/llama-stack/pull/3847 We must not update the lock manually, but always reflect the update in the pyproject.toml. The lock is a state at build time. Signed-off-by: Sébastien Han <seb@redhat.com>
This commit is contained in:
parent
9d6e589120
commit
58d4fd6805
2 changed files with 3 additions and 3 deletions
4
uv.lock
generated
4
uv.lock
generated
|
|
@ -1,5 +1,5 @@
|
|||
version = 1
|
||||
revision = 3
|
||||
revision = 2
|
||||
requires-python = ">=3.12"
|
||||
resolution-markers = [
|
||||
"(python_full_version >= '3.13' and platform_machine != 'aarch64' and sys_platform == 'linux') or (python_full_version >= '3.13' and sys_platform != 'darwin' and sys_platform != 'linux')",
|
||||
|
|
@ -1891,7 +1891,7 @@ requires-dist = [
|
|||
{ name = "jsonschema" },
|
||||
{ name = "llama-stack-client", specifier = ">=0.3.0" },
|
||||
{ name = "llama-stack-client", marker = "extra == 'ui'", specifier = ">=0.3.0" },
|
||||
{ name = "openai", specifier = ">=1.107" },
|
||||
{ name = "openai", specifier = ">=2.5.0" },
|
||||
{ name = "opentelemetry-exporter-otlp-proto-http", specifier = ">=1.30.0" },
|
||||
{ name = "opentelemetry-sdk", specifier = ">=1.30.0" },
|
||||
{ name = "pandas", marker = "extra == 'ui'" },
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue