mirror of
https://github.com/meta-llama/llama-stack.git
synced 2025-12-11 19:56:03 +00:00
chore: bump openai package version
To match https://github.com/llamastack/llama-stack/pull/3847 We must not update the lock manually, but always reflect the update in the pyproject.toml. The lock is a state at build time. Signed-off-by: Sébastien Han <seb@redhat.com>
This commit is contained in:
parent
9d6e589120
commit
58d4fd6805
2 changed files with 3 additions and 3 deletions
|
|
@ -31,7 +31,7 @@ dependencies = [
|
||||||
"jinja2>=3.1.6",
|
"jinja2>=3.1.6",
|
||||||
"jsonschema",
|
"jsonschema",
|
||||||
"llama-stack-client>=0.3.0",
|
"llama-stack-client>=0.3.0",
|
||||||
"openai>=1.107", # for expires_after support
|
"openai>=2.5.0",
|
||||||
"prompt-toolkit",
|
"prompt-toolkit",
|
||||||
"python-dotenv",
|
"python-dotenv",
|
||||||
"pyjwt[crypto]>=2.10.0", # Pull crypto to support RS256 for jwt. Requires 2.10.0+ for ssl_context support.
|
"pyjwt[crypto]>=2.10.0", # Pull crypto to support RS256 for jwt. Requires 2.10.0+ for ssl_context support.
|
||||||
|
|
|
||||||
4
uv.lock
generated
4
uv.lock
generated
|
|
@ -1,5 +1,5 @@
|
||||||
version = 1
|
version = 1
|
||||||
revision = 3
|
revision = 2
|
||||||
requires-python = ">=3.12"
|
requires-python = ">=3.12"
|
||||||
resolution-markers = [
|
resolution-markers = [
|
||||||
"(python_full_version >= '3.13' and platform_machine != 'aarch64' and sys_platform == 'linux') or (python_full_version >= '3.13' and sys_platform != 'darwin' and sys_platform != 'linux')",
|
"(python_full_version >= '3.13' and platform_machine != 'aarch64' and sys_platform == 'linux') or (python_full_version >= '3.13' and sys_platform != 'darwin' and sys_platform != 'linux')",
|
||||||
|
|
@ -1891,7 +1891,7 @@ requires-dist = [
|
||||||
{ name = "jsonschema" },
|
{ name = "jsonschema" },
|
||||||
{ name = "llama-stack-client", specifier = ">=0.3.0" },
|
{ name = "llama-stack-client", specifier = ">=0.3.0" },
|
||||||
{ name = "llama-stack-client", marker = "extra == 'ui'", specifier = ">=0.3.0" },
|
{ name = "llama-stack-client", marker = "extra == 'ui'", specifier = ">=0.3.0" },
|
||||||
{ name = "openai", specifier = ">=1.107" },
|
{ name = "openai", specifier = ">=2.5.0" },
|
||||||
{ name = "opentelemetry-exporter-otlp-proto-http", specifier = ">=1.30.0" },
|
{ name = "opentelemetry-exporter-otlp-proto-http", specifier = ">=1.30.0" },
|
||||||
{ name = "opentelemetry-sdk", specifier = ">=1.30.0" },
|
{ name = "opentelemetry-sdk", specifier = ">=1.30.0" },
|
||||||
{ name = "pandas", marker = "extra == 'ui'" },
|
{ name = "pandas", marker = "extra == 'ui'" },
|
||||||
|
|
|
||||||
Loading…
Add table
Add a link
Reference in a new issue