This commit is contained in:
Derek Higgins 2025-07-24 21:06:24 +02:00 committed by GitHub
commit 727df201b8
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
3 changed files with 146 additions and 2 deletions

View file

@ -22,7 +22,7 @@ from llama_stack.apis.agents.agents import (
def get_boiling_point(liquid_name: str, celcius: bool = True) -> int:
"""
Returns the boiling point of a liquid in Celcius or Fahrenheit.
Returns the boiling point of a liquid in Celcius or Fahrenheit (even fictional liquids).
:param liquid_name: The name of the liquid
:param celcius: Whether to return the boiling point in Celcius
@ -39,7 +39,7 @@ def get_boiling_point(liquid_name: str, celcius: bool = True) -> int:
def get_boiling_point_with_metadata(liquid_name: str, celcius: bool = True) -> dict[str, Any]:
"""
Returns the boiling point of a liquid in Celcius or Fahrenheit
Returns the boiling point of a liquid in Celcius or Fahrenheit (even fictional liquids).
:param liquid_name: The name of the liquid
:param celcius: Whether to return the boiling point in Celcius

View file

@ -34,6 +34,7 @@ def skip_if_model_doesnt_support_openai_completion(client_with_models, model_id)
# support both completions and chat completions endpoint and all the Llama models are
# just chat completions
"remote::nvidia",
"remote::openai",
"remote::runpod",
"remote::sambanova",
"remote::tgi",