From 5a2b9e121c2ea014c8c987f78cf83967a1b1c8f8 Mon Sep 17 00:00:00 2001 From: Xi Yan Date: Fri, 7 Mar 2025 12:52:26 -0800 Subject: [PATCH] fix: return result for together's get_params (#1484) # What does this PR do? - return results for together's get_params - fix issue image - the `return params` was accidentally deleted in https://github.com/meta-llama/llama-stack/pull/1362/files#diff-d9345410ea64589cee96487b22eab0d45f7497a80c25dca295cecd254decb204 [//]: # (If resolving an issue, uncomment and update the line below) [//]: # (Closes #[issue-number]) ## Test Plan ``` npm test examples ``` [//]: # (## Documentation) --- llama_stack/providers/remote/inference/together/together.py | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/llama_stack/providers/remote/inference/together/together.py b/llama_stack/providers/remote/inference/together/together.py index f701c0da7..2046d4aae 100644 --- a/llama_stack/providers/remote/inference/together/together.py +++ b/llama_stack/providers/remote/inference/together/together.py @@ -32,9 +32,7 @@ from llama_stack.apis.inference import ( ) from llama_stack.distribution.request_headers import NeedsRequestProviderData from llama_stack.log import get_logger -from llama_stack.providers.utils.inference.model_registry import ( - ModelRegistryHelper, -) +from llama_stack.providers.utils.inference.model_registry import ModelRegistryHelper from llama_stack.providers.utils.inference.openai_compat import ( convert_message_to_openai_dict, get_sampling_options, @@ -227,6 +225,7 @@ class TogetherInferenceAdapter(ModelRegistryHelper, Inference, NeedsRequestProvi **self._build_options(request.sampling_params, request.logprobs, request.response_format), } logger.debug(f"params to together: {params}") + return params async def embeddings( self,