From cf672c74b081cfa373ba66bce725e585f3a04ae5 Mon Sep 17 00:00:00 2001 From: Sixian Yi Date: Wed, 29 Jan 2025 23:40:35 -0800 Subject: [PATCH] error handling --- llama_stack/providers/remote/inference/together/together.py | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/llama_stack/providers/remote/inference/together/together.py b/llama_stack/providers/remote/inference/together/together.py index e2bbb2220..605b3ce97 100644 --- a/llama_stack/providers/remote/inference/together/together.py +++ b/llama_stack/providers/remote/inference/together/together.py @@ -9,7 +9,6 @@ from typing import AsyncGenerator, List, Optional, Union from llama_models.datatypes import CoreModelId from llama_models.llama3.api.chat_format import ChatFormat from llama_models.llama3.api.tokenizer import Tokenizer -from termcolor import cprint from together import Together from llama_stack.apis.common.content_types import InterleavedContent @@ -181,9 +180,8 @@ class TogetherInferenceAdapter( if logprobs and logprobs.top_k: if logprobs.top_k != 1: - cprint( - "Together only supports logprobs top_k=1. Overriding.", - "Yello", + raise ValueError( + f"Unsupported value: Together only supports logprobs top_k=1. {logprobs.top_k} was provided", ) options["logprobs"] = 1