From ffbc23add1d027a95b7bb5f668264476fbedf57a Mon Sep 17 00:00:00 2001 From: Xi Yan Date: Wed, 15 Jan 2025 19:00:56 -0800 Subject: [PATCH] openai_compat --- llama_stack/providers/utils/inference/openai_compat.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/llama_stack/providers/utils/inference/openai_compat.py b/llama_stack/providers/utils/inference/openai_compat.py index 5bc4224a8..f6350ed51 100644 --- a/llama_stack/providers/utils/inference/openai_compat.py +++ b/llama_stack/providers/utils/inference/openai_compat.py @@ -46,7 +46,7 @@ class OpenAICompatCompletionChoiceDelta(BaseModel): content: str -class OpenAILogprobs(BaseModel): +class OpenAICompatLogprobs(BaseModel): text_offset: Optional[List[int]] = None token_logprobs: Optional[List[float]] = None @@ -60,7 +60,7 @@ class OpenAICompatCompletionChoice(BaseModel): finish_reason: Optional[str] = None text: Optional[str] = None delta: Optional[OpenAICompatCompletionChoiceDelta] = None - logprobs: Optional[OpenAILogprobs] = None + logprobs: Optional[OpenAICompatLogprobs] = None class OpenAICompatCompletionResponse(BaseModel): @@ -117,7 +117,7 @@ def get_stop_reason(finish_reason: str) -> StopReason: def convert_openai_completion_logprobs( - logprobs: Optional[OpenAILogprobs], + logprobs: Optional[OpenAICompatLogprobs], ) -> Optional[List[TokenLogProbs]]: if not logprobs: return None