mirror of
https://github.com/meta-llama/llama-stack.git
synced 2025-12-17 21:57:18 +00:00
JKL
This commit is contained in:
parent
94645dd5f6
commit
a2be32c27d
2 changed files with 19 additions and 11 deletions
|
|
@ -10,12 +10,13 @@ from typing import Any, AsyncGenerator, Dict, Generator, List, Optional
|
|||
|
||||
from llama_models.llama3.api.datatypes import (
|
||||
BuiltinTool,
|
||||
CompletionMessage,
|
||||
# CompletionMessage,
|
||||
StopReason,
|
||||
TokenLogProbs,
|
||||
# TokenLogProbs,
|
||||
ToolCall,
|
||||
ToolDefinition,
|
||||
)
|
||||
from llama_stack.apis.inference import CompletionMessage, TokenLogProbs
|
||||
from openai import AsyncStream
|
||||
from openai.types.chat import (
|
||||
ChatCompletionAssistantMessageParam as OpenAIChatCompletionAssistantMessage,
|
||||
|
|
@ -339,7 +340,7 @@ def _convert_openai_tool_calls(
|
|||
|
||||
def _convert_openai_logprobs(
|
||||
logprobs: OpenAIChoiceLogprobs,
|
||||
) -> Optional[List[TokenLogProbs]]:
|
||||
) -> Optional[List[Any]]:
|
||||
"""
|
||||
Convert an OpenAI ChoiceLogprobs into a list of TokenLogProbs.
|
||||
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue