mirror of
https://github.com/meta-llama/llama-stack.git
synced 2025-12-06 10:37:22 +00:00
Restore responses unit tests
This commit is contained in:
parent
a078f089d9
commit
ff60bb31e6
8 changed files with 2244 additions and 0 deletions
|
|
@ -0,0 +1,23 @@
|
|||
# Copyright (c) Meta Platforms, Inc. and affiliates.
|
||||
# All rights reserved.
|
||||
#
|
||||
# This source code is licensed under the terms described in the LICENSE file in
|
||||
# the root directory of this source tree.
|
||||
|
||||
import os
|
||||
|
||||
import yaml
|
||||
|
||||
from llama_stack.apis.inference import (
|
||||
OpenAIChatCompletion,
|
||||
)
|
||||
|
||||
FIXTURES_DIR = os.path.dirname(os.path.abspath(__file__))
|
||||
|
||||
|
||||
def load_chat_completion_fixture(filename: str) -> OpenAIChatCompletion:
|
||||
fixture_path = os.path.join(FIXTURES_DIR, filename)
|
||||
|
||||
with open(fixture_path) as f:
|
||||
data = yaml.safe_load(f)
|
||||
return OpenAIChatCompletion(**data)
|
||||
|
|
@ -0,0 +1,9 @@
|
|||
id: chat-completion-123
|
||||
choices:
|
||||
- message:
|
||||
content: "Dublin"
|
||||
role: assistant
|
||||
finish_reason: stop
|
||||
index: 0
|
||||
created: 1234567890
|
||||
model: meta-llama/Llama-3.1-8B-Instruct
|
||||
|
|
@ -0,0 +1,14 @@
|
|||
id: chat-completion-123
|
||||
choices:
|
||||
- message:
|
||||
tool_calls:
|
||||
- id: tool_call_123
|
||||
type: function
|
||||
function:
|
||||
name: web_search
|
||||
arguments: '{"query":"What is the capital of Ireland?"}'
|
||||
role: assistant
|
||||
finish_reason: stop
|
||||
index: 0
|
||||
created: 1234567890
|
||||
model: meta-llama/Llama-3.1-8B-Instruct
|
||||
Loading…
Add table
Add a link
Reference in a new issue