mirror of
https://github.com/meta-llama/llama-stack.git
synced 2025-10-14 22:33:48 +00:00
chore: OpenAIMixin implements ModelsProtocolPrivate (#3662)
# What does this PR do? add ModelsProtocolPrivate methods to OpenAIMixin this will allow providers using OpenAIMixin to use a common interface ## Test Plan ci w/ new tests
This commit is contained in:
parent
cf422da825
commit
873a400544
8 changed files with 243 additions and 11 deletions
40
tests/integration/recordings/responses/53d2488c9ea9.json
Normal file
40
tests/integration/recordings/responses/53d2488c9ea9.json
Normal file
|
@ -0,0 +1,40 @@
|
|||
{
|
||||
"request": {
|
||||
"method": "POST",
|
||||
"url": "http://localhost:11434/api/generate",
|
||||
"headers": {},
|
||||
"body": {
|
||||
"model": "llama3.2:3b-instruct-fp16",
|
||||
"options": {
|
||||
"temperature": 0.0001,
|
||||
"top_p": 0.9
|
||||
},
|
||||
"stream": true
|
||||
},
|
||||
"endpoint": "/api/generate",
|
||||
"model": "llama3.2:3b-instruct-fp16"
|
||||
},
|
||||
"response": {
|
||||
"body": [
|
||||
{
|
||||
"__type__": "ollama._types.GenerateResponse",
|
||||
"__data__": {
|
||||
"model": "llama3.2:3b-instruct-fp16",
|
||||
"created_at": "2025-10-01T20:38:48.732564955Z",
|
||||
"done": true,
|
||||
"done_reason": "load",
|
||||
"total_duration": null,
|
||||
"load_duration": null,
|
||||
"prompt_eval_count": null,
|
||||
"prompt_eval_duration": null,
|
||||
"eval_count": null,
|
||||
"eval_duration": null,
|
||||
"response": "",
|
||||
"thinking": null,
|
||||
"context": null
|
||||
}
|
||||
}
|
||||
],
|
||||
"is_streaming": true
|
||||
}
|
||||
}
|
Loading…
Add table
Add a link
Reference in a new issue