mirror of
https://github.com/meta-llama/llama-stack.git
synced 2025-07-27 06:28:50 +00:00
test: add integration test for module
add a test which installs ramalama-stack: https://github.com/containers/ramalama-stack which is one of our validated external providers: https://llama-stack.readthedocs.io/en/latest/providers/external.html Signed-off-by: Charlie Doern <cdoern@redhat.com>
This commit is contained in:
parent
776fabed9e
commit
045f4b5a1a
3 changed files with 98 additions and 0 deletions
14
tests/external/ramalama-stack/build.yaml
vendored
Normal file
14
tests/external/ramalama-stack/build.yaml
vendored
Normal file
|
@ -0,0 +1,14 @@
|
|||
version: 2
|
||||
distribution_spec:
|
||||
description: Use (an external) Ramalama server for running LLM inference
|
||||
container_image: null
|
||||
providers:
|
||||
inference:
|
||||
- provider_id: ramalama
|
||||
provider_type: remote::ramalama
|
||||
module: ramalama_stack==0.3.0a0
|
||||
image_type: venv
|
||||
image_name: ramalama-stack-test
|
||||
additional_pip_packages:
|
||||
- aiosqlite
|
||||
- sqlalchemy[asyncio]
|
12
tests/external/ramalama-stack/run.yaml
vendored
Normal file
12
tests/external/ramalama-stack/run.yaml
vendored
Normal file
|
@ -0,0 +1,12 @@
|
|||
version: 2
|
||||
image_name: ramalama
|
||||
apis:
|
||||
- inference
|
||||
providers:
|
||||
inference:
|
||||
- provider_id: ramalama
|
||||
provider_type: remote::ramalama
|
||||
module: ramalama_stack==0.3.0a0
|
||||
config: {}
|
||||
server:
|
||||
port: 8321
|
Loading…
Add table
Add a link
Reference in a new issue