Add centml as remote inference provider

This commit is contained in:
Honglin Cao 2025-01-08 11:15:29 -05:00 committed by Honglin Cao
parent ead9397e22
commit dc1ff40413
10 changed files with 798 additions and 25 deletions

View file

@ -0,0 +1,32 @@
version: '2'
name: centml
distribution_spec:
description: Use CentML for running LLM inference
providers:
inference:
- remote::centml
memory:
- inline::faiss
- remote::chromadb
- remote::pgvector
safety:
- inline::llama-guard
agents:
- inline::meta-reference
telemetry:
- inline::meta-reference
eval:
- inline::meta-reference
datasetio:
- remote::huggingface
- inline::localfs
scoring:
- inline::basic
- inline::llm-as-judge
- inline::braintrust
tool_runtime:
- remote::brave-search
- remote::tavily-search
- inline::code-interpreter
- inline::memory-runtime
image_type: conda