mirror of
https://github.com/meta-llama/llama-stack.git
synced 2025-12-31 07:59:59 +00:00
passthrough provider
This commit is contained in:
parent
d33b8ea3dc
commit
d946a83d85
2 changed files with 28 additions and 3 deletions
|
|
@ -137,8 +137,33 @@ class PassthroughInferenceAdapter(Inference):
|
|||
|
||||
params = {key: value for key, value in params.items() if value is not None}
|
||||
|
||||
json_params = {}
|
||||
from llama_stack.distribution.library_client import (
|
||||
convert_pydantic_to_json_value,
|
||||
)
|
||||
|
||||
# cast everything to json dict
|
||||
for key, value in params.items():
|
||||
json_input = convert_pydantic_to_json_value(value)
|
||||
if isinstance(json_input, dict):
|
||||
json_input = {k: v for k, v in json_input.items() if v is not None}
|
||||
elif isinstance(json_input, list):
|
||||
json_input = [x for x in json_input if x is not None]
|
||||
new_input = []
|
||||
for x in json_input:
|
||||
if isinstance(x, dict):
|
||||
x = {k: v for k, v in x.items() if v is not None}
|
||||
new_input.append(x)
|
||||
json_input = new_input
|
||||
|
||||
if key != "tools":
|
||||
json_params[key] = json_input
|
||||
|
||||
from rich.pretty import pprint
|
||||
|
||||
pprint(json_params)
|
||||
# only pass through the not None params
|
||||
return client.inference.chat_completion(**params)
|
||||
return client.inference.chat_completion(**json_params)
|
||||
|
||||
async def embeddings(
|
||||
self,
|
||||
|
|
|
|||
|
|
@ -103,8 +103,8 @@ models:
|
|||
provider_id: passthrough
|
||||
provider_model_id: llama3.2-11b-vision-instruct
|
||||
model_type: llm
|
||||
shields:
|
||||
- shield_id: meta-llama/Llama-Guard-3-8B
|
||||
# shields:
|
||||
# - shield_id: meta-llama/Llama-Guard-3-8B
|
||||
vector_dbs: []
|
||||
datasets: []
|
||||
scoring_fns: []
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue