forked from phoenix-oss/llama-stack-mirror
Make Llama Guard 1B the default
This commit is contained in:
parent
cc5029a716
commit
4a75d922a9
5 changed files with 14 additions and 10 deletions
|
@ -59,7 +59,7 @@ async def run_main(host: str, port: int, stream: bool):
|
|||
response = await client.get_model("Meta-Llama3.1-8B-Instruct")
|
||||
cprint(f"get_model response={response}", "blue")
|
||||
|
||||
response = await client.get_model("Llama-Guard-3-8B")
|
||||
response = await client.get_model("Llama-Guard-3-1B")
|
||||
cprint(f"get_model response={response}", "red")
|
||||
|
||||
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue