mirror of
https://github.com/meta-llama/llama-stack.git
synced 2025-07-29 07:14:20 +00:00
updated dependency and client model name
This commit is contained in:
parent
d3e269fcf2
commit
493f0d99b2
2 changed files with 2 additions and 1 deletions
|
@ -33,6 +33,7 @@ COMMON_DEPENDENCIES = [
|
|||
"Pillow",
|
||||
"pydantic==1.10.13",
|
||||
"pydantic_core==2.18.2",
|
||||
"python-dotenv",
|
||||
"python-openapi",
|
||||
"requests",
|
||||
"tiktoken",
|
||||
|
|
|
@ -66,7 +66,7 @@ async def run_main(host: str, port: int, stream: bool):
|
|||
cprint(f"User>{message.content}", "green")
|
||||
iterator = client.chat_completion(
|
||||
ChatCompletionRequest(
|
||||
model="Meta-Llama-3.1-8B-Instruct",
|
||||
model="Meta-Llama3.1-8B-Instruct",
|
||||
messages=[message],
|
||||
stream=stream,
|
||||
)
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue