From 493f0d99b210b9a56101a875c4a769542d24acc1 Mon Sep 17 00:00:00 2001 From: Hardik Shah Date: Fri, 2 Aug 2024 15:37:40 -0700 Subject: [PATCH] updated dependency and client model name --- llama_toolchain/distribution/registry.py | 1 + llama_toolchain/inference/client.py | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/llama_toolchain/distribution/registry.py b/llama_toolchain/distribution/registry.py index ceb101cd4..48124c7d1 100644 --- a/llama_toolchain/distribution/registry.py +++ b/llama_toolchain/distribution/registry.py @@ -33,6 +33,7 @@ COMMON_DEPENDENCIES = [ "Pillow", "pydantic==1.10.13", "pydantic_core==2.18.2", + "python-dotenv", "python-openapi", "requests", "tiktoken", diff --git a/llama_toolchain/inference/client.py b/llama_toolchain/inference/client.py index 4e9dd5ee2..331580190 100644 --- a/llama_toolchain/inference/client.py +++ b/llama_toolchain/inference/client.py @@ -66,7 +66,7 @@ async def run_main(host: str, port: int, stream: bool): cprint(f"User>{message.content}", "green") iterator = client.chat_completion( ChatCompletionRequest( - model="Meta-Llama-3.1-8B-Instruct", + model="Meta-Llama3.1-8B-Instruct", messages=[message], stream=stream, )