work with 2 different models

This commit is contained in:
Xi Yan 2024-09-19 21:40:37 -07:00
parent 7071c46422
commit 4b083eec03
2 changed files with 37 additions and 19 deletions

View file

@ -100,6 +100,17 @@ async def run_main(host: str, port: int, stream: bool):
async for log in EventLogger().log(iterator):
log.print()
cprint(f"User>{message.content}", "green")
iterator = client.chat_completion(
ChatCompletionRequest(
model="Meta-Llama3.1-8B",
messages=[message],
stream=stream,
)
)
async for log in EventLogger().log(iterator):
log.print()
def main(host: str, port: int, stream: bool = True):
asyncio.run(run_main(host, port, stream))